From 20bd2f1cd8dd8c8da036d92cfdd2d578ad615b8b Mon Sep 17 00:00:00 2001 From: Lars <61980367+CodeBooster97@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:44:14 +0100 Subject: [PATCH 01/65] Update lets_start.md --- docs/introduction/lets_start.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/introduction/lets_start.md b/docs/introduction/lets_start.md index db21c627..8152a8e9 100644 --- a/docs/introduction/lets_start.md +++ b/docs/introduction/lets_start.md @@ -40,7 +40,7 @@ Let's assume that we have table `users`: ```python import asyncio -from typing import Final +from typing import Final, Any from psqlpy import ConnectionPool, QueryResult @@ -55,7 +55,7 @@ async def main() -> None: ) dict_results: Final[list[dict[Any, Any]]] = results.result() - db.close() + db_pool.close() ``` ::: tip From b2011c8452140f934d9d735de9a3216523aee8c8 Mon Sep 17 00:00:00 2001 From: Aleksandr Kiselev <62915291+chandr-andr@users.noreply.github.com> Date: Sat, 25 Jan 2025 20:04:28 +0100 Subject: [PATCH 02/65] Update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 27ccecc8..081dfccb 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/psqlpy?style=for-the-badge)](https://pypi.org/project/psqlpy/) +[![PyPI - Python Version](https://img.shields.io/badge/PYTHON-3.9%20%7C%203.10%20%7C%203.11%20%7C%203.12%20%7C%203.13-blue?style=for-the-badge +)](https://pypi.org/project/psqlpy/) [![PyPI](https://img.shields.io/pypi/v/psqlpy?style=for-the-badge)](https://pypi.org/project/psqlpy/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/psqlpy?style=for-the-badge)](https://pypistats.org/packages/psqlpy) From 10aa16d9ff5b11f02de813155be243a0ab5a0c7a Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 27 Jan 2025 00:31:07 +0100 Subject: [PATCH 03/65] Small changes in doc Signed-off-by: chandr-andr (Kiselev Aleksandr) --- docs/components/listener.md | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/components/listener.md b/docs/components/listener.md index f2ecff30..1ad98787 100644 --- a/docs/components/listener.md +++ b/docs/components/listener.md @@ -196,9 +196,17 @@ In the background it creates task in Rust event loop. async def main() -> None: listener = db_pool.listener() await listener.startup() - await listener.listen() + listener.listen() ``` ### Abort Listen Abort listen. If `listen()` method was called, stop listening, else don't do anything. + +```python +async def main() -> None: + listener = db_pool.listener() + await listener.startup() + listener.listen() + listener.abort_listen() +``` From d67dcb307fc0fb9e06d5fea9740e18a214209812 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 27 Jan 2025 00:31:43 +0100 Subject: [PATCH 04/65] Small changes in doc Signed-off-by: chandr-andr (Kiselev Aleksandr) --- docs/components/listener.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/components/listener.md b/docs/components/listener.md index 1ad98787..000067ac 100644 --- a/docs/components/listener.md +++ b/docs/components/listener.md @@ -121,7 +121,7 @@ async def main() -> None: - `channel`: name of the channel to listen. - `callback`: coroutine callback. -Add new callback to the channel, can be called more than 1 times. +Add new callback to the channel, can be called multiple times (before or after `listen`). Callback signature is like this: ```python From f2884d184408ae6715b8f485a31f95e885aae83c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 8 Feb 2025 19:05:57 +0100 Subject: [PATCH 05/65] Refactored execute-like methods Signed-off-by: chandr-andr (Kiselev Aleksandr) --- src/common.rs | 110 +--------- src/driver/connection.rs | 356 ++------------------------------- src/driver/connection_pool.rs | 3 +- src/driver/cursor.rs | 29 ++- src/driver/inner_connection.rs | 267 +++++++++++++++++++++++++ src/driver/listener/core.rs | 3 +- src/driver/mod.rs | 1 + src/driver/transaction.rs | 133 ++---------- 8 files changed, 312 insertions(+), 590 deletions(-) create mode 100644 src/driver/inner_connection.rs diff --git a/src/common.rs b/src/common.rs index 8dc70fc3..d0ec15e4 100644 --- a/src/common.rs +++ b/src/common.rs @@ -1,13 +1,6 @@ use pyo3::{ types::{PyAnyMethods, PyModule, PyModuleMethods}, - Bound, PyAny, PyResult, Python, -}; - -use crate::{ - driver::connection::PsqlpyConnection, - exceptions::rust_errors::RustPSQLDriverPyResult, - query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - value_converter::{convert_parameters, PythonDTO, QueryParameter}, + Bound, PyResult, Python, }; /// Add new module to the parent one. @@ -33,104 +26,3 @@ pub fn add_module( )?; Ok(()) } - -pub trait ObjectQueryTrait { - fn psqlpy_query_one( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> impl std::future::Future> + Send; - - fn psqlpy_query( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> impl std::future::Future> + Send; - - fn psqlpy_query_simple( - &self, - querystring: String, - ) -> impl std::future::Future> + Send; -} - -impl ObjectQueryTrait for PsqlpyConnection { - async fn psqlpy_query_one( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> RustPSQLDriverPyResult { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - self.query_one( - &self.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - } else { - self.query_one( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - }; - - Ok(PSQLDriverSinglePyQueryResult::new(result)) - } - - async fn psqlpy_query( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> RustPSQLDriverPyResult { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - self.query( - &self.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - } else { - self.query( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - }; - - Ok(PSQLDriverPyQueryResult::new(result)) - } - - async fn psqlpy_query_simple(&self, querystring: String) -> RustPSQLDriverPyResult<()> { - self.batch_execute(querystring.as_str()).await - } -} diff --git a/src/driver/connection.rs b/src/driver/connection.rs index f10328c2..2d747225 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -1,114 +1,24 @@ -use bytes::{Buf, BytesMut}; -use deadpool_postgres::{Object, Pool}; +use bytes::BytesMut; +use deadpool_postgres::Pool; use futures_util::pin_mut; -use postgres_types::ToSql; use pyo3::{buffer::PyBuffer, pyclass, pymethods, Py, PyAny, PyErr, Python}; -use std::{collections::HashSet, sync::Arc, vec}; -use tokio_postgres::{ - binary_copy::BinaryCopyInWriter, Client, CopyInSink, Row, Statement, ToStatement, -}; +use std::{collections::HashSet, sync::Arc}; +use tokio_postgres::binary_copy::BinaryCopyInWriter; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, runtime::tokio_runtime, - value_converter::{convert_parameters, postgres_to_py, PythonDTO, QueryParameter}, }; use super::{ cursor::Cursor, + inner_connection::PsqlpyConnection, transaction::Transaction, transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, }; -#[allow(clippy::module_name_repetitions)] -pub enum PsqlpyConnection { - PoolConn(Object), - SingleConn(Client), -} - -impl PsqlpyConnection { - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot prepare statement. - pub async fn prepare_cached(&self, query: &str) -> RustPSQLDriverPyResult { - match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.prepare_cached(query).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.prepare(query).await?), - } - } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn query( - &self, - statement: &T, - params: &[&(dyn ToSql + Sync)], - ) -> RustPSQLDriverPyResult> - where - T: ?Sized + ToStatement, - { - match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.query(statement, params).await?), - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.query(statement, params).await?) - } - } - } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn batch_execute(&self, query: &str) -> RustPSQLDriverPyResult<()> { - match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(query).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), - } - } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn query_one( - &self, - statement: &T, - params: &[&(dyn ToSql + Sync)], - ) -> RustPSQLDriverPyResult - where - T: ?Sized + ToStatement, - { - match self { - PsqlpyConnection::PoolConn(pconn) => { - return Ok(pconn.query_one(statement, params).await?) - } - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.query_one(statement, params).await?) - } - } - } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute copy data. - pub async fn copy_in(&self, statement: &T) -> RustPSQLDriverPyResult> - where - T: ?Sized + ToStatement, - U: Buf + 'static + Send, - { - match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.copy_in(statement).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), - } - } -} - #[pyclass(subclass)] #[derive(Clone)] pub struct Connection { @@ -213,54 +123,7 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - db_client - .query( - &db_client - .prepare_cached(&querystring) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - } else { - db_client - .query( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - }; - - return Ok(PSQLDriverPyQueryResult::new(result)); + return db_client.execute(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -311,60 +174,9 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let mut params: Vec> = vec![]; - if let Some(parameters) = parameters { - for vec_of_py_any in parameters { - params.push(convert_parameters(vec_of_py_any)?); - } - } - let prepared = prepared.unwrap_or(true); - - db_client.batch_execute("BEGIN;").await.map_err(|err| { - RustPSQLDriverError::TransactionBeginError(format!( - "Cannot start transaction to run execute_many: {err}" - )) - })?; - for param in params { - let querystring_result = if prepared { - let prepared_stmt = &db_client.prepare_cached(&querystring).await; - if let Err(error) = prepared_stmt { - return Err(RustPSQLDriverError::TransactionExecuteError(format!( - "Cannot prepare statement in execute_many, operation rolled back {error}", - ))); - } - db_client - .query( - &db_client.prepare_cached(&querystring).await?, - ¶m - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - } else { - db_client - .query( - &querystring, - ¶m - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - }; - - if let Err(error) = querystring_result { - db_client.batch_execute("ROLLBACK;").await?; - return Err(RustPSQLDriverError::TransactionExecuteError(format!( - "Error occured in `execute_many` statement, transaction is rolled back: {error}" - ))); - } - } - db_client.batch_execute("COMMIT;").await?; - - return Ok(()); + return db_client + .execute_many(querystring, parameters, prepared) + .await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -388,54 +200,7 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - db_client - .query( - &db_client - .prepare_cached(&querystring) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - } else { - db_client - .query( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - }; - - return Ok(PSQLDriverPyQueryResult::new(result)); + return db_client.execute(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -465,54 +230,7 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - db_client - .query_one( - &db_client - .prepare_cached(&querystring) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - } else { - db_client - .query_one( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - }; - - return Ok(PSQLDriverSinglePyQueryResult::new(result)); + return db_client.fetch_row(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -539,57 +257,7 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - db_client - .query_one( - &db_client - .prepare_cached(&querystring) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - } else { - db_client - .query_one( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? - }; - - return Python::with_gil(|gil| match result.columns().first() { - Some(first_column) => postgres_to_py(gil, &result, first_column, 0, &None), - None => Ok(gil.None()), - }); + return db_client.fetch_val(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::ConnectionClosedError) diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 4f38407d..c2b3046e 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -12,7 +12,8 @@ use crate::{ use super::{ common_options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, - connection::{Connection, PsqlpyConnection}, + connection::Connection, + inner_connection::PsqlpyConnection, listener::core::Listener, utils::{build_connection_config, build_manager, build_tls}, }; diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 7368d29a..e5147b89 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -5,13 +5,12 @@ use pyo3::{ }; use crate::{ - common::ObjectQueryTrait, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, query_result::PSQLDriverPyQueryResult, runtime::rustdriver_future, }; -use super::connection::PsqlpyConnection; +use super::inner_connection::PsqlpyConnection; /// Additional implementation for the `Object` type. #[allow(clippy::ref_option)] @@ -55,7 +54,7 @@ impl CursorObjectTrait for PsqlpyConnection { cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); - self.psqlpy_query(cursor_init_query, parameters.clone(), *prepared) + self.execute(cursor_init_query, parameters.clone(), *prepared) .await .map_err(|err| { RustPSQLDriverError::CursorStartError(format!("Cannot start cursor, error - {err}")) @@ -77,7 +76,7 @@ impl CursorObjectTrait for PsqlpyConnection { )); } - self.psqlpy_query( + self.execute( format!("CLOSE {cursor_name}"), Option::default(), Some(false), @@ -220,7 +219,7 @@ impl Cursor { rustdriver_future(gil, async move { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH {fetch_number} FROM {cursor_name}"), None, Some(false), @@ -318,7 +317,7 @@ impl Cursor { }; let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH {fetch_number} FROM {cursor_name}"), None, Some(false), @@ -350,7 +349,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query(format!("FETCH NEXT FROM {cursor_name}"), None, Some(false)) + .execute(format!("FETCH NEXT FROM {cursor_name}"), None, Some(false)) .await .map_err(|err| { RustPSQLDriverError::CursorFetchError(format!( @@ -377,7 +376,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query(format!("FETCH PRIOR FROM {cursor_name}"), None, Some(false)) + .execute(format!("FETCH PRIOR FROM {cursor_name}"), None, Some(false)) .await .map_err(|err| { RustPSQLDriverError::CursorFetchError(format!( @@ -404,7 +403,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query(format!("FETCH FIRST FROM {cursor_name}"), None, Some(false)) + .execute(format!("FETCH FIRST FROM {cursor_name}"), None, Some(false)) .await .map_err(|err| { RustPSQLDriverError::CursorFetchError(format!( @@ -431,7 +430,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query(format!("FETCH LAST FROM {cursor_name}"), None, Some(false)) + .execute(format!("FETCH LAST FROM {cursor_name}"), None, Some(false)) .await .map_err(|err| { RustPSQLDriverError::CursorFetchError(format!( @@ -461,7 +460,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH ABSOLUTE {absolute_number} FROM {cursor_name}"), None, Some(false), @@ -495,7 +494,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH RELATIVE {relative_number} FROM {cursor_name}"), None, Some(false), @@ -528,7 +527,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH FORWARD ALL FROM {cursor_name}"), None, Some(false), @@ -562,7 +561,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH BACKWARD {backward_count} FROM {cursor_name}",), None, Some(false), @@ -595,7 +594,7 @@ impl Cursor { if let Some(db_transaction) = db_transaction { let result = db_transaction - .psqlpy_query( + .execute( format!("FETCH BACKWARD ALL FROM {cursor_name}"), None, Some(false), diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs new file mode 100644 index 00000000..c66006cc --- /dev/null +++ b/src/driver/inner_connection.rs @@ -0,0 +1,267 @@ +use bytes::Buf; +use deadpool_postgres::Object; +use postgres_types::ToSql; +use pyo3::{Py, PyAny, Python}; +use std::vec; +use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; + +use crate::{ + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, + value_converter::{convert_parameters, postgres_to_py, PythonDTO, QueryParameter}, +}; + +#[allow(clippy::module_name_repetitions)] +pub enum PsqlpyConnection { + PoolConn(Object), + SingleConn(Client), +} + +impl PsqlpyConnection { + /// Prepare cached statement. + /// + /// # Errors + /// May return Err if cannot prepare statement. + pub async fn prepare_cached(&self, query: &str) -> RustPSQLDriverPyResult { + match self { + PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.prepare_cached(query).await?), + PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.prepare(query).await?), + } + } + + /// Prepare cached statement. + /// + /// # Errors + /// May return Err if cannot execute statement. + pub async fn query( + &self, + statement: &T, + params: &[&(dyn ToSql + Sync)], + ) -> RustPSQLDriverPyResult> + where + T: ?Sized + ToStatement, + { + match self { + PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.query(statement, params).await?), + PsqlpyConnection::SingleConn(sconn) => { + return Ok(sconn.query(statement, params).await?) + } + } + } + + /// Prepare cached statement. + /// + /// # Errors + /// May return Err if cannot execute statement. + pub async fn batch_execute(&self, query: &str) -> RustPSQLDriverPyResult<()> { + match self { + PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(query).await?), + PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), + } + } + + /// Prepare cached statement. + /// + /// # Errors + /// May return Err if cannot execute statement. + pub async fn query_one( + &self, + statement: &T, + params: &[&(dyn ToSql + Sync)], + ) -> RustPSQLDriverPyResult + where + T: ?Sized + ToStatement, + { + match self { + PsqlpyConnection::PoolConn(pconn) => { + return Ok(pconn.query_one(statement, params).await?) + } + PsqlpyConnection::SingleConn(sconn) => { + return Ok(sconn.query_one(statement, params).await?) + } + } + } + + pub async fn execute( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> RustPSQLDriverPyResult { + let prepared = prepared.unwrap_or(true); + + let mut params: Vec = vec![]; + if let Some(parameters) = parameters { + params = convert_parameters(parameters)?; + } + + let boxed_params = ¶ms + .iter() + .map(|param| param as &QueryParameter) + .collect::>() + .into_boxed_slice(); + + let result = if prepared { + self.query( + &self.prepare_cached(&querystring).await.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, + boxed_params, + ) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + } else { + self.query(&querystring, boxed_params) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + }; + + Ok(PSQLDriverPyQueryResult::new(result)) + } + + pub async fn execute_many( + &self, + querystring: String, + parameters: Option>>, + prepared: Option, + ) -> RustPSQLDriverPyResult<()> { + let prepared = prepared.unwrap_or(true); + + let mut params: Vec> = vec![]; + if let Some(parameters) = parameters { + for vec_of_py_any in parameters { + params.push(convert_parameters(vec_of_py_any)?); + } + } + + for param in params { + let boxed_params = ¶m + .iter() + .map(|param| param as &QueryParameter) + .collect::>() + .into_boxed_slice(); + + let querystring_result = if prepared { + let prepared_stmt = &self.prepare_cached(&querystring).await; + if let Err(error) = prepared_stmt { + return Err(RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement in execute_many, operation rolled back {error}", + ))); + } + self.query(&self.prepare_cached(&querystring).await?, boxed_params) + .await + } else { + self.query(&querystring, boxed_params).await + }; + + if let Err(error) = querystring_result { + return Err(RustPSQLDriverError::ConnectionExecuteError(format!( + "Error occured in `execute_many` statement: {error}" + ))); + } + } + + return Ok(()); + } + + pub async fn fetch_row_raw( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> RustPSQLDriverPyResult { + let prepared = prepared.unwrap_or(true); + + let mut params: Vec = vec![]; + if let Some(parameters) = parameters { + params = convert_parameters(parameters)?; + } + + let boxed_params = ¶ms + .iter() + .map(|param| param as &QueryParameter) + .collect::>() + .into_boxed_slice(); + + let result = if prepared { + self.query_one( + &self.prepare_cached(&querystring).await.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, + boxed_params, + ) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + } else { + self.query_one(&querystring, boxed_params) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + }; + + return Ok(result); + } + + pub async fn fetch_row( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> RustPSQLDriverPyResult { + let result = self + .fetch_row_raw(querystring, parameters, prepared) + .await?; + + return Ok(PSQLDriverSinglePyQueryResult::new(result)); + } + + pub async fn fetch_val( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> RustPSQLDriverPyResult> { + let result = self + .fetch_row_raw(querystring, parameters, prepared) + .await?; + + return Python::with_gil(|gil| match result.columns().first() { + Some(first_column) => postgres_to_py(gil, &result, first_column, 0, &None), + None => Ok(gil.None()), + }); + } + + /// Prepare cached statement. + /// + /// # Errors + /// May return Err if cannot execute copy data. + pub async fn copy_in(&self, statement: &T) -> RustPSQLDriverPyResult> + where + T: ?Sized + ToStatement, + U: Buf + 'static + Send, + { + match self { + PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.copy_in(statement).await?), + PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), + } + } +} diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index c8fd271c..a79cdab6 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -14,7 +14,8 @@ use tokio_postgres::{AsyncMessage, Config}; use crate::{ driver::{ common_options::SslMode, - connection::{Connection, PsqlpyConnection}, + connection::Connection, + inner_connection::PsqlpyConnection, utils::{build_tls, is_coroutine_function, ConfiguredTLS}, }, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, diff --git a/src/driver/mod.rs b/src/driver/mod.rs index 578bf2cd..e7827cd5 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -3,6 +3,7 @@ pub mod connection; pub mod connection_pool; pub mod connection_pool_builder; pub mod cursor; +pub mod inner_connection; pub mod listener; pub mod transaction; pub mod transaction_options; diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 3fa59e4d..4cc3655a 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -12,15 +12,13 @@ use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - value_converter::{convert_parameters, postgres_to_py, PythonDTO, QueryParameter}, }; use super::{ - connection::PsqlpyConnection, cursor::Cursor, + inner_connection::PsqlpyConnection, transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, }; -use crate::common::ObjectQueryTrait; use std::{collections::HashSet, sync::Arc}; #[allow(clippy::module_name_repetitions)] @@ -328,9 +326,7 @@ impl Transaction { }); is_transaction_ready?; if let Some(db_client) = db_client { - return db_client - .psqlpy_query(querystring, parameters, prepared) - .await; + return db_client.execute(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::TransactionClosedError) @@ -384,9 +380,7 @@ impl Transaction { }); is_transaction_ready?; if let Some(db_client) = db_client { - return db_client - .psqlpy_query(querystring, parameters, prepared) - .await; + return db_client.execute(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::TransactionClosedError) @@ -420,36 +414,7 @@ impl Transaction { is_transaction_ready?; if let Some(db_client) = db_client { - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - - let result = if prepared.unwrap_or(true) { - db_client - .query_one( - &db_client.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - } else { - db_client - .query_one( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - }; - - return Ok(PSQLDriverSinglePyQueryResult::new(result)); + return db_client.fetch_row(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::TransactionClosedError) @@ -476,41 +441,9 @@ impl Transaction { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) }); + is_transaction_ready?; if let Some(db_client) = db_client { - is_transaction_ready?; - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - - let result = if prepared.unwrap_or(true) { - db_client - .query_one( - &db_client.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - } else { - db_client - .query_one( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await? - }; - - return Python::with_gil(|gil| match result.columns().first() { - Some(first_column) => postgres_to_py(gil, &result, first_column, 0, &None), - None => Ok(gil.None()), - }); + return db_client.fetch_val(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::TransactionClosedError) @@ -537,51 +470,11 @@ impl Transaction { (self_.check_is_transaction_ready(), self_.db_client.clone()) }); + is_transaction_ready?; if let Some(db_client) = db_client { - is_transaction_ready?; - - let mut params: Vec> = vec![]; - if let Some(parameters) = parameters { - for vec_of_py_any in parameters { - params.push(convert_parameters(vec_of_py_any)?); - } - } - let prepared = prepared.unwrap_or(true); - - for param in params { - let is_query_result_ok = if prepared { - let prepared_stmt = &db_client.prepare_cached(&querystring).await; - if let Err(error) = prepared_stmt { - return Err(RustPSQLDriverError::TransactionExecuteError(format!( - "Cannot prepare statement in execute_many, operation rolled back {error}", - ))); - } - db_client - .query( - &db_client.prepare_cached(&querystring).await?, - ¶m - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - } else { - db_client - .query( - &querystring, - ¶m - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(), - ) - .await - }; - is_query_result_ok?; - } - - return Ok(()); + return db_client + .execute_many(querystring, parameters, prepared) + .await; } Err(RustPSQLDriverError::TransactionClosedError) @@ -804,9 +697,9 @@ impl Transaction { (self_.check_is_transaction_ready(), self_.db_client.clone()) }); - if let Some(db_client) = db_client { - is_transaction_ready?; + is_transaction_ready?; + if let Some(db_client) = db_client { let mut futures = vec![]; if let Some(queries) = queries { let gil_result = pyo3::Python::with_gil(|gil| -> PyResult<()> { @@ -822,7 +715,7 @@ impl Transaction { Ok(param) => Some(param.into()), Err(_) => None, }; - futures.push(db_client.psqlpy_query(querystring, params, prepared)); + futures.push(db_client.execute(querystring, params, prepared)); } Ok(()) }); From 23aeb6674c478da6cff8d03a323bba009ede1806 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 8 Feb 2025 23:52:05 +0100 Subject: [PATCH 06/65] Prepared psqlpy for OTLP Signed-off-by: chandr-andr (Kiselev Aleksandr) --- python/psqlpy/_internal/__init__.pyi | 22 ++++++++ src/driver/connection.rs | 79 ++++++++++++++++++++++++++-- src/driver/connection_pool.rs | 26 +++++---- src/driver/cursor.rs | 79 +++++++++++++++++++++++++++- src/driver/listener/core.rs | 15 +++--- src/driver/transaction.rs | 61 ++++++++++++++++++++- 6 files changed, 257 insertions(+), 25 deletions(-) diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index 42b836b2..cd838038 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -288,6 +288,16 @@ class Cursor: It can be used as an asynchronous iterator. """ + cursor_name: str + querystring: str + parameters: Sequence[Any] + prepared: bool | None + conn_dbname: str | None + user: str | None + host_addrs: list[str] + hosts: list[str] + ports: list[int] + def __aiter__(self: Self) -> Self: ... async def __anext__(self: Self) -> QueryResult: ... async def __aenter__(self: Self) -> Self: ... @@ -424,6 +434,12 @@ class Transaction: `.transaction()`. """ + conn_dbname: str | None + user: str | None + host_addrs: list[str] + hosts: list[str] + ports: list[int] + async def __aenter__(self: Self) -> Self: ... async def __aexit__( self: Self, @@ -874,6 +890,12 @@ class Connection: It can be created only from connection pool. """ + conn_dbname: str | None + user: str | None + host_addrs: list[str] + hosts: list[str] + ports: list[int] + async def __aenter__(self: Self) -> Self: ... async def __aexit__( self: Self, diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 2d747225..573a1348 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -2,8 +2,8 @@ use bytes::BytesMut; use deadpool_postgres::Pool; use futures_util::pin_mut; use pyo3::{buffer::PyBuffer, pyclass, pymethods, Py, PyAny, PyErr, Python}; -use std::{collections::HashSet, sync::Arc}; -use tokio_postgres::binary_copy::BinaryCopyInWriter; +use std::{collections::HashSet, net::IpAddr, sync::Arc}; +use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, @@ -24,12 +24,21 @@ use super::{ pub struct Connection { db_client: Option>, db_pool: Option, + pg_config: Arc, } impl Connection { #[must_use] - pub fn new(db_client: Option>, db_pool: Option) -> Self { - Connection { db_client, db_pool } + pub fn new( + db_client: Option>, + db_pool: Option, + pg_config: Arc, + ) -> Self { + Connection { + db_client, + db_pool, + pg_config, + } } #[must_use] @@ -45,12 +54,70 @@ impl Connection { impl Default for Connection { fn default() -> Self { - Connection::new(None, None) + Connection::new(None, None, Arc::new(Config::default())) } } #[pymethods] impl Connection { + #[getter] + fn conn_dbname(&self) -> Option<&str> { + self.pg_config.get_dbname() + } + + #[getter] + fn user(&self) -> Option<&str> { + self.pg_config.get_user() + } + + #[getter] + fn host_addrs(&self) -> Vec { + let mut host_addrs_vec = vec![]; + + let host_addrs = self.pg_config.get_hostaddrs(); + for ip_addr in host_addrs { + match ip_addr { + IpAddr::V4(ipv4) => { + host_addrs_vec.push(ipv4.to_string()); + } + IpAddr::V6(ipv6) => { + host_addrs_vec.push(ipv6.to_string()); + } + } + } + + host_addrs_vec + } + + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + Host::Unix(host) => { + hosts_vec.push(host.display().to_string()); + } + } + } + + hosts_vec + } + + #[getter] + fn ports(&self) -> Vec<&u16> { + return self.pg_config.get_ports().iter().collect::>(); + } + + #[getter] + fn options(&self) -> Option<&str> { + return self.pg_config.get_options(); + } + async fn __aenter__<'a>(self_: Py) -> RustPSQLDriverPyResult> { let (db_client, db_pool) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -283,6 +350,7 @@ impl Connection { if let Some(db_client) = &self.db_client { return Ok(Transaction::new( db_client.clone(), + self.pg_config.clone(), false, false, isolation_level, @@ -318,6 +386,7 @@ impl Connection { if let Some(db_client) = &self.db_client { return Ok(Cursor::new( db_client.clone(), + self.pg_config.clone(), querystring, parameters, "cur_name".into(), diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index c2b3046e..526f5593 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -139,10 +139,10 @@ pub fn connect( let pool = db_pool_builder.build()?; Ok(ConnectionPool { - pool, - pg_config, - ca_file, - ssl_mode, + pool: pool, + pg_config: Arc::new(pg_config), + ca_file: ca_file, + ssl_mode: ssl_mode, }) } @@ -208,7 +208,7 @@ impl ConnectionPoolStatus { #[pyclass(subclass)] pub struct ConnectionPool { pool: Pool, - pg_config: Config, + pg_config: Arc, ca_file: Option, ssl_mode: Option, } @@ -222,10 +222,10 @@ impl ConnectionPool { ssl_mode: Option, ) -> Self { ConnectionPool { - pool, - pg_config, - ca_file, - ssl_mode, + pool: pool, + pg_config: Arc::new(pg_config), + ca_file: ca_file, + ssl_mode: ssl_mode, } } } @@ -499,7 +499,7 @@ impl ConnectionPool { #[must_use] pub fn acquire(&self) -> Connection { - Connection::new(None, Some(self.pool.clone())) + Connection::new(None, Some(self.pool.clone()), self.pg_config.clone()) } #[must_use] @@ -522,7 +522,10 @@ impl ConnectionPool { /// # Errors /// May return Err Result if cannot get new connection from the pool. pub async fn connection(self_: pyo3::Py) -> RustPSQLDriverPyResult { - let db_pool = pyo3::Python::with_gil(|gil| self_.borrow(gil).pool.clone()); + let (db_pool, pg_config) = pyo3::Python::with_gil(|gil| { + let slf = self_.borrow(gil); + (slf.pool.clone(), slf.pg_config.clone()) + }); let db_connection = tokio_runtime() .spawn(async move { Ok::(db_pool.get().await?) @@ -532,6 +535,7 @@ impl ConnectionPool { Ok(Connection::new( Some(Arc::new(PsqlpyConnection::PoolConn(db_connection))), None, + pg_config, )) } diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index e5147b89..5ad4691d 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -1,8 +1,9 @@ -use std::sync::Arc; +use std::{net::IpAddr, sync::Arc}; use pyo3::{ exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, }; +use tokio_postgres::{config::Host, Config}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, @@ -90,6 +91,7 @@ impl CursorObjectTrait for PsqlpyConnection { #[pyclass(subclass)] pub struct Cursor { db_transaction: Option>, + pg_config: Arc, querystring: String, parameters: Option>, cursor_name: String, @@ -104,6 +106,7 @@ impl Cursor { #[must_use] pub fn new( db_transaction: Arc, + pg_config: Arc, querystring: String, parameters: Option>, cursor_name: String, @@ -113,6 +116,7 @@ impl Cursor { ) -> Self { Cursor { db_transaction: Some(db_transaction), + pg_config, querystring, parameters, cursor_name, @@ -127,6 +131,79 @@ impl Cursor { #[pymethods] impl Cursor { + #[getter] + fn conn_dbname(&self) -> Option<&str> { + self.pg_config.get_dbname() + } + + #[getter] + fn user(&self) -> Option<&str> { + self.pg_config.get_user() + } + + #[getter] + fn host_addrs(&self) -> Vec { + let mut host_addrs_vec = vec![]; + + let host_addrs = self.pg_config.get_hostaddrs(); + for ip_addr in host_addrs { + match ip_addr { + IpAddr::V4(ipv4) => { + host_addrs_vec.push(ipv4.to_string()); + } + IpAddr::V6(ipv6) => { + host_addrs_vec.push(ipv6.to_string()); + } + } + } + + host_addrs_vec + } + + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + Host::Unix(host) => { + hosts_vec.push(host.display().to_string()); + } + } + } + + hosts_vec + } + + #[getter] + fn ports(&self) -> Vec<&u16> { + return self.pg_config.get_ports().iter().collect::>(); + } + + #[getter] + fn cursor_name(&self) -> String { + return self.cursor_name.clone(); + } + + #[getter] + fn querystring(&self) -> String { + return self.querystring.clone(); + } + + #[getter] + fn parameters(&self) -> Option> { + return self.parameters.clone(); + } + + #[getter] + fn prepared(&self) -> Option { + return self.prepared.clone(); + } + #[must_use] fn __aiter__(slf: Py) -> Py { slf diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index a79cdab6..83aa9b3e 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -28,7 +28,7 @@ use super::structs::{ #[pyclass] pub struct Listener { - pg_config: Config, + pg_config: Arc, ca_file: Option, ssl_mode: Option, channel_callbacks: Arc>, @@ -42,14 +42,14 @@ pub struct Listener { impl Listener { #[must_use] - pub fn new(pg_config: Config, ca_file: Option, ssl_mode: Option) -> Self { + pub fn new(pg_config: Arc, ca_file: Option, ssl_mode: Option) -> Self { Listener { - pg_config, + pg_config: pg_config.clone(), ca_file, ssl_mode, channel_callbacks: Arc::default(), listen_abort_handler: Option::default(), - connection: Connection::new(None, None), + connection: Connection::new(None, None, pg_config.clone()), receiver: Option::default(), listen_query: Arc::default(), is_listened: Arc::new(RwLock::new(false)), @@ -218,8 +218,11 @@ impl Listener { tokio_runtime().spawn(connection); self.receiver = Some(Arc::new(RwLock::new(receiver))); - self.connection = - Connection::new(Some(Arc::new(PsqlpyConnection::SingleConn(client))), None); + self.connection = Connection::new( + Some(Arc::new(PsqlpyConnection::SingleConn(client))), + None, + self.pg_config.clone(), + ); self.is_started = true; diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 4cc3655a..15567f2b 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -6,7 +6,7 @@ use pyo3::{ pyclass, types::{PyList, PyTuple}, }; -use tokio_postgres::binary_copy::BinaryCopyInWriter; +use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, @@ -19,7 +19,7 @@ use super::{ inner_connection::PsqlpyConnection, transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, }; -use std::{collections::HashSet, sync::Arc}; +use std::{collections::HashSet, net::IpAddr, sync::Arc}; #[allow(clippy::module_name_repetitions)] pub trait TransactionObjectTrait { @@ -105,6 +105,7 @@ impl TransactionObjectTrait for PsqlpyConnection { #[pyclass(subclass)] pub struct Transaction { pub db_client: Option>, + pg_config: Arc, is_started: bool, is_done: bool, @@ -121,6 +122,7 @@ impl Transaction { #[must_use] pub fn new( db_client: Arc, + pg_config: Arc, is_started: bool, is_done: bool, isolation_level: Option, @@ -131,6 +133,7 @@ impl Transaction { ) -> Self { Self { db_client: Some(db_client), + pg_config, is_started, is_done, isolation_level, @@ -158,6 +161,59 @@ impl Transaction { #[pymethods] impl Transaction { + #[getter] + fn conn_dbname(&self) -> Option<&str> { + self.pg_config.get_dbname() + } + + #[getter] + fn user(&self) -> Option<&str> { + self.pg_config.get_user() + } + + #[getter] + fn host_addrs(&self) -> Vec { + let mut host_addrs_vec = vec![]; + + let host_addrs = self.pg_config.get_hostaddrs(); + for ip_addr in host_addrs { + match ip_addr { + IpAddr::V4(ipv4) => { + host_addrs_vec.push(ipv4.to_string()); + } + IpAddr::V6(ipv6) => { + host_addrs_vec.push(ipv6.to_string()); + } + } + } + + host_addrs_vec + } + + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + Host::Unix(host) => { + hosts_vec.push(host.display().to_string()); + } + } + } + + hosts_vec + } + + #[getter] + fn ports(&self) -> Vec<&u16> { + return self.pg_config.get_ports().iter().collect::>(); + } + #[must_use] pub fn __aiter__(self_: Py) -> Py { self_ @@ -756,6 +812,7 @@ impl Transaction { if let Some(db_client) = &self.db_client { return Ok(Cursor::new( db_client.clone(), + self.pg_config.clone(), querystring, parameters, "cur_name".into(), From 59c9c624884f41f17270d858af4ffc77ab091bf9 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 00:24:16 +0100 Subject: [PATCH 07/65] Prepared psqlpy for OTLP Signed-off-by: chandr-andr (Kiselev Aleksandr) --- README.md | 7 +- docs/components/connection_pool.md | 49 ------- docs/integrations/opentelemetry.md | 8 + docs/integrations/taskiq.md | 8 + docs/introduction/lets_start.md | 16 +- psqlpy-stress/psqlpy_stress/mocker.py | 6 +- python/psqlpy/_internal/__init__.pyi | 54 ------- python/tests/conftest.py | 12 +- python/tests/test_binary_copy.py | 16 +- python/tests/test_connection.py | 5 +- python/tests/test_connection_pool.py | 62 +++----- python/tests/test_connection_pool_builder.py | 4 +- python/tests/test_listener.py | 8 +- python/tests/test_row_factories.py | 9 +- python/tests/test_ssl_mode.py | 9 +- python/tests/test_transaction.py | 12 +- python/tests/test_value_converter.py | 82 +++++------ src/driver/connection_pool.rs | 145 +------------------ 18 files changed, 135 insertions(+), 377 deletions(-) create mode 100644 docs/integrations/opentelemetry.md create mode 100644 docs/integrations/taskiq.md diff --git a/README.md b/README.md index 081dfccb..1272db9c 100644 --- a/README.md +++ b/README.md @@ -55,9 +55,10 @@ async def main() -> None: max_db_pool_size=2, ) - res: QueryResult = await db_pool.execute( - "SELECT * FROM users", - ) + async with db_pool.acquire() as conn: + res: QueryResult = await conn.execute( + "SELECT * FROM users", + ) print(res.result()) db_pool.close() diff --git a/docs/components/connection_pool.md b/docs/components/connection_pool.md index bcee4893..514f899d 100644 --- a/docs/components/connection_pool.md +++ b/docs/components/connection_pool.md @@ -178,55 +178,6 @@ It has 4 parameters: - `available` - available connection in the connection pool. - `waiting` - waiting requests to retrieve connection from connection pool. -### Execute - -#### Parameters: - -- `querystring`: Statement string. -- `parameters`: List of parameters for the statement string. -- `prepared`: Prepare statement before execution or not. - -You can execute any query directly from Connection Pool. -This method supports parameters, each parameter must be marked as `$` (number starts with 1). -Parameters must be passed as list after querystring. -::: caution -You must use `ConnectionPool.execute` method in high-load production code wisely! -It pulls connection from the pool each time you execute query. -Preferable way to execute statements with [Connection](./../components/connection.md) or [Transaction](./../components/transaction.md) -::: - -```python -async def main() -> None: - ... - results: QueryResult = await db_pool.execute( - "SELECT * FROM users WHERE id = $1 and username = $2", - [100, "Alex"], - ) - - dict_results: list[dict[str, Any]] = results.result() -``` - -### Fetch - -#### Parameters: - -- `querystring`: Statement string. -- `parameters`: List of parameters for the statement string. -- `prepared`: Prepare statement before execution or not. - -The same as the `execute` method, for some people this naming is preferable. - -```python -async def main() -> None: - ... - results: QueryResult = await db_pool.fetch( - "SELECT * FROM users WHERE id = $1 and username = $2", - [100, "Alex"], - ) - - dict_results: list[dict[str, Any]] = results.result() -``` - ### Acquire Get single connection for async context manager. diff --git a/docs/integrations/opentelemetry.md b/docs/integrations/opentelemetry.md new file mode 100644 index 00000000..a8461034 --- /dev/null +++ b/docs/integrations/opentelemetry.md @@ -0,0 +1,8 @@ +--- +title: Integration with OpenTelemetry +--- + +# OTLP-PSQLPy + +There is a library for OpenTelemetry support. +Please follow the [link](https://github.com/psqlpy-python/otlp-psqlpy) diff --git a/docs/integrations/taskiq.md b/docs/integrations/taskiq.md new file mode 100644 index 00000000..97579347 --- /dev/null +++ b/docs/integrations/taskiq.md @@ -0,0 +1,8 @@ +--- +title: Integration with TaskIQ +--- + +# TaskIQ-PSQLPy + +There is integration with [TaskIQ](https://github.com/taskiq-python/taskiq-psqlpy). +You can use PSQLPy for result backend. diff --git a/docs/introduction/lets_start.md b/docs/introduction/lets_start.md index 8152a8e9..0ab5f4bc 100644 --- a/docs/introduction/lets_start.md +++ b/docs/introduction/lets_start.md @@ -49,20 +49,16 @@ async def main() -> None: # It uses default connection parameters db_pool: Final = ConnectionPool() - results: Final[QueryResult] = await db_pool.execute( - "SELECT * FROM users WHERE id = $1", - [2], - ) + async with db_pool.acquire() as conn: + results: Final[QueryResult] = await conn.execute( + "SELECT * FROM users WHERE id = $1", + [2], + ) dict_results: Final[list[dict[Any, Any]]] = results.result() db_pool.close() ``` ::: tip -You must call `close()` on database pool when you application is shutting down. -::: -::: caution -You must not use `ConnectionPool.execute` method in high-load production code! -It pulls new connection from connection pull each call. -Recommended way to make queries is executing them with `Connection`, `Transaction` or `Cursor`. +It's better to call `close()` on database pool when you application is shutting down. ::: diff --git a/psqlpy-stress/psqlpy_stress/mocker.py b/psqlpy-stress/psqlpy_stress/mocker.py index 156f10df..de55fc59 100644 --- a/psqlpy-stress/psqlpy_stress/mocker.py +++ b/psqlpy-stress/psqlpy_stress/mocker.py @@ -17,8 +17,9 @@ def get_pool() -> psqlpy.ConnectionPool: async def fill_users() -> None: pool = get_pool() users_amount = 10000000 + connection = await pool.connection() for _ in range(users_amount): - await pool.execute( + await connection.execute( querystring="INSERT INTO users (username) VALUES($1)", parameters=[str(uuid.uuid4())], ) @@ -35,8 +36,9 @@ def generate_random_dict() -> dict[str, str]: async def fill_big_table() -> None: pool = get_pool() big_table_amount = 10000000 + connection = await pool.connection() for _ in range(big_table_amount): - await pool.execute( + await connection.execute( "INSERT INTO big_table (string_field, integer_field, json_field, array_field) VALUES($1, $2, $3, $4)", parameters=[ str(uuid.uuid4()), diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index cd838038..77ec440d 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -1306,60 +1306,6 @@ class ConnectionPool: ### Parameters: - `new_max_size`: new size for the connection pool. """ - async def execute( - self: Self, - querystring: str, - parameters: Sequence[Any] | None = None, - prepared: bool = True, - ) -> QueryResult: - """Execute the query. - - Querystring can contain `$` parameters - for converting them in the driver side. - - ### Parameters: - - `querystring`: querystring to execute. - - `parameters`: list of parameters to pass in the query. - - `prepared`: should the querystring be prepared before the request. - By default any querystring will be prepared. - - ### Example: - ```python - import asyncio - - from psqlpy import PSQLPool, QueryResult - - async def main() -> None: - db_pool = PSQLPool() - query_result: QueryResult = await psqlpy.execute( - "SELECT username FROM users WHERE id = $1", - [100], - ) - dict_result: List[Dict[Any, Any]] = query_result.result() - # you don't need to close the pool, - # it will be dropped on Rust side. - ``` - """ - async def fetch( - self: Self, - querystring: str, - parameters: Sequence[Any] | None = None, - prepared: bool = True, - ) -> QueryResult: - """Fetch the result from database. - - It's the same as `execute` method, we made it because people are used - to `fetch` method name. - - Querystring can contain `$` parameters - for converting them in the driver side. - - ### Parameters: - - `querystring`: querystring to execute. - - `parameters`: list of parameters to pass in the query. - - `prepared`: should the querystring be prepared before the request. - By default any querystring will be prepared. - """ async def connection(self: Self) -> Connection: """Create new connection. diff --git a/python/tests/conftest.py b/python/tests/conftest.py index bfa3f650..4a388f62 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -126,18 +126,19 @@ async def create_default_data_for_tests( table_name: str, number_database_records: int, ) -> AsyncGenerator[None, None]: - await psql_pool.execute( + connection = await psql_pool.connection() + await connection.execute( f"CREATE TABLE {table_name} (id SERIAL, name VARCHAR(255))", ) for table_id in range(1, number_database_records + 1): new_name = random_string() - await psql_pool.execute( + await connection.execute( querystring=f"INSERT INTO {table_name} VALUES ($1, $2)", parameters=[table_id, new_name], ) yield - await psql_pool.execute( + await connection.execute( f"DROP TABLE {table_name}", ) @@ -147,14 +148,15 @@ async def create_table_for_listener_tests( psql_pool: ConnectionPool, listener_table_name: str, ) -> AsyncGenerator[None, None]: - await psql_pool.execute( + connection = await psql_pool.connection() + await connection.execute( f"CREATE TABLE {listener_table_name}" f"(id SERIAL, payload VARCHAR(255)," f"channel VARCHAR(255), process_id INT)", ) yield - await psql_pool.execute( + await connection.execute( f"DROP TABLE {listener_table_name}", ) diff --git a/python/tests/test_binary_copy.py b/python/tests/test_binary_copy.py index 93cc1335..a42671a8 100644 --- a/python/tests/test_binary_copy.py +++ b/python/tests/test_binary_copy.py @@ -15,8 +15,9 @@ async def test_binary_copy_to_table_in_connection( ) -> None: """Test binary copy in connection.""" table_name: typing.Final = "cars" - await psql_pool.execute(f"DROP TABLE IF EXISTS {table_name}") - await psql_pool.execute( + connection = await psql_pool.connection() + await connection.execute(f"DROP TABLE IF EXISTS {table_name}") + await connection.execute( """ CREATE TABLE IF NOT EXISTS cars ( model VARCHAR, @@ -56,7 +57,7 @@ async def test_binary_copy_to_table_in_connection( assert inserted_rows == expected_inserted_row - real_table_rows: typing.Final = await psql_pool.execute( + real_table_rows: typing.Final = await connection.execute( f"SELECT COUNT(*) AS rows_count FROM {table_name}", ) assert real_table_rows.result()[0]["rows_count"] == expected_inserted_row @@ -67,8 +68,10 @@ async def test_binary_copy_to_table_in_transaction( ) -> None: """Test binary copy in transaction.""" table_name: typing.Final = "cars" - await psql_pool.execute(f"DROP TABLE IF EXISTS {table_name}") - await psql_pool.execute( + + connection = await psql_pool.connection() + await connection.execute(f"DROP TABLE IF EXISTS {table_name}") + await connection.execute( """ CREATE TABLE IF NOT EXISTS cars ( model VARCHAR, @@ -108,7 +111,8 @@ async def test_binary_copy_to_table_in_transaction( assert inserted_rows == expected_inserted_row - real_table_rows: typing.Final = await psql_pool.execute( + connection = await psql_pool.connection() + real_table_rows: typing.Final = await connection.execute( f"SELECT COUNT(*) AS rows_count FROM {table_name}", ) assert real_table_rows.result()[0]["rows_count"] == expected_inserted_row diff --git a/python/tests/test_connection.py b/python/tests/test_connection.py index 3c15991a..898cc405 100644 --- a/python/tests/test_connection.py +++ b/python/tests/test_connection.py @@ -180,8 +180,9 @@ async def test_closed_connection_error( async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: """Test `execute_batch` method.""" - await psql_pool.execute(querystring="DROP TABLE IF EXISTS execute_batch") - await psql_pool.execute(querystring="DROP TABLE IF EXISTS execute_batch2") + connection = await psql_pool.connection() + await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch") + await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch2") query = "CREATE TABLE execute_batch (name VARCHAR);CREATE TABLE execute_batch2 (name VARCHAR);" async with psql_pool.acquire() as conn: await conn.execute_batch(querystring=query) diff --git a/python/tests/test_connection_pool.py b/python/tests/test_connection_pool.py index cdf2fa48..405fceb7 100644 --- a/python/tests/test_connection_pool.py +++ b/python/tests/test_connection_pool.py @@ -4,7 +4,6 @@ ConnectionPool, ConnRecyclingMethod, LoadBalanceHosts, - QueryResult, TargetSessionAttrs, connect, ) @@ -22,7 +21,8 @@ async def test_connect_func() -> None: dsn="postgres://postgres:postgres@localhost:5432/psqlpy_test", ) - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") async def test_pool_dsn_startup() -> None: @@ -31,41 +31,8 @@ async def test_pool_dsn_startup() -> None: dsn="postgres://postgres:postgres@localhost:5432/psqlpy_test", ) - await pg_pool.execute("SELECT 1") - - -async def test_pool_execute( - psql_pool: ConnectionPool, - table_name: str, - number_database_records: int, -) -> None: - """Test that ConnectionPool can execute queries.""" - select_result = await psql_pool.execute( - f"SELECT * FROM {table_name}", - ) - - assert type(select_result) == QueryResult - - inner_result = select_result.result() - assert isinstance(inner_result, list) - assert len(inner_result) == number_database_records - - -async def test_pool_fetch( - psql_pool: ConnectionPool, - table_name: str, - number_database_records: int, -) -> None: - """Test that ConnectionPool can fetch queries.""" - select_result = await psql_pool.fetch( - f"SELECT * FROM {table_name}", - ) - - assert type(select_result) == QueryResult - - inner_result = select_result.result() - assert isinstance(inner_result, list) - assert len(inner_result) == number_database_records + conn = await pg_pool.connection() + await conn.execute("SELECT 1") async def test_pool_connection( @@ -92,7 +59,8 @@ async def test_pool_conn_recycling_method( conn_recycling_method=conn_recycling_method, ) - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") async def test_build_pool_failure() -> None: @@ -139,9 +107,10 @@ async def test_pool_target_session_attrs( if target_session_attrs == TargetSessionAttrs.ReadOnly: with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): - await pg_pool.execute("SELECT 1") + await pg_pool.connection() else: - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") @pytest.mark.parametrize( @@ -159,7 +128,8 @@ async def test_pool_load_balance_hosts( load_balance_hosts=load_balance_hosts, ) - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") async def test_close_connection_pool() -> None: @@ -168,12 +138,13 @@ async def test_close_connection_pool() -> None: dsn="postgres://postgres:postgres@localhost:5432/psqlpy_test", ) - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") pg_pool.close() with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): - await pg_pool.execute("SELECT 1") + await pg_pool.connection() async def test_connection_pool_as_context_manager() -> None: @@ -181,8 +152,9 @@ async def test_connection_pool_as_context_manager() -> None: with ConnectionPool( dsn="postgres://postgres:postgres@localhost:5432/psqlpy_test", ) as pg_pool: - res = await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + res = await conn.execute("SELECT 1") assert res.result() with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): - await pg_pool.execute("SELECT 1") + await pg_pool.connection() diff --git a/python/tests/test_connection_pool_builder.py b/python/tests/test_connection_pool_builder.py index f937bec3..7c9de409 100644 --- a/python/tests/test_connection_pool_builder.py +++ b/python/tests/test_connection_pool_builder.py @@ -48,8 +48,8 @@ async def test_connection_pool_builder( ) pool = builder.build() - - results = await pool.execute( + connection = await pool.connection() + results = await connection.execute( querystring=f"SELECT * FROM {table_name}", ) diff --git a/python/tests/test_listener.py b/python/tests/test_listener.py index 46722ca1..a2606e5a 100644 --- a/python/tests/test_listener.py +++ b/python/tests/test_listener.py @@ -62,7 +62,7 @@ async def notify( if with_delay: await asyncio.sleep(0.5) - await psql_pool.execute(f"NOTIFY {channel}, '{TEST_PAYLOAD}'") + await (await psql_pool.connection()).execute(f"NOTIFY {channel}, '{TEST_PAYLOAD}'") async def check_insert_callback( @@ -72,7 +72,7 @@ async def check_insert_callback( number_of_data: int = 1, ) -> None: test_data_seq = ( - await psql_pool.execute( + await (await psql_pool.connection()).execute( f"SELECT * FROM {listener_table_name}", ) ).result() @@ -93,7 +93,7 @@ async def clear_test_table( psql_pool: ConnectionPool, listener_table_name: str, ) -> None: - await psql_pool.execute( + await (await psql_pool.connection()).execute( f"DELETE FROM {listener_table_name}", ) @@ -244,7 +244,7 @@ async def test_listener_more_than_one_callback( number_of_data=2, ) - query_result = await psql_pool.execute( + query_result = await (await psql_pool.connection()).execute( querystring=(f"SELECT * FROM {listener_table_name} WHERE channel = $1"), parameters=(additional_channel,), ) diff --git a/python/tests/test_row_factories.py b/python/tests/test_row_factories.py index 75d03e5a..9b7f3121 100644 --- a/python/tests/test_row_factories.py +++ b/python/tests/test_row_factories.py @@ -13,7 +13,8 @@ async def test_tuple_row( table_name: str, number_database_records: int, ) -> None: - conn_result = await psql_pool.execute( + connection = await psql_pool.connection() + conn_result = await connection.execute( querystring=f"SELECT * FROM {table_name}", ) tuple_res = conn_result.row_factory(row_factory=tuple_row) @@ -32,7 +33,8 @@ class ValidationTestModel: id: int name: str - conn_result = await psql_pool.execute( + connection = await psql_pool.connection() + conn_result = await connection.execute( querystring=f"SELECT * FROM {table_name}", ) class_res = conn_result.row_factory(row_factory=class_row(ValidationTestModel)) @@ -58,7 +60,8 @@ def to_class_inner(row: Dict[str, Any]) -> ValidationTestModel: return to_class_inner - conn_result = await psql_pool.execute( + connection = await psql_pool.connection() + conn_result = await connection.execute( querystring=f"SELECT * FROM {table_name}", ) class_res = conn_result.row_factory(row_factory=to_class(ValidationTestModel)) diff --git a/python/tests/test_ssl_mode.py b/python/tests/test_ssl_mode.py index 53978d9e..4c72014e 100644 --- a/python/tests/test_ssl_mode.py +++ b/python/tests/test_ssl_mode.py @@ -35,7 +35,8 @@ async def test_ssl_mode_require( ca_file=ssl_cert_file, ) - await pg_pool.execute("SELECT 1") + conn = await pg_pool.connection() + await conn.execute("SELECT 1") @pytest.mark.parametrize( @@ -72,7 +73,8 @@ async def test_ssl_mode_require_pool_builder( pool = builder.build() - await pool.execute("SELECT 1") + connection = await pool.connection() + await connection.execute("SELECT 1") async def test_ssl_mode_require_without_ca_file( @@ -94,4 +96,5 @@ async def test_ssl_mode_require_without_ca_file( ) pool = builder.build() - await pool.execute("SELECT 1") + connection = await pool.connection() + await connection.execute("SELECT 1") diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index 7704393b..c597fb5d 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -97,7 +97,7 @@ async def test_transaction_commit( # Make request from other connection, it mustn't know # about new INSERT data before commit. - result = await psql_pool.execute( + result = await (await psql_pool.connection()).execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -105,7 +105,7 @@ async def test_transaction_commit( await transaction.commit() - result = await psql_pool.execute( + result = await (await psql_pool.connection()).execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -136,7 +136,7 @@ async def test_transaction_savepoint( assert result.result() await transaction.rollback_savepoint(savepoint_name=savepoint_name) - result = await psql_pool.execute( + result = await (await psql_pool.connection()).execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -174,7 +174,7 @@ async def test_transaction_rollback( parameters=[test_name], ) - result_from_conn = await psql_pool.execute( + result_from_conn = await (await psql_pool.connection()).execute( f"INSERT INTO {table_name} VALUES ($1, $2)", parameters=[100, test_name], ) @@ -344,8 +344,8 @@ async def test_transaction_send_underlying_connection_to_pool_manually( async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: """Test `execute_batch` method.""" - await psql_pool.execute(querystring="DROP TABLE IF EXISTS execute_batch") - await psql_pool.execute(querystring="DROP TABLE IF EXISTS execute_batch2") + await (await psql_pool.connection()).execute(querystring="DROP TABLE IF EXISTS execute_batch") + await (await psql_pool.connection()).execute(querystring="DROP TABLE IF EXISTS execute_batch2") query = "CREATE TABLE execute_batch (name VARCHAR);CREATE TABLE execute_batch2 (name VARCHAR);" async with psql_pool.acquire() as conn, conn.transaction() as transaction: await transaction.execute_batch(querystring=query) diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index 414b051f..da0fcb78 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -103,7 +103,7 @@ async def test_as_class( number_database_records: int, ) -> None: """Test `as_class()` method.""" - select_result = await psql_pool.execute( + select_result = await (await psql_pool.connection()).execute( f"SELECT * FROM {table_name}", ) @@ -649,20 +649,20 @@ async def test_deserialization_simple_into_python( expected_deserialized: Any, ) -> None: """Test how types can cast from Python and to Python.""" - await psql_pool.execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") create_table_query = f""" CREATE TABLE for_test (test_field {postgres_type}) """ insert_data_query = """ INSERT INTO for_test VALUES ($1) """ - await psql_pool.execute(querystring=create_table_query) - await psql_pool.execute( + await (await psql_pool.connection()).execute(querystring=create_table_query) + await (await psql_pool.connection()).execute( querystring=insert_data_query, parameters=[py_value], ) - raw_result = await psql_pool.execute( + raw_result = await (await psql_pool.connection()).execute( querystring="SELECT test_field FROM for_test", ) @@ -673,12 +673,12 @@ async def test_deserialization_composite_into_python( psql_pool: ConnectionPool, ) -> None: """Test that it's possible to deserialize custom postgresql type.""" - await psql_pool.execute("DROP TABLE IF EXISTS for_test") - await psql_pool.execute("DROP TYPE IF EXISTS all_types") - await psql_pool.execute("DROP TYPE IF EXISTS inner_type") - await psql_pool.execute("DROP TYPE IF EXISTS enum_type") - await psql_pool.execute("CREATE TYPE enum_type AS ENUM ('sad', 'ok', 'happy')") - await psql_pool.execute("CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)") + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS all_types") + await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS inner_type") + await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS enum_type") + await (await psql_pool.connection()).execute("CREATE TYPE enum_type AS ENUM ('sad', 'ok', 'happy')") + await (await psql_pool.connection()).execute("CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)") create_type_query = """ CREATE type all_types AS ( bytea_ BYTEA, @@ -736,10 +736,10 @@ async def test_deserialization_composite_into_python( CREATE table for_test (custom_type all_types) """ - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring=create_type_query, ) - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring=create_table_query, ) @@ -752,7 +752,7 @@ class TestEnum(Enum): row_values += ", ROW($41, $42), " row_values += ", ".join([f"${index}" for index in range(43, 50)]) - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring=f"INSERT INTO for_test VALUES (ROW({row_values}))", parameters=[ b"Bytes", @@ -914,7 +914,7 @@ class ValidateModelForCustomType(BaseModel): class TopLevelModel(BaseModel): custom_type: ValidateModelForCustomType - query_result = await psql_pool.execute( + query_result = await (await psql_pool.connection()).execute( "SELECT custom_type FROM for_test", ) @@ -938,21 +938,21 @@ class TestStrEnum(str, Enum): SAD = "sad" HAPPY = "happy" - await psql_pool.execute("DROP TABLE IF EXISTS for_test") - await psql_pool.execute("DROP TYPE IF EXISTS mood") - await psql_pool.execute( + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS mood") + await (await psql_pool.connection()).execute( "CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy')", ) - await psql_pool.execute( + await (await psql_pool.connection()).execute( "CREATE TABLE for_test (test_mood mood, test_mood2 mood)", ) - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring="INSERT INTO for_test VALUES ($1, $2)", parameters=[TestEnum.HAPPY, TestEnum.OK], ) - qs_result = await psql_pool.execute( + qs_result = await (await psql_pool.connection()).execute( "SELECT * FROM for_test", ) assert qs_result.result()[0]["test_mood"] == TestEnum.HAPPY.value @@ -964,17 +964,17 @@ async def test_custom_type_as_parameter( psql_pool: ConnectionPool, ) -> None: """Tests that we can use `PyCustomType`.""" - await psql_pool.execute("DROP TABLE IF EXISTS for_test") - await psql_pool.execute( + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute( "CREATE TABLE for_test (nickname VARCHAR)", ) - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring="INSERT INTO for_test VALUES ($1)", parameters=[CustomType(b"Some Real Nickname")], ) - qs_result = await psql_pool.execute( + qs_result = await (await psql_pool.connection()).execute( "SELECT * FROM for_test", ) @@ -985,19 +985,19 @@ async def test_custom_type_as_parameter( async def test_custom_decoder( psql_pool: ConnectionPool, ) -> None: - await psql_pool.execute("DROP TABLE IF EXISTS for_test") - await psql_pool.execute( + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute( "CREATE TABLE for_test (geo_point POINT)", ) - await psql_pool.execute( + await (await psql_pool.connection()).execute( "INSERT INTO for_test VALUES ('(1, 1)')", ) def point_encoder(point_bytes: bytes) -> str: # noqa: ARG001 return "Just An Example" - qs_result = await psql_pool.execute( + qs_result = await (await psql_pool.connection()).execute( "SELECT * FROM for_test", ) result = qs_result.result( @@ -1014,7 +1014,7 @@ async def test_row_factory_query_result( table_name: str, number_database_records: int, ) -> None: - select_result = await psql_pool.execute( + select_result = await (await psql_pool.connection()).execute( f"SELECT * FROM {table_name}", ) @@ -1053,11 +1053,11 @@ def row_factory(db_result: Dict[str, Any]) -> List[str]: async def test_incorrect_dimensions_array( psql_pool: ConnectionPool, ) -> None: - await psql_pool.execute("DROP TABLE IF EXISTS test_marr") - await psql_pool.execute("CREATE TABLE test_marr (var_array VARCHAR ARRAY)") + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS test_marr") + await (await psql_pool.connection()).execute("CREATE TABLE test_marr (var_array VARCHAR ARRAY)") with pytest.raises(expected_exception=PyToRustValueMappingError): - await psql_pool.execute( + await (await psql_pool.connection()).execute( querystring="INSERT INTO test_marr VALUES ($1)", parameters=[ [ @@ -1071,14 +1071,14 @@ async def test_incorrect_dimensions_array( async def test_empty_array( psql_pool: ConnectionPool, ) -> None: - await psql_pool.execute("DROP TABLE IF EXISTS test_earr") - await psql_pool.execute( + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS test_earr") + await (await psql_pool.connection()).execute( "CREATE TABLE test_earr (id serial NOT NULL PRIMARY KEY, e_array text[] NOT NULL DEFAULT array[]::text[])", ) - await psql_pool.execute("INSERT INTO test_earr(id) VALUES(2);") + await (await psql_pool.connection()).execute("INSERT INTO test_earr(id) VALUES(2);") - res = await psql_pool.execute( + res = await (await psql_pool.connection()).execute( "SELECT * FROM test_earr WHERE id = 2", ) @@ -1557,20 +1557,20 @@ async def test_array_types( py_value: Any, expected_deserialized: Any, ) -> None: - await psql_pool.execute("DROP TABLE IF EXISTS for_test") + await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") create_table_query = f""" CREATE TABLE for_test (test_field {postgres_type}) """ insert_data_query = """ INSERT INTO for_test VALUES ($1) """ - await psql_pool.execute(querystring=create_table_query) - await psql_pool.execute( + await (await psql_pool.connection()).execute(querystring=create_table_query) + await (await psql_pool.connection()).execute( querystring=insert_data_query, parameters=[py_value], ) - raw_result = await psql_pool.execute( + raw_result = await (await psql_pool.connection()).execute( querystring="SELECT test_field FROM for_test", ) diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 526f5593..24780a6a 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -1,14 +1,10 @@ use crate::runtime::tokio_runtime; -use deadpool_postgres::{Manager, ManagerConfig, Object, Pool, RecyclingMethod}; +use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny}; -use std::{sync::Arc, vec}; +use std::sync::Arc; use tokio_postgres::Config; -use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, - query_result::PSQLDriverPyQueryResult, - value_converter::{convert_parameters, PythonDTO, QueryParameter}, -}; +use crate::exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}; use super::{ common_options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, @@ -362,141 +358,6 @@ impl ConnectionPool { self.pool.resize(new_max_size); } - /// Execute querystring with parameters. - /// - /// Prepare statement and cache it, then execute. - /// - /// # Errors - /// May return Err Result if cannot retrieve new connection - /// or prepare statement or execute statement. - #[pyo3(signature = (querystring, parameters=None, prepared=None))] - pub async fn execute<'a>( - self_: pyo3::Py, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> RustPSQLDriverPyResult { - let db_pool = pyo3::Python::with_gil(|gil| self_.borrow(gil).pool.clone()); - - let db_pool_manager = tokio_runtime() - .spawn(async move { Ok::(db_pool.get().await?) }) - .await??; - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - let result = if prepared { - tokio_runtime() - .spawn(async move { - db_pool_manager - .query( - &db_pool_manager.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement from ConnectionPool, error - {err}" - )) - }) - }) - .await?? - } else { - tokio_runtime() - .spawn(async move { - db_pool_manager - .query( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement from ConnectionPool, error - {err}" - )) - }) - }) - .await?? - }; - Ok(PSQLDriverPyQueryResult::new(result)) - } - - /// Fetch result from the database. - /// - /// It's the same as `execute`, we made it for people who prefer - /// `fetch()`. - /// - /// Prepare statement and cache it, then execute. - /// - /// # Errors - /// May return Err Result if cannot retrieve new connection - /// or prepare statement or execute statement. - #[pyo3(signature = (querystring, parameters=None, prepared=None))] - pub async fn fetch<'a>( - self_: pyo3::Py, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> RustPSQLDriverPyResult { - let db_pool = pyo3::Python::with_gil(|gil| self_.borrow(gil).pool.clone()); - - let db_pool_manager = tokio_runtime() - .spawn(async move { Ok::(db_pool.get().await?) }) - .await??; - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } - let prepared = prepared.unwrap_or(true); - let result = if prepared { - tokio_runtime() - .spawn(async move { - db_pool_manager - .query( - &db_pool_manager.prepare_cached(&querystring).await?, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement from ConnectionPool, error - {err}" - )) - }) - }) - .await?? - } else { - tokio_runtime() - .spawn(async move { - db_pool_manager - .query( - &querystring, - ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>(), - ) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement from ConnectionPool, error - {err}" - )) - }) - }) - .await?? - }; - Ok(PSQLDriverPyQueryResult::new(result)) - } - #[must_use] pub fn acquire(&self) -> Connection { Connection::new(None, Some(self.pool.clone()), self.pg_config.clone()) From 444464f1afe1377b1319dfd0fac3eaca988ce7a1 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 01:25:06 +0100 Subject: [PATCH 08/65] Prepared psqlpy for OTLP Signed-off-by: chandr-andr (Kiselev Aleksandr) --- python/tests/test_binary_copy.py | 9 +- python/tests/test_listener.py | 16 +- python/tests/test_transaction.py | 26 ++-- python/tests/test_value_converter.py | 218 ++++++++++++++------------- 4 files changed, 147 insertions(+), 122 deletions(-) diff --git a/python/tests/test_binary_copy.py b/python/tests/test_binary_copy.py index a42671a8..3dcfe678 100644 --- a/python/tests/test_binary_copy.py +++ b/python/tests/test_binary_copy.py @@ -47,11 +47,10 @@ async def test_binary_copy_to_table_in_connection( buf.write(encoder.finish()) buf.seek(0) - async with psql_pool.acquire() as connection: - inserted_rows = await connection.binary_copy_to_table( - source=buf, - table_name=table_name, - ) + inserted_rows = await connection.binary_copy_to_table( + source=buf, + table_name=table_name, + ) expected_inserted_row: typing.Final = 32 diff --git a/python/tests/test_listener.py b/python/tests/test_listener.py index a2606e5a..c48c8974 100644 --- a/python/tests/test_listener.py +++ b/python/tests/test_listener.py @@ -62,7 +62,9 @@ async def notify( if with_delay: await asyncio.sleep(0.5) - await (await psql_pool.connection()).execute(f"NOTIFY {channel}, '{TEST_PAYLOAD}'") + connection = await psql_pool.connection() + await connection.execute(f"NOTIFY {channel}, '{TEST_PAYLOAD}'") + connection.back_to_pool() async def check_insert_callback( @@ -71,8 +73,9 @@ async def check_insert_callback( is_insert_exist: bool = True, number_of_data: int = 1, ) -> None: + connection = await psql_pool.connection() test_data_seq = ( - await (await psql_pool.connection()).execute( + await connection.execute( f"SELECT * FROM {listener_table_name}", ) ).result() @@ -88,14 +91,18 @@ async def check_insert_callback( assert data_record["payload"] == TEST_PAYLOAD assert data_record["channel"] == TEST_CHANNEL + connection.back_to_pool() + async def clear_test_table( psql_pool: ConnectionPool, listener_table_name: str, ) -> None: - await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + await connection.execute( f"DELETE FROM {listener_table_name}", ) + connection.back_to_pool() @pytest.mark.usefixtures("create_table_for_listener_tests") @@ -244,7 +251,8 @@ async def test_listener_more_than_one_callback( number_of_data=2, ) - query_result = await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + query_result = await connection.execute( querystring=(f"SELECT * FROM {listener_table_name} WHERE channel = $1"), parameters=(additional_channel,), ) diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index c597fb5d..151f5bb5 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -39,8 +39,7 @@ async def test_transaction_init_parameters( deferrable: bool | None, read_variant: ReadVariant | None, ) -> None: - connection = await psql_pool.connection() - async with connection.transaction( + async with psql_pool.acquire() as connection, connection.transaction( isolation_level=isolation_level, deferrable=deferrable, read_variant=read_variant, @@ -79,6 +78,8 @@ async def test_transaction_begin( assert len(result.result()) == number_database_records + await transaction.commit() + async def test_transaction_commit( psql_pool: ConnectionPool, @@ -97,7 +98,8 @@ async def test_transaction_commit( # Make request from other connection, it mustn't know # about new INSERT data before commit. - result = await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + result = await connection.execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -105,7 +107,7 @@ async def test_transaction_commit( await transaction.commit() - result = await (await psql_pool.connection()).execute( + result = await connection.execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -136,7 +138,8 @@ async def test_transaction_savepoint( assert result.result() await transaction.rollback_savepoint(savepoint_name=savepoint_name) - result = await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + result = await connection.execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], ) @@ -174,10 +177,12 @@ async def test_transaction_rollback( parameters=[test_name], ) - result_from_conn = await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + result_from_conn = await connection.execute( f"INSERT INTO {table_name} VALUES ($1, $2)", parameters=[100, test_name], ) + connection.back_to_pool() assert not (result_from_conn.result()) @@ -344,14 +349,17 @@ async def test_transaction_send_underlying_connection_to_pool_manually( async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: """Test `execute_batch` method.""" - await (await psql_pool.connection()).execute(querystring="DROP TABLE IF EXISTS execute_batch") - await (await psql_pool.connection()).execute(querystring="DROP TABLE IF EXISTS execute_batch2") + connection = await psql_pool.connection() + await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch") + await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch2") query = "CREATE TABLE execute_batch (name VARCHAR);CREATE TABLE execute_batch2 (name VARCHAR);" - async with psql_pool.acquire() as conn, conn.transaction() as transaction: + async with connection.transaction() as transaction: await transaction.execute_batch(querystring=query) await transaction.execute(querystring="SELECT * FROM execute_batch") await transaction.execute(querystring="SELECT * FROM execute_batch2") + connection.back_to_pool() + @pytest.mark.parametrize( "synchronous_commit", diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index da0fcb78..de62c554 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -103,7 +103,8 @@ async def test_as_class( number_database_records: int, ) -> None: """Test `as_class()` method.""" - select_result = await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + select_result = await connection.execute( f"SELECT * FROM {table_name}", ) @@ -649,20 +650,21 @@ async def test_deserialization_simple_into_python( expected_deserialized: Any, ) -> None: """Test how types can cast from Python and to Python.""" - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") + connection = await psql_pool.connection() + await connection.execute("DROP TABLE IF EXISTS for_test") create_table_query = f""" CREATE TABLE for_test (test_field {postgres_type}) """ insert_data_query = """ INSERT INTO for_test VALUES ($1) """ - await (await psql_pool.connection()).execute(querystring=create_table_query) - await (await psql_pool.connection()).execute( + await connection.execute(querystring=create_table_query) + await connection.execute( querystring=insert_data_query, parameters=[py_value], ) - raw_result = await (await psql_pool.connection()).execute( + raw_result = await connection.execute( querystring="SELECT test_field FROM for_test", ) @@ -673,12 +675,13 @@ async def test_deserialization_composite_into_python( psql_pool: ConnectionPool, ) -> None: """Test that it's possible to deserialize custom postgresql type.""" - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") - await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS all_types") - await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS inner_type") - await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS enum_type") - await (await psql_pool.connection()).execute("CREATE TYPE enum_type AS ENUM ('sad', 'ok', 'happy')") - await (await psql_pool.connection()).execute("CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)") + connection = await psql_pool.connection() + await connection.execute("DROP TABLE IF EXISTS for_test") + await connection.execute("DROP TYPE IF EXISTS all_types") + await connection.execute("DROP TYPE IF EXISTS inner_type") + await connection.execute("DROP TYPE IF EXISTS enum_type") + await connection.execute("CREATE TYPE enum_type AS ENUM ('sad', 'ok', 'happy')") + await connection.execute("CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)") create_type_query = """ CREATE type all_types AS ( bytea_ BYTEA, @@ -736,10 +739,10 @@ async def test_deserialization_composite_into_python( CREATE table for_test (custom_type all_types) """ - await (await psql_pool.connection()).execute( + await connection.execute( querystring=create_type_query, ) - await (await psql_pool.connection()).execute( + await connection.execute( querystring=create_table_query, ) @@ -752,7 +755,7 @@ class TestEnum(Enum): row_values += ", ROW($41, $42), " row_values += ", ".join([f"${index}" for index in range(43, 50)]) - await (await psql_pool.connection()).execute( + await connection.execute( querystring=f"INSERT INTO for_test VALUES (ROW({row_values}))", parameters=[ b"Bytes", @@ -914,7 +917,7 @@ class ValidateModelForCustomType(BaseModel): class TopLevelModel(BaseModel): custom_type: ValidateModelForCustomType - query_result = await (await psql_pool.connection()).execute( + query_result = await connection.execute( "SELECT custom_type FROM for_test", ) @@ -938,21 +941,22 @@ class TestStrEnum(str, Enum): SAD = "sad" HAPPY = "happy" - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") - await (await psql_pool.connection()).execute("DROP TYPE IF EXISTS mood") - await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + await connection.execute("DROP TABLE IF EXISTS for_test") + await connection.execute("DROP TYPE IF EXISTS mood") + await connection.execute( "CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy')", ) - await (await psql_pool.connection()).execute( + await connection.execute( "CREATE TABLE for_test (test_mood mood, test_mood2 mood)", ) - await (await psql_pool.connection()).execute( + await connection.execute( querystring="INSERT INTO for_test VALUES ($1, $2)", parameters=[TestEnum.HAPPY, TestEnum.OK], ) - qs_result = await (await psql_pool.connection()).execute( + qs_result = await connection.execute( "SELECT * FROM for_test", ) assert qs_result.result()[0]["test_mood"] == TestEnum.HAPPY.value @@ -964,17 +968,18 @@ async def test_custom_type_as_parameter( psql_pool: ConnectionPool, ) -> None: """Tests that we can use `PyCustomType`.""" - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") - await (await psql_pool.connection()).execute( + connection = await psql_pool.connection() + await connection.execute("DROP TABLE IF EXISTS for_test") + await connection.execute( "CREATE TABLE for_test (nickname VARCHAR)", ) - await (await psql_pool.connection()).execute( + await connection.execute( querystring="INSERT INTO for_test VALUES ($1)", parameters=[CustomType(b"Some Real Nickname")], ) - qs_result = await (await psql_pool.connection()).execute( + qs_result = await connection.execute( "SELECT * FROM for_test", ) @@ -985,28 +990,29 @@ async def test_custom_type_as_parameter( async def test_custom_decoder( psql_pool: ConnectionPool, ) -> None: - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") - await (await psql_pool.connection()).execute( - "CREATE TABLE for_test (geo_point POINT)", - ) - - await (await psql_pool.connection()).execute( - "INSERT INTO for_test VALUES ('(1, 1)')", - ) - def point_encoder(point_bytes: bytes) -> str: # noqa: ARG001 return "Just An Example" - qs_result = await (await psql_pool.connection()).execute( - "SELECT * FROM for_test", - ) - result = qs_result.result( - custom_decoders={ - "geo_point": point_encoder, - }, - ) + async with psql_pool.acquire() as conn: + await conn.execute("DROP TABLE IF EXISTS for_test") + await conn.execute( + "CREATE TABLE for_test (geo_point POINT)", + ) - assert result[0]["geo_point"] == "Just An Example" + await conn.execute( + "INSERT INTO for_test VALUES ('(1, 1)')", + ) + + qs_result = await conn.execute( + "SELECT * FROM for_test", + ) + result = qs_result.result( + custom_decoders={ + "geo_point": point_encoder, + }, + ) + + assert result[0]["geo_point"] == "Just An Example" async def test_row_factory_query_result( @@ -1014,77 +1020,80 @@ async def test_row_factory_query_result( table_name: str, number_database_records: int, ) -> None: - select_result = await (await psql_pool.connection()).execute( - f"SELECT * FROM {table_name}", - ) + async with psql_pool.acquire() as conn: + select_result = await conn.execute( + f"SELECT * FROM {table_name}", + ) - def row_factory(db_result: Dict[str, Any]) -> List[str]: - return list(db_result.keys()) + def row_factory(db_result: Dict[str, Any]) -> List[str]: + return list(db_result.keys()) - as_row_factory = select_result.row_factory( - row_factory=row_factory, - ) - assert len(as_row_factory) == number_database_records + as_row_factory = select_result.row_factory( + row_factory=row_factory, + ) + assert len(as_row_factory) == number_database_records - assert isinstance(as_row_factory[0], list) + assert isinstance(as_row_factory[0], list) async def test_row_factory_single_query_result( psql_pool: ConnectionPool, table_name: str, ) -> None: - connection = await psql_pool.connection() - select_result = await connection.fetch_row( - f"SELECT * FROM {table_name} LIMIT 1", - ) + async with psql_pool.acquire() as conn: + select_result = await conn.fetch_row( + f"SELECT * FROM {table_name} LIMIT 1", + ) - def row_factory(db_result: Dict[str, Any]) -> List[str]: - return list(db_result.keys()) + def row_factory(db_result: Dict[str, Any]) -> List[str]: + return list(db_result.keys()) - as_row_factory = select_result.row_factory( - row_factory=row_factory, - ) - expected_number_of_elements_in_result = 2 - assert len(as_row_factory) == expected_number_of_elements_in_result + as_row_factory = select_result.row_factory( + row_factory=row_factory, + ) + expected_number_of_elements_in_result = 2 + assert len(as_row_factory) == expected_number_of_elements_in_result - assert isinstance(as_row_factory, list) + assert isinstance(as_row_factory, list) async def test_incorrect_dimensions_array( psql_pool: ConnectionPool, ) -> None: - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS test_marr") - await (await psql_pool.connection()).execute("CREATE TABLE test_marr (var_array VARCHAR ARRAY)") - - with pytest.raises(expected_exception=PyToRustValueMappingError): - await (await psql_pool.connection()).execute( - querystring="INSERT INTO test_marr VALUES ($1)", - parameters=[ - [ - ["Len", "is", "Three"], - ["Len", "is", "Four", "Wow"], + async with psql_pool.acquire() as conn: + await conn.execute("DROP TABLE IF EXISTS test_marr") + await conn.execute("CREATE TABLE test_marr (var_array VARCHAR ARRAY)") + + with pytest.raises(expected_exception=PyToRustValueMappingError): + await conn.execute( + querystring="INSERT INTO test_marr VALUES ($1)", + parameters=[ + [ + ["Len", "is", "Three"], + ["Len", "is", "Four", "Wow"], + ], ], - ], - ) + ) async def test_empty_array( psql_pool: ConnectionPool, ) -> None: - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS test_earr") - await (await psql_pool.connection()).execute( - "CREATE TABLE test_earr (id serial NOT NULL PRIMARY KEY, e_array text[] NOT NULL DEFAULT array[]::text[])", - ) + async with psql_pool.acquire() as conn: + await conn.execute("DROP TABLE IF EXISTS test_earr") + await conn.execute( + "CREATE TABLE test_earr (id serial NOT NULL PRIMARY KEY, e_array text[] NOT NULL DEFAULT array[]::text[])", + ) - await (await psql_pool.connection()).execute("INSERT INTO test_earr(id) VALUES(2);") + await conn.execute("INSERT INTO test_earr(id) VALUES(2);") - res = await (await psql_pool.connection()).execute( - "SELECT * FROM test_earr WHERE id = 2", - ) + res = await conn.execute( + "SELECT * FROM test_earr WHERE id = 2", + ) - json_result = res.result() - assert json_result - assert not json_result[0]["e_array"] + json_result = res.result() + assert json_result + assert not json_result[0]["e_array"] @pytest.mark.parametrize( @@ -1557,21 +1566,22 @@ async def test_array_types( py_value: Any, expected_deserialized: Any, ) -> None: - await (await psql_pool.connection()).execute("DROP TABLE IF EXISTS for_test") - create_table_query = f""" - CREATE TABLE for_test (test_field {postgres_type}) - """ - insert_data_query = """ - INSERT INTO for_test VALUES ($1) - """ - await (await psql_pool.connection()).execute(querystring=create_table_query) - await (await psql_pool.connection()).execute( - querystring=insert_data_query, - parameters=[py_value], - ) + async with psql_pool.acquire() as conn: + await conn.execute("DROP TABLE IF EXISTS for_test") + create_table_query = f""" + CREATE TABLE for_test (test_field {postgres_type}) + """ + insert_data_query = """ + INSERT INTO for_test VALUES ($1) + """ + await conn.execute(querystring=create_table_query) + await conn.execute( + querystring=insert_data_query, + parameters=[py_value], + ) - raw_result = await (await psql_pool.connection()).execute( - querystring="SELECT test_field FROM for_test", - ) + raw_result = await conn.execute( + querystring="SELECT test_field FROM for_test", + ) - assert raw_result.result()[0]["test_field"] == expected_deserialized + assert raw_result.result()[0]["test_field"] == expected_deserialized From b77ce55b428791f436f8f01a4201ae22c1119bb5 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 01:46:08 +0100 Subject: [PATCH 09/65] Docs update Signed-off-by: chandr-andr (Kiselev Aleksandr) --- docs/.vuepress/sidebar.ts | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/.vuepress/sidebar.ts b/docs/.vuepress/sidebar.ts index 133833f7..94b082f9 100644 --- a/docs/.vuepress/sidebar.ts +++ b/docs/.vuepress/sidebar.ts @@ -67,6 +67,15 @@ export default sidebar({ }, ], }, + { + text: "Integrations", + prefix: "/integrations", + collapsible: true, + children: [ + "taskiq", + "opentelemetry", + ], + }, { text: "Contribution guide", prefix: "/contribution_guide", From 530c06436a830b345d4587a6b1e7709329d0f782 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 01:49:18 +0100 Subject: [PATCH 10/65] Updated jobs Signed-off-by: chandr-andr (Kiselev Aleksandr) --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 077354e8..f7db1a98 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -43,7 +43,7 @@ jobs: apt update -y && apt-get install -y libssl-dev openssl pkg-config fi - name: Upload wheels - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wheels path: dist @@ -73,7 +73,7 @@ jobs: args: --release --out dist -i 3.9 3.10 3.11 3.12 3.13 sccache: 'true' - name: Upload wheels - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wheels path: dist @@ -113,7 +113,7 @@ jobs: args: --release --out dist -i 3.9 3.10 3.11 3.12 3.13 pypy3.9 pypy3.10 sccache: 'true' - name: Upload wheels - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wheels path: dist @@ -135,7 +135,7 @@ jobs: command: sdist args: --out dist - name: Upload sdist - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wheels path: dist @@ -167,7 +167,7 @@ jobs: args: --release --out dist -i 3.9 3.10 3.11 3.12 3.13 pypy3.9 pypy3.10 manylinux: musllinux_1_2 - name: Upload wheels - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: wheels path: dist From 617da8b4d9a1e133568d81eea9edd7e9d92f40d2 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 03:04:57 +0100 Subject: [PATCH 11/65] Updated jobs Signed-off-by: chandr-andr (Kiselev Aleksandr) --- .github/workflows/release.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f7db1a98..66d0445a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -47,6 +47,7 @@ jobs: with: name: wheels path: dist + overwrite: true - name: Releasing assets uses: softprops/action-gh-release@v1 with: @@ -77,6 +78,7 @@ jobs: with: name: wheels path: dist + overwrite: true - name: Releasing assets uses: softprops/action-gh-release@v1 with: @@ -117,6 +119,7 @@ jobs: with: name: wheels path: dist + overwrite: true - name: Releasing assets uses: softprops/action-gh-release@v1 with: @@ -139,6 +142,7 @@ jobs: with: name: wheels path: dist + overwrite: true - name: Releasing assets uses: softprops/action-gh-release@v1 with: @@ -171,6 +175,7 @@ jobs: with: name: wheels path: dist + overwrite: true - name: Releasing assets uses: softprops/action-gh-release@v1 with: From e52ba3d3742e7e043434135f7e146c2ab1453cd6 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 14:11:29 +0100 Subject: [PATCH 12/65] Split getter to unix and not unix systems Signed-off-by: chandr-andr (Kiselev Aleksandr) --- src/driver/connection.rs | 19 +++++++++++++++++++ src/driver/cursor.rs | 19 +++++++++++++++++++ src/driver/transaction.rs | 19 +++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 573a1348..3c0595bb 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -89,6 +89,7 @@ impl Connection { host_addrs_vec } + #[cfg(unix)] #[getter] fn hosts(&self) -> Vec { let mut hosts_vec = vec![]; @@ -108,6 +109,24 @@ impl Connection { hosts_vec } + #[cfg(not(unix))] + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + _ => unreachable!(), + } + } + + hosts_vec + } + #[getter] fn ports(&self) -> Vec<&u16> { return self.pg_config.get_ports().iter().collect::>(); diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 5ad4691d..f391d1c1 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -160,6 +160,7 @@ impl Cursor { host_addrs_vec } + #[cfg(unix)] #[getter] fn hosts(&self) -> Vec { let mut hosts_vec = vec![]; @@ -179,6 +180,24 @@ impl Cursor { hosts_vec } + #[cfg(not(unix))] + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + _ => unreachable!(), + } + } + + hosts_vec + } + #[getter] fn ports(&self) -> Vec<&u16> { return self.pg_config.get_ports().iter().collect::>(); diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 15567f2b..2fa38ba5 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -190,6 +190,7 @@ impl Transaction { host_addrs_vec } + #[cfg(unix)] #[getter] fn hosts(&self) -> Vec { let mut hosts_vec = vec![]; @@ -209,6 +210,24 @@ impl Transaction { hosts_vec } + #[cfg(not(unix))] + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + _ => unreachable!(), + } + } + + hosts_vec + } + #[getter] fn ports(&self) -> Vec<&u16> { return self.pg_config.get_ports().iter().collect::>(); From 69156e8a5908c63b0416cd3a4a0967fa79790c35 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 14:37:37 +0100 Subject: [PATCH 13/65] Changed jobs Signed-off-by: chandr-andr (Kiselev Aleksandr) --- .github/workflows/release.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 66d0445a..a87e03e4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -189,9 +189,10 @@ jobs: runs-on: ubuntu-latest needs: [linux, windows, macos, musllinux, sdist] steps: - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: - name: wheels + pattern: wheel* + merge-multiple: true - name: Publish to PyPI uses: PyO3/maturin-action@v1 env: From 2bcfc0c330132b477f85eeb3d05793a9f966f3ca Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 9 Feb 2025 14:56:49 +0100 Subject: [PATCH 14/65] Bumped version to 0.9.1 Signed-off-by: chandr-andr (Kiselev Aleksandr) --- .github/workflows/release.yml | 12 ++++++------ Cargo.lock | 2 +- Cargo.toml | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a87e03e4..f7260aed 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,7 +45,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels + name: wheels-linux path: dist overwrite: true - name: Releasing assets @@ -76,7 +76,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels + name: wheels-windows path: dist overwrite: true - name: Releasing assets @@ -117,7 +117,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels + name: wheels-macos path: dist overwrite: true - name: Releasing assets @@ -140,7 +140,7 @@ jobs: - name: Upload sdist uses: actions/upload-artifact@v4 with: - name: wheels + name: wheels-sdist path: dist overwrite: true - name: Releasing assets @@ -173,7 +173,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels + name: wheels-musllinux path: dist overwrite: true - name: Releasing assets @@ -191,7 +191,7 @@ jobs: steps: - uses: actions/download-artifact@v4 with: - pattern: wheel* + pattern: wheels-* merge-multiple: true - name: Publish to PyPI uses: PyO3/maturin-action@v1 diff --git a/Cargo.lock b/Cargo.lock index abf26770..8ef797d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,7 +997,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.9.0" +version = "0.9.1" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 5e7743de..b30cc53c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.9.0" +version = "0.9.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From cd73b48b86a86c6f49b3d9ebc2b2407ee98bc4d9 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 10 Feb 2025 15:29:41 +0100 Subject: [PATCH 15/65] Fixed job Signed-off-by: chandr-andr (Kiselev Aleksandr) --- .github/workflows/release.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f7260aed..0a53f792 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,7 +45,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels-linux + name: wheels-linux-${{ matrix.target }} path: dist overwrite: true - name: Releasing assets @@ -76,7 +76,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels-windows + name: wheels-windows-${{ matrix.target }} path: dist overwrite: true - name: Releasing assets @@ -117,7 +117,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels-macos + name: wheels-macos-${{ matrix.target }} path: dist overwrite: true - name: Releasing assets @@ -140,7 +140,7 @@ jobs: - name: Upload sdist uses: actions/upload-artifact@v4 with: - name: wheels-sdist + name: wheels-sdist-${{ matrix.target }} path: dist overwrite: true - name: Releasing assets @@ -173,7 +173,7 @@ jobs: - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: wheels-musllinux + name: wheels-musllinux-${{ matrix.target }} path: dist overwrite: true - name: Releasing assets From 75e78c1a14beb5ed89d8f9a6028c747651344554 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 10 Feb 2025 15:47:20 +0100 Subject: [PATCH 16/65] Bumped version to 0.9.2 Signed-off-by: chandr-andr (Kiselev Aleksandr) --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8ef797d6..85f93be3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,7 +997,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.9.1" +version = "0.9.2" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index b30cc53c..710c59cf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.9.1" +version = "0.9.2" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 414c816ffbbcf2a12fa6b05f2c7cba658682571d Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 22 Feb 2025 16:35:38 +0100 Subject: [PATCH 17/65] Added support for named parameters Signed-off-by: chandr-andr (Kiselev Aleksandr) --- Cargo.lock | 18 ++-- Cargo.toml | 2 + docs/.vuepress/sidebar.ts | 1 + docs/usage/parameters.md | 42 +++++++++ pyproject.toml | 8 +- python/psqlpy/_internal/__init__.pyi | 41 +++++---- python/psqlpy/_internal/extra_types.pyi | 18 +++- python/tests/conftest.py | 22 +++++ python/tests/test_connection.py | 5 +- python/tests/test_kwargs_parameters.py | 73 +++++++++++++++ python/tests/test_transaction.py | 9 +- python/tests/test_value_converter.py | 76 +++++++++++++--- src/driver/inner_connection.rs | 88 ++++++++++++------ src/value_converter.rs | 113 ++++++++++++++++++++---- 14 files changed, 426 insertions(+), 90 deletions(-) create mode 100644 docs/usage/parameters.md create mode 100644 python/tests/test_kwargs_parameters.py diff --git a/Cargo.lock b/Cargo.lock index 85f93be3..35713346 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -707,9 +707,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl" @@ -1010,6 +1010,7 @@ dependencies = [ "geo-types", "itertools", "macaddr", + "once_cell", "openssl", "openssl-src", "openssl-sys", @@ -1021,6 +1022,7 @@ dependencies = [ "postgres_array", "pyo3", "pyo3-async-runtimes", + "regex", "rust_decimal 1.36.0", "serde", "serde_json", @@ -1192,9 +1194,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.6" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -1204,9 +1206,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -1215,9 +1217,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rend" diff --git a/Cargo.toml b/Cargo.toml index 710c59cf..93a46f66 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,3 +54,5 @@ pgvector = { git = "https://github.com/chandr-andr/pgvector-rust.git", branch = ] } futures-channel = "0.3.31" futures = "0.3.31" +regex = "1.11.1" +once_cell = "1.20.3" diff --git a/docs/.vuepress/sidebar.ts b/docs/.vuepress/sidebar.ts index 94b082f9..d3afac19 100644 --- a/docs/.vuepress/sidebar.ts +++ b/docs/.vuepress/sidebar.ts @@ -33,6 +33,7 @@ export default sidebar({ prefix: "usage/", collapsible: true, children: [ + "parameters", { text: "Types", prefix: "types/", diff --git a/docs/usage/parameters.md b/docs/usage/parameters.md new file mode 100644 index 00000000..596fbbc5 --- /dev/null +++ b/docs/usage/parameters.md @@ -0,0 +1,42 @@ +--- +title: Passing parameters to SQL queries +--- + +We support two variant of passing parameters to sql queries. + +::: tabs +@tab Parameters sequence + +You can pass parameters as some python Sequence. + +Placeholders in querystring must be marked as `$1`, `$2` and so on, +depending on how many parameters you have. + +```python +async def main(): + ... + + await connection.execute( + querystring="SELECT * FROM users WHERE id = $1", + parameters=(101,), + ) +``` + +@tab Parameters mapping + +If you prefer use named arguments, we support it too. +Placeholder in querystring must look like `$(parameter)p`. + +If you don't pass parameter but have it in querystring, exception will be raised. + +```python +async def main(): + ... + + await connection.execute( + querystring="SELECT * FROM users WHERE id = $(user_id)p", + parameters=dict(user_id=101), + ) +``` + +::: \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index bc906612..5a61fd61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,12 @@ maintainers = [{ name = "Kiselev Aleksandr", email = "askiselev00@gmail.com" }] description = "Async PostgreSQL driver for Python written in Rust" dynamic = ["version"] +# [tool.poetry.group.querybuilder] +# optional = true + +# [tool.poetry.group.querybuilder.dependencies] +# pypika = "*" + [tool.maturin] python-source = "python" module-name = "psqlpy._internal" @@ -64,7 +70,7 @@ warn_unused_ignores = false [tool.ruff] fix = true unsafe-fixes = true -line-length = 120 +line-length = 89 exclude = [".venv/", "psqlpy-stress"] [tool.ruff.format] diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index 77ec440d..8c391d96 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -2,9 +2,9 @@ import types from enum import Enum from io import BytesIO from ipaddress import IPv4Address, IPv6Address -from typing import Any, Awaitable, Callable, Sequence, TypeVar +from typing import Any, Awaitable, Callable, Mapping, Sequence, TypeVar -from typing_extensions import Buffer, Self +from typing_extensions import Buffer, Self, TypeAlias _CustomClass = TypeVar( "_CustomClass", @@ -13,6 +13,8 @@ _RowFactoryRV = TypeVar( "_RowFactoryRV", ) +ParamsT: TypeAlias = Sequence[Any] | Mapping[str, Any] | None + class QueryResult: """Result.""" @@ -150,7 +152,7 @@ class SingleQueryResult: class SynchronousCommit(Enum): """ - Class for synchronous_commit option for transactions. + Synchronous_commit option for transactions. ### Variants: - `On`: The meaning may change based on whether you have @@ -181,7 +183,7 @@ class SynchronousCommit(Enum): RemoteApply = 5 class IsolationLevel(Enum): - """Class for Isolation Level for transactions.""" + """Isolation Level for transactions.""" ReadUncommitted = 1 ReadCommitted = 2 @@ -290,7 +292,7 @@ class Cursor: cursor_name: str querystring: str - parameters: Sequence[Any] + parameters: ParamsT = None prepared: bool | None conn_dbname: str | None user: str | None @@ -464,7 +466,7 @@ class Transaction: async def execute( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> QueryResult: """Execute the query. @@ -554,7 +556,7 @@ class Transaction: async def fetch( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> QueryResult: """Fetch the result from database. @@ -574,7 +576,7 @@ class Transaction: async def fetch_row( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> SingleQueryResult: """Fetch exaclty single row from query. @@ -613,7 +615,7 @@ class Transaction: async def fetch_val( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> Any | None: """Execute the query and return first value of the first row. @@ -814,7 +816,7 @@ class Transaction: def cursor( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, fetch_number: int | None = None, scroll: bool | None = None, prepared: bool = True, @@ -906,7 +908,7 @@ class Connection: async def execute( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> QueryResult: """Execute the query. @@ -990,7 +992,7 @@ class Connection: async def fetch( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> QueryResult: """Fetch the result from database. @@ -1010,7 +1012,7 @@ class Connection: async def fetch_row( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> SingleQueryResult: """Fetch exaclty single row from query. @@ -1046,7 +1048,7 @@ class Connection: async def fetch_val( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, prepared: bool = True, ) -> Any: """Execute the query and return first value of the first row. @@ -1100,7 +1102,7 @@ class Connection: def cursor( self: Self, querystring: str, - parameters: Sequence[Any] | None = None, + parameters: ParamsT = None, fetch_number: int | None = None, scroll: bool | None = None, prepared: bool = True, @@ -1708,10 +1710,13 @@ class ConnectionPoolBuilder: self: Self, keepalives_retries: int, ) -> Self: - """ - Set the maximum number of TCP keepalive probes that will be sent before dropping a connection. + """Keepalives Retries. + + Set the maximum number of TCP keepalive probes + that will be sent before dropping a connection. - This is ignored for Unix domain sockets, or if the `keepalives` option is disabled. + This is ignored for Unix domain sockets, + or if the `keepalives` option is disabled. ### Parameters: - `keepalives_retries`: number of retries. diff --git a/python/psqlpy/_internal/extra_types.pyi b/python/psqlpy/_internal/extra_types.pyi index 93037639..e29c7573 100644 --- a/python/psqlpy/_internal/extra_types.pyi +++ b/python/psqlpy/_internal/extra_types.pyi @@ -143,9 +143,13 @@ class MacAddr8: class CustomType: def __init__(self, value: bytes) -> None: ... -Coordinates: TypeAlias = list[int | float] | set[int | float] | tuple[int | float, int | float] +Coordinates: TypeAlias = ( + list[int | float] | set[int | float] | tuple[int | float, int | float] +) PairsOfCoordinates: TypeAlias = ( - list[Coordinates | int | float] | set[Coordinates | int | float] | tuple[Coordinates | int | float, ...] + list[Coordinates | int | float] + | set[Coordinates | int | float] + | tuple[Coordinates | int | float, ...] ) class Point: @@ -227,7 +231,9 @@ class Circle: def __init__( self: Self, - value: list[int | float] | set[int | float] | tuple[int | float, int | float, int | float], + value: list[int | float] + | set[int | float] + | tuple[int | float, int | float, int | float], ) -> None: """Create new instance of Circle. @@ -374,7 +380,11 @@ class IpAddressArray: def __init__( self: Self, inner: typing.Sequence[ - IPv4Address | IPv6Address | typing.Sequence[IPv4Address] | typing.Sequence[IPv6Address] | typing.Any, + IPv4Address + | IPv6Address + | typing.Sequence[IPv4Address] + | typing.Sequence[IPv6Address] + | typing.Any, ], ) -> None: """Create new instance of IpAddressArray. diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 4a388f62..30426e5f 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -73,6 +73,11 @@ def listener_table_name() -> str: return random_string() +@pytest.fixture +def map_parameters_table_name() -> str: + return random_string() + + @pytest.fixture def number_database_records() -> int: return random.randint(10, 35) @@ -161,6 +166,23 @@ async def create_table_for_listener_tests( ) +@pytest.fixture +async def create_table_for_map_parameters_test( + psql_pool: ConnectionPool, + map_parameters_table_name: str, +) -> AsyncGenerator[None, None]: + connection = await psql_pool.connection() + await connection.execute( + f"CREATE TABLE {map_parameters_table_name}" + "(id SERIAL, name VARCHAR(255),surname VARCHAR(255), age INT)", + ) + + yield + await connection.execute( + f"DROP TABLE {map_parameters_table_name}", + ) + + @pytest.fixture async def test_cursor( psql_pool: ConnectionPool, diff --git a/python/tests/test_connection.py b/python/tests/test_connection.py index 898cc405..7af208f2 100644 --- a/python/tests/test_connection.py +++ b/python/tests/test_connection.py @@ -183,7 +183,10 @@ async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: connection = await psql_pool.connection() await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch") await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch2") - query = "CREATE TABLE execute_batch (name VARCHAR);CREATE TABLE execute_batch2 (name VARCHAR);" + query = ( + "CREATE TABLE execute_batch (name VARCHAR);" + "CREATE TABLE execute_batch2 (name VARCHAR);" + ) async with psql_pool.acquire() as conn: await conn.execute_batch(querystring=query) await conn.execute(querystring="SELECT * FROM execute_batch") diff --git a/python/tests/test_kwargs_parameters.py b/python/tests/test_kwargs_parameters.py new file mode 100644 index 00000000..d1fb1ebf --- /dev/null +++ b/python/tests/test_kwargs_parameters.py @@ -0,0 +1,73 @@ +import pytest +from psqlpy import ConnectionPool +from psqlpy.exceptions import ( + PyToRustValueMappingError, +) + +pytestmark = pytest.mark.anyio + + +async def test_success_default_map_parameters( + psql_pool: ConnectionPool, + table_name: str, +) -> None: + async with psql_pool.acquire() as conn: + exist_records = await conn.execute( + f"SELECT * FROM {table_name}", + ) + result = exist_records.result() + + test_fetch = await conn.execute( + f"SELECT * FROM {table_name} WHERE id = $(id_)p", + parameters={ + "id_": result[0]["id"], + }, + ) + + assert test_fetch.result()[0]["id"] == result[0]["id"] + assert test_fetch.result()[0]["name"] == result[0]["name"] + + +@pytest.mark.usefixtures("create_table_for_map_parameters_test") +async def test_success_multiple_same_parameters( + psql_pool: ConnectionPool, + map_parameters_table_name: str, +) -> None: + test_name_surname = "Surname" + test_age = 1 + async with psql_pool.acquire() as conn: + await conn.execute( + querystring=( + f"INSERT INTO {map_parameters_table_name} " + "(name, surname, age) VALUES ($(name)p, $(surname)p, $(age)p)" + ), + parameters={ + "name": test_name_surname, + "surname": test_name_surname, + "age": test_age, + }, + ) + + res = await conn.execute( + querystring=( + f"SELECT * FROM {map_parameters_table_name} " + "WHERE name = $(name)p OR surname = $(name)p" + ), + parameters={"name": test_name_surname}, + ) + + assert res.result()[0]["name"] == test_name_surname + assert res.result()[0]["surname"] == test_name_surname + assert res.result()[0]["age"] == test_age + + +async def test_failed_no_parameter( + psql_pool: ConnectionPool, + table_name: str, +) -> None: + async with psql_pool.acquire() as conn: + with pytest.raises(expected_exception=PyToRustValueMappingError): + await conn.execute( + querystring=(f"SELECT * FROM {table_name} " "WHERE name = $(name)p"), # noqa: ISC001 + parameters={"mistake": "wow"}, + ) diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index 151f5bb5..3c60676a 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -352,7 +352,10 @@ async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: connection = await psql_pool.connection() await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch") await connection.execute(querystring="DROP TABLE IF EXISTS execute_batch2") - query = "CREATE TABLE execute_batch (name VARCHAR);CREATE TABLE execute_batch2 (name VARCHAR);" + query = ( + "CREATE TABLE execute_batch (name VARCHAR);" + "CREATE TABLE execute_batch2 (name VARCHAR);" + ) async with connection.transaction() as transaction: await transaction.execute_batch(querystring=query) await transaction.execute(querystring="SELECT * FROM execute_batch") @@ -377,7 +380,9 @@ async def test_synchronous_commit( table_name: str, number_database_records: int, ) -> None: - async with psql_pool.acquire() as conn, conn.transaction(synchronous_commit=synchronous_commit) as trans: + async with psql_pool.acquire() as conn, conn.transaction( + synchronous_commit=synchronous_commit, + ) as trans: res = await trans.execute( f"SELECT * FROM {table_name}", ) diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index de62c554..94b8dabe 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -134,7 +134,8 @@ async def test_as_class( ("TEXT", "Some String", "Some String"), ( "XML", - """Manual...""", + """Manual + ...""", """Manual...""", ), ("BOOL", True, True), @@ -151,7 +152,11 @@ async def test_as_class( ("TIME", now_datetime.time(), now_datetime.time()), ("TIMESTAMP", now_datetime, now_datetime), ("TIMESTAMPTZ", now_datetime_with_tz, now_datetime_with_tz), - ("TIMESTAMPTZ", now_datetime_with_tz_in_asia_jakarta, now_datetime_with_tz_in_asia_jakarta), + ( + "TIMESTAMPTZ", + now_datetime_with_tz_in_asia_jakarta, + now_datetime_with_tz_in_asia_jakarta, + ), ("UUID", uuid_, str(uuid_)), ("INET", IPv4Address("192.0.0.1"), IPv4Address("192.0.0.1")), ( @@ -246,7 +251,11 @@ async def test_as_class( ("INT2 ARRAY", [SmallInt(12), SmallInt(100)], [12, 100]), ("INT2 ARRAY", [[SmallInt(12)], [SmallInt(100)]], [[12], [100]]), ("INT4 ARRAY", [Integer(121231231), Integer(121231231)], [121231231, 121231231]), - ("INT4 ARRAY", [[Integer(121231231)], [Integer(121231231)]], [[121231231], [121231231]]), + ( + "INT4 ARRAY", + [[Integer(121231231)], [Integer(121231231)]], + [[121231231], [121231231]], + ), ( "INT8 ARRAY", [BigInt(99999999999999999), BigInt(99999999999999999)], @@ -308,7 +317,11 @@ async def test_as_class( [[now_datetime.time()], [now_datetime.time()]], ), ("TIMESTAMP ARRAY", [now_datetime, now_datetime], [now_datetime, now_datetime]), - ("TIMESTAMP ARRAY", [[now_datetime], [now_datetime]], [[now_datetime], [now_datetime]]), + ( + "TIMESTAMP ARRAY", + [[now_datetime], [now_datetime]], + [[now_datetime], [now_datetime]], + ), ( "TIMESTAMPTZ ARRAY", [now_datetime_with_tz, now_datetime_with_tz], @@ -638,8 +651,14 @@ async def test_as_class( ), ( "INTERVAL ARRAY", - [datetime.timedelta(days=100, microseconds=100), datetime.timedelta(days=100, microseconds=100)], - [datetime.timedelta(days=100, microseconds=100), datetime.timedelta(days=100, microseconds=100)], + [ + datetime.timedelta(days=100, microseconds=100), + datetime.timedelta(days=100, microseconds=100), + ], + [ + datetime.timedelta(days=100, microseconds=100), + datetime.timedelta(days=100, microseconds=100), + ], ), ], ) @@ -681,7 +700,9 @@ async def test_deserialization_composite_into_python( await connection.execute("DROP TYPE IF EXISTS inner_type") await connection.execute("DROP TYPE IF EXISTS enum_type") await connection.execute("CREATE TYPE enum_type AS ENUM ('sad', 'ok', 'happy')") - await connection.execute("CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)") + await connection.execute( + "CREATE TYPE inner_type AS (inner_value VARCHAR, some_enum enum_type)", + ) create_type_query = """ CREATE type all_types AS ( bytea_ BYTEA, @@ -1082,7 +1103,12 @@ async def test_empty_array( async with psql_pool.acquire() as conn: await conn.execute("DROP TABLE IF EXISTS test_earr") await conn.execute( - "CREATE TABLE test_earr (id serial NOT NULL PRIMARY KEY, e_array text[] NOT NULL DEFAULT array[]::text[])", + """ + CREATE TABLE test_earr ( + id serial NOT NULL PRIMARY KEY, + e_array text[] NOT NULL DEFAULT array[]::text[] + ) + """, ) await conn.execute("INSERT INTO test_earr(id) VALUES(2);") @@ -1125,8 +1151,16 @@ async def test_empty_array( ("INT2 ARRAY", Int16Array([]), []), ("INT2 ARRAY", Int16Array([SmallInt(12), SmallInt(100)]), [12, 100]), ("INT2 ARRAY", Int16Array([[SmallInt(12)], [SmallInt(100)]]), [[12], [100]]), - ("INT4 ARRAY", Int32Array([Integer(121231231), Integer(121231231)]), [121231231, 121231231]), - ("INT4 ARRAY", Int32Array([[Integer(121231231)], [Integer(121231231)]]), [[121231231], [121231231]]), + ( + "INT4 ARRAY", + Int32Array([Integer(121231231), Integer(121231231)]), + [121231231, 121231231], + ), + ( + "INT4 ARRAY", + Int32Array([[Integer(121231231)], [Integer(121231231)]]), + [[121231231], [121231231]], + ), ( "INT8 ARRAY", Int64Array([BigInt(99999999999999999), BigInt(99999999999999999)]), @@ -1187,8 +1221,16 @@ async def test_empty_array( TimeArray([[now_datetime.time()], [now_datetime.time()]]), [[now_datetime.time()], [now_datetime.time()]], ), - ("TIMESTAMP ARRAY", DateTimeArray([now_datetime, now_datetime]), [now_datetime, now_datetime]), - ("TIMESTAMP ARRAY", DateTimeArray([[now_datetime], [now_datetime]]), [[now_datetime], [now_datetime]]), + ( + "TIMESTAMP ARRAY", + DateTimeArray([now_datetime, now_datetime]), + [now_datetime, now_datetime], + ), + ( + "TIMESTAMP ARRAY", + DateTimeArray([[now_datetime], [now_datetime]]), + [[now_datetime], [now_datetime]], + ), ( "TIMESTAMPTZ ARRAY", DateTimeTZArray([now_datetime_with_tz, now_datetime_with_tz]), @@ -1554,9 +1596,15 @@ async def test_empty_array( ( "INTERVAL ARRAY", IntervalArray( - [[datetime.timedelta(days=100, microseconds=100)], [datetime.timedelta(days=100, microseconds=100)]], + [ + [datetime.timedelta(days=100, microseconds=100)], + [datetime.timedelta(days=100, microseconds=100)], + ], ), - [[datetime.timedelta(days=100, microseconds=100)], [datetime.timedelta(days=100, microseconds=100)]], + [ + [datetime.timedelta(days=100, microseconds=100)], + [datetime.timedelta(days=100, microseconds=100)], + ], ), ], ) diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index c66006cc..10b861f1 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -8,7 +8,7 @@ use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - value_converter::{convert_parameters, postgres_to_py, PythonDTO, QueryParameter}, + value_converter::{convert_parameters_and_qs, postgres_to_py, PythonDTO, QueryParameter}, }; #[allow(clippy::module_name_repetitions)] @@ -82,7 +82,7 @@ impl PsqlpyConnection { } } - pub async fn execute( + pub async fn cursor_execute( &self, querystring: String, parameters: Option>, @@ -90,10 +90,7 @@ impl PsqlpyConnection { ) -> RustPSQLDriverPyResult { let prepared = prepared.unwrap_or(true); - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } + let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; let boxed_params = ¶ms .iter() @@ -103,7 +100,7 @@ impl PsqlpyConnection { let result = if prepared { self.query( - &self.prepare_cached(&querystring).await.map_err(|err| { + &self.prepare_cached(&qs).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) @@ -117,13 +114,53 @@ impl PsqlpyConnection { )) })? } else { - self.query(&querystring, boxed_params) - .await - .map_err(|err| { + self.query(&qs, boxed_params).await.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + }; + + Ok(PSQLDriverPyQueryResult::new(result)) + } + + pub async fn execute( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> RustPSQLDriverPyResult { + let prepared = prepared.unwrap_or(true); + + let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; + + let boxed_params = ¶ms + .iter() + .map(|param| param as &QueryParameter) + .collect::>() + .into_boxed_slice(); + + let result = if prepared { + self.query( + &self.prepare_cached(&qs).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" + "Cannot prepare statement, error - {err}" )) - })? + })?, + boxed_params, + ) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? + } else { + self.query(&qs, boxed_params).await.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? }; Ok(PSQLDriverPyQueryResult::new(result)) @@ -131,7 +168,7 @@ impl PsqlpyConnection { pub async fn execute_many( &self, - querystring: String, + mut querystring: String, parameters: Option>>, prepared: Option, ) -> RustPSQLDriverPyResult<()> { @@ -140,7 +177,11 @@ impl PsqlpyConnection { let mut params: Vec> = vec![]; if let Some(parameters) = parameters { for vec_of_py_any in parameters { - params.push(convert_parameters(vec_of_py_any)?); + // TODO: Fix multiple qs creation + let (qs, parsed_params) = + convert_parameters_and_qs(querystring.clone(), Some(vec_of_py_any))?; + querystring = qs; + params.push(parsed_params); } } @@ -182,10 +223,7 @@ impl PsqlpyConnection { ) -> RustPSQLDriverPyResult { let prepared = prepared.unwrap_or(true); - let mut params: Vec = vec![]; - if let Some(parameters) = parameters { - params = convert_parameters(parameters)?; - } + let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; let boxed_params = ¶ms .iter() @@ -195,7 +233,7 @@ impl PsqlpyConnection { let result = if prepared { self.query_one( - &self.prepare_cached(&querystring).await.map_err(|err| { + &self.prepare_cached(&qs).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) @@ -209,13 +247,11 @@ impl PsqlpyConnection { )) })? } else { - self.query_one(&querystring, boxed_params) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + self.query_one(&qs, boxed_params).await.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute statement, error - {err}" + )) + })? }; return Ok(result); diff --git a/src/value_converter.rs b/src/value_converter.rs index fd9fca37..f3a95297 100644 --- a/src/value_converter.rs +++ b/src/value_converter.rs @@ -3,11 +3,12 @@ use chrono_tz::Tz; use geo_types::{coord, Coord, Line as LineSegment, LineString, Point, Rect}; use itertools::Itertools; use macaddr::{MacAddr6, MacAddr8}; +use once_cell::sync::Lazy; use pg_interval::Interval; use postgres_types::{Field, FromSql, Kind, ToSql}; use rust_decimal::Decimal; use serde_json::{json, Map, Value}; -use std::{fmt::Debug, net::IpAddr}; +use std::{collections::HashMap, fmt::Debug, net::IpAddr, sync::RwLock}; use uuid::Uuid; use bytes::{BufMut, BytesMut}; @@ -16,8 +17,8 @@ use pyo3::{ sync::GILOnceCell, types::{ PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, - PyInt, PyList, PyListMethods, PySequence, PySet, PyString, PyTime, PyTuple, PyType, - PyTypeMethods, + PyInt, PyList, PyListMethods, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, + PyType, PyTypeMethods, }, Bound, FromPyObject, IntoPy, Py, PyAny, PyObject, PyResult, Python, ToPyObject, }; @@ -39,16 +40,15 @@ use postgres_array::{array::Array, Dimension}; static DECIMAL_CLS: GILOnceCell> = GILOnceCell::new(); static TIMEDELTA_CLS: GILOnceCell> = GILOnceCell::new(); +static KWARGS_QUERYSTRINGS: Lazy)>>> = + Lazy::new(|| RwLock::new(Default::default())); pub type QueryParameter = (dyn ToSql + Sync); fn get_decimal_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { DECIMAL_CLS .get_or_try_init(py, || { - let type_object = py - .import_bound("decimal")? - .getattr("Decimal")? - .downcast_into()?; + let type_object = py.import("decimal")?.getattr("Decimal")?.downcast_into()?; Ok(type_object.unbind()) }) .map(|ty| ty.bind(py)) @@ -58,7 +58,7 @@ fn get_timedelta_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { TIMEDELTA_CLS .get_or_try_init(py, || { let type_object = py - .import_bound("datetime")? + .import("datetime")? .getattr("timedelta")? .downcast_into()?; Ok(type_object.unbind()) @@ -613,6 +613,73 @@ impl ToSql for PythonDTO { to_sql_checked!(); } +fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { + let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); + + { + let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); + let qs = kq_read.get(querystring); + + if let Some(qs) = qs { + return qs.clone(); + } + }; + + let mut counter = 0; + let mut sequence = Vec::new(); + + let result = re.replace_all(querystring, |caps: ®ex::Captures| { + let account_id = caps[1].to_string(); + + sequence.push(account_id.clone()); + counter += 1; + + format!("${}", &counter) + }); + + let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); + kq_write.insert( + querystring.to_string(), + (result.clone().into(), sequence.clone()), + ); + (result.into(), sequence) +} + +pub fn convert_kwargs_parameters<'a>( + kw_params: &Bound<'_, PyMapping>, + querystring: &'a str, +) -> RustPSQLDriverPyResult<(String, Vec)> { + let mut result_vec: Vec = vec![]; + let (changed_string, params_names) = parse_kwargs_qs(querystring); + + for param_name in params_names { + match kw_params.get_item(¶m_name) { + Ok(param) => result_vec.push(py_to_rust(¶m)?), + Err(_) => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + format!("Cannot find parameter with name <{param_name}> in parameters").into(), + )) + } + } + } + + Ok((changed_string, result_vec)) +} + +pub fn convert_seq_parameters( + seq_params: Vec>, +) -> RustPSQLDriverPyResult> { + let mut result_vec: Vec = vec![]; + Python::with_gil(|gil| { + for parameter in seq_params { + result_vec.push(py_to_rust(parameter.bind(gil))?); + } + Ok::<(), RustPSQLDriverError>(()) + })?; + + Ok(result_vec) +} + /// Convert parameters come from python. /// /// Parameters for `execute()` method can be either @@ -625,22 +692,36 @@ impl ToSql for PythonDTO { /// /// May return Err Result if can't convert python object. #[allow(clippy::needless_pass_by_value)] -pub fn convert_parameters(parameters: Py) -> RustPSQLDriverPyResult> { - let mut result_vec: Vec = vec![]; - Python::with_gil(|gil| { +pub fn convert_parameters_and_qs( + querystring: String, + parameters: Option>, +) -> RustPSQLDriverPyResult<(String, Vec)> { + let Some(parameters) = parameters else { + return Ok((querystring, vec![])); + }; + + let res = Python::with_gil(|gil| { let params = parameters.extract::>>(gil).map_err(|_| { RustPSQLDriverError::PyToRustValueConversionError( "Cannot convert you parameters argument into Rust type, please use List/Tuple" .into(), ) - })?; - for parameter in params { - result_vec.push(py_to_rust(parameter.bind(gil))?); + }); + if let Ok(params) = params { + return Ok((querystring, convert_seq_parameters(params)?)); } - Ok::<(), RustPSQLDriverError>(()) + + let kw_params = parameters.downcast_bound::(gil); + if let Ok(kw_params) = kw_params { + return convert_kwargs_parameters(kw_params, &querystring); + } + + Err(RustPSQLDriverError::PyToRustValueConversionError( + "Parameters must be sequence or mapping".into(), + )) })?; - Ok(result_vec) + Ok(res) } /// Convert Sequence from Python (except String) into flat vec. From 7573b07783b94b3e98a02815bb52bcfca9aaa459 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 22 Feb 2025 16:37:09 +0100 Subject: [PATCH 18/65] Added support for named parameters Signed-off-by: chandr-andr (Kiselev Aleksandr) --- python/tests/test_value_converter.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index 94b8dabe..34361b22 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -134,8 +134,7 @@ async def test_as_class( ("TEXT", "Some String", "Some String"), ( "XML", - """Manual - ...""", + """Manual...""", # noqa: E501 """Manual...""", ), ("BOOL", True, True), From 2404eae49f947e5b4d2f33fba9bb052bbceee07c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 22 Feb 2025 16:46:43 +0100 Subject: [PATCH 19/65] Added support for named parameters Signed-off-by: chandr-andr (Kiselev Aleksandr) --- pyproject.toml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5a61fd61..84c00f42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,12 +38,6 @@ maintainers = [{ name = "Kiselev Aleksandr", email = "askiselev00@gmail.com" }] description = "Async PostgreSQL driver for Python written in Rust" dynamic = ["version"] -# [tool.poetry.group.querybuilder] -# optional = true - -# [tool.poetry.group.querybuilder.dependencies] -# pypika = "*" - [tool.maturin] python-source = "python" module-name = "psqlpy._internal" From c9b9c8cf06051bc7d90bfbb59be6bac1bf269ee8 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 22 Feb 2025 18:26:54 +0100 Subject: [PATCH 20/65] Added support for named parameters Signed-off-by: chandr-andr (Kiselev Aleksandr) --- python/tests/test_listener.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python/tests/test_listener.py b/python/tests/test_listener.py index c48c8974..a1ff0742 100644 --- a/python/tests/test_listener.py +++ b/python/tests/test_listener.py @@ -126,7 +126,7 @@ async def test_listener_listen( listener_table_name=listener_table_name, ) - listener.abort_listen() + await listener.shutdown() @pytest.mark.usefixtures("create_table_for_listener_tests") @@ -152,7 +152,7 @@ async def test_listener_asynciterator( assert listener_msg.payload == TEST_PAYLOAD break - listener.abort_listen() + await listener.shutdown() @pytest.mark.usefixtures("create_table_for_listener_tests") @@ -175,7 +175,7 @@ async def test_listener_abort( listener_table_name=listener_table_name, ) - listener.abort_listen() + await listener.shutdown() await clear_test_table( psql_pool=psql_pool, @@ -261,7 +261,7 @@ async def test_listener_more_than_one_callback( assert data_result["channel"] == additional_channel - listener.abort_listen() + await listener.shutdown() @pytest.mark.usefixtures("create_table_for_listener_tests") @@ -290,7 +290,7 @@ async def test_listener_clear_callbacks( is_insert_exist=False, ) - listener.abort_listen() + await listener.shutdown() @pytest.mark.usefixtures("create_table_for_listener_tests") @@ -317,4 +317,4 @@ async def test_listener_clear_all_callbacks( is_insert_exist=False, ) - listener.abort_listen() + await listener.shutdown() From 66450d5588f7b24375905f2394672d5bb22e9a5f Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 22 Feb 2025 18:41:22 +0100 Subject: [PATCH 21/65] Bumped version to 0.9.3 Signed-off-by: chandr-andr (Kiselev Aleksandr) --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 35713346..fee82b45 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,7 +997,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.9.2" +version = "0.9.3" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 93a46f66..1846f8c2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.9.2" +version = "0.9.3" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 151815599149cd74906f91d4fd2b76ea1c859607 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 28 Apr 2025 21:41:09 +0200 Subject: [PATCH 22/65] First steps to refactor value converter --- src/driver/inner_connection.rs | 6 +- src/extra_types.rs | 6 +- src/query_result.rs | 5 +- src/value_converter.rs | 2232 --------------------- src/value_converter/consts.rs | 37 + src/value_converter/funcs/from_python.rs | 985 +++++++++ src/value_converter/funcs/mod.rs | 2 + src/value_converter/funcs/to_python.rs | 712 +++++++ src/value_converter/mod.rs | 3 + src/value_converter/models/decimal.rs | 30 + src/value_converter/models/dto.rs | 489 +++++ src/value_converter/models/interval.rs | 37 + src/value_converter/models/mod.rs | 5 + src/value_converter/models/serde_value.rs | 89 + src/value_converter/models/uuid.rs | 46 + 15 files changed, 2448 insertions(+), 2236 deletions(-) delete mode 100644 src/value_converter.rs create mode 100644 src/value_converter/consts.rs create mode 100644 src/value_converter/funcs/from_python.rs create mode 100644 src/value_converter/funcs/mod.rs create mode 100644 src/value_converter/funcs/to_python.rs create mode 100644 src/value_converter/mod.rs create mode 100644 src/value_converter/models/decimal.rs create mode 100644 src/value_converter/models/dto.rs create mode 100644 src/value_converter/models/interval.rs create mode 100644 src/value_converter/models/mod.rs create mode 100644 src/value_converter/models/serde_value.rs create mode 100644 src/value_converter/models/uuid.rs diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index 10b861f1..ae060baa 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -8,7 +8,11 @@ use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - value_converter::{convert_parameters_and_qs, postgres_to_py, PythonDTO, QueryParameter}, + value_converter::{ + consts::QueryParameter, + funcs::{from_python::convert_parameters_and_qs, to_python::postgres_to_py}, + models::dto::PythonDTO, + }, }; #[allow(clippy::module_name_repetitions)] diff --git a/src/extra_types.rs b/src/extra_types.rs index e0b33be8..ea4e35f5 100644 --- a/src/extra_types.rs +++ b/src/extra_types.rs @@ -13,8 +13,10 @@ use crate::{ additional_types::{Circle as RustCircle, Line as RustLine}, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, value_converter::{ - build_flat_geo_coords, build_geo_coords, build_serde_value, - py_sequence_into_postgres_array, PythonDTO, + funcs::from_python::{ + build_flat_geo_coords, build_geo_coords, py_sequence_into_postgres_array, + }, + models::{dto::PythonDTO, serde_value::build_serde_value}, }, }; diff --git a/src/query_result.rs b/src/query_result.rs index 162be3b5..da393f89 100644 --- a/src/query_result.rs +++ b/src/query_result.rs @@ -1,7 +1,10 @@ use pyo3::{prelude::*, pyclass, pymethods, types::PyDict, Py, PyAny, Python, ToPyObject}; use tokio_postgres::Row; -use crate::{exceptions::rust_errors::RustPSQLDriverPyResult, value_converter::postgres_to_py}; +use crate::{ + exceptions::rust_errors::RustPSQLDriverPyResult, + value_converter::funcs::to_python::postgres_to_py, +}; /// Convert postgres `Row` into Python Dict. /// diff --git a/src/value_converter.rs b/src/value_converter.rs deleted file mode 100644 index f3a95297..00000000 --- a/src/value_converter.rs +++ /dev/null @@ -1,2232 +0,0 @@ -use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, TimeZone}; -use chrono_tz::Tz; -use geo_types::{coord, Coord, Line as LineSegment, LineString, Point, Rect}; -use itertools::Itertools; -use macaddr::{MacAddr6, MacAddr8}; -use once_cell::sync::Lazy; -use pg_interval::Interval; -use postgres_types::{Field, FromSql, Kind, ToSql}; -use rust_decimal::Decimal; -use serde_json::{json, Map, Value}; -use std::{collections::HashMap, fmt::Debug, net::IpAddr, sync::RwLock}; -use uuid::Uuid; - -use bytes::{BufMut, BytesMut}; -use postgres_protocol::types; -use pyo3::{ - sync::GILOnceCell, - types::{ - PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, - PyInt, PyList, PyListMethods, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, - PyType, PyTypeMethods, - }, - Bound, FromPyObject, IntoPy, Py, PyAny, PyObject, PyResult, Python, ToPyObject, -}; -use tokio_postgres::{ - types::{to_sql_checked, Type}, - Column, Row, -}; - -use crate::{ - additional_types::{ - Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, - RustRect, - }, - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, - extra_types, -}; -use pgvector::Vector as PgVector; -use postgres_array::{array::Array, Dimension}; - -static DECIMAL_CLS: GILOnceCell> = GILOnceCell::new(); -static TIMEDELTA_CLS: GILOnceCell> = GILOnceCell::new(); -static KWARGS_QUERYSTRINGS: Lazy)>>> = - Lazy::new(|| RwLock::new(Default::default())); - -pub type QueryParameter = (dyn ToSql + Sync); - -fn get_decimal_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { - DECIMAL_CLS - .get_or_try_init(py, || { - let type_object = py.import("decimal")?.getattr("Decimal")?.downcast_into()?; - Ok(type_object.unbind()) - }) - .map(|ty| ty.bind(py)) -} - -fn get_timedelta_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { - TIMEDELTA_CLS - .get_or_try_init(py, || { - let type_object = py - .import("datetime")? - .getattr("timedelta")? - .downcast_into()?; - Ok(type_object.unbind()) - }) - .map(|ty| ty.bind(py)) -} - -/// Struct for Uuid. -/// -/// We use custom struct because we need to implement external traits -/// to it. -#[derive(Clone, Copy)] -pub struct InternalUuid(Uuid); - -impl<'a> FromPyObject<'a> for InternalUuid { - fn extract_bound(obj: &Bound<'a, PyAny>) -> PyResult { - let uuid_value = Uuid::parse_str(obj.str()?.extract::<&str>()?).map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert UUID Array to inner rust type, check you parameters.".into(), - ) - })?; - Ok(InternalUuid(uuid_value)) - } -} - -impl ToPyObject for InternalUuid { - fn to_object(&self, py: Python<'_>) -> PyObject { - self.0.to_string().as_str().to_object(py) - } -} - -impl<'a> FromSql<'a> for InternalUuid { - fn from_sql( - ty: &Type, - raw: &'a [u8], - ) -> Result> { - Ok(InternalUuid(::from_sql(ty, raw)?)) - } - - fn accepts(_ty: &Type) -> bool { - true - } -} - -/// Struct for Value. -/// -/// We use custom struct because we need to implement external traits -/// to it. -#[derive(Clone)] -pub struct InternalSerdeValue(Value); - -impl<'a> FromPyObject<'a> for InternalSerdeValue { - fn extract_bound(ob: &Bound<'a, PyAny>) -> PyResult { - let serde_value = build_serde_value(ob.clone().unbind())?; - - Ok(InternalSerdeValue(serde_value)) - } -} - -impl ToPyObject for InternalSerdeValue { - fn to_object(&self, py: Python<'_>) -> PyObject { - match build_python_from_serde_value(py, self.0.clone()) { - Ok(ok_value) => ok_value, - Err(_) => py.None(), - } - } -} - -impl<'a> FromSql<'a> for InternalSerdeValue { - fn from_sql( - ty: &Type, - raw: &'a [u8], - ) -> Result> { - Ok(InternalSerdeValue(::from_sql(ty, raw)?)) - } - - fn accepts(_ty: &Type) -> bool { - true - } -} - -/// Struct for Decimal. -/// -/// It's necessary because we use custom forks and there is -/// no implementation of `ToPyObject` for Decimal. -struct InnerDecimal(Decimal); - -impl ToPyObject for InnerDecimal { - fn to_object(&self, py: Python<'_>) -> PyObject { - let dec_cls = get_decimal_cls(py).expect("failed to load decimal.Decimal"); - let ret = dec_cls - .call1((self.0.to_string(),)) - .expect("failed to call decimal.Decimal(value)"); - ret.to_object(py) - } -} - -impl<'a> FromSql<'a> for InnerDecimal { - fn from_sql( - ty: &Type, - raw: &'a [u8], - ) -> Result> { - Ok(InnerDecimal(::from_sql(ty, raw)?)) - } - - fn accepts(_ty: &Type) -> bool { - true - } -} - -struct InnerInterval(Interval); - -impl ToPyObject for InnerInterval { - fn to_object(&self, py: Python<'_>) -> PyObject { - let td_cls = get_timedelta_cls(py).expect("failed to load datetime.timedelta"); - let pydict = PyDict::new_bound(py); - let months = self.0.months * 30; - let _ = pydict.set_item("days", self.0.days + months); - let _ = pydict.set_item("microseconds", self.0.microseconds); - let ret = td_cls - .call((), Some(&pydict)) - .expect("failed to call datetime.timedelta(days=<>, microseconds=<>)"); - ret.to_object(py) - } -} - -impl<'a> FromSql<'a> for InnerInterval { - fn from_sql( - ty: &Type, - raw: &'a [u8], - ) -> Result> { - Ok(InnerInterval(::from_sql(ty, raw)?)) - } - - fn accepts(_ty: &Type) -> bool { - true - } -} - -/// Additional type for types come from Python. -/// -/// It's necessary because we need to pass this -/// enum into `to_sql` method of `ToSql` trait from -/// `postgres` crate. -#[derive(Debug, Clone, PartialEq)] -pub enum PythonDTO { - // Primitive - PyNone, - PyBytes(Vec), - PyBool(bool), - PyUUID(Uuid), - PyVarChar(String), - PyText(String), - PyString(String), - PyIntI16(i16), - PyIntI32(i32), - PyIntI64(i64), - PyIntU32(u32), - PyIntU64(u64), - PyFloat32(f32), - PyFloat64(f64), - PyMoney(i64), - PyDate(NaiveDate), - PyTime(NaiveTime), - PyDateTime(NaiveDateTime), - PyDateTimeTz(DateTime), - PyInterval(Interval), - PyIpAddress(IpAddr), - PyList(Vec), - PyArray(Array), - PyTuple(Vec), - PyJsonb(Value), - PyJson(Value), - PyMacAddr6(MacAddr6), - PyMacAddr8(MacAddr8), - PyDecimal(Decimal), - PyCustomType(Vec), - PyPoint(Point), - PyBox(Rect), - PyPath(LineString), - PyLine(Line), - PyLineSegment(LineSegment), - PyCircle(Circle), - // Arrays - PyBoolArray(Array), - PyUuidArray(Array), - PyVarCharArray(Array), - PyTextArray(Array), - PyInt16Array(Array), - PyInt32Array(Array), - PyInt64Array(Array), - PyFloat32Array(Array), - PyFloat64Array(Array), - PyMoneyArray(Array), - PyIpAddressArray(Array), - PyJSONBArray(Array), - PyJSONArray(Array), - PyDateArray(Array), - PyTimeArray(Array), - PyDateTimeArray(Array), - PyDateTimeTZArray(Array), - PyMacAddr6Array(Array), - PyMacAddr8Array(Array), - PyNumericArray(Array), - PyPointArray(Array), - PyBoxArray(Array), - PyPathArray(Array), - PyLineArray(Array), - PyLsegArray(Array), - PyCircleArray(Array), - PyIntervalArray(Array), - // PgVector - PyPgVector(Vec), -} - -impl ToPyObject for PythonDTO { - fn to_object(&self, py: Python<'_>) -> PyObject { - match self { - PythonDTO::PyNone => py.None(), - PythonDTO::PyBool(pybool) => pybool.to_object(py), - PythonDTO::PyString(py_string) - | PythonDTO::PyText(py_string) - | PythonDTO::PyVarChar(py_string) => py_string.to_object(py), - PythonDTO::PyIntI32(pyint) => pyint.to_object(py), - PythonDTO::PyIntI64(pyint) => pyint.to_object(py), - PythonDTO::PyIntU64(pyint) => pyint.to_object(py), - PythonDTO::PyFloat32(pyfloat) => pyfloat.to_object(py), - PythonDTO::PyFloat64(pyfloat) => pyfloat.to_object(py), - _ => unreachable!(), - } - } -} - -impl PythonDTO { - /// Return type of the Array for `PostgreSQL`. - /// - /// Since every Array must have concrete type, - /// we must say exactly what type of array we try to pass into - /// postgres. - /// - /// # Errors - /// May return Err Result if there is no support for passed python type. - pub fn array_type(&self) -> RustPSQLDriverPyResult { - match self { - PythonDTO::PyBool(_) => Ok(tokio_postgres::types::Type::BOOL_ARRAY), - PythonDTO::PyUUID(_) => Ok(tokio_postgres::types::Type::UUID_ARRAY), - PythonDTO::PyVarChar(_) | PythonDTO::PyString(_) => { - Ok(tokio_postgres::types::Type::VARCHAR_ARRAY) - } - PythonDTO::PyText(_) => Ok(tokio_postgres::types::Type::TEXT_ARRAY), - PythonDTO::PyIntI16(_) => Ok(tokio_postgres::types::Type::INT2_ARRAY), - PythonDTO::PyIntI32(_) | PythonDTO::PyIntU32(_) => { - Ok(tokio_postgres::types::Type::INT4_ARRAY) - } - PythonDTO::PyIntI64(_) => Ok(tokio_postgres::types::Type::INT8_ARRAY), - PythonDTO::PyFloat32(_) => Ok(tokio_postgres::types::Type::FLOAT4_ARRAY), - PythonDTO::PyFloat64(_) => Ok(tokio_postgres::types::Type::FLOAT8_ARRAY), - PythonDTO::PyMoney(_) => Ok(tokio_postgres::types::Type::MONEY_ARRAY), - PythonDTO::PyIpAddress(_) => Ok(tokio_postgres::types::Type::INET_ARRAY), - PythonDTO::PyJsonb(_) => Ok(tokio_postgres::types::Type::JSONB_ARRAY), - PythonDTO::PyJson(_) => Ok(tokio_postgres::types::Type::JSON_ARRAY), - PythonDTO::PyDate(_) => Ok(tokio_postgres::types::Type::DATE_ARRAY), - PythonDTO::PyTime(_) => Ok(tokio_postgres::types::Type::TIME_ARRAY), - PythonDTO::PyDateTime(_) => Ok(tokio_postgres::types::Type::TIMESTAMP_ARRAY), - PythonDTO::PyDateTimeTz(_) => Ok(tokio_postgres::types::Type::TIMESTAMPTZ_ARRAY), - PythonDTO::PyMacAddr6(_) => Ok(tokio_postgres::types::Type::MACADDR_ARRAY), - PythonDTO::PyMacAddr8(_) => Ok(tokio_postgres::types::Type::MACADDR8_ARRAY), - PythonDTO::PyDecimal(_) => Ok(tokio_postgres::types::Type::NUMERIC_ARRAY), - PythonDTO::PyPoint(_) => Ok(tokio_postgres::types::Type::POINT_ARRAY), - PythonDTO::PyBox(_) => Ok(tokio_postgres::types::Type::BOX_ARRAY), - PythonDTO::PyPath(_) => Ok(tokio_postgres::types::Type::PATH_ARRAY), - PythonDTO::PyLine(_) => Ok(tokio_postgres::types::Type::LINE_ARRAY), - PythonDTO::PyLineSegment(_) => Ok(tokio_postgres::types::Type::LSEG_ARRAY), - PythonDTO::PyCircle(_) => Ok(tokio_postgres::types::Type::CIRCLE_ARRAY), - PythonDTO::PyInterval(_) => Ok(tokio_postgres::types::Type::INTERVAL_ARRAY), - _ => Err(RustPSQLDriverError::PyToRustValueConversionError( - "Can't process array type, your type doesn't have support yet".into(), - )), - } - } - - /// Convert enum into serde `Value`. - /// - /// # Errors - /// May return Err Result if cannot convert python type into rust. - pub fn to_serde_value(&self) -> RustPSQLDriverPyResult { - match self { - PythonDTO::PyNone => Ok(Value::Null), - PythonDTO::PyBool(pybool) => Ok(json!(pybool)), - PythonDTO::PyString(pystring) - | PythonDTO::PyText(pystring) - | PythonDTO::PyVarChar(pystring) => Ok(json!(pystring)), - PythonDTO::PyIntI32(pyint) => Ok(json!(pyint)), - PythonDTO::PyIntI64(pyint) => Ok(json!(pyint)), - PythonDTO::PyIntU64(pyint) => Ok(json!(pyint)), - PythonDTO::PyFloat32(pyfloat) => Ok(json!(pyfloat)), - PythonDTO::PyFloat64(pyfloat) => Ok(json!(pyfloat)), - PythonDTO::PyList(pylist) => { - let mut vec_serde_values: Vec = vec![]; - - for py_object in pylist { - vec_serde_values.push(py_object.to_serde_value()?); - } - - Ok(json!(vec_serde_values)) - } - PythonDTO::PyArray(array) => Ok(json!(pythondto_array_to_serde(Some(array.clone()))?)), - PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => Ok(py_dict.clone()), - _ => Err(RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert your type into Rust type".into(), - )), - } - } -} - -/// Implement `ToSql` trait. -/// -/// It allows us to pass `PythonDTO` enum as parameter -/// directly into `.execute()` method in -/// `DatabasePool`, `Connection` and `Transaction`. -impl ToSql for PythonDTO { - /// Answer the question Is this type can be passed into sql? - /// - /// Always True. - fn accepts(_ty: &tokio_postgres::types::Type) -> bool - where - Self: Sized, - { - true - } - - /// Convert our `PythonDTO` enum into bytes. - /// - /// We convert every inner type of `PythonDTO` enum variant - /// into bytes and write them into bytes buffer. - /// - /// # Errors - /// - /// May return Err Result if cannot write bytes into buffer. - #[allow(clippy::too_many_lines)] - fn to_sql( - &self, - ty: &tokio_postgres::types::Type, - out: &mut BytesMut, - ) -> Result> - where - Self: Sized, - { - let mut return_is_null_true: bool = false; - if *self == PythonDTO::PyNone { - return_is_null_true = true; - } - - match self { - PythonDTO::PyNone => {} - PythonDTO::PyCustomType(some_bytes) => { - <&[u8] as ToSql>::to_sql(&some_bytes.as_slice(), ty, out)?; - } - PythonDTO::PyBytes(pybytes) => { - as ToSql>::to_sql(pybytes, ty, out)?; - } - PythonDTO::PyBool(boolean) => types::bool_to_sql(*boolean, out), - PythonDTO::PyVarChar(string) => { - <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; - } - PythonDTO::PyText(string) => { - <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; - } - PythonDTO::PyUUID(pyuuid) => { - ::to_sql(pyuuid, ty, out)?; - } - PythonDTO::PyString(string) => { - <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; - } - PythonDTO::PyIntI16(int) => out.put_i16(*int), - PythonDTO::PyIntI32(int) => out.put_i32(*int), - PythonDTO::PyIntI64(int) | PythonDTO::PyMoney(int) => out.put_i64(*int), - PythonDTO::PyIntU32(int) => out.put_u32(*int), - PythonDTO::PyIntU64(int) => out.put_u64(*int), - PythonDTO::PyFloat32(float) => out.put_f32(*float), - PythonDTO::PyFloat64(float) => out.put_f64(*float), - PythonDTO::PyDate(pydate) => { - <&NaiveDate as ToSql>::to_sql(&pydate, ty, out)?; - } - PythonDTO::PyTime(pytime) => { - <&NaiveTime as ToSql>::to_sql(&pytime, ty, out)?; - } - PythonDTO::PyDateTime(pydatetime_no_tz) => { - <&NaiveDateTime as ToSql>::to_sql(&pydatetime_no_tz, ty, out)?; - } - PythonDTO::PyDateTimeTz(pydatetime_tz) => { - <&DateTime as ToSql>::to_sql(&pydatetime_tz, ty, out)?; - } - PythonDTO::PyInterval(pyinterval) => { - <&Interval as ToSql>::to_sql(&pyinterval, ty, out)?; - } - PythonDTO::PyIpAddress(pyidaddress) => { - <&IpAddr as ToSql>::to_sql(&pyidaddress, ty, out)?; - } - PythonDTO::PyMacAddr6(pymacaddr) => { - <&[u8] as ToSql>::to_sql(&pymacaddr.as_bytes(), ty, out)?; - } - PythonDTO::PyMacAddr8(pymacaddr) => { - <&[u8] as ToSql>::to_sql(&pymacaddr.as_bytes(), ty, out)?; - } - PythonDTO::PyPoint(pypoint) => { - <&RustPoint as ToSql>::to_sql(&&RustPoint::new(*pypoint), ty, out)?; - } - PythonDTO::PyBox(pybox) => { - <&RustRect as ToSql>::to_sql(&&RustRect::new(*pybox), ty, out)?; - } - PythonDTO::PyPath(pypath) => { - <&RustLineString as ToSql>::to_sql(&&RustLineString::new(pypath.clone()), ty, out)?; - } - PythonDTO::PyLine(pyline) => { - <&Line as ToSql>::to_sql(&pyline, ty, out)?; - } - PythonDTO::PyLineSegment(pylinesegment) => { - <&RustLineSegment as ToSql>::to_sql( - &&RustLineSegment::new(*pylinesegment), - ty, - out, - )?; - } - PythonDTO::PyCircle(pycircle) => { - <&Circle as ToSql>::to_sql(&pycircle, ty, out)?; - } - PythonDTO::PyList(py_iterable) | PythonDTO::PyTuple(py_iterable) => { - let mut items = Vec::new(); - for inner in py_iterable { - items.push(inner); - } - if items.is_empty() { - return_is_null_true = true; - } else { - items.to_sql(&items[0].array_type()?, out)?; - } - } - PythonDTO::PyArray(array) => { - if let Some(first_elem) = array.iter().nth(0) { - match first_elem.array_type() { - Ok(ok_type) => { - array.to_sql(&ok_type, out)?; - } - Err(_) => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Cannot define array type.".into(), - ))? - } - } - } - } - PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => { - <&Value as ToSql>::to_sql(&py_dict, ty, out)?; - } - PythonDTO::PyDecimal(py_decimal) => { - ::to_sql(py_decimal, ty, out)?; - } - PythonDTO::PyBoolArray(array) => { - array.to_sql(&Type::BOOL_ARRAY, out)?; - } - PythonDTO::PyUuidArray(array) => { - array.to_sql(&Type::UUID_ARRAY, out)?; - } - PythonDTO::PyVarCharArray(array) => { - array.to_sql(&Type::VARCHAR_ARRAY, out)?; - } - PythonDTO::PyTextArray(array) => { - array.to_sql(&Type::TEXT_ARRAY, out)?; - } - PythonDTO::PyInt16Array(array) => { - array.to_sql(&Type::INT2_ARRAY, out)?; - } - PythonDTO::PyInt32Array(array) => { - array.to_sql(&Type::INT4_ARRAY, out)?; - } - PythonDTO::PyInt64Array(array) => { - array.to_sql(&Type::INT8_ARRAY, out)?; - } - PythonDTO::PyFloat32Array(array) => { - array.to_sql(&Type::FLOAT4, out)?; - } - PythonDTO::PyFloat64Array(array) => { - array.to_sql(&Type::FLOAT8_ARRAY, out)?; - } - PythonDTO::PyMoneyArray(array) => { - array.to_sql(&Type::MONEY_ARRAY, out)?; - } - PythonDTO::PyIpAddressArray(array) => { - array.to_sql(&Type::INET_ARRAY, out)?; - } - PythonDTO::PyJSONBArray(array) => { - array.to_sql(&Type::JSONB_ARRAY, out)?; - } - PythonDTO::PyJSONArray(array) => { - array.to_sql(&Type::JSON_ARRAY, out)?; - } - PythonDTO::PyDateArray(array) => { - array.to_sql(&Type::DATE_ARRAY, out)?; - } - PythonDTO::PyTimeArray(array) => { - array.to_sql(&Type::TIME_ARRAY, out)?; - } - PythonDTO::PyDateTimeArray(array) => { - array.to_sql(&Type::TIMESTAMP_ARRAY, out)?; - } - PythonDTO::PyDateTimeTZArray(array) => { - array.to_sql(&Type::TIMESTAMPTZ_ARRAY, out)?; - } - PythonDTO::PyMacAddr6Array(array) => { - array.to_sql(&Type::MACADDR_ARRAY, out)?; - } - PythonDTO::PyMacAddr8Array(array) => { - array.to_sql(&Type::MACADDR8_ARRAY, out)?; - } - PythonDTO::PyNumericArray(array) => { - array.to_sql(&Type::NUMERIC_ARRAY, out)?; - } - PythonDTO::PyPointArray(array) => { - array.to_sql(&Type::POINT_ARRAY, out)?; - } - PythonDTO::PyBoxArray(array) => { - array.to_sql(&Type::BOX_ARRAY, out)?; - } - PythonDTO::PyPathArray(array) => { - array.to_sql(&Type::PATH_ARRAY, out)?; - } - PythonDTO::PyLineArray(array) => { - array.to_sql(&Type::LINE_ARRAY, out)?; - } - PythonDTO::PyLsegArray(array) => { - array.to_sql(&Type::LSEG_ARRAY, out)?; - } - PythonDTO::PyCircleArray(array) => { - array.to_sql(&Type::CIRCLE_ARRAY, out)?; - } - PythonDTO::PyIntervalArray(array) => { - array.to_sql(&Type::INTERVAL_ARRAY, out)?; - } - PythonDTO::PyPgVector(vector) => { - ::to_sql(&PgVector::from(vector.clone()), ty, out)?; - } - } - - if return_is_null_true { - Ok(tokio_postgres::types::IsNull::Yes) - } else { - Ok(tokio_postgres::types::IsNull::No) - } - } - - to_sql_checked!(); -} - -fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { - let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); - - { - let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); - let qs = kq_read.get(querystring); - - if let Some(qs) = qs { - return qs.clone(); - } - }; - - let mut counter = 0; - let mut sequence = Vec::new(); - - let result = re.replace_all(querystring, |caps: ®ex::Captures| { - let account_id = caps[1].to_string(); - - sequence.push(account_id.clone()); - counter += 1; - - format!("${}", &counter) - }); - - let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); - kq_write.insert( - querystring.to_string(), - (result.clone().into(), sequence.clone()), - ); - (result.into(), sequence) -} - -pub fn convert_kwargs_parameters<'a>( - kw_params: &Bound<'_, PyMapping>, - querystring: &'a str, -) -> RustPSQLDriverPyResult<(String, Vec)> { - let mut result_vec: Vec = vec![]; - let (changed_string, params_names) = parse_kwargs_qs(querystring); - - for param_name in params_names { - match kw_params.get_item(¶m_name) { - Ok(param) => result_vec.push(py_to_rust(¶m)?), - Err(_) => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - format!("Cannot find parameter with name <{param_name}> in parameters").into(), - )) - } - } - } - - Ok((changed_string, result_vec)) -} - -pub fn convert_seq_parameters( - seq_params: Vec>, -) -> RustPSQLDriverPyResult> { - let mut result_vec: Vec = vec![]; - Python::with_gil(|gil| { - for parameter in seq_params { - result_vec.push(py_to_rust(parameter.bind(gil))?); - } - Ok::<(), RustPSQLDriverError>(()) - })?; - - Ok(result_vec) -} - -/// Convert parameters come from python. -/// -/// Parameters for `execute()` method can be either -/// a list or a tuple or a set. -/// -/// We parse every parameter from python object and return -/// Vector of out `PythonDTO`. -/// -/// # Errors -/// -/// May return Err Result if can't convert python object. -#[allow(clippy::needless_pass_by_value)] -pub fn convert_parameters_and_qs( - querystring: String, - parameters: Option>, -) -> RustPSQLDriverPyResult<(String, Vec)> { - let Some(parameters) = parameters else { - return Ok((querystring, vec![])); - }; - - let res = Python::with_gil(|gil| { - let params = parameters.extract::>>(gil).map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert you parameters argument into Rust type, please use List/Tuple" - .into(), - ) - }); - if let Ok(params) = params { - return Ok((querystring, convert_seq_parameters(params)?)); - } - - let kw_params = parameters.downcast_bound::(gil); - if let Ok(kw_params) = kw_params { - return convert_kwargs_parameters(kw_params, &querystring); - } - - Err(RustPSQLDriverError::PyToRustValueConversionError( - "Parameters must be sequence or mapping".into(), - )) - })?; - - Ok(res) -} - -/// Convert Sequence from Python (except String) into flat vec. -/// -/// # Errors -/// May return Err Result if cannot convert element into Rust one. -pub fn py_sequence_into_flat_vec( - parameter: &Bound, -) -> RustPSQLDriverPyResult> { - let py_seq = parameter.downcast::().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - "PostgreSQL ARRAY type can be made only from python Sequence".into(), - ) - })?; - - let mut final_vec: Vec = vec![]; - - for seq_elem in py_seq.iter()? { - let ok_seq_elem = seq_elem?; - - // Check for the string because it's sequence too, - // and in the most cases it should be array type, not new dimension. - if ok_seq_elem.is_instance_of::() { - final_vec.push(py_to_rust(&ok_seq_elem)?); - continue; - } - - let possible_next_seq = ok_seq_elem.downcast::(); - - if let Ok(next_seq) = possible_next_seq { - let mut next_vec = py_sequence_into_flat_vec(next_seq)?; - final_vec.append(&mut next_vec); - } else { - final_vec.push(py_to_rust(&ok_seq_elem)?); - continue; - } - } - - Ok(final_vec) -} - -/// Convert Sequence from Python into Postgres ARRAY. -/// -/// # Errors -/// -/// May return Err Result if cannot convert at least one element. -#[allow(clippy::cast_possible_truncation)] -#[allow(clippy::cast_possible_wrap)] -pub fn py_sequence_into_postgres_array( - parameter: &Bound, -) -> RustPSQLDriverPyResult> { - let mut py_seq = parameter - .downcast::() - .map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - "PostgreSQL ARRAY type can be made only from python Sequence".into(), - ) - })? - .clone(); - - let mut dimensions: Vec = vec![]; - let mut continue_iteration = true; - - while continue_iteration { - dimensions.push(Dimension { - len: py_seq.len()? as i32, - lower_bound: 1, - }); - - let first_seq_elem = py_seq.iter()?.next(); - match first_seq_elem { - Some(first_seq_elem) => { - if let Ok(first_seq_elem) = first_seq_elem { - // Check for the string because it's sequence too, - // and in the most cases it should be array type, not new dimension. - if first_seq_elem.is_instance_of::() { - continue_iteration = false; - continue; - } - let possible_inner_seq = first_seq_elem.downcast::(); - - match possible_inner_seq { - Ok(possible_inner_seq) => { - py_seq = possible_inner_seq.clone(); - } - Err(_) => continue_iteration = false, - } - } - } - None => { - continue_iteration = false; - } - } - } - - let array_data = py_sequence_into_flat_vec(parameter)?; - match postgres_array::Array::from_parts_no_panic(array_data, dimensions) { - Ok(result_array) => Ok(result_array), - Err(err) => Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Cannot convert python sequence to PostgreSQL ARRAY, error - {err}" - ))), - } -} - -/// Extract a value from a Python object, raising an error if missing or invalid -/// -/// # Errors -/// This function will return `Err` in the following cases: -/// - The Python object does not have the specified attribute -/// - The attribute exists but cannot be extracted into the specified Rust type -fn extract_value_from_python_object_or_raise<'py, T>( - parameter: &'py pyo3::Bound<'_, PyAny>, - attr_name: &str, -) -> Result -where - T: FromPyObject<'py>, -{ - parameter - .getattr(attr_name) - .ok() - .and_then(|attr| attr.extract::().ok()) - .ok_or_else(|| { - RustPSQLDriverError::PyToRustValueConversionError("Invalid attribute".into()) - }) -} - -/// Extract a timezone-aware datetime from a Python object. -/// This function retrieves various datetime components (`year`, `month`, `day`, etc.) -/// from a Python object and constructs a `DateTime` -/// -/// # Errors -/// This function will return `Err` in the following cases: -/// - The Python object does not contain or support one or more required datetime attributes -/// - The retrieved values are invalid for constructing a date, time, or datetime (e.g., invalid month or day) -/// - The timezone information (`tzinfo`) is not available or cannot be parsed -/// - The resulting datetime is ambiguous or invalid (e.g., due to DST transitions) -fn extract_datetime_from_python_object_attrs( - parameter: &pyo3::Bound<'_, PyAny>, -) -> Result, RustPSQLDriverError> { - let year = extract_value_from_python_object_or_raise::(parameter, "year")?; - let month = extract_value_from_python_object_or_raise::(parameter, "month")?; - let day = extract_value_from_python_object_or_raise::(parameter, "day")?; - let hour = extract_value_from_python_object_or_raise::(parameter, "hour")?; - let minute = extract_value_from_python_object_or_raise::(parameter, "minute")?; - let second = extract_value_from_python_object_or_raise::(parameter, "second")?; - let microsecond = extract_value_from_python_object_or_raise::(parameter, "microsecond")?; - - let date = NaiveDate::from_ymd_opt(year, month, day) - .ok_or_else(|| RustPSQLDriverError::PyToRustValueConversionError("Invalid date".into()))?; - let time = NaiveTime::from_hms_micro_opt(hour, minute, second, microsecond) - .ok_or_else(|| RustPSQLDriverError::PyToRustValueConversionError("Invalid time".into()))?; - let naive_datetime = NaiveDateTime::new(date, time); - - let raw_timestamp_tz = parameter - .getattr("tzinfo") - .ok() - .and_then(|tzinfo| tzinfo.getattr("key").ok()) - .and_then(|key| key.extract::().ok()) - .ok_or_else(|| { - RustPSQLDriverError::PyToRustValueConversionError("Invalid timezone info".into()) - })?; - - let fixed_offset_datetime = raw_timestamp_tz - .parse::() - .map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError("Failed to parse TZ".into()) - })? - .from_local_datetime(&naive_datetime) - .single() - .ok_or_else(|| { - RustPSQLDriverError::PyToRustValueConversionError( - "Ambiguous or invalid datetime".into(), - ) - })? - .fixed_offset(); - - Ok(fixed_offset_datetime) -} - -/// Convert single python parameter to `PythonDTO` enum. -/// -/// # Errors -/// -/// May return Err Result if python type doesn't have support yet -/// or value of the type is incorrect. -#[allow(clippy::too_many_lines)] -pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { - if parameter.is_none() { - return Ok(PythonDTO::PyNone); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyCustomType( - parameter.extract::()?.inner(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBool(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBytes(parameter.extract::>()?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyText( - parameter.extract::()?.inner(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyVarChar( - parameter.extract::()?.inner(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyString(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat64(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat32( - parameter - .extract::()? - .retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat64( - parameter - .extract::()? - .retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI16( - parameter - .extract::()? - .retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI32( - parameter - .extract::()? - .retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI64( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMoney( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI32(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - let timestamp_tz = parameter.extract::>(); - if let Ok(pydatetime_tz) = timestamp_tz { - return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); - } - - let timestamp_no_tz = parameter.extract::(); - if let Ok(pydatetime_no_tz) = timestamp_no_tz { - return Ok(PythonDTO::PyDateTime(pydatetime_no_tz)); - } - - let timestamp_tz = extract_datetime_from_python_object_attrs(parameter); - if let Ok(pydatetime_tz) = timestamp_tz { - return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); - } - - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Can not convert you datetime to rust type".into(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyDate(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyTime(parameter.extract::()?)); - } - - if parameter.is_instance_of::() { - let duration = parameter.extract::()?; - if let Some(interval) = Interval::from_duration(duration) { - return Ok(PythonDTO::PyInterval(interval)); - } - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert timedelta from Python to inner Rust type.".to_string(), - )); - } - - if parameter.is_instance_of::() | parameter.is_instance_of::() { - return Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( - parameter, - )?)); - } - - if parameter.is_instance_of::() { - let dict = parameter.downcast::().map_err(|error| { - RustPSQLDriverError::PyToRustValueConversionError(format!( - "Can't cast to inner dict: {error}" - )) - })?; - - let mut serde_map: Map = Map::new(); - - for dict_item in dict.items() { - let py_list = dict_item.downcast::().map_err(|error| { - RustPSQLDriverError::PyToRustValueConversionError(format!( - "Cannot cast to list: {error}" - )) - })?; - - let key = py_list.get_item(0)?.extract::()?; - let value = py_to_rust(&py_list.get_item(1)?)?; - - serde_map.insert(key, value.to_serde_value()?); - } - - return Ok(PythonDTO::PyJsonb(Value::Object(serde_map))); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyJsonb( - parameter.extract::()?.inner().clone(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyJson( - parameter.extract::()?.inner().clone(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMacAddr6( - parameter.extract::()?.inner(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMacAddr8( - parameter.extract::()?.inner(), - )); - } - - if parameter.get_type().name()? == "UUID" { - return Ok(PythonDTO::PyUUID(Uuid::parse_str( - parameter.str()?.extract::<&str>()?, - )?)); - } - - if parameter.get_type().name()? == "decimal.Decimal" - || parameter.get_type().name()? == "Decimal" - { - return Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( - parameter.str()?.extract::<&str>()?, - )?)); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPoint( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBox( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPath( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyLine( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyLineSegment( - parameter - .extract::()? - .retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyCircle( - parameter.extract::()?.retrieve_value(), - )); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); - } - - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPgVector( - parameter.extract::()?.inner_value(), - )); - } - - if let Ok(id_address) = parameter.extract::() { - return Ok(PythonDTO::PyIpAddress(id_address)); - } - - // It's used for Enum. - // If StrEnum is used on Python side, - // we simply stop at the `is_instance_of::``. - if let Ok(value_attr) = parameter.getattr("value") { - if let Ok(possible_string) = value_attr.extract::() { - return Ok(PythonDTO::PyString(possible_string)); - } - } - - Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Can not covert you type {parameter} into inner one", - ))) -} - -fn composite_field_postgres_to_py<'a, T: FromSql<'a>>( - type_: &Type, - buf: &mut &'a [u8], - is_simple: bool, -) -> RustPSQLDriverPyResult { - if is_simple { - return T::from_sql_nullable(type_, Some(buf)).map_err(|err| { - RustPSQLDriverError::RustToPyValueConversionError(format!( - "Cannot convert PostgreSQL type {type_} into Python type, err: {err}", - )) - }); - } - postgres_types::private::read_value::(type_, buf).map_err(|err| { - RustPSQLDriverError::RustToPyValueConversionError(format!( - "Cannot convert PostgreSQL type {type_} into Python type, err: {err}", - )) - }) -} - -/// Convert Array of `PythonDTO`s to serde `Value`. -/// -/// It can convert multidimensional arrays. -fn pythondto_array_to_serde(array: Option>) -> RustPSQLDriverPyResult { - match array { - Some(array) => inner_pythondto_array_to_serde( - array.dimensions(), - array.iter().collect::>().as_slice(), - 0, - 0, - ), - None => Ok(Value::Null), - } -} - -/// Inner conversion array of `PythonDTO`s to serde `Value`. -#[allow(clippy::cast_sign_loss)] -fn inner_pythondto_array_to_serde( - dimensions: &[Dimension], - data: &[&PythonDTO], - dimension_index: usize, - mut lower_bound: usize, -) -> RustPSQLDriverPyResult { - let current_dimension = dimensions.get(dimension_index); - - if let Some(current_dimension) = current_dimension { - let possible_next_dimension = dimensions.get(dimension_index + 1); - match possible_next_dimension { - Some(next_dimension) => { - let mut final_list: Value = Value::Array(vec![]); - - for _ in 0..current_dimension.len as usize { - if dimensions.get(dimension_index + 1).is_some() { - let inner_pylist = inner_pythondto_array_to_serde( - dimensions, - &data[lower_bound..next_dimension.len as usize + lower_bound], - dimension_index + 1, - 0, - )?; - match final_list { - Value::Array(ref mut array) => array.push(inner_pylist), - _ => unreachable!(), - } - lower_bound += next_dimension.len as usize; - }; - } - - return Ok(final_list); - } - None => { - return data.iter().map(|x| x.to_serde_value()).collect(); - } - } - } - - Ok(Value::Array(vec![])) -} - -/// Convert rust array to python list. -/// -/// It can convert multidimensional arrays. -fn postgres_array_to_py( - py: Python<'_>, - array: Option>, -) -> Option> { - array.map(|array| { - inner_postgres_array_to_py( - py, - array.dimensions(), - array.iter().collect::>().as_slice(), - 0, - 0, - ) - }) -} - -/// Inner postgres array conversion to python list. -#[allow(clippy::cast_sign_loss)] -fn inner_postgres_array_to_py( - py: Python<'_>, - dimensions: &[Dimension], - data: &[T], - dimension_index: usize, - mut lower_bound: usize, -) -> Py -where - T: ToPyObject, -{ - let current_dimension = dimensions.get(dimension_index); - - if let Some(current_dimension) = current_dimension { - let possible_next_dimension = dimensions.get(dimension_index + 1); - match possible_next_dimension { - Some(next_dimension) => { - let final_list = PyList::empty_bound(py); - - for _ in 0..current_dimension.len as usize { - if dimensions.get(dimension_index + 1).is_some() { - let inner_pylist = inner_postgres_array_to_py( - py, - dimensions, - &data[lower_bound..next_dimension.len as usize + lower_bound], - dimension_index + 1, - 0, - ); - final_list.append(inner_pylist).unwrap(); - lower_bound += next_dimension.len as usize; - }; - } - - return final_list.unbind(); - } - None => { - return PyList::new_bound(py, data).unbind(); - } - } - } - - PyList::empty_bound(py).unbind() -} - -#[allow(clippy::too_many_lines)] -fn postgres_bytes_to_py( - py: Python<'_>, - type_: &Type, - buf: &mut &[u8], - is_simple: bool, -) -> RustPSQLDriverPyResult> { - match *type_ { - // ---------- Bytes Types ---------- - // Convert BYTEA type into Vector, then into PyBytes - Type::BYTEA => { - let vec_of_bytes = - composite_field_postgres_to_py::>>(type_, buf, is_simple)?; - if let Some(vec_of_bytes) = vec_of_bytes { - return Ok(PyBytes::new_bound(py, &vec_of_bytes).to_object(py)); - } - Ok(py.None()) - } - // // ---------- String Types ---------- - // // Convert TEXT and VARCHAR type into String, then into str - Type::TEXT | Type::VARCHAR | Type::XML => Ok(composite_field_postgres_to_py::< - Option, - >(type_, buf, is_simple)? - .to_object(py)), - // ---------- Boolean Types ---------- - // Convert BOOL type into bool - Type::BOOL => Ok( - composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py), - ), - // ---------- Number Types ---------- - // Convert SmallInt into i16, then into int - Type::INT2 => { - Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) - } - // Convert Integer into i32, then into int - Type::INT4 => { - Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) - } - // Convert BigInt into i64, then into int - Type::INT8 | Type::MONEY => { - Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) - } - // Convert REAL into f32, then into float - Type::FLOAT4 => { - Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) - } - // Convert DOUBLE PRECISION into f64, then into float - Type::FLOAT8 => { - Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) - } - // ---------- Date Types ---------- - // Convert DATE into NaiveDate, then into datetime.date - Type::DATE => Ok(composite_field_postgres_to_py::>( - type_, buf, is_simple, - )? - .to_object(py)), - // Convert Time into NaiveTime, then into datetime.time - Type::TIME => Ok(composite_field_postgres_to_py::>( - type_, buf, is_simple, - )? - .to_object(py)), - // Convert TIMESTAMP into NaiveDateTime, then into datetime.datetime - Type::TIMESTAMP => Ok(composite_field_postgres_to_py::>( - type_, buf, is_simple, - )? - .to_object(py)), - // Convert TIMESTAMP into NaiveDateTime, then into datetime.datetime - Type::TIMESTAMPTZ => Ok( - composite_field_postgres_to_py::>>(type_, buf, is_simple)? - .to_object(py), - ), - // ---------- UUID Types ---------- - // Convert UUID into Uuid type, then into String if possible - Type::UUID => { - let rust_uuid = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - match rust_uuid { - Some(rust_uuid) => { - return Ok(PyString::new_bound(py, &rust_uuid.to_string()).to_object(py)) - } - None => Ok(py.None()), - } - } - // ---------- IpAddress Types ---------- - Type::INET => Ok( - composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py), - ), - // Convert JSON/JSONB into Serde Value, then into list or dict - Type::JSONB | Type::JSON => { - let db_json = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match db_json { - Some(value) => Ok(build_python_from_serde_value(py, value)?), - None => Ok(py.None().to_object(py)), - } - } - // Convert MACADDR into inner type for macaddr6, then into str - Type::MACADDR => { - let macaddr_ = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - if let Some(macaddr_) = macaddr_ { - Ok(macaddr_.inner().to_string().to_object(py)) - } else { - Ok(py.None().to_object(py)) - } - } - Type::MACADDR8 => { - let macaddr_ = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - if let Some(macaddr_) = macaddr_ { - Ok(macaddr_.inner().to_string().to_object(py)) - } else { - Ok(py.None().to_object(py)) - } - } - Type::NUMERIC => { - if let Some(numeric_) = - composite_field_postgres_to_py::>(type_, buf, is_simple)? - { - return Ok(InnerDecimal(numeric_).to_object(py)); - } - Ok(py.None().to_object(py)) - } - // ---------- Geo Types ---------- - Type::POINT => { - let point_ = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match point_ { - Some(point_) => Ok(point_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::BOX => { - let box_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match box_ { - Some(box_) => Ok(box_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::PATH => { - let path_ = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match path_ { - Some(path_) => Ok(path_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::LINE => { - let line_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match line_ { - Some(line_) => Ok(line_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::LSEG => { - let lseg_ = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match lseg_ { - Some(lseg_) => Ok(lseg_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::CIRCLE => { - let circle_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - - match circle_ { - Some(circle_) => Ok(circle_.into_py(py)), - None => Ok(py.None().to_object(py)), - } - } - Type::INTERVAL => { - let interval = - composite_field_postgres_to_py::>(type_, buf, is_simple)?; - if let Some(interval) = interval { - return Ok(InnerInterval(interval).to_object(py)); - } - Ok(py.None()) - } - // ---------- Array Text Types ---------- - Type::BOOL_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of TEXT or VARCHAR into Vec, then into list[str] - Type::TEXT_ARRAY | Type::VARCHAR_ARRAY | Type::XML_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // ---------- Array Integer Types ---------- - // Convert ARRAY of SmallInt into Vec, then into list[int] - Type::INT2_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of Integer into Vec, then into list[int] - Type::INT4_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of BigInt into Vec, then into list[int] - Type::INT8_ARRAY | Type::MONEY_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of Float4 into Vec, then into list[float] - Type::FLOAT4_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of Float8 into Vec, then into list[float] - Type::FLOAT8_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of Date into Vec, then into list[datetime.date] - Type::DATE_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of Time into Vec, then into list[datetime.date] - Type::TIME_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of TIMESTAMP into Vec, then into list[datetime.date] - Type::TIMESTAMP_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // Convert ARRAY of TIMESTAMPTZ into Vec>, then into list[datetime.date] - Type::TIMESTAMPTZ_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>>( - type_, buf, is_simple, - )?, - ) - .to_object(py)), - // Convert ARRAY of UUID into Vec>, then into list[UUID] - Type::UUID_ARRAY => { - let uuid_array = composite_field_postgres_to_py::>>( - type_, buf, is_simple, - )?; - Ok(postgres_array_to_py(py, uuid_array).to_object(py)) - } - // Convert ARRAY of INET into Vec, then into list[IPv4Address | IPv6Address] - Type::INET_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - Type::JSONB_ARRAY | Type::JSON_ARRAY => { - let db_json_array = composite_field_postgres_to_py::>>( - type_, buf, is_simple, - )?; - Ok(postgres_array_to_py(py, db_json_array).to_object(py)) - } - Type::NUMERIC_ARRAY => Ok(postgres_array_to_py( - py, - composite_field_postgres_to_py::>>(type_, buf, is_simple)?, - ) - .to_object(py)), - // ---------- Array Geo Types ---------- - Type::POINT_ARRAY => { - let point_array_ = - composite_field_postgres_to_py::>>(type_, buf, is_simple)?; - - Ok(postgres_array_to_py(py, point_array_).to_object(py)) - } - Type::BOX_ARRAY => { - let box_array_ = - composite_field_postgres_to_py::>>(type_, buf, is_simple)?; - - Ok(postgres_array_to_py(py, box_array_).to_object(py)) - } - Type::PATH_ARRAY => { - let path_array_ = composite_field_postgres_to_py::>>( - type_, buf, is_simple, - )?; - - Ok(postgres_array_to_py(py, path_array_).to_object(py)) - } - Type::LINE_ARRAY => { - let line_array_ = - composite_field_postgres_to_py::>>(type_, buf, is_simple)?; - - Ok(postgres_array_to_py(py, line_array_).to_object(py)) - } - Type::LSEG_ARRAY => { - let lseg_array_ = composite_field_postgres_to_py::>>( - type_, buf, is_simple, - )?; - - Ok(postgres_array_to_py(py, lseg_array_).to_object(py)) - } - Type::CIRCLE_ARRAY => { - let circle_array_ = - composite_field_postgres_to_py::>>(type_, buf, is_simple)?; - - Ok(postgres_array_to_py(py, circle_array_).to_object(py)) - } - Type::INTERVAL_ARRAY => { - let interval_array_ = composite_field_postgres_to_py::>>( - type_, buf, is_simple, - )?; - - Ok(postgres_array_to_py(py, interval_array_).to_object(py)) - } - _ => other_postgres_bytes_to_py(py, type_, buf, is_simple), - } -} - -/// Convert OTHER type to python. -/// -/// # Errors -/// May return result if type is unknown. -pub fn other_postgres_bytes_to_py( - py: Python<'_>, - type_: &Type, - buf: &mut &[u8], - is_simple: bool, -) -> RustPSQLDriverPyResult> { - if type_.name() == "vector" { - let vector = composite_field_postgres_to_py::>(type_, buf, is_simple)?; - match vector { - Some(real_vector) => { - return Ok(real_vector.to_vec().to_object(py)); - } - None => return Ok(py.None()), - } - } - - Err(RustPSQLDriverError::RustToPyValueConversionError( - format!("Cannot convert {type_} into Python type, please look at the custom_decoders functionality.") - )) -} - -/// Convert composite type from `PostgreSQL` to Python type. -/// -/// # Errors -/// May return error if there is any problem with bytes. -#[allow(clippy::cast_sign_loss)] -pub fn composite_postgres_to_py( - py: Python<'_>, - fields: &Vec, - buf: &mut &[u8], - custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { - let result_py_dict: Bound<'_, PyDict> = PyDict::new_bound(py); - - let num_fields = postgres_types::private::read_be_i32(buf).map_err(|err| { - RustPSQLDriverError::RustToPyValueConversionError(format!( - "Cannot read bytes data from PostgreSQL: {err}" - )) - })?; - if num_fields as usize != fields.len() { - return Err(RustPSQLDriverError::RustToPyValueConversionError(format!( - "invalid field count: {} vs {}", - num_fields, - fields.len() - ))); - } - - for field in fields { - let oid = postgres_types::private::read_be_i32(buf).map_err(|err| { - RustPSQLDriverError::RustToPyValueConversionError(format!( - "Cannot read bytes data from PostgreSQL: {err}" - )) - })? as u32; - - if oid != field.type_().oid() { - return Err(RustPSQLDriverError::RustToPyValueConversionError( - "unexpected OID".into(), - )); - } - - match field.type_().kind() { - Kind::Simple | Kind::Array(_) => { - result_py_dict.set_item( - field.name(), - postgres_bytes_to_py(py, field.type_(), buf, false)?.to_object(py), - )?; - } - Kind::Enum(_) => { - result_py_dict.set_item( - field.name(), - postgres_bytes_to_py(py, &Type::VARCHAR, buf, false)?.to_object(py), - )?; - } - _ => { - let (_, tail) = buf.split_at(4_usize); - *buf = tail; - result_py_dict.set_item( - field.name(), - raw_bytes_data_process(py, buf, field.name(), field.type_(), custom_decoders)? - .to_object(py), - )?; - } - } - } - - Ok(result_py_dict.to_object(py)) -} - -/// Process raw bytes from `PostgreSQL`. -/// -/// # Errors -/// -/// May return Err Result if cannot convert postgres -/// type into rust one. -pub fn raw_bytes_data_process( - py: Python<'_>, - raw_bytes_data: &mut &[u8], - column_name: &str, - column_type: &Type, - custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { - if let Some(custom_decoders) = custom_decoders { - let py_encoder_func = custom_decoders - .bind(py) - .get_item(column_name.to_lowercase()); - - if let Ok(Some(py_encoder_func)) = py_encoder_func { - return Ok(py_encoder_func - .call((raw_bytes_data.to_vec(),), None)? - .unbind()); - } - } - - match column_type.kind() { - Kind::Simple | Kind::Array(_) => { - postgres_bytes_to_py(py, column_type, raw_bytes_data, true) - } - Kind::Composite(fields) => { - composite_postgres_to_py(py, fields, raw_bytes_data, custom_decoders) - } - Kind::Enum(_) => postgres_bytes_to_py(py, &Type::VARCHAR, raw_bytes_data, true), - _ => Err(RustPSQLDriverError::RustToPyValueConversionError( - column_type.to_string(), - )), - } -} - -/// Convert type from postgres to python type. -/// -/// # Errors -/// -/// May return Err Result if cannot convert postgres -/// type into rust one. -pub fn postgres_to_py( - py: Python<'_>, - row: &Row, - column: &Column, - column_i: usize, - custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { - let raw_bytes_data = row.col_buffer(column_i); - if let Some(mut raw_bytes_data) = raw_bytes_data { - return raw_bytes_data_process( - py, - &mut raw_bytes_data, - column.name(), - column.type_(), - custom_decoders, - ); - } - Ok(py.None()) -} - -/// Convert python List of Dict type or just Dict into serde `Value`. -/// -/// # Errors -/// May return error if cannot convert Python type into Rust one. -#[allow(clippy::needless_pass_by_value)] -pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { - Python::with_gil(|gil| { - let bind_value = value.bind(gil); - if bind_value.is_instance_of::() { - let mut result_vec: Vec = vec![]; - - let params = bind_value.extract::>>()?; - - for inner in params { - let inner_bind = inner.bind(gil); - if inner_bind.is_instance_of::() { - let python_dto = py_to_rust(inner_bind)?; - result_vec.push(python_dto.to_serde_value()?); - } else if inner_bind.is_instance_of::() { - let serde_value = build_serde_value(inner)?; - result_vec.push(serde_value); - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "PyJSON must have dicts.".to_string(), - )); - } - } - Ok(json!(result_vec)) - } else if bind_value.is_instance_of::() { - return py_to_rust(bind_value)?.to_serde_value(); - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "PyJSON must be dict value.".to_string(), - )); - } - }) -} - -/// Convert serde `Value` into Python object. -/// # Errors -/// May return Err Result if cannot add new value to Python Dict. -pub fn build_python_from_serde_value( - py: Python<'_>, - value: Value, -) -> RustPSQLDriverPyResult> { - match value { - Value::Array(massive) => { - let mut result_vec: Vec> = vec![]; - - for single_record in massive { - result_vec.push(build_python_from_serde_value(py, single_record)?); - } - - Ok(result_vec.to_object(py)) - } - Value::Object(mapping) => { - let py_dict = PyDict::new_bound(py); - - for (key, value) in mapping { - py_dict.set_item( - build_python_from_serde_value(py, Value::String(key))?, - build_python_from_serde_value(py, value)?, - )?; - } - - Ok(py_dict.to_object(py)) - } - Value::Bool(boolean) => Ok(boolean.to_object(py)), - Value::Number(number) => { - if number.is_f64() { - Ok(number.as_f64().to_object(py)) - } else if number.is_i64() { - Ok(number.as_i64().to_object(py)) - } else { - Ok(number.as_u64().to_object(py)) - } - } - Value::String(string) => Ok(string.to_object(py)), - Value::Null => Ok(py.None()), - } -} - -/// Convert Python sequence to Rust vector. -/// Also it checks that sequence has set/list/tuple type. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parameters type isn't correct. -fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult>> { - let mut coord_values_sequence_vec: Vec> = vec![]; - - if bind_parameters.is_instance_of::() { - let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); - - for one_parameter in bind_pyset_parameters { - let extracted_parameter = one_parameter.extract::>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - coord_values_sequence_vec.push(extracted_parameter); - } - } else if bind_parameters.is_instance_of::() - | bind_parameters.is_instance_of::() - { - coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid sequence type, please use list/tuple/set, {bind_parameters}" - ))); - }; - - Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) -} - -/// Convert two python parameters(x and y) to Coord from `geo_type`. -/// Also it checks that passed values is int or float. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parameters type isn't correct. -fn convert_py_to_rust_coord_values(parameters: Vec>) -> RustPSQLDriverPyResult> { - Python::with_gil(|gil| { - let mut coord_values_vec: Vec = vec![]; - - for one_parameter in parameters { - let parameter_bind = one_parameter.bind(gil); - - if !parameter_bind.is_instance_of::() - & !parameter_bind.is_instance_of::() - { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Incorrect types of coordinate values. It must be int or float".into(), - )); - } - - let python_dto = py_to_rust(parameter_bind)?; - match python_dto { - PythonDTO::PyIntI16(pyint) => coord_values_vec.push(f64::from(pyint)), - PythonDTO::PyIntI32(pyint) => coord_values_vec.push(f64::from(pyint)), - PythonDTO::PyIntU32(pyint) => coord_values_vec.push(f64::from(pyint)), - PythonDTO::PyFloat32(pyfloat) => coord_values_vec.push(f64::from(pyfloat)), - PythonDTO::PyFloat64(pyfloat) => coord_values_vec.push(pyfloat), - PythonDTO::PyIntI64(_) | PythonDTO::PyIntU64(_) => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Not implemented this type yet".into(), - )) - } - _ => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Incorrect types of coordinate values. It must be int or float".into(), - )) - } - }; - } - - Ok::, RustPSQLDriverError>(coord_values_vec) - }) -} - -/// Convert Python values with coordinates into vector of Coord's for building Geo types later. -/// -/// Passed parameter can be either a list or a tuple or a set. -/// Inside this parameter may be multiple list/tuple/set with int/float or only int/float values flat. -/// We parse every parameter from python object and make from them Coord's. -/// Additionally it checks for correct length of coordinates parsed from Python values. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parsed number of coordinates is not expected by allowed length. -#[allow(clippy::needless_pass_by_value)] -pub fn build_geo_coords( - py_parameters: Py, - allowed_length_option: Option, -) -> RustPSQLDriverPyResult> { - let mut result_vec: Vec = vec![]; - - result_vec = Python::with_gil(|gil| { - let bind_py_parameters = py_parameters.bind(gil); - let parameters = py_sequence_to_rust(bind_py_parameters)?; - - let first_inner_bind_py_parameters = parameters[0].bind(gil); - if first_inner_bind_py_parameters.is_instance_of::() - | first_inner_bind_py_parameters.is_instance_of::() - { - if parameters.len() % 2 != 0 { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Length of coordinates that passed in flat structure must be a multiple of 2" - .into(), - )); - } - - for (pair_first_inner, pair_second_inner) in parameters.into_iter().tuples() { - let coord_values = - convert_py_to_rust_coord_values(vec![pair_first_inner, pair_second_inner])?; - result_vec.push(coord! {x: coord_values[0], y: coord_values[1]}); - } - } else if first_inner_bind_py_parameters.is_instance_of::() - | first_inner_bind_py_parameters.is_instance_of::() - | first_inner_bind_py_parameters.is_instance_of::() - { - for pair_inner_parameters in parameters { - let bind_pair_inner_parameters = pair_inner_parameters.bind(gil); - let pair_py_inner_parameters = py_sequence_to_rust(bind_pair_inner_parameters)?; - - if pair_py_inner_parameters.len() != 2 { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Inner parameters must be pair(list/tuple/set) of int/float values".into(), - )); - } - - let coord_values = convert_py_to_rust_coord_values(pair_py_inner_parameters)?; - result_vec.push(coord! {x: coord_values[0], y: coord_values[1]}); - } - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Inner coordinates must be passed as pairs of int/float in list/tuple/set or as flat structure with int/float values".into(), - )); - }; - Ok::, RustPSQLDriverError>(result_vec) - })?; - - let number_of_coords = result_vec.len(); - let allowed_length = allowed_length_option.unwrap_or_default(); - - if (allowed_length != 0) & (number_of_coords != allowed_length) { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid number of coordinates for this geo type, allowed {allowed_length}, got: {number_of_coords}" - ))); - } - - Ok(result_vec) -} - -/// Convert flat Python values with coordinates into vector of Geo values for building Geo types later. -/// -/// Passed parameter can be either a list or a tuple or a set with elements. -/// We parse every parameter from python object and prepare them for making geo type. -/// Additionally it checks for correct length of coordinates parsed from Python values. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parsed number of coordinates is not expected by allowed length. -#[allow(clippy::needless_pass_by_value)] -pub fn build_flat_geo_coords( - py_parameters: Py, - allowed_length_option: Option, -) -> RustPSQLDriverPyResult> { - Python::with_gil(|gil| { - let allowed_length = allowed_length_option.unwrap_or_default(); - - let bind_py_parameters = py_parameters.bind(gil); - let parameters = py_sequence_to_rust(bind_py_parameters)?; - let parameters_length = parameters.len(); - - if (allowed_length != 0) & (parameters.len() != allowed_length) { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid number of values for this geo type, allowed {allowed_length}, got: {parameters_length}" - ))); - }; - - let result_vec = convert_py_to_rust_coord_values(parameters)?; - - let number_of_coords = result_vec.len(); - if (allowed_length != 0) & (number_of_coords != allowed_length) { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid number of values for this geo type, allowed {allowed_length}, got: {parameters_length}" - ))); - }; - - Ok::, RustPSQLDriverError>(result_vec) - }) -} diff --git a/src/value_converter/consts.rs b/src/value_converter/consts.rs new file mode 100644 index 00000000..40fa932b --- /dev/null +++ b/src/value_converter/consts.rs @@ -0,0 +1,37 @@ +use once_cell::sync::Lazy; +use postgres_types::ToSql; +use std::{collections::HashMap, sync::RwLock}; + +use pyo3::{ + sync::GILOnceCell, + types::{PyAnyMethods, PyType}, + Bound, Py, PyResult, Python, +}; + +pub static DECIMAL_CLS: GILOnceCell> = GILOnceCell::new(); +pub static TIMEDELTA_CLS: GILOnceCell> = GILOnceCell::new(); +pub static KWARGS_QUERYSTRINGS: Lazy)>>> = + Lazy::new(|| RwLock::new(Default::default())); + +pub fn get_decimal_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { + DECIMAL_CLS + .get_or_try_init(py, || { + let type_object = py.import("decimal")?.getattr("Decimal")?.downcast_into()?; + Ok(type_object.unbind()) + }) + .map(|ty| ty.bind(py)) +} + +pub fn get_timedelta_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { + TIMEDELTA_CLS + .get_or_try_init(py, || { + let type_object = py + .import("datetime")? + .getattr("timedelta")? + .downcast_into()?; + Ok(type_object.unbind()) + }) + .map(|ty| ty.bind(py)) +} + +pub type QueryParameter = (dyn ToSql + Sync); diff --git a/src/value_converter/funcs/from_python.rs b/src/value_converter/funcs/from_python.rs new file mode 100644 index 00000000..48fd0a4e --- /dev/null +++ b/src/value_converter/funcs/from_python.rs @@ -0,0 +1,985 @@ +use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, TimeZone}; +use chrono_tz::Tz; +use geo_types::{coord, Coord}; +use itertools::Itertools; +use pg_interval::Interval; +use postgres_array::{Array, Dimension}; +use postgres_types::{Field, FromSql, Kind, Type}; +use rust_decimal::Decimal; +use serde_json::{json, Map, Value}; +use std::net::IpAddr; +use tokio_postgres::{Column, Row}; +use uuid::Uuid; + +use pyo3::{ + types::{ + PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, + PyInt, PyList, PyListMethods, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, + PyTypeMethods, + }, + Bound, FromPyObject, IntoPy, Py, PyAny, Python, ToPyObject, +}; + +use crate::{ + additional_types::{ + Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, + RustRect, + }, + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + extra_types::{self}, + value_converter::{consts::KWARGS_QUERYSTRINGS, models::dto::PythonDTO}, +}; + +use pgvector::Vector as PgVector; + +/// Convert single python parameter to `PythonDTO` enum. +/// +/// # Errors +/// +/// May return Err Result if python type doesn't have support yet +/// or value of the type is incorrect. +#[allow(clippy::too_many_lines)] +pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + if parameter.is_none() { + return Ok(PythonDTO::PyNone); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyCustomType( + parameter.extract::()?.inner(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyBool(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyBytes(parameter.extract::>()?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyText( + parameter.extract::()?.inner(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyVarChar( + parameter.extract::()?.inner(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyString(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyFloat64(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyFloat32( + parameter + .extract::()? + .retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyFloat64( + parameter + .extract::()? + .retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyIntI16( + parameter + .extract::()? + .retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyIntI32( + parameter + .extract::()? + .retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyIntI64( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyMoney( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyIntI32(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + let timestamp_tz = parameter.extract::>(); + if let Ok(pydatetime_tz) = timestamp_tz { + return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); + } + + let timestamp_no_tz = parameter.extract::(); + if let Ok(pydatetime_no_tz) = timestamp_no_tz { + return Ok(PythonDTO::PyDateTime(pydatetime_no_tz)); + } + + let timestamp_tz = extract_datetime_from_python_object_attrs(parameter); + if let Ok(pydatetime_tz) = timestamp_tz { + return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); + } + + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Can not convert you datetime to rust type".into(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyDate(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyTime(parameter.extract::()?)); + } + + if parameter.is_instance_of::() { + let duration = parameter.extract::()?; + if let Some(interval) = Interval::from_duration(duration) { + return Ok(PythonDTO::PyInterval(interval)); + } + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert timedelta from Python to inner Rust type.".to_string(), + )); + } + + if parameter.is_instance_of::() | parameter.is_instance_of::() { + return Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( + parameter, + )?)); + } + + if parameter.is_instance_of::() { + let dict = parameter.downcast::().map_err(|error| { + RustPSQLDriverError::PyToRustValueConversionError(format!( + "Can't cast to inner dict: {error}" + )) + })?; + + let mut serde_map: Map = Map::new(); + + for dict_item in dict.items() { + let py_list = dict_item.downcast::().map_err(|error| { + RustPSQLDriverError::PyToRustValueConversionError(format!( + "Cannot cast to list: {error}" + )) + })?; + + let key = py_list.get_item(0)?.extract::()?; + let value = py_to_rust(&py_list.get_item(1)?)?; + + serde_map.insert(key, value.to_serde_value()?); + } + + return Ok(PythonDTO::PyJsonb(Value::Object(serde_map))); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyJsonb( + parameter.extract::()?.inner().clone(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyJson( + parameter.extract::()?.inner().clone(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyMacAddr6( + parameter.extract::()?.inner(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyMacAddr8( + parameter.extract::()?.inner(), + )); + } + + if parameter.get_type().name()? == "UUID" { + return Ok(PythonDTO::PyUUID(Uuid::parse_str( + parameter.str()?.extract::<&str>()?, + )?)); + } + + if parameter.get_type().name()? == "decimal.Decimal" + || parameter.get_type().name()? == "Decimal" + { + return Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( + parameter.str()?.extract::<&str>()?, + )?)); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyPoint( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyBox( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyPath( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyLine( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyLineSegment( + parameter + .extract::()? + .retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyCircle( + parameter.extract::()?.retrieve_value(), + )); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return parameter + .extract::()? + ._convert_to_python_dto(); + } + + if parameter.is_instance_of::() { + return Ok(PythonDTO::PyPgVector( + parameter.extract::()?.inner_value(), + )); + } + + if let Ok(id_address) = parameter.extract::() { + return Ok(PythonDTO::PyIpAddress(id_address)); + } + + // It's used for Enum. + // If StrEnum is used on Python side, + // we simply stop at the `is_instance_of::``. + if let Ok(value_attr) = parameter.getattr("value") { + if let Ok(possible_string) = value_attr.extract::() { + return Ok(PythonDTO::PyString(possible_string)); + } + } + + Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Can not covert you type {parameter} into inner one", + ))) +} + +/// Extract a value from a Python object, raising an error if missing or invalid +/// +/// # Errors +/// This function will return `Err` in the following cases: +/// - The Python object does not have the specified attribute +/// - The attribute exists but cannot be extracted into the specified Rust type +fn extract_value_from_python_object_or_raise<'py, T>( + parameter: &'py pyo3::Bound<'_, PyAny>, + attr_name: &str, +) -> Result +where + T: FromPyObject<'py>, +{ + parameter + .getattr(attr_name) + .ok() + .and_then(|attr| attr.extract::().ok()) + .ok_or_else(|| { + RustPSQLDriverError::PyToRustValueConversionError("Invalid attribute".into()) + }) +} + +/// Extract a timezone-aware datetime from a Python object. +/// This function retrieves various datetime components (`year`, `month`, `day`, etc.) +/// from a Python object and constructs a `DateTime` +/// +/// # Errors +/// This function will return `Err` in the following cases: +/// - The Python object does not contain or support one or more required datetime attributes +/// - The retrieved values are invalid for constructing a date, time, or datetime (e.g., invalid month or day) +/// - The timezone information (`tzinfo`) is not available or cannot be parsed +/// - The resulting datetime is ambiguous or invalid (e.g., due to DST transitions) +fn extract_datetime_from_python_object_attrs( + parameter: &pyo3::Bound<'_, PyAny>, +) -> Result, RustPSQLDriverError> { + let year = extract_value_from_python_object_or_raise::(parameter, "year")?; + let month = extract_value_from_python_object_or_raise::(parameter, "month")?; + let day = extract_value_from_python_object_or_raise::(parameter, "day")?; + let hour = extract_value_from_python_object_or_raise::(parameter, "hour")?; + let minute = extract_value_from_python_object_or_raise::(parameter, "minute")?; + let second = extract_value_from_python_object_or_raise::(parameter, "second")?; + let microsecond = extract_value_from_python_object_or_raise::(parameter, "microsecond")?; + + let date = NaiveDate::from_ymd_opt(year, month, day) + .ok_or_else(|| RustPSQLDriverError::PyToRustValueConversionError("Invalid date".into()))?; + let time = NaiveTime::from_hms_micro_opt(hour, minute, second, microsecond) + .ok_or_else(|| RustPSQLDriverError::PyToRustValueConversionError("Invalid time".into()))?; + let naive_datetime = NaiveDateTime::new(date, time); + + let raw_timestamp_tz = parameter + .getattr("tzinfo") + .ok() + .and_then(|tzinfo| tzinfo.getattr("key").ok()) + .and_then(|key| key.extract::().ok()) + .ok_or_else(|| { + RustPSQLDriverError::PyToRustValueConversionError("Invalid timezone info".into()) + })?; + + let fixed_offset_datetime = raw_timestamp_tz + .parse::() + .map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError("Failed to parse TZ".into()) + })? + .from_local_datetime(&naive_datetime) + .single() + .ok_or_else(|| { + RustPSQLDriverError::PyToRustValueConversionError( + "Ambiguous or invalid datetime".into(), + ) + })? + .fixed_offset(); + + Ok(fixed_offset_datetime) +} + +/// Convert Sequence from Python into Postgres ARRAY. +/// +/// # Errors +/// +/// May return Err Result if cannot convert at least one element. +#[allow(clippy::cast_possible_truncation)] +#[allow(clippy::cast_possible_wrap)] +pub fn py_sequence_into_postgres_array( + parameter: &Bound, +) -> RustPSQLDriverPyResult> { + let mut py_seq = parameter + .downcast::() + .map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + "PostgreSQL ARRAY type can be made only from python Sequence".into(), + ) + })? + .clone(); + + let mut dimensions: Vec = vec![]; + let mut continue_iteration = true; + + while continue_iteration { + dimensions.push(Dimension { + len: py_seq.len()? as i32, + lower_bound: 1, + }); + + let first_seq_elem = py_seq.iter()?.next(); + match first_seq_elem { + Some(first_seq_elem) => { + if let Ok(first_seq_elem) = first_seq_elem { + // Check for the string because it's sequence too, + // and in the most cases it should be array type, not new dimension. + if first_seq_elem.is_instance_of::() { + continue_iteration = false; + continue; + } + let possible_inner_seq = first_seq_elem.downcast::(); + + match possible_inner_seq { + Ok(possible_inner_seq) => { + py_seq = possible_inner_seq.clone(); + } + Err(_) => continue_iteration = false, + } + } + } + None => { + continue_iteration = false; + } + } + } + + let array_data = py_sequence_into_flat_vec(parameter)?; + match postgres_array::Array::from_parts_no_panic(array_data, dimensions) { + Ok(result_array) => Ok(result_array), + Err(err) => Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Cannot convert python sequence to PostgreSQL ARRAY, error - {err}" + ))), + } +} + +/// Convert Sequence from Python (except String) into flat vec. +/// +/// # Errors +/// May return Err Result if cannot convert element into Rust one. +pub fn py_sequence_into_flat_vec( + parameter: &Bound, +) -> RustPSQLDriverPyResult> { + let py_seq = parameter.downcast::().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + "PostgreSQL ARRAY type can be made only from python Sequence".into(), + ) + })?; + + let mut final_vec: Vec = vec![]; + + for seq_elem in py_seq.iter()? { + let ok_seq_elem = seq_elem?; + + // Check for the string because it's sequence too, + // and in the most cases it should be array type, not new dimension. + if ok_seq_elem.is_instance_of::() { + final_vec.push(py_to_rust(&ok_seq_elem)?); + continue; + } + + let possible_next_seq = ok_seq_elem.downcast::(); + + if let Ok(next_seq) = possible_next_seq { + let mut next_vec = py_sequence_into_flat_vec(next_seq)?; + final_vec.append(&mut next_vec); + } else { + final_vec.push(py_to_rust(&ok_seq_elem)?); + continue; + } + } + + Ok(final_vec) +} + +/// Convert parameters come from python. +/// +/// Parameters for `execute()` method can be either +/// a list or a tuple or a set. +/// +/// We parse every parameter from python object and return +/// Vector of out `PythonDTO`. +/// +/// # Errors +/// +/// May return Err Result if can't convert python object. +#[allow(clippy::needless_pass_by_value)] +pub fn convert_parameters_and_qs( + querystring: String, + parameters: Option>, +) -> RustPSQLDriverPyResult<(String, Vec)> { + let Some(parameters) = parameters else { + return Ok((querystring, vec![])); + }; + + let res = Python::with_gil(|gil| { + let params = parameters.extract::>>(gil).map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert you parameters argument into Rust type, please use List/Tuple" + .into(), + ) + }); + if let Ok(params) = params { + return Ok((querystring, convert_seq_parameters(params)?)); + } + + let kw_params = parameters.downcast_bound::(gil); + if let Ok(kw_params) = kw_params { + return convert_kwargs_parameters(kw_params, &querystring); + } + + Err(RustPSQLDriverError::PyToRustValueConversionError( + "Parameters must be sequence or mapping".into(), + )) + })?; + + Ok(res) +} + +pub fn convert_kwargs_parameters<'a>( + kw_params: &Bound<'_, PyMapping>, + querystring: &'a str, +) -> RustPSQLDriverPyResult<(String, Vec)> { + let mut result_vec: Vec = vec![]; + let (changed_string, params_names) = parse_kwargs_qs(querystring); + + for param_name in params_names { + match kw_params.get_item(¶m_name) { + Ok(param) => result_vec.push(py_to_rust(¶m)?), + Err(_) => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + format!("Cannot find parameter with name <{param_name}> in parameters").into(), + )) + } + } + } + + Ok((changed_string, result_vec)) +} + +pub fn convert_seq_parameters( + seq_params: Vec>, +) -> RustPSQLDriverPyResult> { + let mut result_vec: Vec = vec![]; + Python::with_gil(|gil| { + for parameter in seq_params { + result_vec.push(py_to_rust(parameter.bind(gil))?); + } + Ok::<(), RustPSQLDriverError>(()) + })?; + + Ok(result_vec) +} + +/// Convert python List of Dict type or just Dict into serde `Value`. +/// +/// # Errors +/// May return error if cannot convert Python type into Rust one. +#[allow(clippy::needless_pass_by_value)] +pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { + Python::with_gil(|gil| { + let bind_value = value.bind(gil); + if bind_value.is_instance_of::() { + let mut result_vec: Vec = vec![]; + + let params = bind_value.extract::>>()?; + + for inner in params { + let inner_bind = inner.bind(gil); + if inner_bind.is_instance_of::() { + let python_dto = py_to_rust(inner_bind)?; + result_vec.push(python_dto.to_serde_value()?); + } else if inner_bind.is_instance_of::() { + let serde_value = build_serde_value(inner)?; + result_vec.push(serde_value); + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "PyJSON must have dicts.".to_string(), + )); + } + } + Ok(json!(result_vec)) + } else if bind_value.is_instance_of::() { + return py_to_rust(bind_value)?.to_serde_value(); + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "PyJSON must be dict value.".to_string(), + )); + } + }) +} + +/// Convert two python parameters(x and y) to Coord from `geo_type`. +/// Also it checks that passed values is int or float. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parameters type isn't correct. +fn convert_py_to_rust_coord_values(parameters: Vec>) -> RustPSQLDriverPyResult> { + Python::with_gil(|gil| { + let mut coord_values_vec: Vec = vec![]; + + for one_parameter in parameters { + let parameter_bind = one_parameter.bind(gil); + + if !parameter_bind.is_instance_of::() + & !parameter_bind.is_instance_of::() + { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Incorrect types of coordinate values. It must be int or float".into(), + )); + } + + let python_dto = py_to_rust(parameter_bind)?; + match python_dto { + PythonDTO::PyIntI16(pyint) => coord_values_vec.push(f64::from(pyint)), + PythonDTO::PyIntI32(pyint) => coord_values_vec.push(f64::from(pyint)), + PythonDTO::PyIntU32(pyint) => coord_values_vec.push(f64::from(pyint)), + PythonDTO::PyFloat32(pyfloat) => coord_values_vec.push(f64::from(pyfloat)), + PythonDTO::PyFloat64(pyfloat) => coord_values_vec.push(pyfloat), + PythonDTO::PyIntI64(_) | PythonDTO::PyIntU64(_) => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Not implemented this type yet".into(), + )) + } + _ => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Incorrect types of coordinate values. It must be int or float".into(), + )) + } + }; + } + + Ok::, RustPSQLDriverError>(coord_values_vec) + }) +} + +/// Convert Python values with coordinates into vector of Coord's for building Geo types later. +/// +/// Passed parameter can be either a list or a tuple or a set. +/// Inside this parameter may be multiple list/tuple/set with int/float or only int/float values flat. +/// We parse every parameter from python object and make from them Coord's. +/// Additionally it checks for correct length of coordinates parsed from Python values. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parsed number of coordinates is not expected by allowed length. +#[allow(clippy::needless_pass_by_value)] +pub fn build_geo_coords( + py_parameters: Py, + allowed_length_option: Option, +) -> RustPSQLDriverPyResult> { + let mut result_vec: Vec = vec![]; + + result_vec = Python::with_gil(|gil| { + let bind_py_parameters = py_parameters.bind(gil); + let parameters = py_sequence_to_rust(bind_py_parameters)?; + + let first_inner_bind_py_parameters = parameters[0].bind(gil); + if first_inner_bind_py_parameters.is_instance_of::() + | first_inner_bind_py_parameters.is_instance_of::() + { + if parameters.len() % 2 != 0 { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Length of coordinates that passed in flat structure must be a multiple of 2" + .into(), + )); + } + + for (pair_first_inner, pair_second_inner) in parameters.into_iter().tuples() { + let coord_values = + convert_py_to_rust_coord_values(vec![pair_first_inner, pair_second_inner])?; + result_vec.push(coord! {x: coord_values[0], y: coord_values[1]}); + } + } else if first_inner_bind_py_parameters.is_instance_of::() + | first_inner_bind_py_parameters.is_instance_of::() + | first_inner_bind_py_parameters.is_instance_of::() + { + for pair_inner_parameters in parameters { + let bind_pair_inner_parameters = pair_inner_parameters.bind(gil); + let pair_py_inner_parameters = py_sequence_to_rust(bind_pair_inner_parameters)?; + + if pair_py_inner_parameters.len() != 2 { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Inner parameters must be pair(list/tuple/set) of int/float values".into(), + )); + } + + let coord_values = convert_py_to_rust_coord_values(pair_py_inner_parameters)?; + result_vec.push(coord! {x: coord_values[0], y: coord_values[1]}); + } + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Inner coordinates must be passed as pairs of int/float in list/tuple/set or as flat structure with int/float values".into(), + )); + }; + Ok::, RustPSQLDriverError>(result_vec) + })?; + + let number_of_coords = result_vec.len(); + let allowed_length = allowed_length_option.unwrap_or_default(); + + if (allowed_length != 0) & (number_of_coords != allowed_length) { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid number of coordinates for this geo type, allowed {allowed_length}, got: {number_of_coords}" + ))); + } + + Ok(result_vec) +} + +/// Convert flat Python values with coordinates into vector of Geo values for building Geo types later. +/// +/// Passed parameter can be either a list or a tuple or a set with elements. +/// We parse every parameter from python object and prepare them for making geo type. +/// Additionally it checks for correct length of coordinates parsed from Python values. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parsed number of coordinates is not expected by allowed length. +#[allow(clippy::needless_pass_by_value)] +pub fn build_flat_geo_coords( + py_parameters: Py, + allowed_length_option: Option, +) -> RustPSQLDriverPyResult> { + Python::with_gil(|gil| { + let allowed_length = allowed_length_option.unwrap_or_default(); + + let bind_py_parameters = py_parameters.bind(gil); + let parameters = py_sequence_to_rust(bind_py_parameters)?; + let parameters_length = parameters.len(); + + if (allowed_length != 0) & (parameters.len() != allowed_length) { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid number of values for this geo type, allowed {allowed_length}, got: {parameters_length}" + ))); + }; + + let result_vec = convert_py_to_rust_coord_values(parameters)?; + + let number_of_coords = result_vec.len(); + if (allowed_length != 0) & (number_of_coords != allowed_length) { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid number of values for this geo type, allowed {allowed_length}, got: {parameters_length}" + ))); + }; + + Ok::, RustPSQLDriverError>(result_vec) + }) +} + +/// Convert Python sequence to Rust vector. +/// Also it checks that sequence has set/list/tuple type. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parameters type isn't correct. +fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult>> { + let mut coord_values_sequence_vec: Vec> = vec![]; + + if bind_parameters.is_instance_of::() { + let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); + + for one_parameter in bind_pyset_parameters { + let extracted_parameter = one_parameter.extract::>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + coord_values_sequence_vec.push(extracted_parameter); + } + } else if bind_parameters.is_instance_of::() + | bind_parameters.is_instance_of::() + { + coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid sequence type, please use list/tuple/set, {bind_parameters}" + ))); + }; + + Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) +} + +fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { + let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); + + { + let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); + let qs = kq_read.get(querystring); + + if let Some(qs) = qs { + return qs.clone(); + } + }; + + let mut counter = 0; + let mut sequence = Vec::new(); + + let result = re.replace_all(querystring, |caps: ®ex::Captures| { + let account_id = caps[1].to_string(); + + sequence.push(account_id.clone()); + counter += 1; + + format!("${}", &counter) + }); + + let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); + kq_write.insert( + querystring.to_string(), + (result.clone().into(), sequence.clone()), + ); + (result.into(), sequence) +} diff --git a/src/value_converter/funcs/mod.rs b/src/value_converter/funcs/mod.rs new file mode 100644 index 00000000..4db4cd38 --- /dev/null +++ b/src/value_converter/funcs/mod.rs @@ -0,0 +1,2 @@ +pub mod from_python; +pub mod to_python; diff --git a/src/value_converter/funcs/to_python.rs b/src/value_converter/funcs/to_python.rs new file mode 100644 index 00000000..fce887b1 --- /dev/null +++ b/src/value_converter/funcs/to_python.rs @@ -0,0 +1,712 @@ +use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, TimeZone}; +use chrono_tz::Tz; +use pg_interval::Interval; +use postgres_array::{Array, Dimension}; +use postgres_types::{Field, FromSql, Kind, Type}; +use rust_decimal::Decimal; +use serde_json::Value; +use std::net::IpAddr; +use tokio_postgres::{Column, Row}; +use uuid::Uuid; + +use pyo3::{ + types::{ + PyAnyMethods, PyBytes, PyDict, PyDictMethods, PyList, PyListMethods, PySet, PyString, + PyTuple, + }, + Bound, FromPyObject, IntoPy, Py, PyAny, Python, ToPyObject, +}; + +use crate::{ + additional_types::{ + Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, + RustRect, + }, + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + value_converter::{ + consts::KWARGS_QUERYSTRINGS, + models::{ + decimal::InnerDecimal, interval::InnerInterval, serde_value::InternalSerdeValue, + uuid::InternalUuid, + }, + }, +}; +use pgvector::Vector as PgVector; + +/// Convert serde `Value` into Python object. +/// # Errors +/// May return Err Result if cannot add new value to Python Dict. +pub fn build_python_from_serde_value( + py: Python<'_>, + value: Value, +) -> RustPSQLDriverPyResult> { + match value { + Value::Array(massive) => { + let mut result_vec: Vec> = vec![]; + + for single_record in massive { + result_vec.push(build_python_from_serde_value(py, single_record)?); + } + + Ok(result_vec.to_object(py)) + } + Value::Object(mapping) => { + let py_dict = PyDict::new_bound(py); + + for (key, value) in mapping { + py_dict.set_item( + build_python_from_serde_value(py, Value::String(key))?, + build_python_from_serde_value(py, value)?, + )?; + } + + Ok(py_dict.to_object(py)) + } + Value::Bool(boolean) => Ok(boolean.to_object(py)), + Value::Number(number) => { + if number.is_f64() { + Ok(number.as_f64().to_object(py)) + } else if number.is_i64() { + Ok(number.as_i64().to_object(py)) + } else { + Ok(number.as_u64().to_object(py)) + } + } + Value::String(string) => Ok(string.to_object(py)), + Value::Null => Ok(py.None()), + } +} + +fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { + let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); + + { + let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); + let qs = kq_read.get(querystring); + + if let Some(qs) = qs { + return qs.clone(); + } + }; + + let mut counter = 0; + let mut sequence = Vec::new(); + + let result = re.replace_all(querystring, |caps: ®ex::Captures| { + let account_id = caps[1].to_string(); + + sequence.push(account_id.clone()); + counter += 1; + + format!("${}", &counter) + }); + + let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); + kq_write.insert( + querystring.to_string(), + (result.clone().into(), sequence.clone()), + ); + (result.into(), sequence) +} + +fn composite_field_postgres_to_py<'a, T: FromSql<'a>>( + type_: &Type, + buf: &mut &'a [u8], + is_simple: bool, +) -> RustPSQLDriverPyResult { + if is_simple { + return T::from_sql_nullable(type_, Some(buf)).map_err(|err| { + RustPSQLDriverError::RustToPyValueConversionError(format!( + "Cannot convert PostgreSQL type {type_} into Python type, err: {err}", + )) + }); + } + postgres_types::private::read_value::(type_, buf).map_err(|err| { + RustPSQLDriverError::RustToPyValueConversionError(format!( + "Cannot convert PostgreSQL type {type_} into Python type, err: {err}", + )) + }) +} + +/// Convert rust array to python list. +/// +/// It can convert multidimensional arrays. +fn postgres_array_to_py( + py: Python<'_>, + array: Option>, +) -> Option> { + array.map(|array| { + inner_postgres_array_to_py( + py, + array.dimensions(), + array.iter().collect::>().as_slice(), + 0, + 0, + ) + }) +} + +/// Inner postgres array conversion to python list. +#[allow(clippy::cast_sign_loss)] +fn inner_postgres_array_to_py( + py: Python<'_>, + dimensions: &[Dimension], + data: &[T], + dimension_index: usize, + mut lower_bound: usize, +) -> Py +where + T: ToPyObject, +{ + let current_dimension = dimensions.get(dimension_index); + + if let Some(current_dimension) = current_dimension { + let possible_next_dimension = dimensions.get(dimension_index + 1); + match possible_next_dimension { + Some(next_dimension) => { + let final_list = PyList::empty_bound(py); + + for _ in 0..current_dimension.len as usize { + if dimensions.get(dimension_index + 1).is_some() { + let inner_pylist = inner_postgres_array_to_py( + py, + dimensions, + &data[lower_bound..next_dimension.len as usize + lower_bound], + dimension_index + 1, + 0, + ); + final_list.append(inner_pylist).unwrap(); + lower_bound += next_dimension.len as usize; + }; + } + + return final_list.unbind(); + } + None => { + return PyList::new_bound(py, data).unbind(); + } + } + } + + PyList::empty_bound(py).unbind() +} + +#[allow(clippy::too_many_lines)] +fn postgres_bytes_to_py( + py: Python<'_>, + type_: &Type, + buf: &mut &[u8], + is_simple: bool, +) -> RustPSQLDriverPyResult> { + match *type_ { + // ---------- Bytes Types ---------- + // Convert BYTEA type into Vector, then into PyBytes + Type::BYTEA => { + let vec_of_bytes = + composite_field_postgres_to_py::>>(type_, buf, is_simple)?; + if let Some(vec_of_bytes) = vec_of_bytes { + return Ok(PyBytes::new_bound(py, &vec_of_bytes).to_object(py)); + } + Ok(py.None()) + } + // // ---------- String Types ---------- + // // Convert TEXT and VARCHAR type into String, then into str + Type::TEXT | Type::VARCHAR | Type::XML => Ok(composite_field_postgres_to_py::< + Option, + >(type_, buf, is_simple)? + .to_object(py)), + // ---------- Boolean Types ---------- + // Convert BOOL type into bool + Type::BOOL => Ok( + composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py), + ), + // ---------- Number Types ---------- + // Convert SmallInt into i16, then into int + Type::INT2 => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + // Convert Integer into i32, then into int + Type::INT4 => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + // Convert BigInt into i64, then into int + Type::INT8 | Type::MONEY => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + // Convert REAL into f32, then into float + Type::FLOAT4 => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + // Convert DOUBLE PRECISION into f64, then into float + Type::FLOAT8 => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + // ---------- Date Types ---------- + // Convert DATE into NaiveDate, then into datetime.date + Type::DATE => Ok(composite_field_postgres_to_py::>( + type_, buf, is_simple, + )? + .to_object(py)), + // Convert Time into NaiveTime, then into datetime.time + Type::TIME => Ok(composite_field_postgres_to_py::>( + type_, buf, is_simple, + )? + .to_object(py)), + // Convert TIMESTAMP into NaiveDateTime, then into datetime.datetime + Type::TIMESTAMP => Ok(composite_field_postgres_to_py::>( + type_, buf, is_simple, + )? + .to_object(py)), + // Convert TIMESTAMP into NaiveDateTime, then into datetime.datetime + Type::TIMESTAMPTZ => Ok( + composite_field_postgres_to_py::>>(type_, buf, is_simple)? + .to_object(py), + ), + // ---------- UUID Types ---------- + // Convert UUID into Uuid type, then into String if possible + Type::UUID => { + let rust_uuid = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + match rust_uuid { + Some(rust_uuid) => { + return Ok(PyString::new_bound(py, &rust_uuid.to_string()).to_object(py)) + } + None => Ok(py.None()), + } + } + // ---------- IpAddress Types ---------- + Type::INET => Ok( + composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py), + ), + // Convert JSON/JSONB into Serde Value, then into list or dict + Type::JSONB | Type::JSON => { + let db_json = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match db_json { + Some(value) => Ok(build_python_from_serde_value(py, value)?), + None => Ok(py.None().to_object(py)), + } + } + // Convert MACADDR into inner type for macaddr6, then into str + Type::MACADDR => { + let macaddr_ = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + if let Some(macaddr_) = macaddr_ { + Ok(macaddr_.inner().to_string().to_object(py)) + } else { + Ok(py.None().to_object(py)) + } + } + Type::MACADDR8 => { + let macaddr_ = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + if let Some(macaddr_) = macaddr_ { + Ok(macaddr_.inner().to_string().to_object(py)) + } else { + Ok(py.None().to_object(py)) + } + } + Type::NUMERIC => { + if let Some(numeric_) = + composite_field_postgres_to_py::>(type_, buf, is_simple)? + { + return Ok(InnerDecimal(numeric_).to_object(py)); + } + Ok(py.None().to_object(py)) + } + // ---------- Geo Types ---------- + Type::POINT => { + let point_ = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match point_ { + Some(point_) => Ok(point_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::BOX => { + let box_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match box_ { + Some(box_) => Ok(box_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::PATH => { + let path_ = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match path_ { + Some(path_) => Ok(path_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::LINE => { + let line_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match line_ { + Some(line_) => Ok(line_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::LSEG => { + let lseg_ = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match lseg_ { + Some(lseg_) => Ok(lseg_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::CIRCLE => { + let circle_ = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + + match circle_ { + Some(circle_) => Ok(circle_.into_py(py)), + None => Ok(py.None().to_object(py)), + } + } + Type::INTERVAL => { + let interval = + composite_field_postgres_to_py::>(type_, buf, is_simple)?; + if let Some(interval) = interval { + return Ok(InnerInterval(interval).to_object(py)); + } + Ok(py.None()) + } + // ---------- Array Text Types ---------- + Type::BOOL_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of TEXT or VARCHAR into Vec, then into list[str] + Type::TEXT_ARRAY | Type::VARCHAR_ARRAY | Type::XML_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // ---------- Array Integer Types ---------- + // Convert ARRAY of SmallInt into Vec, then into list[int] + Type::INT2_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of Integer into Vec, then into list[int] + Type::INT4_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of BigInt into Vec, then into list[int] + Type::INT8_ARRAY | Type::MONEY_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of Float4 into Vec, then into list[float] + Type::FLOAT4_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of Float8 into Vec, then into list[float] + Type::FLOAT8_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of Date into Vec, then into list[datetime.date] + Type::DATE_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of Time into Vec, then into list[datetime.date] + Type::TIME_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of TIMESTAMP into Vec, then into list[datetime.date] + Type::TIMESTAMP_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // Convert ARRAY of TIMESTAMPTZ into Vec>, then into list[datetime.date] + Type::TIMESTAMPTZ_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>>( + type_, buf, is_simple, + )?, + ) + .to_object(py)), + // Convert ARRAY of UUID into Vec>, then into list[UUID] + Type::UUID_ARRAY => { + let uuid_array = composite_field_postgres_to_py::>>( + type_, buf, is_simple, + )?; + Ok(postgres_array_to_py(py, uuid_array).to_object(py)) + } + // Convert ARRAY of INET into Vec, then into list[IPv4Address | IPv6Address] + Type::INET_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + Type::JSONB_ARRAY | Type::JSON_ARRAY => { + let db_json_array = composite_field_postgres_to_py::>>( + type_, buf, is_simple, + )?; + Ok(postgres_array_to_py(py, db_json_array).to_object(py)) + } + Type::NUMERIC_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), + // ---------- Array Geo Types ---------- + Type::POINT_ARRAY => { + let point_array_ = + composite_field_postgres_to_py::>>(type_, buf, is_simple)?; + + Ok(postgres_array_to_py(py, point_array_).to_object(py)) + } + Type::BOX_ARRAY => { + let box_array_ = + composite_field_postgres_to_py::>>(type_, buf, is_simple)?; + + Ok(postgres_array_to_py(py, box_array_).to_object(py)) + } + Type::PATH_ARRAY => { + let path_array_ = composite_field_postgres_to_py::>>( + type_, buf, is_simple, + )?; + + Ok(postgres_array_to_py(py, path_array_).to_object(py)) + } + Type::LINE_ARRAY => { + let line_array_ = + composite_field_postgres_to_py::>>(type_, buf, is_simple)?; + + Ok(postgres_array_to_py(py, line_array_).to_object(py)) + } + Type::LSEG_ARRAY => { + let lseg_array_ = composite_field_postgres_to_py::>>( + type_, buf, is_simple, + )?; + + Ok(postgres_array_to_py(py, lseg_array_).to_object(py)) + } + Type::CIRCLE_ARRAY => { + let circle_array_ = + composite_field_postgres_to_py::>>(type_, buf, is_simple)?; + + Ok(postgres_array_to_py(py, circle_array_).to_object(py)) + } + Type::INTERVAL_ARRAY => { + let interval_array_ = composite_field_postgres_to_py::>>( + type_, buf, is_simple, + )?; + + Ok(postgres_array_to_py(py, interval_array_).to_object(py)) + } + _ => other_postgres_bytes_to_py(py, type_, buf, is_simple), + } +} + +/// Convert OTHER type to python. +/// +/// # Errors +/// May return result if type is unknown. +pub fn other_postgres_bytes_to_py( + py: Python<'_>, + type_: &Type, + buf: &mut &[u8], + is_simple: bool, +) -> RustPSQLDriverPyResult> { + if type_.name() == "vector" { + let vector = composite_field_postgres_to_py::>(type_, buf, is_simple)?; + match vector { + Some(real_vector) => { + return Ok(real_vector.to_vec().to_object(py)); + } + None => return Ok(py.None()), + } + } + + Err(RustPSQLDriverError::RustToPyValueConversionError( + format!("Cannot convert {type_} into Python type, please look at the custom_decoders functionality.") + )) +} + +/// Convert composite type from `PostgreSQL` to Python type. +/// +/// # Errors +/// May return error if there is any problem with bytes. +#[allow(clippy::cast_sign_loss)] +pub fn composite_postgres_to_py( + py: Python<'_>, + fields: &Vec, + buf: &mut &[u8], + custom_decoders: &Option>, +) -> RustPSQLDriverPyResult> { + let result_py_dict: Bound<'_, PyDict> = PyDict::new_bound(py); + + let num_fields = postgres_types::private::read_be_i32(buf).map_err(|err| { + RustPSQLDriverError::RustToPyValueConversionError(format!( + "Cannot read bytes data from PostgreSQL: {err}" + )) + })?; + if num_fields as usize != fields.len() { + return Err(RustPSQLDriverError::RustToPyValueConversionError(format!( + "invalid field count: {} vs {}", + num_fields, + fields.len() + ))); + } + + for field in fields { + let oid = postgres_types::private::read_be_i32(buf).map_err(|err| { + RustPSQLDriverError::RustToPyValueConversionError(format!( + "Cannot read bytes data from PostgreSQL: {err}" + )) + })? as u32; + + if oid != field.type_().oid() { + return Err(RustPSQLDriverError::RustToPyValueConversionError( + "unexpected OID".into(), + )); + } + + match field.type_().kind() { + Kind::Simple | Kind::Array(_) => { + result_py_dict.set_item( + field.name(), + postgres_bytes_to_py(py, field.type_(), buf, false)?.to_object(py), + )?; + } + Kind::Enum(_) => { + result_py_dict.set_item( + field.name(), + postgres_bytes_to_py(py, &Type::VARCHAR, buf, false)?.to_object(py), + )?; + } + _ => { + let (_, tail) = buf.split_at(4_usize); + *buf = tail; + result_py_dict.set_item( + field.name(), + raw_bytes_data_process(py, buf, field.name(), field.type_(), custom_decoders)? + .to_object(py), + )?; + } + } + } + + Ok(result_py_dict.to_object(py)) +} + +/// Process raw bytes from `PostgreSQL`. +/// +/// # Errors +/// +/// May return Err Result if cannot convert postgres +/// type into rust one. +pub fn raw_bytes_data_process( + py: Python<'_>, + raw_bytes_data: &mut &[u8], + column_name: &str, + column_type: &Type, + custom_decoders: &Option>, +) -> RustPSQLDriverPyResult> { + if let Some(custom_decoders) = custom_decoders { + let py_encoder_func = custom_decoders + .bind(py) + .get_item(column_name.to_lowercase()); + + if let Ok(Some(py_encoder_func)) = py_encoder_func { + return Ok(py_encoder_func + .call((raw_bytes_data.to_vec(),), None)? + .unbind()); + } + } + + match column_type.kind() { + Kind::Simple | Kind::Array(_) => { + postgres_bytes_to_py(py, column_type, raw_bytes_data, true) + } + Kind::Composite(fields) => { + composite_postgres_to_py(py, fields, raw_bytes_data, custom_decoders) + } + Kind::Enum(_) => postgres_bytes_to_py(py, &Type::VARCHAR, raw_bytes_data, true), + _ => Err(RustPSQLDriverError::RustToPyValueConversionError( + column_type.to_string(), + )), + } +} + +/// Convert type from postgres to python type. +/// +/// # Errors +/// +/// May return Err Result if cannot convert postgres +/// type into rust one. +pub fn postgres_to_py( + py: Python<'_>, + row: &Row, + column: &Column, + column_i: usize, + custom_decoders: &Option>, +) -> RustPSQLDriverPyResult> { + let raw_bytes_data = row.col_buffer(column_i); + if let Some(mut raw_bytes_data) = raw_bytes_data { + return raw_bytes_data_process( + py, + &mut raw_bytes_data, + column.name(), + column.type_(), + custom_decoders, + ); + } + Ok(py.None()) +} + +/// Convert Python sequence to Rust vector. +/// Also it checks that sequence has set/list/tuple type. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parameters type isn't correct. +fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult>> { + let mut coord_values_sequence_vec: Vec> = vec![]; + + if bind_parameters.is_instance_of::() { + let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); + + for one_parameter in bind_pyset_parameters { + let extracted_parameter = one_parameter.extract::>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + coord_values_sequence_vec.push(extracted_parameter); + } + } else if bind_parameters.is_instance_of::() + | bind_parameters.is_instance_of::() + { + coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid sequence type, please use list/tuple/set, {bind_parameters}" + ))); + }; + + Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) +} diff --git a/src/value_converter/mod.rs b/src/value_converter/mod.rs new file mode 100644 index 00000000..9ca56447 --- /dev/null +++ b/src/value_converter/mod.rs @@ -0,0 +1,3 @@ +pub mod consts; +pub mod funcs; +pub mod models; diff --git a/src/value_converter/models/decimal.rs b/src/value_converter/models/decimal.rs new file mode 100644 index 00000000..13d009cc --- /dev/null +++ b/src/value_converter/models/decimal.rs @@ -0,0 +1,30 @@ +use postgres_types::{FromSql, Type}; +use pyo3::{types::PyAnyMethods, PyObject, Python, ToPyObject}; +use rust_decimal::Decimal; + +use crate::value_converter::consts::get_decimal_cls; + +pub struct InnerDecimal(pub Decimal); + +impl ToPyObject for InnerDecimal { + fn to_object(&self, py: Python<'_>) -> PyObject { + let dec_cls = get_decimal_cls(py).expect("failed to load decimal.Decimal"); + let ret = dec_cls + .call1((self.0.to_string(),)) + .expect("failed to call decimal.Decimal(value)"); + ret.to_object(py) + } +} + +impl<'a> FromSql<'a> for InnerDecimal { + fn from_sql( + ty: &Type, + raw: &'a [u8], + ) -> Result> { + Ok(InnerDecimal(::from_sql(ty, raw)?)) + } + + fn accepts(_ty: &Type) -> bool { + true + } +} diff --git a/src/value_converter/models/dto.rs b/src/value_converter/models/dto.rs new file mode 100644 index 00000000..5dfabbcf --- /dev/null +++ b/src/value_converter/models/dto.rs @@ -0,0 +1,489 @@ +use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; +use geo_types::{Line as LineSegment, LineString, Point, Rect}; +use macaddr::{MacAddr6, MacAddr8}; +use pg_interval::Interval; +use postgres_types::ToSql; +use rust_decimal::Decimal; +use serde_json::{json, Value}; +use std::{fmt::Debug, net::IpAddr}; +use uuid::Uuid; + +use bytes::{BufMut, BytesMut}; +use postgres_protocol::types; +use pyo3::{PyObject, Python, ToPyObject}; +use tokio_postgres::types::{to_sql_checked, Type}; + +use crate::{ + additional_types::{Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect}, + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, +}; +use pgvector::Vector as PgVector; +use postgres_array::{array::Array, Dimension}; + +#[derive(Debug, Clone, PartialEq)] +pub enum PythonDTO { + // Primitive + PyNone, + PyBytes(Vec), + PyBool(bool), + PyUUID(Uuid), + PyVarChar(String), + PyText(String), + PyString(String), + PyIntI16(i16), + PyIntI32(i32), + PyIntI64(i64), + PyIntU32(u32), + PyIntU64(u64), + PyFloat32(f32), + PyFloat64(f64), + PyMoney(i64), + PyDate(NaiveDate), + PyTime(NaiveTime), + PyDateTime(NaiveDateTime), + PyDateTimeTz(DateTime), + PyInterval(Interval), + PyIpAddress(IpAddr), + PyList(Vec), + PyArray(Array), + PyTuple(Vec), + PyJsonb(Value), + PyJson(Value), + PyMacAddr6(MacAddr6), + PyMacAddr8(MacAddr8), + PyDecimal(Decimal), + PyCustomType(Vec), + PyPoint(Point), + PyBox(Rect), + PyPath(LineString), + PyLine(Line), + PyLineSegment(LineSegment), + PyCircle(Circle), + // Arrays + PyBoolArray(Array), + PyUuidArray(Array), + PyVarCharArray(Array), + PyTextArray(Array), + PyInt16Array(Array), + PyInt32Array(Array), + PyInt64Array(Array), + PyFloat32Array(Array), + PyFloat64Array(Array), + PyMoneyArray(Array), + PyIpAddressArray(Array), + PyJSONBArray(Array), + PyJSONArray(Array), + PyDateArray(Array), + PyTimeArray(Array), + PyDateTimeArray(Array), + PyDateTimeTZArray(Array), + PyMacAddr6Array(Array), + PyMacAddr8Array(Array), + PyNumericArray(Array), + PyPointArray(Array), + PyBoxArray(Array), + PyPathArray(Array), + PyLineArray(Array), + PyLsegArray(Array), + PyCircleArray(Array), + PyIntervalArray(Array), + // PgVector + PyPgVector(Vec), +} + +impl ToPyObject for PythonDTO { + fn to_object(&self, py: Python<'_>) -> PyObject { + match self { + PythonDTO::PyNone => py.None(), + PythonDTO::PyBool(pybool) => pybool.to_object(py), + PythonDTO::PyString(py_string) + | PythonDTO::PyText(py_string) + | PythonDTO::PyVarChar(py_string) => py_string.to_object(py), + PythonDTO::PyIntI32(pyint) => pyint.to_object(py), + PythonDTO::PyIntI64(pyint) => pyint.to_object(py), + PythonDTO::PyIntU64(pyint) => pyint.to_object(py), + PythonDTO::PyFloat32(pyfloat) => pyfloat.to_object(py), + PythonDTO::PyFloat64(pyfloat) => pyfloat.to_object(py), + _ => unreachable!(), + } + } +} + +impl PythonDTO { + /// Return type of the Array for `PostgreSQL`. + /// + /// Since every Array must have concrete type, + /// we must say exactly what type of array we try to pass into + /// postgres. + /// + /// # Errors + /// May return Err Result if there is no support for passed python type. + pub fn array_type(&self) -> RustPSQLDriverPyResult { + match self { + PythonDTO::PyBool(_) => Ok(tokio_postgres::types::Type::BOOL_ARRAY), + PythonDTO::PyUUID(_) => Ok(tokio_postgres::types::Type::UUID_ARRAY), + PythonDTO::PyVarChar(_) | PythonDTO::PyString(_) => { + Ok(tokio_postgres::types::Type::VARCHAR_ARRAY) + } + PythonDTO::PyText(_) => Ok(tokio_postgres::types::Type::TEXT_ARRAY), + PythonDTO::PyIntI16(_) => Ok(tokio_postgres::types::Type::INT2_ARRAY), + PythonDTO::PyIntI32(_) | PythonDTO::PyIntU32(_) => { + Ok(tokio_postgres::types::Type::INT4_ARRAY) + } + PythonDTO::PyIntI64(_) => Ok(tokio_postgres::types::Type::INT8_ARRAY), + PythonDTO::PyFloat32(_) => Ok(tokio_postgres::types::Type::FLOAT4_ARRAY), + PythonDTO::PyFloat64(_) => Ok(tokio_postgres::types::Type::FLOAT8_ARRAY), + PythonDTO::PyMoney(_) => Ok(tokio_postgres::types::Type::MONEY_ARRAY), + PythonDTO::PyIpAddress(_) => Ok(tokio_postgres::types::Type::INET_ARRAY), + PythonDTO::PyJsonb(_) => Ok(tokio_postgres::types::Type::JSONB_ARRAY), + PythonDTO::PyJson(_) => Ok(tokio_postgres::types::Type::JSON_ARRAY), + PythonDTO::PyDate(_) => Ok(tokio_postgres::types::Type::DATE_ARRAY), + PythonDTO::PyTime(_) => Ok(tokio_postgres::types::Type::TIME_ARRAY), + PythonDTO::PyDateTime(_) => Ok(tokio_postgres::types::Type::TIMESTAMP_ARRAY), + PythonDTO::PyDateTimeTz(_) => Ok(tokio_postgres::types::Type::TIMESTAMPTZ_ARRAY), + PythonDTO::PyMacAddr6(_) => Ok(tokio_postgres::types::Type::MACADDR_ARRAY), + PythonDTO::PyMacAddr8(_) => Ok(tokio_postgres::types::Type::MACADDR8_ARRAY), + PythonDTO::PyDecimal(_) => Ok(tokio_postgres::types::Type::NUMERIC_ARRAY), + PythonDTO::PyPoint(_) => Ok(tokio_postgres::types::Type::POINT_ARRAY), + PythonDTO::PyBox(_) => Ok(tokio_postgres::types::Type::BOX_ARRAY), + PythonDTO::PyPath(_) => Ok(tokio_postgres::types::Type::PATH_ARRAY), + PythonDTO::PyLine(_) => Ok(tokio_postgres::types::Type::LINE_ARRAY), + PythonDTO::PyLineSegment(_) => Ok(tokio_postgres::types::Type::LSEG_ARRAY), + PythonDTO::PyCircle(_) => Ok(tokio_postgres::types::Type::CIRCLE_ARRAY), + PythonDTO::PyInterval(_) => Ok(tokio_postgres::types::Type::INTERVAL_ARRAY), + _ => Err(RustPSQLDriverError::PyToRustValueConversionError( + "Can't process array type, your type doesn't have support yet".into(), + )), + } + } + + /// Convert enum into serde `Value`. + /// + /// # Errors + /// May return Err Result if cannot convert python type into rust. + pub fn to_serde_value(&self) -> RustPSQLDriverPyResult { + match self { + PythonDTO::PyNone => Ok(Value::Null), + PythonDTO::PyBool(pybool) => Ok(json!(pybool)), + PythonDTO::PyString(pystring) + | PythonDTO::PyText(pystring) + | PythonDTO::PyVarChar(pystring) => Ok(json!(pystring)), + PythonDTO::PyIntI32(pyint) => Ok(json!(pyint)), + PythonDTO::PyIntI64(pyint) => Ok(json!(pyint)), + PythonDTO::PyIntU64(pyint) => Ok(json!(pyint)), + PythonDTO::PyFloat32(pyfloat) => Ok(json!(pyfloat)), + PythonDTO::PyFloat64(pyfloat) => Ok(json!(pyfloat)), + PythonDTO::PyList(pylist) => { + let mut vec_serde_values: Vec = vec![]; + + for py_object in pylist { + vec_serde_values.push(py_object.to_serde_value()?); + } + + Ok(json!(vec_serde_values)) + } + PythonDTO::PyArray(array) => Ok(json!(pythondto_array_to_serde(Some(array.clone()))?)), + PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => Ok(py_dict.clone()), + _ => Err(RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert your type into Rust type".into(), + )), + } + } +} + +/// Implement `ToSql` trait. +/// +/// It allows us to pass `PythonDTO` enum as parameter +/// directly into `.execute()` method in +/// `DatabasePool`, `Connection` and `Transaction`. +impl ToSql for PythonDTO { + /// Answer the question Is this type can be passed into sql? + /// + /// Always True. + fn accepts(_ty: &tokio_postgres::types::Type) -> bool + where + Self: Sized, + { + true + } + + /// Convert our `PythonDTO` enum into bytes. + /// + /// We convert every inner type of `PythonDTO` enum variant + /// into bytes and write them into bytes buffer. + /// + /// # Errors + /// + /// May return Err Result if cannot write bytes into buffer. + #[allow(clippy::too_many_lines)] + fn to_sql( + &self, + ty: &tokio_postgres::types::Type, + out: &mut BytesMut, + ) -> Result> + where + Self: Sized, + { + let mut return_is_null_true: bool = false; + if *self == PythonDTO::PyNone { + return_is_null_true = true; + } + + match self { + PythonDTO::PyNone => {} + PythonDTO::PyCustomType(some_bytes) => { + <&[u8] as ToSql>::to_sql(&some_bytes.as_slice(), ty, out)?; + } + PythonDTO::PyBytes(pybytes) => { + as ToSql>::to_sql(pybytes, ty, out)?; + } + PythonDTO::PyBool(boolean) => types::bool_to_sql(*boolean, out), + PythonDTO::PyVarChar(string) => { + <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; + } + PythonDTO::PyText(string) => { + <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; + } + PythonDTO::PyUUID(pyuuid) => { + ::to_sql(pyuuid, ty, out)?; + } + PythonDTO::PyString(string) => { + <&str as ToSql>::to_sql(&string.as_str(), ty, out)?; + } + PythonDTO::PyIntI16(int) => out.put_i16(*int), + PythonDTO::PyIntI32(int) => out.put_i32(*int), + PythonDTO::PyIntI64(int) | PythonDTO::PyMoney(int) => out.put_i64(*int), + PythonDTO::PyIntU32(int) => out.put_u32(*int), + PythonDTO::PyIntU64(int) => out.put_u64(*int), + PythonDTO::PyFloat32(float) => out.put_f32(*float), + PythonDTO::PyFloat64(float) => out.put_f64(*float), + PythonDTO::PyDate(pydate) => { + <&NaiveDate as ToSql>::to_sql(&pydate, ty, out)?; + } + PythonDTO::PyTime(pytime) => { + <&NaiveTime as ToSql>::to_sql(&pytime, ty, out)?; + } + PythonDTO::PyDateTime(pydatetime_no_tz) => { + <&NaiveDateTime as ToSql>::to_sql(&pydatetime_no_tz, ty, out)?; + } + PythonDTO::PyDateTimeTz(pydatetime_tz) => { + <&DateTime as ToSql>::to_sql(&pydatetime_tz, ty, out)?; + } + PythonDTO::PyInterval(pyinterval) => { + <&Interval as ToSql>::to_sql(&pyinterval, ty, out)?; + } + PythonDTO::PyIpAddress(pyidaddress) => { + <&IpAddr as ToSql>::to_sql(&pyidaddress, ty, out)?; + } + PythonDTO::PyMacAddr6(pymacaddr) => { + <&[u8] as ToSql>::to_sql(&pymacaddr.as_bytes(), ty, out)?; + } + PythonDTO::PyMacAddr8(pymacaddr) => { + <&[u8] as ToSql>::to_sql(&pymacaddr.as_bytes(), ty, out)?; + } + PythonDTO::PyPoint(pypoint) => { + <&RustPoint as ToSql>::to_sql(&&RustPoint::new(*pypoint), ty, out)?; + } + PythonDTO::PyBox(pybox) => { + <&RustRect as ToSql>::to_sql(&&RustRect::new(*pybox), ty, out)?; + } + PythonDTO::PyPath(pypath) => { + <&RustLineString as ToSql>::to_sql(&&RustLineString::new(pypath.clone()), ty, out)?; + } + PythonDTO::PyLine(pyline) => { + <&Line as ToSql>::to_sql(&pyline, ty, out)?; + } + PythonDTO::PyLineSegment(pylinesegment) => { + <&RustLineSegment as ToSql>::to_sql( + &&RustLineSegment::new(*pylinesegment), + ty, + out, + )?; + } + PythonDTO::PyCircle(pycircle) => { + <&Circle as ToSql>::to_sql(&pycircle, ty, out)?; + } + PythonDTO::PyList(py_iterable) | PythonDTO::PyTuple(py_iterable) => { + let mut items = Vec::new(); + for inner in py_iterable { + items.push(inner); + } + if items.is_empty() { + return_is_null_true = true; + } else { + items.to_sql(&items[0].array_type()?, out)?; + } + } + PythonDTO::PyArray(array) => { + if let Some(first_elem) = array.iter().nth(0) { + match first_elem.array_type() { + Ok(ok_type) => { + array.to_sql(&ok_type, out)?; + } + Err(_) => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Cannot define array type.".into(), + ))? + } + } + } + } + PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => { + <&Value as ToSql>::to_sql(&py_dict, ty, out)?; + } + PythonDTO::PyDecimal(py_decimal) => { + ::to_sql(py_decimal, ty, out)?; + } + PythonDTO::PyBoolArray(array) => { + array.to_sql(&Type::BOOL_ARRAY, out)?; + } + PythonDTO::PyUuidArray(array) => { + array.to_sql(&Type::UUID_ARRAY, out)?; + } + PythonDTO::PyVarCharArray(array) => { + array.to_sql(&Type::VARCHAR_ARRAY, out)?; + } + PythonDTO::PyTextArray(array) => { + array.to_sql(&Type::TEXT_ARRAY, out)?; + } + PythonDTO::PyInt16Array(array) => { + array.to_sql(&Type::INT2_ARRAY, out)?; + } + PythonDTO::PyInt32Array(array) => { + array.to_sql(&Type::INT4_ARRAY, out)?; + } + PythonDTO::PyInt64Array(array) => { + array.to_sql(&Type::INT8_ARRAY, out)?; + } + PythonDTO::PyFloat32Array(array) => { + array.to_sql(&Type::FLOAT4, out)?; + } + PythonDTO::PyFloat64Array(array) => { + array.to_sql(&Type::FLOAT8_ARRAY, out)?; + } + PythonDTO::PyMoneyArray(array) => { + array.to_sql(&Type::MONEY_ARRAY, out)?; + } + PythonDTO::PyIpAddressArray(array) => { + array.to_sql(&Type::INET_ARRAY, out)?; + } + PythonDTO::PyJSONBArray(array) => { + array.to_sql(&Type::JSONB_ARRAY, out)?; + } + PythonDTO::PyJSONArray(array) => { + array.to_sql(&Type::JSON_ARRAY, out)?; + } + PythonDTO::PyDateArray(array) => { + array.to_sql(&Type::DATE_ARRAY, out)?; + } + PythonDTO::PyTimeArray(array) => { + array.to_sql(&Type::TIME_ARRAY, out)?; + } + PythonDTO::PyDateTimeArray(array) => { + array.to_sql(&Type::TIMESTAMP_ARRAY, out)?; + } + PythonDTO::PyDateTimeTZArray(array) => { + array.to_sql(&Type::TIMESTAMPTZ_ARRAY, out)?; + } + PythonDTO::PyMacAddr6Array(array) => { + array.to_sql(&Type::MACADDR_ARRAY, out)?; + } + PythonDTO::PyMacAddr8Array(array) => { + array.to_sql(&Type::MACADDR8_ARRAY, out)?; + } + PythonDTO::PyNumericArray(array) => { + array.to_sql(&Type::NUMERIC_ARRAY, out)?; + } + PythonDTO::PyPointArray(array) => { + array.to_sql(&Type::POINT_ARRAY, out)?; + } + PythonDTO::PyBoxArray(array) => { + array.to_sql(&Type::BOX_ARRAY, out)?; + } + PythonDTO::PyPathArray(array) => { + array.to_sql(&Type::PATH_ARRAY, out)?; + } + PythonDTO::PyLineArray(array) => { + array.to_sql(&Type::LINE_ARRAY, out)?; + } + PythonDTO::PyLsegArray(array) => { + array.to_sql(&Type::LSEG_ARRAY, out)?; + } + PythonDTO::PyCircleArray(array) => { + array.to_sql(&Type::CIRCLE_ARRAY, out)?; + } + PythonDTO::PyIntervalArray(array) => { + array.to_sql(&Type::INTERVAL_ARRAY, out)?; + } + PythonDTO::PyPgVector(vector) => { + ::to_sql(&PgVector::from(vector.clone()), ty, out)?; + } + } + + if return_is_null_true { + Ok(tokio_postgres::types::IsNull::Yes) + } else { + Ok(tokio_postgres::types::IsNull::No) + } + } + + to_sql_checked!(); +} + +/// Convert Array of `PythonDTO`s to serde `Value`. +/// +/// It can convert multidimensional arrays. +fn pythondto_array_to_serde(array: Option>) -> RustPSQLDriverPyResult { + match array { + Some(array) => inner_pythondto_array_to_serde( + array.dimensions(), + array.iter().collect::>().as_slice(), + 0, + 0, + ), + None => Ok(Value::Null), + } +} + +/// Inner conversion array of `PythonDTO`s to serde `Value`. +#[allow(clippy::cast_sign_loss)] +fn inner_pythondto_array_to_serde( + dimensions: &[Dimension], + data: &[&PythonDTO], + dimension_index: usize, + mut lower_bound: usize, +) -> RustPSQLDriverPyResult { + let current_dimension = dimensions.get(dimension_index); + + if let Some(current_dimension) = current_dimension { + let possible_next_dimension = dimensions.get(dimension_index + 1); + match possible_next_dimension { + Some(next_dimension) => { + let mut final_list: Value = Value::Array(vec![]); + + for _ in 0..current_dimension.len as usize { + if dimensions.get(dimension_index + 1).is_some() { + let inner_pylist = inner_pythondto_array_to_serde( + dimensions, + &data[lower_bound..next_dimension.len as usize + lower_bound], + dimension_index + 1, + 0, + )?; + match final_list { + Value::Array(ref mut array) => array.push(inner_pylist), + _ => unreachable!(), + } + lower_bound += next_dimension.len as usize; + }; + } + + return Ok(final_list); + } + None => { + return data.iter().map(|x| x.to_serde_value()).collect(); + } + } + } + + Ok(Value::Array(vec![])) +} diff --git a/src/value_converter/models/interval.rs b/src/value_converter/models/interval.rs new file mode 100644 index 00000000..7259e20d --- /dev/null +++ b/src/value_converter/models/interval.rs @@ -0,0 +1,37 @@ +use pg_interval::Interval; +use postgres_types::{FromSql, Type}; +use pyo3::{ + types::{PyAnyMethods, PyDict, PyDictMethods}, + PyObject, Python, ToPyObject, +}; + +use crate::value_converter::consts::get_timedelta_cls; + +pub struct InnerInterval(pub Interval); + +impl ToPyObject for InnerInterval { + fn to_object(&self, py: Python<'_>) -> PyObject { + let td_cls = get_timedelta_cls(py).expect("failed to load datetime.timedelta"); + let pydict = PyDict::new_bound(py); + let months = self.0.months * 30; + let _ = pydict.set_item("days", self.0.days + months); + let _ = pydict.set_item("microseconds", self.0.microseconds); + let ret = td_cls + .call((), Some(&pydict)) + .expect("failed to call datetime.timedelta(days=<>, microseconds=<>)"); + ret.to_object(py) + } +} + +impl<'a> FromSql<'a> for InnerInterval { + fn from_sql( + ty: &Type, + raw: &'a [u8], + ) -> Result> { + Ok(InnerInterval(::from_sql(ty, raw)?)) + } + + fn accepts(_ty: &Type) -> bool { + true + } +} diff --git a/src/value_converter/models/mod.rs b/src/value_converter/models/mod.rs new file mode 100644 index 00000000..92d26e49 --- /dev/null +++ b/src/value_converter/models/mod.rs @@ -0,0 +1,5 @@ +pub mod decimal; +pub mod dto; +pub mod interval; +pub mod serde_value; +pub mod uuid; diff --git a/src/value_converter/models/serde_value.rs b/src/value_converter/models/serde_value.rs new file mode 100644 index 00000000..b39f7737 --- /dev/null +++ b/src/value_converter/models/serde_value.rs @@ -0,0 +1,89 @@ +use postgres_types::FromSql; +use serde_json::{json, Value}; +use uuid::Uuid; + +use pyo3::{ + types::{PyAnyMethods, PyDict, PyList}, + Bound, FromPyObject, Py, PyAny, PyObject, PyResult, Python, ToPyObject, +}; +use tokio_postgres::types::Type; + +use crate::{ + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + value_converter::funcs::{from_python::py_to_rust, to_python::build_python_from_serde_value}, +}; + +/// Struct for Value. +/// +/// We use custom struct because we need to implement external traits +/// to it. +#[derive(Clone)] +pub struct InternalSerdeValue(Value); + +impl<'a> FromPyObject<'a> for InternalSerdeValue { + fn extract_bound(ob: &Bound<'a, PyAny>) -> PyResult { + let serde_value = build_serde_value(ob.clone().unbind())?; + + Ok(InternalSerdeValue(serde_value)) + } +} + +impl ToPyObject for InternalSerdeValue { + fn to_object(&self, py: Python<'_>) -> PyObject { + match build_python_from_serde_value(py, self.0.clone()) { + Ok(ok_value) => ok_value, + Err(_) => py.None(), + } + } +} + +impl<'a> FromSql<'a> for InternalSerdeValue { + fn from_sql( + ty: &Type, + raw: &'a [u8], + ) -> Result> { + Ok(InternalSerdeValue(::from_sql(ty, raw)?)) + } + + fn accepts(_ty: &Type) -> bool { + true + } +} + +/// Convert python List of Dict type or just Dict into serde `Value`. +/// +/// # Errors +/// May return error if cannot convert Python type into Rust one. +#[allow(clippy::needless_pass_by_value)] +pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { + Python::with_gil(|gil| { + let bind_value = value.bind(gil); + if bind_value.is_instance_of::() { + let mut result_vec: Vec = vec![]; + + let params = bind_value.extract::>>()?; + + for inner in params { + let inner_bind = inner.bind(gil); + if inner_bind.is_instance_of::() { + let python_dto = py_to_rust(inner_bind)?; + result_vec.push(python_dto.to_serde_value()?); + } else if inner_bind.is_instance_of::() { + let serde_value = build_serde_value(inner)?; + result_vec.push(serde_value); + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "PyJSON must have dicts.".to_string(), + )); + } + } + Ok(json!(result_vec)) + } else if bind_value.is_instance_of::() { + return py_to_rust(bind_value)?.to_serde_value(); + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "PyJSON must be dict value.".to_string(), + )); + } + }) +} diff --git a/src/value_converter/models/uuid.rs b/src/value_converter/models/uuid.rs new file mode 100644 index 00000000..100bfbf8 --- /dev/null +++ b/src/value_converter/models/uuid.rs @@ -0,0 +1,46 @@ +use postgres_types::FromSql; +use uuid::Uuid; + +use pyo3::{ + types::PyAnyMethods, Bound, FromPyObject, PyAny, PyObject, PyResult, Python, ToPyObject, +}; +use tokio_postgres::types::Type; + +use crate::exceptions::rust_errors::RustPSQLDriverError; + +/// Struct for Uuid. +/// +/// We use custom struct because we need to implement external traits +/// to it. +#[derive(Clone, Copy)] +pub struct InternalUuid(Uuid); + +impl<'a> FromPyObject<'a> for InternalUuid { + fn extract_bound(obj: &Bound<'a, PyAny>) -> PyResult { + let uuid_value = Uuid::parse_str(obj.str()?.extract::<&str>()?).map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert UUID Array to inner rust type, check you parameters.".into(), + ) + })?; + Ok(InternalUuid(uuid_value)) + } +} + +impl ToPyObject for InternalUuid { + fn to_object(&self, py: Python<'_>) -> PyObject { + self.0.to_string().as_str().to_object(py) + } +} + +impl<'a> FromSql<'a> for InternalUuid { + fn from_sql( + ty: &Type, + raw: &'a [u8], + ) -> Result> { + Ok(InternalUuid(::from_sql(ty, raw)?)) + } + + fn accepts(_ty: &Type) -> bool { + true + } +} From e69a383c90baee2483430ec296bf1d054072c7ab Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 28 Apr 2025 23:23:01 +0200 Subject: [PATCH 23/65] Moved additional_types to value_converter --- src/extra_types.rs | 2 +- src/lib.rs | 1 - src/{ => value_converter}/additional_types.rs | 0 src/value_converter/funcs/from_python.rs | 13 ++----------- src/value_converter/funcs/to_python.rs | 13 ++++++------- src/value_converter/mod.rs | 1 + src/value_converter/models/dto.rs | 4 +++- 7 files changed, 13 insertions(+), 21 deletions(-) rename src/{ => value_converter}/additional_types.rs (100%) diff --git a/src/extra_types.rs b/src/extra_types.rs index ea4e35f5..1e8d22b4 100644 --- a/src/extra_types.rs +++ b/src/extra_types.rs @@ -10,9 +10,9 @@ use pyo3::{ use serde_json::Value; use crate::{ - additional_types::{Circle as RustCircle, Line as RustLine}, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, value_converter::{ + additional_types::{Circle as RustCircle, Line as RustLine}, funcs::from_python::{ build_flat_geo_coords, build_geo_coords, py_sequence_into_postgres_array, }, diff --git a/src/lib.rs b/src/lib.rs index e3602311..e0e1fe11 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,3 @@ -pub mod additional_types; pub mod common; pub mod driver; pub mod exceptions; diff --git a/src/additional_types.rs b/src/value_converter/additional_types.rs similarity index 100% rename from src/additional_types.rs rename to src/value_converter/additional_types.rs diff --git a/src/value_converter/funcs/from_python.rs b/src/value_converter/funcs/from_python.rs index 48fd0a4e..636ef5f6 100644 --- a/src/value_converter/funcs/from_python.rs +++ b/src/value_converter/funcs/from_python.rs @@ -4,34 +4,25 @@ use geo_types::{coord, Coord}; use itertools::Itertools; use pg_interval::Interval; use postgres_array::{Array, Dimension}; -use postgres_types::{Field, FromSql, Kind, Type}; use rust_decimal::Decimal; use serde_json::{json, Map, Value}; use std::net::IpAddr; -use tokio_postgres::{Column, Row}; use uuid::Uuid; use pyo3::{ types::{ PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, - PyInt, PyList, PyListMethods, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, - PyTypeMethods, + PyInt, PyList, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, PyTypeMethods, }, - Bound, FromPyObject, IntoPy, Py, PyAny, Python, ToPyObject, + Bound, FromPyObject, Py, PyAny, Python, }; use crate::{ - additional_types::{ - Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, - RustRect, - }, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, extra_types::{self}, value_converter::{consts::KWARGS_QUERYSTRINGS, models::dto::PythonDTO}, }; -use pgvector::Vector as PgVector; - /// Convert single python parameter to `PythonDTO` enum. /// /// # Errors diff --git a/src/value_converter/funcs/to_python.rs b/src/value_converter/funcs/to_python.rs index fce887b1..e65a0085 100644 --- a/src/value_converter/funcs/to_python.rs +++ b/src/value_converter/funcs/to_python.rs @@ -1,5 +1,4 @@ -use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, TimeZone}; -use chrono_tz::Tz; +use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; use pg_interval::Interval; use postgres_array::{Array, Dimension}; use postgres_types::{Field, FromSql, Kind, Type}; @@ -14,16 +13,16 @@ use pyo3::{ PyAnyMethods, PyBytes, PyDict, PyDictMethods, PyList, PyListMethods, PySet, PyString, PyTuple, }, - Bound, FromPyObject, IntoPy, Py, PyAny, Python, ToPyObject, + Bound, IntoPy, Py, PyAny, Python, ToPyObject, }; use crate::{ - additional_types::{ - Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, - RustRect, - }, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, value_converter::{ + additional_types::{ + Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, + RustRect, + }, consts::KWARGS_QUERYSTRINGS, models::{ decimal::InnerDecimal, interval::InnerInterval, serde_value::InternalSerdeValue, diff --git a/src/value_converter/mod.rs b/src/value_converter/mod.rs index 9ca56447..c9ed8d79 100644 --- a/src/value_converter/mod.rs +++ b/src/value_converter/mod.rs @@ -1,3 +1,4 @@ +pub mod additional_types; pub mod consts; pub mod funcs; pub mod models; diff --git a/src/value_converter/models/dto.rs b/src/value_converter/models/dto.rs index 5dfabbcf..8609a600 100644 --- a/src/value_converter/models/dto.rs +++ b/src/value_converter/models/dto.rs @@ -14,8 +14,10 @@ use pyo3::{PyObject, Python, ToPyObject}; use tokio_postgres::types::{to_sql_checked, Type}; use crate::{ - additional_types::{Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect}, exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + value_converter::additional_types::{ + Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect, + }, }; use pgvector::Vector as PgVector; use postgres_array::{array::Array, Dimension}; From 2f1576935d5b81614a21c684e467d8a32fe8b003 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 29 Apr 2025 18:21:34 +0200 Subject: [PATCH 24/65] Moved common func to utils --- src/value_converter/funcs/from_python.rs | 33 +++++------------------- src/value_converter/mod.rs | 1 + src/value_converter/utils.rs | 25 ++++++++++++++++++ 3 files changed, 33 insertions(+), 26 deletions(-) create mode 100644 src/value_converter/utils.rs diff --git a/src/value_converter/funcs/from_python.rs b/src/value_converter/funcs/from_python.rs index 636ef5f6..4fe73290 100644 --- a/src/value_converter/funcs/from_python.rs +++ b/src/value_converter/funcs/from_python.rs @@ -14,13 +14,16 @@ use pyo3::{ PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, PyInt, PyList, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, PyTypeMethods, }, - Bound, FromPyObject, Py, PyAny, Python, + Bound, Py, PyAny, Python, }; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, extra_types::{self}, - value_converter::{consts::KWARGS_QUERYSTRINGS, models::dto::PythonDTO}, + value_converter::{ + consts::KWARGS_QUERYSTRINGS, models::dto::PythonDTO, + utils::extract_value_from_python_object_or_raise, + }, }; /// Convert single python parameter to `PythonDTO` enum. @@ -449,28 +452,6 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< ))) } -/// Extract a value from a Python object, raising an error if missing or invalid -/// -/// # Errors -/// This function will return `Err` in the following cases: -/// - The Python object does not have the specified attribute -/// - The attribute exists but cannot be extracted into the specified Rust type -fn extract_value_from_python_object_or_raise<'py, T>( - parameter: &'py pyo3::Bound<'_, PyAny>, - attr_name: &str, -) -> Result -where - T: FromPyObject<'py>, -{ - parameter - .getattr(attr_name) - .ok() - .and_then(|attr| attr.extract::().ok()) - .ok_or_else(|| { - RustPSQLDriverError::PyToRustValueConversionError("Invalid attribute".into()) - }) -} - /// Extract a timezone-aware datetime from a Python object. /// This function retrieves various datetime components (`year`, `month`, `day`, etc.) /// from a Python object and constructs a `DateTime` @@ -552,7 +533,7 @@ pub fn py_sequence_into_postgres_array( lower_bound: 1, }); - let first_seq_elem = py_seq.iter()?.next(); + let first_seq_elem = py_seq.try_iter()?.next(); match first_seq_elem { Some(first_seq_elem) => { if let Ok(first_seq_elem) = first_seq_elem { @@ -602,7 +583,7 @@ pub fn py_sequence_into_flat_vec( let mut final_vec: Vec = vec![]; - for seq_elem in py_seq.iter()? { + for seq_elem in py_seq.try_iter()? { let ok_seq_elem = seq_elem?; // Check for the string because it's sequence too, diff --git a/src/value_converter/mod.rs b/src/value_converter/mod.rs index c9ed8d79..e8cbc82b 100644 --- a/src/value_converter/mod.rs +++ b/src/value_converter/mod.rs @@ -2,3 +2,4 @@ pub mod additional_types; pub mod consts; pub mod funcs; pub mod models; +pub mod utils; diff --git a/src/value_converter/utils.rs b/src/value_converter/utils.rs new file mode 100644 index 00000000..c94b2669 --- /dev/null +++ b/src/value_converter/utils.rs @@ -0,0 +1,25 @@ +use pyo3::{types::PyAnyMethods, FromPyObject, PyAny}; + +use crate::exceptions::rust_errors::RustPSQLDriverError; + +/// Extract a value from a Python object, raising an error if missing or invalid +/// +/// # Errors +/// This function will return `Err` in the following cases: +/// - The Python object does not have the specified attribute +/// - The attribute exists but cannot be extracted into the specified Rust type +pub fn extract_value_from_python_object_or_raise<'py, T>( + parameter: &'py pyo3::Bound<'_, PyAny>, + attr_name: &str, +) -> Result +where + T: FromPyObject<'py>, +{ + parameter + .getattr(attr_name) + .ok() + .and_then(|attr| attr.extract::().ok()) + .ok_or_else(|| { + RustPSQLDriverError::PyToRustValueConversionError("Invalid attribute".into()) + }) +} From d29bd6bd3f242b6cffb3c3a2d3b49f33228039e8 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 29 Apr 2025 22:48:56 +0200 Subject: [PATCH 25/65] Started implementing new traits and realizations for convertions from Python to Rust types --- src/driver/inner_connection.rs | 2 +- src/extra_types.rs | 23 +- src/value_converter/additional_types.rs | 2 + src/value_converter/dto/converter_impls.rs | 199 ++++++++++++++++++ src/value_converter/dto/enums.rs | 82 ++++++++ .../{models/dto.rs => dto/impls.rs} | 165 ++------------- src/value_converter/dto/mod.rs | 3 + src/value_converter/funcs/from_python.rs | 112 +++------- src/value_converter/mod.rs | 2 + src/value_converter/models/mod.rs | 1 - src/value_converter/models/serde_value.rs | 149 ++++++++++--- src/value_converter/traits.rs | 9 + 12 files changed, 488 insertions(+), 261 deletions(-) create mode 100644 src/value_converter/dto/converter_impls.rs create mode 100644 src/value_converter/dto/enums.rs rename src/value_converter/{models/dto.rs => dto/impls.rs} (74%) create mode 100644 src/value_converter/dto/mod.rs create mode 100644 src/value_converter/traits.rs diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index ae060baa..2dfbcbb7 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -10,8 +10,8 @@ use crate::{ query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, value_converter::{ consts::QueryParameter, + dto::enums::PythonDTO, funcs::{from_python::convert_parameters_and_qs, to_python::postgres_to_py}, - models::dto::PythonDTO, }, }; diff --git a/src/extra_types.rs b/src/extra_types.rs index 1e8d22b4..48058ff4 100644 --- a/src/extra_types.rs +++ b/src/extra_types.rs @@ -13,13 +13,19 @@ use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, value_converter::{ additional_types::{Circle as RustCircle, Line as RustLine}, + dto::enums::PythonDTO, funcs::from_python::{ build_flat_geo_coords, build_geo_coords, py_sequence_into_postgres_array, }, - models::{dto::PythonDTO, serde_value::build_serde_value}, + models::serde_value::build_serde_value, }, }; +pub struct PythonArray; +pub struct PythonDecimal; +pub struct PythonUUID; +pub struct PythonEnum; + #[pyclass] #[derive(Clone)] pub struct PgVector(Vec); @@ -34,7 +40,7 @@ impl PgVector { impl PgVector { #[must_use] - pub fn inner_value(self) -> Vec { + pub fn inner(self) -> Vec { self.0 } } @@ -49,7 +55,7 @@ macro_rules! build_python_type { impl $st_name { #[must_use] - pub fn retrieve_value(&self) -> $rust_type { + pub fn inner(&self) -> $rust_type { self.inner_value } } @@ -135,7 +141,12 @@ macro_rules! build_json_py_type { impl $st_name { #[must_use] - pub fn inner(&self) -> &$rust_type { + pub fn inner(&self) -> $rust_type { + self.inner.clone() + } + + #[must_use] + pub fn inner_ref(&self) -> &$rust_type { &self.inner } } @@ -144,7 +155,7 @@ macro_rules! build_json_py_type { impl $st_name { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_class(value: Py) -> RustPSQLDriverPyResult { + pub fn new_class(value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { Ok(Self { inner: build_serde_value(value)?, }) @@ -223,7 +234,7 @@ macro_rules! build_geo_type { impl $st_name { #[must_use] - pub fn retrieve_value(&self) -> $rust_type { + pub fn inner(&self) -> $rust_type { self.inner.clone() } } diff --git a/src/value_converter/additional_types.rs b/src/value_converter/additional_types.rs index 5dd435a0..1159939a 100644 --- a/src/value_converter/additional_types.rs +++ b/src/value_converter/additional_types.rs @@ -13,6 +13,8 @@ use pyo3::{ use serde::{Deserialize, Serialize}; use tokio_postgres::types::{FromSql, Type}; +pub struct NonePyType; + macro_rules! build_additional_rust_type { ($st_name:ident, $rust_type:ty) => { #[derive(Debug)] diff --git a/src/value_converter/dto/converter_impls.rs b/src/value_converter/dto/converter_impls.rs new file mode 100644 index 00000000..97675af3 --- /dev/null +++ b/src/value_converter/dto/converter_impls.rs @@ -0,0 +1,199 @@ +use std::net::IpAddr; + +use chrono::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; +use pg_interval::Interval; +use pyo3::{ + types::{PyAnyMethods, PyDateTime, PyDelta, PyDict}, + Bound, PyAny, +}; +use rust_decimal::Decimal; +use uuid::Uuid; + +use crate::{ + exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + extra_types::{self, PythonDecimal, PythonUUID}, + value_converter::{ + additional_types::NonePyType, + funcs::from_python::{ + extract_datetime_from_python_object_attrs, py_sequence_into_postgres_array, + }, + models::serde_value::build_serde_value, + traits::PythonToDTO, + }, +}; + +use super::enums::PythonDTO; + +impl PythonToDTO for NonePyType { + fn to_python_dto(_python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok(PythonDTO::PyNone) + } +} + +macro_rules! construct_simple_type_matcher { + ($match_type:ty, $kind:path) => { + impl PythonToDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok($kind(python_param.extract::<$match_type>()?)) + } + } + }; +} + +construct_simple_type_matcher!(bool, PythonDTO::PyBool); +construct_simple_type_matcher!(Vec, PythonDTO::PyBytes); +construct_simple_type_matcher!(String, PythonDTO::PyString); +construct_simple_type_matcher!(f32, PythonDTO::PyFloat32); +construct_simple_type_matcher!(f64, PythonDTO::PyFloat64); +construct_simple_type_matcher!(i16, PythonDTO::PyIntI16); +construct_simple_type_matcher!(i32, PythonDTO::PyIntI32); +construct_simple_type_matcher!(i64, PythonDTO::PyIntI64); +construct_simple_type_matcher!(NaiveDate, PythonDTO::PyDate); +construct_simple_type_matcher!(NaiveTime, PythonDTO::PyTime); + +impl PythonToDTO for PyDateTime { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + let timestamp_tz = python_param.extract::>(); + if let Ok(pydatetime_tz) = timestamp_tz { + return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); + } + + let timestamp_no_tz = python_param.extract::(); + if let Ok(pydatetime_no_tz) = timestamp_no_tz { + return Ok(PythonDTO::PyDateTime(pydatetime_no_tz)); + } + + let timestamp_tz = extract_datetime_from_python_object_attrs(python_param); + if let Ok(pydatetime_tz) = timestamp_tz { + return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); + } + + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Can not convert you datetime to rust type".into(), + )); + } +} + +impl PythonToDTO for PyDelta { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + let duration = python_param.extract::()?; + if let Some(interval) = Interval::from_duration(duration) { + return Ok(PythonDTO::PyInterval(interval)); + } + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert timedelta from Python to inner Rust type.".to_string(), + )); + } +} + +impl PythonToDTO for PyDict { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + let serde_value = build_serde_value(python_param)?; + + return Ok(PythonDTO::PyJsonb(serde_value)); + } +} + +macro_rules! construct_extra_type_matcher { + ($match_type:ty, $kind:path) => { + impl PythonToDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok($kind(python_param.extract::<$match_type>()?.inner())) + } + } + }; +} + +construct_extra_type_matcher!(extra_types::JSONB, PythonDTO::PyJsonb); +construct_extra_type_matcher!(extra_types::JSON, PythonDTO::PyJson); +construct_extra_type_matcher!(extra_types::MacAddr6, PythonDTO::PyMacAddr6); +construct_extra_type_matcher!(extra_types::MacAddr8, PythonDTO::PyMacAddr8); +construct_extra_type_matcher!(extra_types::Point, PythonDTO::PyPoint); +construct_extra_type_matcher!(extra_types::Box, PythonDTO::PyBox); +construct_extra_type_matcher!(extra_types::Path, PythonDTO::PyPath); +construct_extra_type_matcher!(extra_types::Line, PythonDTO::PyLine); +construct_extra_type_matcher!(extra_types::LineSegment, PythonDTO::PyLineSegment); +construct_extra_type_matcher!(extra_types::Circle, PythonDTO::PyCircle); + +impl PythonToDTO for PythonDecimal { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( + python_param.str()?.extract::<&str>()?, + )?)) + } +} + +impl PythonToDTO for PythonUUID { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok(PythonDTO::PyUUID(Uuid::parse_str( + python_param.str()?.extract::<&str>()?, + )?)) + } +} + +impl PythonToDTO for extra_types::PythonArray { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( + python_param, + )?)) + } +} + +impl PythonToDTO for IpAddr { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + if let Ok(id_address) = python_param.extract::() { + return Ok(PythonDTO::PyIpAddress(id_address)); + } + + Err(RustPSQLDriverError::PyToRustValueConversionError( + "Parameter passed to IpAddr is incorrect.".to_string(), + )) + } +} + +impl PythonToDTO for extra_types::PythonEnum { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + let string = python_param.extract::()?; + return Ok(PythonDTO::PyString(string)); + } +} + +macro_rules! construct_array_type_matcher { + ($match_type:ty) => { + impl PythonToDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + python_param + .extract::<$match_type>()? + ._convert_to_python_dto() + } + } + }; +} + +construct_array_type_matcher!(extra_types::BoolArray); +construct_array_type_matcher!(extra_types::UUIDArray); +construct_array_type_matcher!(extra_types::VarCharArray); +construct_array_type_matcher!(extra_types::TextArray); +construct_array_type_matcher!(extra_types::Int16Array); +construct_array_type_matcher!(extra_types::Int32Array); +construct_array_type_matcher!(extra_types::Int64Array); +construct_array_type_matcher!(extra_types::Float32Array); +construct_array_type_matcher!(extra_types::Float64Array); +construct_array_type_matcher!(extra_types::MoneyArray); +construct_array_type_matcher!(extra_types::IpAddressArray); +construct_array_type_matcher!(extra_types::JSONBArray); +construct_array_type_matcher!(extra_types::JSONArray); +construct_array_type_matcher!(extra_types::DateArray); +construct_array_type_matcher!(extra_types::TimeArray); +construct_array_type_matcher!(extra_types::DateTimeArray); +construct_array_type_matcher!(extra_types::DateTimeTZArray); +construct_array_type_matcher!(extra_types::MacAddr6Array); +construct_array_type_matcher!(extra_types::MacAddr8Array); +construct_array_type_matcher!(extra_types::NumericArray); +construct_array_type_matcher!(extra_types::PointArray); +construct_array_type_matcher!(extra_types::BoxArray); +construct_array_type_matcher!(extra_types::PathArray); +construct_array_type_matcher!(extra_types::LineArray); +construct_array_type_matcher!(extra_types::LsegArray); +construct_array_type_matcher!(extra_types::CircleArray); +construct_array_type_matcher!(extra_types::IntervalArray); diff --git a/src/value_converter/dto/enums.rs b/src/value_converter/dto/enums.rs new file mode 100644 index 00000000..00e88a10 --- /dev/null +++ b/src/value_converter/dto/enums.rs @@ -0,0 +1,82 @@ +use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; +use geo_types::{Line as LineSegment, LineString, Point, Rect}; +use macaddr::{MacAddr6, MacAddr8}; +use pg_interval::Interval; +use rust_decimal::Decimal; +use serde_json::Value; +use std::{fmt::Debug, net::IpAddr}; +use uuid::Uuid; + +use crate::value_converter::additional_types::{Circle, Line}; +use postgres_array::array::Array; + +#[derive(Debug, Clone, PartialEq)] +pub enum PythonDTO { + // Primitive + PyNone, + PyBytes(Vec), + PyBool(bool), + PyUUID(Uuid), + PyVarChar(String), + PyText(String), + PyString(String), + PyIntI16(i16), + PyIntI32(i32), + PyIntI64(i64), + PyIntU32(u32), + PyIntU64(u64), + PyFloat32(f32), + PyFloat64(f64), + PyMoney(i64), + PyDate(NaiveDate), + PyTime(NaiveTime), + PyDateTime(NaiveDateTime), + PyDateTimeTz(DateTime), + PyInterval(Interval), + PyIpAddress(IpAddr), + PyList(Vec), + PyArray(Array), + PyTuple(Vec), + PyJsonb(Value), + PyJson(Value), + PyMacAddr6(MacAddr6), + PyMacAddr8(MacAddr8), + PyDecimal(Decimal), + PyCustomType(Vec), + PyPoint(Point), + PyBox(Rect), + PyPath(LineString), + PyLine(Line), + PyLineSegment(LineSegment), + PyCircle(Circle), + // Arrays + PyBoolArray(Array), + PyUuidArray(Array), + PyVarCharArray(Array), + PyTextArray(Array), + PyInt16Array(Array), + PyInt32Array(Array), + PyInt64Array(Array), + PyFloat32Array(Array), + PyFloat64Array(Array), + PyMoneyArray(Array), + PyIpAddressArray(Array), + PyJSONBArray(Array), + PyJSONArray(Array), + PyDateArray(Array), + PyTimeArray(Array), + PyDateTimeArray(Array), + PyDateTimeTZArray(Array), + PyMacAddr6Array(Array), + PyMacAddr8Array(Array), + PyNumericArray(Array), + PyPointArray(Array), + PyBoxArray(Array), + PyPathArray(Array), + PyLineArray(Array), + PyLsegArray(Array), + PyCircleArray(Array), + PyIntervalArray(Array), + // PgVector + PyPgVector(Vec), +} diff --git a/src/value_converter/models/dto.rs b/src/value_converter/dto/impls.rs similarity index 74% rename from src/value_converter/models/dto.rs rename to src/value_converter/dto/impls.rs index 8609a600..b634d8b8 100644 --- a/src/value_converter/models/dto.rs +++ b/src/value_converter/dto/impls.rs @@ -1,111 +1,44 @@ use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; -use geo_types::{Line as LineSegment, LineString, Point, Rect}; -use macaddr::{MacAddr6, MacAddr8}; use pg_interval::Interval; use postgres_types::ToSql; use rust_decimal::Decimal; use serde_json::{json, Value}; -use std::{fmt::Debug, net::IpAddr}; +use std::net::IpAddr; use uuid::Uuid; use bytes::{BufMut, BytesMut}; use postgres_protocol::types; -use pyo3::{PyObject, Python, ToPyObject}; +use pyo3::{Bound, IntoPyObject, PyAny, Python}; use tokio_postgres::types::{to_sql_checked, Type}; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, - value_converter::additional_types::{ - Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect, + value_converter::{ + additional_types::{Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect}, + models::serde_value::pythondto_array_to_serde, }, }; use pgvector::Vector as PgVector; -use postgres_array::{array::Array, Dimension}; -#[derive(Debug, Clone, PartialEq)] -pub enum PythonDTO { - // Primitive - PyNone, - PyBytes(Vec), - PyBool(bool), - PyUUID(Uuid), - PyVarChar(String), - PyText(String), - PyString(String), - PyIntI16(i16), - PyIntI32(i32), - PyIntI64(i64), - PyIntU32(u32), - PyIntU64(u64), - PyFloat32(f32), - PyFloat64(f64), - PyMoney(i64), - PyDate(NaiveDate), - PyTime(NaiveTime), - PyDateTime(NaiveDateTime), - PyDateTimeTz(DateTime), - PyInterval(Interval), - PyIpAddress(IpAddr), - PyList(Vec), - PyArray(Array), - PyTuple(Vec), - PyJsonb(Value), - PyJson(Value), - PyMacAddr6(MacAddr6), - PyMacAddr8(MacAddr8), - PyDecimal(Decimal), - PyCustomType(Vec), - PyPoint(Point), - PyBox(Rect), - PyPath(LineString), - PyLine(Line), - PyLineSegment(LineSegment), - PyCircle(Circle), - // Arrays - PyBoolArray(Array), - PyUuidArray(Array), - PyVarCharArray(Array), - PyTextArray(Array), - PyInt16Array(Array), - PyInt32Array(Array), - PyInt64Array(Array), - PyFloat32Array(Array), - PyFloat64Array(Array), - PyMoneyArray(Array), - PyIpAddressArray(Array), - PyJSONBArray(Array), - PyJSONArray(Array), - PyDateArray(Array), - PyTimeArray(Array), - PyDateTimeArray(Array), - PyDateTimeTZArray(Array), - PyMacAddr6Array(Array), - PyMacAddr8Array(Array), - PyNumericArray(Array), - PyPointArray(Array), - PyBoxArray(Array), - PyPathArray(Array), - PyLineArray(Array), - PyLsegArray(Array), - PyCircleArray(Array), - PyIntervalArray(Array), - // PgVector - PyPgVector(Vec), -} +use super::enums::PythonDTO; + +impl<'py> IntoPyObject<'py> for PythonDTO { + type Target = PyAny; + type Output = Bound<'py, Self::Target>; + type Error = std::convert::Infallible; -impl ToPyObject for PythonDTO { - fn to_object(&self, py: Python<'_>) -> PyObject { + fn into_pyobject(self, py: Python<'py>) -> Result { match self { - PythonDTO::PyNone => py.None(), - PythonDTO::PyBool(pybool) => pybool.to_object(py), + PythonDTO::PyNone => Ok(py.None().into_bound(py)), + PythonDTO::PyBool(pybool) => Ok(pybool.into_pyobject(py)?.to_owned().into_any()), PythonDTO::PyString(py_string) | PythonDTO::PyText(py_string) - | PythonDTO::PyVarChar(py_string) => py_string.to_object(py), - PythonDTO::PyIntI32(pyint) => pyint.to_object(py), - PythonDTO::PyIntI64(pyint) => pyint.to_object(py), - PythonDTO::PyIntU64(pyint) => pyint.to_object(py), - PythonDTO::PyFloat32(pyfloat) => pyfloat.to_object(py), - PythonDTO::PyFloat64(pyfloat) => pyfloat.to_object(py), + | PythonDTO::PyVarChar(py_string) => Ok(py_string.into_pyobject(py)?.into_any()), + PythonDTO::PyIntI32(pyint) => Ok(pyint.into_pyobject(py)?.into_any()), + PythonDTO::PyIntI64(pyint) => Ok(pyint.into_pyobject(py)?.into_any()), + PythonDTO::PyIntU64(pyint) => Ok(pyint.into_pyobject(py)?.into_any()), + PythonDTO::PyFloat32(pyfloat) => Ok(pyfloat.into_pyobject(py)?.into_any()), + PythonDTO::PyFloat64(pyfloat) => Ok(pyfloat.into_pyobject(py)?.into_any()), _ => unreachable!(), } } @@ -431,61 +364,3 @@ impl ToSql for PythonDTO { to_sql_checked!(); } - -/// Convert Array of `PythonDTO`s to serde `Value`. -/// -/// It can convert multidimensional arrays. -fn pythondto_array_to_serde(array: Option>) -> RustPSQLDriverPyResult { - match array { - Some(array) => inner_pythondto_array_to_serde( - array.dimensions(), - array.iter().collect::>().as_slice(), - 0, - 0, - ), - None => Ok(Value::Null), - } -} - -/// Inner conversion array of `PythonDTO`s to serde `Value`. -#[allow(clippy::cast_sign_loss)] -fn inner_pythondto_array_to_serde( - dimensions: &[Dimension], - data: &[&PythonDTO], - dimension_index: usize, - mut lower_bound: usize, -) -> RustPSQLDriverPyResult { - let current_dimension = dimensions.get(dimension_index); - - if let Some(current_dimension) = current_dimension { - let possible_next_dimension = dimensions.get(dimension_index + 1); - match possible_next_dimension { - Some(next_dimension) => { - let mut final_list: Value = Value::Array(vec![]); - - for _ in 0..current_dimension.len as usize { - if dimensions.get(dimension_index + 1).is_some() { - let inner_pylist = inner_pythondto_array_to_serde( - dimensions, - &data[lower_bound..next_dimension.len as usize + lower_bound], - dimension_index + 1, - 0, - )?; - match final_list { - Value::Array(ref mut array) => array.push(inner_pylist), - _ => unreachable!(), - } - lower_bound += next_dimension.len as usize; - }; - } - - return Ok(final_list); - } - None => { - return data.iter().map(|x| x.to_serde_value()).collect(); - } - } - } - - Ok(Value::Array(vec![])) -} diff --git a/src/value_converter/dto/mod.rs b/src/value_converter/dto/mod.rs new file mode 100644 index 00000000..5be9ae5b --- /dev/null +++ b/src/value_converter/dto/mod.rs @@ -0,0 +1,3 @@ +pub mod converter_impls; +pub mod enums; +pub mod impls; diff --git a/src/value_converter/funcs/from_python.rs b/src/value_converter/funcs/from_python.rs index 4fe73290..adad8879 100644 --- a/src/value_converter/funcs/from_python.rs +++ b/src/value_converter/funcs/from_python.rs @@ -5,7 +5,7 @@ use itertools::Itertools; use pg_interval::Interval; use postgres_array::{Array, Dimension}; use rust_decimal::Decimal; -use serde_json::{json, Map, Value}; +use serde_json::{Map, Value}; use std::net::IpAddr; use uuid::Uuid; @@ -21,7 +21,7 @@ use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, extra_types::{self}, value_converter::{ - consts::KWARGS_QUERYSTRINGS, models::dto::PythonDTO, + consts::KWARGS_QUERYSTRINGS, dto::enums::PythonDTO, utils::extract_value_from_python_object_or_raise, }, }; @@ -74,45 +74,37 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< if parameter.is_instance_of::() { return Ok(PythonDTO::PyFloat32( - parameter - .extract::()? - .retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyFloat64( - parameter - .extract::()? - .retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyIntI16( - parameter - .extract::()? - .retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyIntI32( - parameter - .extract::()? - .retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyIntI64( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyMoney( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } @@ -192,13 +184,13 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< if parameter.is_instance_of::() { return Ok(PythonDTO::PyJsonb( - parameter.extract::()?.inner().clone(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyJson( - parameter.extract::()?.inner().clone(), + parameter.extract::()?.inner(), )); } @@ -214,58 +206,56 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< )); } - if parameter.get_type().name()? == "UUID" { - return Ok(PythonDTO::PyUUID(Uuid::parse_str( - parameter.str()?.extract::<&str>()?, - )?)); - } - - if parameter.get_type().name()? == "decimal.Decimal" - || parameter.get_type().name()? == "Decimal" - { - return Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( - parameter.str()?.extract::<&str>()?, - )?)); - } - if parameter.is_instance_of::() { return Ok(PythonDTO::PyPoint( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyBox( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyPath( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyLine( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyLineSegment( - parameter - .extract::()? - .retrieve_value(), + parameter.extract::()?.inner(), )); } if parameter.is_instance_of::() { return Ok(PythonDTO::PyCircle( - parameter.extract::()?.retrieve_value(), + parameter.extract::()?.inner(), )); } + if parameter.get_type().name()? == "UUID" { + return Ok(PythonDTO::PyUUID(Uuid::parse_str( + parameter.str()?.extract::<&str>()?, + )?)); + } + + if parameter.get_type().name()? == "decimal.Decimal" + || parameter.get_type().name()? == "Decimal" + { + return Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( + parameter.str()?.extract::<&str>()?, + )?)); + } + if parameter.is_instance_of::() { return parameter .extract::()? @@ -430,7 +420,7 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< if parameter.is_instance_of::() { return Ok(PythonDTO::PyPgVector( - parameter.extract::()?.inner_value(), + parameter.extract::()?.inner(), )); } @@ -462,7 +452,7 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< /// - The retrieved values are invalid for constructing a date, time, or datetime (e.g., invalid month or day) /// - The timezone information (`tzinfo`) is not available or cannot be parsed /// - The resulting datetime is ambiguous or invalid (e.g., due to DST transitions) -fn extract_datetime_from_python_object_attrs( +pub fn extract_datetime_from_python_object_attrs( parameter: &pyo3::Bound<'_, PyAny>, ) -> Result, RustPSQLDriverError> { let year = extract_value_from_python_object_or_raise::(parameter, "year")?; @@ -686,44 +676,6 @@ pub fn convert_seq_parameters( Ok(result_vec) } -/// Convert python List of Dict type or just Dict into serde `Value`. -/// -/// # Errors -/// May return error if cannot convert Python type into Rust one. -#[allow(clippy::needless_pass_by_value)] -pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { - Python::with_gil(|gil| { - let bind_value = value.bind(gil); - if bind_value.is_instance_of::() { - let mut result_vec: Vec = vec![]; - - let params = bind_value.extract::>>()?; - - for inner in params { - let inner_bind = inner.bind(gil); - if inner_bind.is_instance_of::() { - let python_dto = py_to_rust(inner_bind)?; - result_vec.push(python_dto.to_serde_value()?); - } else if inner_bind.is_instance_of::() { - let serde_value = build_serde_value(inner)?; - result_vec.push(serde_value); - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "PyJSON must have dicts.".to_string(), - )); - } - } - Ok(json!(result_vec)) - } else if bind_value.is_instance_of::() { - return py_to_rust(bind_value)?.to_serde_value(); - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "PyJSON must be dict value.".to_string(), - )); - } - }) -} - /// Convert two python parameters(x and y) to Coord from `geo_type`. /// Also it checks that passed values is int or float. /// diff --git a/src/value_converter/mod.rs b/src/value_converter/mod.rs index e8cbc82b..7d08bf3f 100644 --- a/src/value_converter/mod.rs +++ b/src/value_converter/mod.rs @@ -1,5 +1,7 @@ pub mod additional_types; pub mod consts; +pub mod dto; pub mod funcs; pub mod models; +pub mod traits; pub mod utils; diff --git a/src/value_converter/models/mod.rs b/src/value_converter/models/mod.rs index 92d26e49..b36f3bff 100644 --- a/src/value_converter/models/mod.rs +++ b/src/value_converter/models/mod.rs @@ -1,5 +1,4 @@ pub mod decimal; -pub mod dto; pub mod interval; pub mod serde_value; pub mod uuid; diff --git a/src/value_converter/models/serde_value.rs b/src/value_converter/models/serde_value.rs index b39f7737..0bf6652f 100644 --- a/src/value_converter/models/serde_value.rs +++ b/src/value_converter/models/serde_value.rs @@ -1,16 +1,20 @@ +use postgres_array::{Array, Dimension}; use postgres_types::FromSql; -use serde_json::{json, Value}; +use serde_json::{json, Map, Value}; use uuid::Uuid; use pyo3::{ - types::{PyAnyMethods, PyDict, PyList}, + types::{PyAnyMethods, PyDict, PyDictMethods, PyList, PyTuple}, Bound, FromPyObject, Py, PyAny, PyObject, PyResult, Python, ToPyObject, }; use tokio_postgres::types::Type; use crate::{ exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, - value_converter::funcs::{from_python::py_to_rust, to_python::build_python_from_serde_value}, + value_converter::{ + dto::enums::PythonDTO, + funcs::{from_python::py_to_rust, to_python::build_python_from_serde_value}, + }, }; /// Struct for Value. @@ -22,7 +26,7 @@ pub struct InternalSerdeValue(Value); impl<'a> FromPyObject<'a> for InternalSerdeValue { fn extract_bound(ob: &Bound<'a, PyAny>) -> PyResult { - let serde_value = build_serde_value(ob.clone().unbind())?; + let serde_value = build_serde_value(ob)?; Ok(InternalSerdeValue(serde_value)) } @@ -50,36 +54,67 @@ impl<'a> FromSql<'a> for InternalSerdeValue { } } +fn serde_value_from_list( + gil: Python<'_>, + bind_value: &Bound<'_, PyAny>, +) -> RustPSQLDriverPyResult { + let mut result_vec: Vec = vec![]; + + let params = bind_value.extract::>>()?; + + for inner in params { + let inner_bind = inner.bind(gil); + if inner_bind.is_instance_of::() { + let python_dto = py_to_rust(inner_bind)?; + result_vec.push(python_dto.to_serde_value()?); + } else if inner_bind.is_instance_of::() { + let serde_value = build_serde_value(inner.bind(gil))?; + result_vec.push(serde_value); + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "PyJSON must have dicts.".to_string(), + )); + } + } + Ok(json!(result_vec)) +} + +fn serde_value_from_dict(bind_value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + let dict = bind_value.downcast::().map_err(|error| { + RustPSQLDriverError::PyToRustValueConversionError(format!( + "Can't cast to inner dict: {error}" + )) + })?; + + let mut serde_map: Map = Map::new(); + + for dict_item in dict.items() { + let py_list = dict_item.downcast::().map_err(|error| { + RustPSQLDriverError::PyToRustValueConversionError(format!( + "Cannot cast to list: {error}" + )) + })?; + + let key = py_list.get_item(0)?.extract::()?; + let value = py_to_rust(&py_list.get_item(1)?)?; + + serde_map.insert(key, value.to_serde_value()?); + } + + return Ok(Value::Object(serde_map)); +} + /// Convert python List of Dict type or just Dict into serde `Value`. /// /// # Errors /// May return error if cannot convert Python type into Rust one. #[allow(clippy::needless_pass_by_value)] -pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { +pub fn build_serde_value(value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { Python::with_gil(|gil| { - let bind_value = value.bind(gil); - if bind_value.is_instance_of::() { - let mut result_vec: Vec = vec![]; - - let params = bind_value.extract::>>()?; - - for inner in params { - let inner_bind = inner.bind(gil); - if inner_bind.is_instance_of::() { - let python_dto = py_to_rust(inner_bind)?; - result_vec.push(python_dto.to_serde_value()?); - } else if inner_bind.is_instance_of::() { - let serde_value = build_serde_value(inner)?; - result_vec.push(serde_value); - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "PyJSON must have dicts.".to_string(), - )); - } - } - Ok(json!(result_vec)) - } else if bind_value.is_instance_of::() { - return py_to_rust(bind_value)?.to_serde_value(); + if value.is_instance_of::() { + return serde_value_from_list(gil, value); + } else if value.is_instance_of::() { + return serde_value_from_dict(value); } else { return Err(RustPSQLDriverError::PyToRustValueConversionError( "PyJSON must be dict value.".to_string(), @@ -87,3 +122,61 @@ pub fn build_serde_value(value: Py) -> RustPSQLDriverPyResult { } }) } + +/// Convert Array of `PythonDTO`s to serde `Value`. +/// +/// It can convert multidimensional arrays. +pub fn pythondto_array_to_serde(array: Option>) -> RustPSQLDriverPyResult { + match array { + Some(array) => inner_pythondto_array_to_serde( + array.dimensions(), + array.iter().collect::>().as_slice(), + 0, + 0, + ), + None => Ok(Value::Null), + } +} + +/// Inner conversion array of `PythonDTO`s to serde `Value`. +#[allow(clippy::cast_sign_loss)] +fn inner_pythondto_array_to_serde( + dimensions: &[Dimension], + data: &[&PythonDTO], + dimension_index: usize, + mut lower_bound: usize, +) -> RustPSQLDriverPyResult { + let current_dimension = dimensions.get(dimension_index); + + if let Some(current_dimension) = current_dimension { + let possible_next_dimension = dimensions.get(dimension_index + 1); + match possible_next_dimension { + Some(next_dimension) => { + let mut final_list: Value = Value::Array(vec![]); + + for _ in 0..current_dimension.len as usize { + if dimensions.get(dimension_index + 1).is_some() { + let inner_pylist = inner_pythondto_array_to_serde( + dimensions, + &data[lower_bound..next_dimension.len as usize + lower_bound], + dimension_index + 1, + 0, + )?; + match final_list { + Value::Array(ref mut array) => array.push(inner_pylist), + _ => unreachable!(), + } + lower_bound += next_dimension.len as usize; + }; + } + + return Ok(final_list); + } + None => { + return data.iter().map(|x| x.to_serde_value()).collect(); + } + } + } + + Ok(Value::Array(vec![])) +} diff --git a/src/value_converter/traits.rs b/src/value_converter/traits.rs new file mode 100644 index 00000000..ca44a7d0 --- /dev/null +++ b/src/value_converter/traits.rs @@ -0,0 +1,9 @@ +use pyo3::PyAny; + +use crate::exceptions::rust_errors::RustPSQLDriverPyResult; + +use super::dto::enums::PythonDTO; + +pub trait PythonToDTO { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult; +} From 50855b5a5c227dea79f671d306bcae115b85a674 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Fri, 2 May 2025 22:35:06 +0200 Subject: [PATCH 26/65] Full value converter refactor --- Cargo.lock | 10 +- src/driver/connection.rs | 27 +- src/driver/connection_pool.rs | 8 +- src/driver/connection_pool_builder.rs | 6 +- src/driver/cursor.rs | 44 +- src/driver/inner_connection.rs | 184 ++++----- src/driver/listener/core.rs | 26 +- src/driver/listener/structs.rs | 4 +- src/driver/transaction.rs | 59 ++- src/driver/utils.rs | 8 +- src/exceptions/rust_errors.rs | 8 +- src/extra_types.rs | 25 +- src/lib.rs | 1 + src/query_result.rs | 29 +- src/row_factories.rs | 6 +- src/runtime.rs | 6 +- src/statement/cache.rs | 50 +++ src/statement/mod.rs | 7 + src/statement/parameters.rs | 255 ++++++++++++ src/statement/query.rs | 92 +++++ src/statement/statement.rs | 30 ++ src/statement/statement_builder.rs | 100 +++++ src/statement/traits.rs | 8 + src/statement/utils.rs | 1 + src/value_converter/consts.rs | 2 + src/value_converter/dto/converter_impls.rs | 65 +-- src/value_converter/dto/impls.rs | 6 +- .../{funcs => }/from_python.rs | 375 ++++-------------- src/value_converter/funcs/mod.rs | 2 - src/value_converter/mod.rs | 3 +- src/value_converter/models/serde_value.rs | 19 +- src/value_converter/params_converters.rs | 0 src/value_converter/{funcs => }/to_python.rs | 21 +- src/value_converter/traits.rs | 6 +- 34 files changed, 890 insertions(+), 603 deletions(-) create mode 100644 src/statement/cache.rs create mode 100644 src/statement/mod.rs create mode 100644 src/statement/parameters.rs create mode 100644 src/statement/query.rs create mode 100644 src/statement/statement.rs create mode 100644 src/statement/statement_builder.rs create mode 100644 src/statement/traits.rs create mode 100644 src/statement/utils.rs rename src/value_converter/{funcs => }/from_python.rs (64%) delete mode 100644 src/value_converter/funcs/mod.rs create mode 100644 src/value_converter/params_converters.rs rename src/value_converter/{funcs => }/to_python.rs (97%) diff --git a/Cargo.lock b/Cargo.lock index fee82b45..df4dc951 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -881,7 +881,7 @@ checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" [[package]] name = "postgres-derive" version = "0.4.5" -source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#e4e1047e701318b31c61330e428ebd8ade7ed1cb" +source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#5780895bfa8a0b9142df225b65bc6e59f7dbee61" dependencies = [ "heck", "proc-macro2", @@ -892,7 +892,7 @@ dependencies = [ [[package]] name = "postgres-openssl" version = "0.5.0" -source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#e4e1047e701318b31c61330e428ebd8ade7ed1cb" +source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#5780895bfa8a0b9142df225b65bc6e59f7dbee61" dependencies = [ "openssl", "tokio", @@ -903,7 +903,7 @@ dependencies = [ [[package]] name = "postgres-protocol" version = "0.6.7" -source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#e4e1047e701318b31c61330e428ebd8ade7ed1cb" +source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#5780895bfa8a0b9142df225b65bc6e59f7dbee61" dependencies = [ "base64", "byteorder", @@ -920,7 +920,7 @@ dependencies = [ [[package]] name = "postgres-types" version = "0.2.7" -source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#e4e1047e701318b31c61330e428ebd8ade7ed1cb" +source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#5780895bfa8a0b9142df225b65bc6e59f7dbee61" dependencies = [ "array-init", "bytes", @@ -1540,7 +1540,7 @@ dependencies = [ [[package]] name = "tokio-postgres" version = "0.7.11" -source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#e4e1047e701318b31c61330e428ebd8ade7ed1cb" +source = "git+https://github.com/chandr-andr/rust-postgres.git?branch=psqlpy#5780895bfa8a0b9142df225b65bc6e59f7dbee61" dependencies = [ "async-trait", "byteorder", diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 3c0595bb..8f2a4b40 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -6,7 +6,7 @@ use std::{collections::HashSet, net::IpAddr, sync::Arc}; use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, runtime::tokio_runtime, @@ -137,7 +137,7 @@ impl Connection { return self.pg_config.get_options(); } - async fn __aenter__<'a>(self_: Py) -> RustPSQLDriverPyResult> { + async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { let (db_client, db_pool) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.db_client.clone(), self_.db_pool.clone()) @@ -169,7 +169,7 @@ impl Connection { _exception_type: Py, exception: Py, _traceback: Py, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (is_exception_none, py_err) = pyo3::Python::with_gil(|gil| { ( exception.is_none(gil), @@ -205,7 +205,7 @@ impl Connection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -227,10 +227,7 @@ impl Connection { /// May return Err Result if: /// 1) Connection is closed. /// 2) Cannot execute querystring. - pub async fn execute_batch( - self_: pyo3::Py, - querystring: String, - ) -> RustPSQLDriverPyResult<()> { + pub async fn execute_batch(self_: pyo3::Py, querystring: String) -> PSQLPyResult<()> { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -256,7 +253,7 @@ impl Connection { querystring: String, parameters: Option>>, prepared: Option, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -282,7 +279,7 @@ impl Connection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -312,7 +309,7 @@ impl Connection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -339,7 +336,7 @@ impl Connection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { @@ -365,7 +362,7 @@ impl Connection { read_variant: Option, deferrable: Option, synchronous_commit: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { if let Some(db_client) = &self.db_client { return Ok(Transaction::new( db_client.clone(), @@ -401,7 +398,7 @@ impl Connection { fetch_number: Option, scroll: Option, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { if let Some(db_client) = &self.db_client { return Ok(Cursor::new( db_client.clone(), @@ -446,7 +443,7 @@ impl Connection { table_name: String, columns: Option>, schema_name: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); let mut table_name = quote_ident(&table_name); if let Some(schema_name) = schema_name { diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 24780a6a..0c52c256 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -4,7 +4,7 @@ use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny}; use std::sync::Arc; use tokio_postgres::Config; -use crate::exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}; +use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::{ common_options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, @@ -75,7 +75,7 @@ pub fn connect( ca_file: Option, max_db_pool_size: Option, conn_recycling_method: Option, -) -> RustPSQLDriverPyResult { +) -> PSQLPyResult { if let Some(max_db_pool_size) = max_db_pool_size { if max_db_pool_size < 2 { return Err(RustPSQLDriverError::ConnectionPoolConfigurationError( @@ -289,7 +289,7 @@ impl ConnectionPool { conn_recycling_method: Option, ssl_mode: Option, ca_file: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { connect( dsn, username, @@ -382,7 +382,7 @@ impl ConnectionPool { /// /// # Errors /// May return Err Result if cannot get new connection from the pool. - pub async fn connection(self_: pyo3::Py) -> RustPSQLDriverPyResult { + pub async fn connection(self_: pyo3::Py) -> PSQLPyResult { let (db_pool, pg_config) = pyo3::Python::with_gil(|gil| { let slf = self_.borrow(gil); (slf.pool.clone(), slf.pg_config.clone()) diff --git a/src/driver/connection_pool_builder.rs b/src/driver/connection_pool_builder.rs index e0610942..42cdd641 100644 --- a/src/driver/connection_pool_builder.rs +++ b/src/driver/connection_pool_builder.rs @@ -3,7 +3,7 @@ use std::{net::IpAddr, time::Duration}; use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; use pyo3::{pyclass, pymethods, Py, Python}; -use crate::exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}; +use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::{ common_options, @@ -38,7 +38,7 @@ impl ConnectionPoolBuilder { /// /// # Errors /// May return error if cannot build new connection pool. - fn build(&self) -> RustPSQLDriverPyResult { + fn build(&self) -> PSQLPyResult { let mgr_config: ManagerConfig; if let Some(conn_recycling_method) = self.conn_recycling_method.as_ref() { mgr_config = ManagerConfig { @@ -84,7 +84,7 @@ impl ConnectionPoolBuilder { /// /// # Error /// If size more than 2. - fn max_pool_size(self_: Py, pool_size: usize) -> RustPSQLDriverPyResult> { + fn max_pool_size(self_: Py, pool_size: usize) -> PSQLPyResult> { if pool_size < 2 { return Err(RustPSQLDriverError::ConnectionPoolConfigurationError( "Maximum database pool size must be more than 1".into(), diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index f391d1c1..1f435ef5 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -6,7 +6,7 @@ use pyo3::{ use tokio_postgres::{config::Host, Config}; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, query_result::PSQLDriverPyQueryResult, runtime::rustdriver_future, }; @@ -23,9 +23,9 @@ trait CursorObjectTrait { querystring: &str, prepared: &Option, parameters: &Option>, - ) -> RustPSQLDriverPyResult<()>; + ) -> PSQLPyResult<()>; - async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> RustPSQLDriverPyResult<()>; + async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> PSQLPyResult<()>; } impl CursorObjectTrait for PsqlpyConnection { @@ -43,7 +43,7 @@ impl CursorObjectTrait for PsqlpyConnection { querystring: &str, prepared: &Option, parameters: &Option>, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let mut cursor_init_query = format!("DECLARE {cursor_name}"); if let Some(scroll) = scroll { if *scroll { @@ -70,7 +70,7 @@ impl CursorObjectTrait for PsqlpyConnection { /// /// # Errors /// May return Err Result if cannot execute querystring. - async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> RustPSQLDriverPyResult<()> { + async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> PSQLPyResult<()> { if *closed { return Err(RustPSQLDriverError::CursorCloseError( "Cursor is already closed".into(), @@ -232,7 +232,7 @@ impl Cursor { slf } - async fn __aenter__<'a>(slf: Py) -> RustPSQLDriverPyResult> { + async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { let (db_transaction, cursor_name, scroll, querystring, prepared, parameters) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); @@ -265,7 +265,7 @@ impl Cursor { _exception_type: Py, exception: Py, _traceback: Py, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (db_transaction, closed, cursor_name, is_exception_none, py_err) = pyo3::Python::with_gil(|gil| { let self_ = slf.borrow(gil); @@ -307,7 +307,7 @@ impl Cursor { /// we didn't find any solution how to implement it without /// # Errors /// May return Err Result if can't execute querystring. - fn __anext__(&self) -> RustPSQLDriverPyResult> { + fn __anext__(&self) -> PSQLPyResult> { let db_transaction = self.db_transaction.clone(); let fetch_number = self.fetch_number; let cursor_name = self.cursor_name.clone(); @@ -343,7 +343,7 @@ impl Cursor { /// # Errors /// May return Err Result /// if cannot execute querystring for cursor declaration. - pub async fn start(&mut self) -> RustPSQLDriverPyResult<()> { + pub async fn start(&mut self) -> PSQLPyResult<()> { let db_transaction_arc = self.db_transaction.clone(); if let Some(db_transaction) = db_transaction_arc { @@ -370,7 +370,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn close(&mut self) -> RustPSQLDriverPyResult<()> { + pub async fn close(&mut self) -> PSQLPyResult<()> { let db_transaction_arc = self.db_transaction.clone(); if let Some(db_transaction) = db_transaction_arc { @@ -396,7 +396,7 @@ impl Cursor { pub async fn fetch<'a>( slf: Py, fetch_number: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (db_transaction, inner_fetch_number, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); ( @@ -437,7 +437,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_next<'a>(slf: Py) -> RustPSQLDriverPyResult { + pub async fn fetch_next<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -464,7 +464,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_prior<'a>(slf: Py) -> RustPSQLDriverPyResult { + pub async fn fetch_prior<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -491,7 +491,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_first<'a>(slf: Py) -> RustPSQLDriverPyResult { + pub async fn fetch_first<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -518,7 +518,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_last<'a>(slf: Py) -> RustPSQLDriverPyResult { + pub async fn fetch_last<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -548,7 +548,7 @@ impl Cursor { pub async fn fetch_absolute<'a>( slf: Py, absolute_number: i64, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -582,7 +582,7 @@ impl Cursor { pub async fn fetch_relative<'a>( slf: Py, relative_number: i64, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -613,9 +613,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_forward_all<'a>( - slf: Py, - ) -> RustPSQLDriverPyResult { + pub async fn fetch_forward_all<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -649,7 +647,7 @@ impl Cursor { pub async fn fetch_backward<'a>( slf: Py, backward_count: i64, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) @@ -680,9 +678,7 @@ impl Cursor { /// /// # Errors /// May return Err Result if cannot execute query. - pub async fn fetch_backward_all<'a>( - slf: Py, - ) -> RustPSQLDriverPyResult { + pub async fn fetch_backward_all<'a>(slf: Py) -> PSQLPyResult { let (db_transaction, cursor_name) = Python::with_gil(|gil| { let self_ = slf.borrow(gil); (self_.db_transaction.clone(), self_.cursor_name.clone()) diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index 2dfbcbb7..a7e9d233 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -1,18 +1,15 @@ use bytes::Buf; use deadpool_postgres::Object; -use postgres_types::ToSql; +use postgres_types::{ToSql, Type}; use pyo3::{Py, PyAny, Python}; use std::vec; use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - value_converter::{ - consts::QueryParameter, - dto::enums::PythonDTO, - funcs::{from_python::convert_parameters_and_qs, to_python::postgres_to_py}, - }, + statement::{statement::PsqlpyStatement, statement_builder::StatementBuilder}, + value_converter::to_python::postgres_to_py, }; #[allow(clippy::module_name_repetitions)] @@ -26,13 +23,39 @@ impl PsqlpyConnection { /// /// # Errors /// May return Err if cannot prepare statement. - pub async fn prepare_cached(&self, query: &str) -> RustPSQLDriverPyResult { + pub async fn prepare(&self, query: &str) -> PSQLPyResult { match self { PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.prepare_cached(query).await?), PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.prepare(query).await?), } } + /// Delete prepared statement. + /// + /// # Errors + /// May return Err if cannot prepare statement. + pub async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { + let query = format!("DEALLOCATE PREPARE {}", stmt.name()); + match self { + PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(&query).await?), + PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(&query).await?), + } + } + + /// Prepare and delete statement. + /// + /// # Errors + /// Can return Err if cannot prepare statement. + pub async fn prepare_then_drop(&self, query: &str) -> PSQLPyResult> { + let types: Vec; + + let stmt = self.prepare(query).await?; + types = stmt.params().to_vec(); + self.drop_prepared(&stmt).await?; + + Ok(types) + } + /// Prepare cached statement. /// /// # Errors @@ -41,7 +64,7 @@ impl PsqlpyConnection { &self, statement: &T, params: &[&(dyn ToSql + Sync)], - ) -> RustPSQLDriverPyResult> + ) -> PSQLPyResult> where T: ?Sized + ToStatement, { @@ -57,7 +80,7 @@ impl PsqlpyConnection { /// /// # Errors /// May return Err if cannot execute statement. - pub async fn batch_execute(&self, query: &str) -> RustPSQLDriverPyResult<()> { + pub async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { match self { PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(query).await?), PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), @@ -72,7 +95,7 @@ impl PsqlpyConnection { &self, statement: &T, params: &[&(dyn ToSql + Sync)], - ) -> RustPSQLDriverPyResult + ) -> PSQLPyResult where T: ?Sized + ToStatement, { @@ -91,38 +114,28 @@ impl PsqlpyConnection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { - let prepared = prepared.unwrap_or(true); - - let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; + ) -> PSQLPyResult { + let statement = StatementBuilder::new(querystring, parameters, self, prepared) + .build() + .await?; - let boxed_params = ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(); + let prepared = prepared.unwrap_or(true); let result = if prepared { self.query( - &self.prepare_cached(&qs).await.map_err(|err| { + &self.prepare(&statement.sql_stmt()).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) })?, - boxed_params, + &statement.params(), ) .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? } else { - self.query(&qs, boxed_params).await.map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + self.query(statement.sql_stmt(), &statement.params()) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? }; Ok(PSQLDriverPyQueryResult::new(result)) @@ -133,38 +146,28 @@ impl PsqlpyConnection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { - let prepared = prepared.unwrap_or(true); - - let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; + ) -> PSQLPyResult { + let statement = StatementBuilder::new(querystring, parameters, self, prepared) + .build() + .await?; - let boxed_params = ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(); + let prepared = prepared.unwrap_or(true); let result = if prepared { self.query( - &self.prepare_cached(&qs).await.map_err(|err| { + &self.prepare(statement.sql_stmt()).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) })?, - boxed_params, + &statement.params(), ) .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? } else { - self.query(&qs, boxed_params).await.map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + self.query(statement.sql_stmt(), &statement.params()) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? }; Ok(PSQLDriverPyQueryResult::new(result)) @@ -172,41 +175,40 @@ impl PsqlpyConnection { pub async fn execute_many( &self, - mut querystring: String, + querystring: String, parameters: Option>>, prepared: Option, - ) -> RustPSQLDriverPyResult<()> { - let prepared = prepared.unwrap_or(true); - - let mut params: Vec> = vec![]; + ) -> PSQLPyResult<()> { + let mut statements: Vec = vec![]; if let Some(parameters) = parameters { for vec_of_py_any in parameters { // TODO: Fix multiple qs creation - let (qs, parsed_params) = - convert_parameters_and_qs(querystring.clone(), Some(vec_of_py_any))?; - querystring = qs; - params.push(parsed_params); + let statement = + StatementBuilder::new(querystring.clone(), Some(vec_of_py_any), self, prepared) + .build() + .await?; + + statements.push(statement); } } - for param in params { - let boxed_params = ¶m - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(); + let prepared = prepared.unwrap_or(true); + for statement in statements { let querystring_result = if prepared { - let prepared_stmt = &self.prepare_cached(&querystring).await; + let prepared_stmt = &self.prepare(&statement.sql_stmt()).await; if let Err(error) = prepared_stmt { return Err(RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement in execute_many, operation rolled back {error}", ))); } - self.query(&self.prepare_cached(&querystring).await?, boxed_params) - .await + self.query( + &self.prepare(&statement.sql_stmt()).await?, + &statement.params(), + ) + .await } else { - self.query(&querystring, boxed_params).await + self.query(statement.sql_stmt(), &statement.params()).await }; if let Err(error) = querystring_result { @@ -224,38 +226,28 @@ impl PsqlpyConnection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { - let prepared = prepared.unwrap_or(true); - - let (qs, params) = convert_parameters_and_qs(querystring, parameters)?; + ) -> PSQLPyResult { + let statement = StatementBuilder::new(querystring, parameters, self, prepared) + .build() + .await?; - let boxed_params = ¶ms - .iter() - .map(|param| param as &QueryParameter) - .collect::>() - .into_boxed_slice(); + let prepared = prepared.unwrap_or(true); let result = if prepared { self.query_one( - &self.prepare_cached(&qs).await.map_err(|err| { + &self.prepare(&statement.sql_stmt()).await.map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) })?, - boxed_params, + &statement.params(), ) .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? } else { - self.query_one(&qs, boxed_params).await.map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot execute statement, error - {err}" - )) - })? + self.query_one(statement.sql_stmt(), &statement.params()) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? }; return Ok(result); @@ -266,7 +258,7 @@ impl PsqlpyConnection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let result = self .fetch_row_raw(querystring, parameters, prepared) .await?; @@ -279,7 +271,7 @@ impl PsqlpyConnection { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let result = self .fetch_row_raw(querystring, parameters, prepared) .await?; @@ -294,7 +286,7 @@ impl PsqlpyConnection { /// /// # Errors /// May return Err if cannot execute copy data. - pub async fn copy_in(&self, statement: &T) -> RustPSQLDriverPyResult> + pub async fn copy_in(&self, statement: &T) -> PSQLPyResult> where T: ?Sized + ToStatement, U: Buf + 'static + Send, diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index 83aa9b3e..16b323d8 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -18,7 +18,7 @@ use crate::{ inner_connection::PsqlpyConnection, utils::{build_tls, is_coroutine_function, ConfiguredTLS}, }, - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, runtime::{rustdriver_future, tokio_runtime}, }; @@ -89,7 +89,7 @@ impl Listener { } #[allow(clippy::unused_async)] - async fn __aenter__<'a>(slf: Py) -> RustPSQLDriverPyResult> { + async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { Ok(slf) } @@ -99,7 +99,7 @@ impl Listener { _exception_type: Py, exception: Py, _traceback: Py, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (client, is_exception_none, py_err) = pyo3::Python::with_gil(|gil| { let self_ = slf.borrow(gil); ( @@ -126,7 +126,7 @@ impl Listener { Err(RustPSQLDriverError::ListenerClosedError) } - fn __anext__(&self) -> RustPSQLDriverPyResult>> { + fn __anext__(&self) -> PSQLPyResult>> { let Some(client) = self.connection.db_client() else { return Err(RustPSQLDriverError::ListenerStartError( "Listener doesn't have underlying client, please call startup".into(), @@ -167,7 +167,7 @@ impl Listener { } #[getter] - fn connection(&self) -> RustPSQLDriverPyResult { + fn connection(&self) -> PSQLPyResult { if !self.is_started { return Err(RustPSQLDriverError::ListenerStartError( "Listener isn't started up".into(), @@ -177,7 +177,7 @@ impl Listener { Ok(self.connection.clone()) } - async fn startup(&mut self) -> RustPSQLDriverPyResult<()> { + async fn startup(&mut self) -> PSQLPyResult<()> { if self.is_started { return Err(RustPSQLDriverError::ListenerStartError( "Listener is already started".into(), @@ -238,11 +238,7 @@ impl Listener { } #[pyo3(signature = (channel, callback))] - async fn add_callback( - &mut self, - channel: String, - callback: Py, - ) -> RustPSQLDriverPyResult<()> { + async fn add_callback(&mut self, channel: String, callback: Py) -> PSQLPyResult<()> { if !is_coroutine_function(callback.clone())? { return Err(RustPSQLDriverError::ListenerCallbackError); } @@ -279,7 +275,7 @@ impl Listener { self.update_listen_query().await; } - fn listen(&mut self) -> RustPSQLDriverPyResult<()> { + fn listen(&mut self) -> PSQLPyResult<()> { let Some(client) = self.connection.db_client() else { return Err(RustPSQLDriverError::ListenerStartError( "Cannot start listening, underlying connection doesn't exist".into(), @@ -343,7 +339,7 @@ async fn dispatch_callback( listener_callback: &ListenerCallback, listener_notification: ListenerNotification, connection: Connection, -) -> RustPSQLDriverPyResult<()> { +) -> PSQLPyResult<()> { listener_callback .call(listener_notification.clone(), connection) .await?; @@ -355,7 +351,7 @@ async fn execute_listen( is_listened: &Arc>, listen_query: &Arc>, client: &Arc, -) -> RustPSQLDriverPyResult<()> { +) -> PSQLPyResult<()> { let mut write_is_listened = is_listened.write().await; if !write_is_listened.eq(&true) { @@ -371,7 +367,7 @@ async fn execute_listen( Ok(()) } -fn process_message(message: Option) -> RustPSQLDriverPyResult { +fn process_message(message: Option) -> PSQLPyResult { let Some(async_message) = message else { return Err(RustPSQLDriverError::ListenerError("Wow".into())); }; diff --git a/src/driver/listener/structs.rs b/src/driver/listener/structs.rs index 4d53a408..6236547e 100644 --- a/src/driver/listener/structs.rs +++ b/src/driver/listener/structs.rs @@ -6,7 +6,7 @@ use tokio_postgres::Notification; use crate::{ driver::connection::Connection, - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, runtime::tokio_runtime, }; @@ -126,7 +126,7 @@ impl ListenerCallback { &self, lister_notification: ListenerNotification, connection: Connection, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (callback, task_locals) = Python::with_gil(|py| (self.callback.clone(), self.task_locals.clone_ref(py))); diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 2fa38ba5..60f054b7 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -9,7 +9,7 @@ use pyo3::{ use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, }; @@ -29,9 +29,9 @@ pub trait TransactionObjectTrait { read_variant: Option, defferable: Option, synchronous_commit: Option, - ) -> impl std::future::Future> + Send; - fn commit(&self) -> impl std::future::Future> + Send; - fn rollback(&self) -> impl std::future::Future> + Send; + ) -> impl std::future::Future> + Send; + fn commit(&self) -> impl std::future::Future> + Send; + fn rollback(&self) -> impl std::future::Future> + Send; } impl TransactionObjectTrait for PsqlpyConnection { @@ -41,7 +41,7 @@ impl TransactionObjectTrait for PsqlpyConnection { read_variant: Option, deferrable: Option, synchronous_commit: Option, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let mut querystring = "START TRANSACTION".to_string(); if let Some(level) = isolation_level { @@ -84,7 +84,7 @@ impl TransactionObjectTrait for PsqlpyConnection { Ok(()) } - async fn commit(&self) -> RustPSQLDriverPyResult<()> { + async fn commit(&self) -> PSQLPyResult<()> { self.batch_execute("COMMIT;").await.map_err(|err| { RustPSQLDriverError::TransactionCommitError(format!( "Cannot execute COMMIT statement, error - {err}" @@ -92,7 +92,7 @@ impl TransactionObjectTrait for PsqlpyConnection { })?; Ok(()) } - async fn rollback(&self) -> RustPSQLDriverPyResult<()> { + async fn rollback(&self) -> PSQLPyResult<()> { self.batch_execute("ROLLBACK;").await.map_err(|err| { RustPSQLDriverError::TransactionRollbackError(format!( "Cannot execute ROLLBACK statement, error - {err}" @@ -144,7 +144,7 @@ impl Transaction { } } - fn check_is_transaction_ready(&self) -> RustPSQLDriverPyResult<()> { + fn check_is_transaction_ready(&self) -> PSQLPyResult<()> { if !self.is_started { return Err(RustPSQLDriverError::TransactionBeginError( "Transaction is not started, please call begin() on transaction".into(), @@ -242,7 +242,7 @@ impl Transaction { self_ } - async fn __aenter__<'a>(self_: Py) -> RustPSQLDriverPyResult> { + async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { let ( is_started, is_done, @@ -302,7 +302,7 @@ impl Transaction { _exception_type: Py, exception: Py, _traceback: Py, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (is_transaction_ready, is_exception_none, py_err, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -345,7 +345,7 @@ impl Transaction { /// 1) Transaction is not started /// 2) Transaction is done /// 3) Cannot execute `COMMIT` command - pub async fn commit(&mut self) -> RustPSQLDriverPyResult<()> { + pub async fn commit(&mut self) -> PSQLPyResult<()> { self.check_is_transaction_ready()?; if let Some(db_client) = &self.db_client { db_client.commit().await?; @@ -366,7 +366,7 @@ impl Transaction { /// 1) Transaction is not started /// 2) Transaction is done /// 3) Can not execute ROLLBACK command - pub async fn rollback(&mut self) -> RustPSQLDriverPyResult<()> { + pub async fn rollback(&mut self) -> PSQLPyResult<()> { self.check_is_transaction_ready()?; if let Some(db_client) = &self.db_client { db_client.rollback().await?; @@ -394,7 +394,7 @@ impl Transaction { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -419,7 +419,7 @@ impl Transaction { /// May return Err Result if: /// 1) Transaction is closed. /// 2) Cannot execute querystring. - pub async fn execute_batch(self_: Py, querystring: String) -> RustPSQLDriverPyResult<()> { + pub async fn execute_batch(self_: Py, querystring: String) -> PSQLPyResult<()> { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -448,7 +448,7 @@ impl Transaction { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -481,7 +481,7 @@ impl Transaction { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -511,7 +511,7 @@ impl Transaction { querystring: String, parameters: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -539,7 +539,7 @@ impl Transaction { querystring: String, parameters: Option>>, prepared: Option, - ) -> RustPSQLDriverPyResult<()> { + ) -> PSQLPyResult<()> { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); (self_.check_is_transaction_ready(), self_.db_client.clone()) @@ -564,7 +564,7 @@ impl Transaction { /// 1) Transaction is already started. /// 2) Transaction is done. /// 3) Cannot execute `BEGIN` command. - pub async fn begin(self_: Py) -> RustPSQLDriverPyResult<()> { + pub async fn begin(self_: Py) -> PSQLPyResult<()> { let ( is_started, is_done, @@ -629,10 +629,7 @@ impl Transaction { /// 2) Transaction is done /// 3) Specified savepoint name is exists /// 4) Can not execute SAVEPOINT command - pub async fn create_savepoint( - self_: Py, - savepoint_name: String, - ) -> RustPSQLDriverPyResult<()> { + pub async fn create_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { let (is_transaction_ready, is_savepoint_name_exists, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -673,10 +670,7 @@ impl Transaction { /// 2) Transaction is done /// 3) Specified savepoint name doesn't exists /// 4) Can not execute RELEASE SAVEPOINT command - pub async fn release_savepoint( - self_: Py, - savepoint_name: String, - ) -> RustPSQLDriverPyResult<()> { + pub async fn release_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { let (is_transaction_ready, is_savepoint_name_exists, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -717,10 +711,7 @@ impl Transaction { /// 2) Transaction is done /// 3) Specified savepoint name doesn't exist /// 4) Can not execute ROLLBACK TO SAVEPOINT command - pub async fn rollback_savepoint( - self_: Py, - savepoint_name: String, - ) -> RustPSQLDriverPyResult<()> { + pub async fn rollback_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { let (is_transaction_ready, is_savepoint_name_exists, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -765,7 +756,7 @@ impl Transaction { self_: Py, queries: Option>, prepared: Option, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); @@ -827,7 +818,7 @@ impl Transaction { fetch_number: Option, scroll: Option, prepared: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { if let Some(db_client) = &self.db_client { return Ok(Cursor::new( db_client.clone(), @@ -857,7 +848,7 @@ impl Transaction { table_name: String, columns: Option>, schema_name: Option, - ) -> RustPSQLDriverPyResult { + ) -> PSQLPyResult { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); let mut table_name = quote_ident(&table_name); if let Some(schema_name) = schema_name { diff --git a/src/driver/utils.rs b/src/driver/utils.rs index 3d0d59e3..15ca4123 100644 --- a/src/driver/utils.rs +++ b/src/driver/utils.rs @@ -6,7 +6,7 @@ use postgres_openssl::MakeTlsConnector; use pyo3::{types::PyAnyMethods, Py, PyAny, Python}; use tokio_postgres::{Config, NoTls}; -use crate::exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}; +use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::common_options::{self, LoadBalanceHosts, SslMode, TargetSessionAttrs}; @@ -40,7 +40,7 @@ pub fn build_connection_config( keepalives_retries: Option, load_balance_hosts: Option, ssl_mode: Option, -) -> RustPSQLDriverPyResult { +) -> PSQLPyResult { if tcp_user_timeout_nanosec.is_some() && tcp_user_timeout_sec.is_none() { return Err(RustPSQLDriverError::ConnectionPoolConfigurationError( "tcp_user_timeout_nanosec must be used with tcp_user_timeout_sec param.".into(), @@ -182,7 +182,7 @@ pub enum ConfiguredTLS { pub fn build_tls( ca_file: &Option, ssl_mode: &Option, -) -> RustPSQLDriverPyResult { +) -> PSQLPyResult { if let Some(ca_file) = ca_file { let mut builder = SslConnector::builder(SslMethod::tls())?; builder.set_ca_file(ca_file)?; @@ -224,7 +224,7 @@ pub fn build_manager( /// May return Err Result if cannot /// 1) import inspect /// 2) extract boolean -pub fn is_coroutine_function(function: Py) -> RustPSQLDriverPyResult { +pub fn is_coroutine_function(function: Py) -> PSQLPyResult { let is_coroutine_function: bool = Python::with_gil(|py| { let inspect = py.import("inspect")?; diff --git a/src/exceptions/rust_errors.rs b/src/exceptions/rust_errors.rs index 48af50cb..b6694da1 100644 --- a/src/exceptions/rust_errors.rs +++ b/src/exceptions/rust_errors.rs @@ -14,7 +14,7 @@ use super::python_errors::{ TransactionRollbackError, TransactionSavepointError, UUIDValueConvertError, }; -pub type RustPSQLDriverPyResult = Result; +pub type PSQLPyResult = Result; #[derive(Error, Debug)] pub enum RustPSQLDriverError { @@ -29,9 +29,9 @@ pub enum RustPSQLDriverError { ConnectionPoolExecuteError(String), // Connection Errors - #[error("Connection error: {0}.")] + #[error("{0}")] BaseConnectionError(String), - #[error("Connection execute error: {0}.")] + #[error("{0}")] ConnectionExecuteError(String), #[error("Underlying connection is returned to the pool")] ConnectionClosedError, @@ -81,7 +81,7 @@ pub enum RustPSQLDriverError { #[error("Python exception: {0}.")] RustPyError(#[from] pyo3::PyErr), - #[error("Database engine exception: {0}.")] + #[error("{0}")] RustDriverError(#[from] deadpool_postgres::tokio_postgres::Error), #[error("Database engine pool exception: {0}")] RustConnectionPoolError(#[from] deadpool_postgres::PoolError), diff --git a/src/extra_types.rs b/src/extra_types.rs index 48058ff4..c3b2d832 100644 --- a/src/extra_types.rs +++ b/src/extra_types.rs @@ -3,6 +3,7 @@ use std::str::FromStr; use geo_types::{Line as RustLineSegment, LineString, Point as RustPoint, Rect as RustRect}; use macaddr::{MacAddr6 as RustMacAddr6, MacAddr8 as RustMacAddr8}; use pyo3::{ + conversion::FromPyObjectBound, pyclass, pymethods, types::{PyModule, PyModuleMethods}, Bound, Py, PyAny, PyResult, Python, @@ -10,13 +11,11 @@ use pyo3::{ use serde_json::Value; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, value_converter::{ additional_types::{Circle as RustCircle, Line as RustLine}, dto::enums::PythonDTO, - funcs::from_python::{ - build_flat_geo_coords, build_geo_coords, py_sequence_into_postgres_array, - }, + from_python::{build_flat_geo_coords, build_geo_coords, py_sequence_into_postgres_array}, models::serde_value::build_serde_value, }, }; @@ -155,7 +154,7 @@ macro_rules! build_json_py_type { impl $st_name { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_class(value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + pub fn new_class(value: &Bound<'_, PyAny>) -> PSQLPyResult { Ok(Self { inner: build_serde_value(value)?, }) @@ -191,7 +190,7 @@ macro_rules! build_macaddr_type { impl $st_name { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_class(value: &str) -> RustPSQLDriverPyResult { + pub fn new_class(value: &str) -> PSQLPyResult { Ok(Self { inner: <$rust_type>::from_str(value)?, }) @@ -252,7 +251,7 @@ build_geo_type!(Circle, RustCircle); impl Point { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_point(value: Py) -> RustPSQLDriverPyResult { + pub fn new_point(value: Py) -> PSQLPyResult { let point_coords = build_geo_coords(value, Some(1))?; Ok(Self { @@ -265,7 +264,7 @@ impl Point { impl Box { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_box(value: Py) -> RustPSQLDriverPyResult { + pub fn new_box(value: Py) -> PSQLPyResult { let box_coords = build_geo_coords(value, Some(2))?; Ok(Self { @@ -278,7 +277,7 @@ impl Box { impl Path { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_path(value: Py) -> RustPSQLDriverPyResult { + pub fn new_path(value: Py) -> PSQLPyResult { let path_coords = build_geo_coords(value, None)?; Ok(Self { @@ -291,7 +290,7 @@ impl Path { impl Line { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_line(value: Py) -> RustPSQLDriverPyResult { + pub fn new_line(value: Py) -> PSQLPyResult { let line_coords = build_flat_geo_coords(value, Some(3))?; Ok(Self { @@ -304,7 +303,7 @@ impl Line { impl LineSegment { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_line_segment(value: Py) -> RustPSQLDriverPyResult { + pub fn new_line_segment(value: Py) -> PSQLPyResult { let line_segment_coords = build_geo_coords(value, Some(2))?; Ok(Self { @@ -317,7 +316,7 @@ impl LineSegment { impl Circle { #[new] #[allow(clippy::missing_errors_doc)] - pub fn new_circle(value: Py) -> RustPSQLDriverPyResult { + pub fn new_circle(value: Py) -> PSQLPyResult { let circle_coords = build_flat_geo_coords(value, Some(3))?; Ok(Self { inner: RustCircle::new(circle_coords[0], circle_coords[1], circle_coords[2]), @@ -352,7 +351,7 @@ macro_rules! build_array_type { /// /// # Errors /// May return Err Result if cannot convert sequence to array. - pub fn _convert_to_python_dto(&self) -> RustPSQLDriverPyResult { + pub fn _convert_to_python_dto(&self) -> PSQLPyResult { return Python::with_gil(|gil| { let binding = &self.inner; let bound_inner = Ok::<&pyo3::Bound<'_, pyo3::PyAny>, RustPSQLDriverError>( diff --git a/src/lib.rs b/src/lib.rs index e0e1fe11..6be59c75 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,6 +6,7 @@ pub mod format_helpers; pub mod query_result; pub mod row_factories; pub mod runtime; +pub mod statement; pub mod value_converter; use common::add_module; diff --git a/src/query_result.rs b/src/query_result.rs index da393f89..cda02a8b 100644 --- a/src/query_result.rs +++ b/src/query_result.rs @@ -1,10 +1,7 @@ use pyo3::{prelude::*, pyclass, pymethods, types::PyDict, Py, PyAny, Python, ToPyObject}; use tokio_postgres::Row; -use crate::{ - exceptions::rust_errors::RustPSQLDriverPyResult, - value_converter::funcs::to_python::postgres_to_py, -}; +use crate::{exceptions::rust_errors::PSQLPyResult, value_converter::to_python::postgres_to_py}; /// Convert postgres `Row` into Python Dict. /// @@ -18,7 +15,7 @@ fn row_to_dict<'a>( py: Python<'a>, postgres_row: &'a Row, custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { let python_dict = PyDict::new(py); for (column_idx, column) in postgres_row.columns().iter().enumerate() { let python_type = postgres_to_py(py, postgres_row, column, column_idx, custom_decoders)?; @@ -30,7 +27,7 @@ fn row_to_dict<'a>( #[pyclass(name = "QueryResult")] #[allow(clippy::module_name_repetitions)] pub struct PSQLDriverPyQueryResult { - inner: Vec, + pub inner: Vec, } impl PSQLDriverPyQueryResult { @@ -65,7 +62,7 @@ impl PSQLDriverPyQueryResult { &self, py: Python<'_>, custom_decoders: Option>, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let mut result: Vec> = vec![]; for row in &self.inner { result.push(row_to_dict(py, row, &custom_decoders)?); @@ -80,11 +77,7 @@ impl PSQLDriverPyQueryResult { /// May return Err Result if can not convert /// postgres type to python or create new Python class. #[allow(clippy::needless_pass_by_value)] - pub fn as_class<'a>( - &'a self, - py: Python<'a>, - as_class: Py, - ) -> RustPSQLDriverPyResult> { + pub fn as_class<'a>(&'a self, py: Python<'a>, as_class: Py) -> PSQLPyResult> { let mut res: Vec> = vec![]; for row in &self.inner { let pydict: pyo3::Bound<'_, PyDict> = row_to_dict(py, row, &None)?; @@ -108,7 +101,7 @@ impl PSQLDriverPyQueryResult { py: Python<'a>, row_factory: Py, custom_decoders: Option>, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let mut res: Vec> = vec![]; for row in &self.inner { let pydict: pyo3::Bound<'_, PyDict> = row_to_dict(py, row, &custom_decoders)?; @@ -155,7 +148,7 @@ impl PSQLDriverSinglePyQueryResult { &self, py: Python<'_>, custom_decoders: Option>, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { Ok(row_to_dict(py, &self.inner, &custom_decoders)?.to_object(py)) } @@ -167,11 +160,7 @@ impl PSQLDriverSinglePyQueryResult { /// postgres type to python, can not create new Python class /// or there are no results. #[allow(clippy::needless_pass_by_value)] - pub fn as_class<'a>( - &'a self, - py: Python<'a>, - as_class: Py, - ) -> RustPSQLDriverPyResult> { + pub fn as_class<'a>(&'a self, py: Python<'a>, as_class: Py) -> PSQLPyResult> { let pydict: pyo3::Bound<'_, PyDict> = row_to_dict(py, &self.inner, &None)?; Ok(as_class.call(py, (), Some(&pydict))?) } @@ -189,7 +178,7 @@ impl PSQLDriverSinglePyQueryResult { py: Python<'a>, row_factory: Py, custom_decoders: Option>, - ) -> RustPSQLDriverPyResult> { + ) -> PSQLPyResult> { let pydict = row_to_dict(py, &self.inner, &custom_decoders)?.to_object(py); Ok(row_factory.call(py, (pydict,), None)?) } diff --git a/src/row_factories.rs b/src/row_factories.rs index 3a2d2de8..e867df0a 100644 --- a/src/row_factories.rs +++ b/src/row_factories.rs @@ -4,11 +4,11 @@ use pyo3::{ wrap_pyfunction, Bound, Py, PyAny, PyResult, Python, ToPyObject, }; -use crate::exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}; +use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; #[pyfunction] #[allow(clippy::needless_pass_by_value)] -fn tuple_row(py: Python<'_>, dict_: Py) -> RustPSQLDriverPyResult> { +fn tuple_row(py: Python<'_>, dict_: Py) -> PSQLPyResult> { let dict_ = dict_.downcast_bound::(py).map_err(|_| { RustPSQLDriverError::RustToPyValueConversionError( "as_tuple accepts only dict as a parameter".into(), @@ -29,7 +29,7 @@ impl class_row { } #[allow(clippy::needless_pass_by_value)] - fn __call__(&self, py: Python<'_>, dict_: Py) -> RustPSQLDriverPyResult> { + fn __call__(&self, py: Python<'_>, dict_: Py) -> PSQLPyResult> { let dict_ = dict_.downcast_bound::(py).map_err(|_| { RustPSQLDriverError::RustToPyValueConversionError( "as_tuple accepts only dict as a parameter".into(), diff --git a/src/runtime.rs b/src/runtime.rs index 05889d99..ee6281de 100644 --- a/src/runtime.rs +++ b/src/runtime.rs @@ -1,7 +1,7 @@ use futures_util::Future; use pyo3::{IntoPyObject, Py, PyAny, Python}; -use crate::exceptions::rust_errors::RustPSQLDriverPyResult; +use crate::exceptions::rust_errors::PSQLPyResult; #[allow(clippy::missing_panics_doc)] #[allow(clippy::module_name_repetitions)] @@ -18,9 +18,9 @@ pub fn tokio_runtime() -> &'static tokio::runtime::Runtime { /// # Errors /// /// May return Err Result if future acts incorrect. -pub fn rustdriver_future(py: Python<'_>, future: F) -> RustPSQLDriverPyResult> +pub fn rustdriver_future(py: Python<'_>, future: F) -> PSQLPyResult> where - F: Future> + Send + 'static, + F: Future> + Send + 'static, T: for<'py> IntoPyObject<'py>, { let res = diff --git a/src/statement/cache.rs b/src/statement/cache.rs new file mode 100644 index 00000000..a6fbc131 --- /dev/null +++ b/src/statement/cache.rs @@ -0,0 +1,50 @@ +use std::collections::HashMap; + +use once_cell::sync::Lazy; +use postgres_types::Type; +use tokio::sync::RwLock; +use tokio_postgres::Statement; + +use super::{query::QueryString, traits::hash_str}; + +#[derive(Default)] +pub(crate) struct StatementsCache(HashMap); + +impl StatementsCache { + pub fn add_cache(&mut self, query: &QueryString, inner_stmt: &Statement) { + self.0 + .insert(query.hash(), StatementCacheInfo::new(query, inner_stmt)); + } + + pub fn get_cache(&self, querystring: &String) -> Option { + let qs_hash = hash_str(&querystring); + + if let Some(cache_info) = self.0.get(&qs_hash) { + return Some(cache_info.clone()); + } + + None + } +} + +#[derive(Clone)] +pub(crate) struct StatementCacheInfo { + pub(crate) query: QueryString, + pub(crate) inner_stmt: Statement, +} + +impl StatementCacheInfo { + fn new(query: &QueryString, inner_stmt: &Statement) -> Self { + return Self { + query: query.clone(), + inner_stmt: inner_stmt.clone(), + }; + } + + pub(crate) fn types(&self) -> Vec { + self.inner_stmt.params().to_vec() + } +} + +pub(crate) static STMTS_CACHE: Lazy> = + Lazy::new(|| RwLock::new(Default::default())); diff --git a/src/statement/mod.rs b/src/statement/mod.rs new file mode 100644 index 00000000..e027eaea --- /dev/null +++ b/src/statement/mod.rs @@ -0,0 +1,7 @@ +pub mod cache; +pub mod parameters; +pub mod query; +pub mod statement; +pub mod statement_builder; +pub mod traits; +pub mod utils; diff --git a/src/statement/parameters.rs b/src/statement/parameters.rs new file mode 100644 index 00000000..baeded5d --- /dev/null +++ b/src/statement/parameters.rs @@ -0,0 +1,255 @@ +use std::iter::zip; + +use postgres_types::{ToSql, Type}; +use pyo3::{ + conversion::FromPyObjectBound, + types::{PyAnyMethods, PyMapping}, + Py, PyObject, PyTypeCheck, Python, +}; + +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + value_converter::{dto::enums::PythonDTO, from_python::py_to_rust}, +}; + +pub type QueryParameter = (dyn ToSql + Sync); + +pub(crate) struct ParametersBuilder { + parameters: Option, + types: Option>, +} + +impl ParametersBuilder { + pub fn new(parameters: &Option, types: Option>) -> Self { + Self { + parameters: parameters.clone(), + types, + } + } + + pub fn prepare( + self, + parameters_names: Option>, + ) -> PSQLPyResult { + let prepared_parameters = + Python::with_gil(|gil| self.prepare_parameters(gil, parameters_names))?; + + Ok(prepared_parameters) + } + + fn prepare_parameters( + self, + gil: Python<'_>, + parameters_names: Option>, + ) -> PSQLPyResult { + if self.parameters.is_none() { + return Ok(PreparedParameters::default()); + } + + let sequence_typed = self.as_type::>(gil); + let mapping_typed = self.downcast_as::(gil); + let mut prepared_parameters: Option = None; + + match (sequence_typed, mapping_typed) { + (Some(sequence), None) => { + prepared_parameters = + Some(SequenceParametersBuilder::new(sequence, self.types).prepare(gil)?); + } + (None, Some(mapping)) => { + if let Some(parameters_names) = parameters_names { + prepared_parameters = Some( + MappingParametersBuilder::new(mapping, self.types) + .prepare(gil, parameters_names)?, + ) + } + } + _ => {} + } + + if let Some(prepared_parameters) = prepared_parameters { + return Ok(prepared_parameters); + } + + return Err(RustPSQLDriverError::PyToRustValueConversionError( + "Parameters must be sequence or mapping".into(), + )); + } + + fn as_type FromPyObjectBound<'a, 'py>>(&self, gil: Python<'_>) -> Option { + if let Some(parameters) = &self.parameters { + let extracted_param = parameters.extract::(gil); + + if let Ok(extracted_param) = extracted_param { + return Some(extracted_param); + } + + return None; + } + + None + } + + fn downcast_as(&self, gil: Python<'_>) -> Option> { + if let Some(parameters) = &self.parameters { + let extracted_param = parameters.downcast_bound::(gil); + + if let Ok(extracted_param) = extracted_param { + return Some(extracted_param.clone().unbind()); + } + + return None; + } + + None + } +} + +pub(crate) struct MappingParametersBuilder { + map_parameters: Py, + types: Option>, +} + +impl MappingParametersBuilder { + fn new(map_parameters: Py, types: Option>) -> Self { + Self { + map_parameters, + types, + } + } + + fn prepare( + self, + gil: Python<'_>, + parameters_names: Vec, + ) -> PSQLPyResult { + if self.types.is_some() { + return self.prepare_typed(gil, parameters_names); + } + + self.prepare_not_typed(gil, parameters_names) + } + + fn prepare_typed( + self, + gil: Python<'_>, + parameters_names: Vec, + ) -> PSQLPyResult { + let converted_parameters = self + .extract_parameters(gil, parameters_names)? + .iter() + .map(|parameter| py_to_rust(parameter.bind(gil))) + .collect::>>()?; + + Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + } + + fn prepare_not_typed( + self, + gil: Python<'_>, + parameters_names: Vec, + ) -> PSQLPyResult { + let converted_parameters = self + .extract_parameters(gil, parameters_names)? + .iter() + .map(|parameter| py_to_rust(parameter.bind(gil))) + .collect::>>()?; + + Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + } + + fn extract_parameters( + &self, + gil: Python<'_>, + parameters_names: Vec, + ) -> PSQLPyResult> { + let mut params_as_pyobject: Vec = vec![]; + + for param_name in parameters_names { + match self.map_parameters.bind(gil).get_item(¶m_name) { + Ok(param_value) => params_as_pyobject.push(param_value.unbind()), + Err(_) => { + return Err(RustPSQLDriverError::PyToRustValueConversionError( + format!("Cannot find parameter with name <{}>", param_name).into(), + )) + } + } + } + + Ok(params_as_pyobject) + } +} + +pub(crate) struct SequenceParametersBuilder { + seq_parameters: Vec, + types: Option>, +} + +impl SequenceParametersBuilder { + fn new(seq_parameters: Vec, types: Option>) -> Self { + Self { + seq_parameters: seq_parameters, + types, + } + } + + fn prepare(self, gil: Python<'_>) -> PSQLPyResult { + if self.types.is_some() { + return self.prepare_typed(gil); + } + + self.prepare_not_typed(gil) + } + + fn prepare_typed(self, gil: Python<'_>) -> PSQLPyResult { + let converted_parameters = self + .seq_parameters + .iter() + .map(|parameter| py_to_rust(parameter.bind(gil))) + .collect::>>()?; + + Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + + // Ok(prepared_parameters) // TODO: put there normal convert with types + } + + fn prepare_not_typed(self, gil: Python<'_>) -> PSQLPyResult { + let converted_parameters = self + .seq_parameters + .iter() + .map(|parameter| py_to_rust(parameter.bind(gil))) + .collect::>>()?; + + Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + } +} + +#[derive(Default, Clone, Debug)] +pub struct PreparedParameters { + parameters: Vec, + types: Vec, +} + +impl PreparedParameters { + pub fn new(parameters: Vec, types: Vec) -> Self { + Self { parameters, types } + } + + pub fn params(&self) -> Box<[&(dyn ToSql + Sync)]> { + let params_ref = &self.parameters; + params_ref + .iter() + .map(|param| param as &QueryParameter) + .collect::>() + .into_boxed_slice() + } + + pub fn params_typed(&self) -> Box<[(&(dyn ToSql + Sync), Type)]> { + let params_ref = &self.parameters; + let types = self.types.clone(); + let params_types = zip(params_ref, types); + params_types + .map(|(param, type_)| (param as &QueryParameter, type_)) + .collect::>() + .into_boxed_slice() + } +} diff --git a/src/statement/query.rs b/src/statement/query.rs new file mode 100644 index 00000000..7f87cede --- /dev/null +++ b/src/statement/query.rs @@ -0,0 +1,92 @@ +use std::fmt::Display; + +use regex::Regex; + +use crate::value_converter::consts::KWARGS_PARAMS_REGEXP; + +use super::traits::hash_str; + +#[derive(Clone)] +pub struct QueryString { + pub(crate) initial_qs: String, + // This field are used when kwargs passed + // from python side as parameters. + pub(crate) converted_qs: Option, +} + +impl Display for QueryString { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.query()) + } +} + +impl QueryString { + pub fn new(initial_qs: &String) -> Self { + return Self { + initial_qs: initial_qs.clone(), + converted_qs: None, + }; + } + + pub(crate) fn query(&self) -> &str { + if let Some(converted_qs) = &self.converted_qs { + return converted_qs.query(); + } + + return &self.initial_qs; + } + + pub(crate) fn hash(&self) -> u64 { + hash_str(&self.initial_qs) + } + + pub(crate) fn process_qs(&mut self) { + if !self.is_kwargs_parametrized() { + return (); + } + + let mut counter = 0; + let mut parameters_names = Vec::new(); + + let re = Regex::new(KWARGS_PARAMS_REGEXP).unwrap(); + let result = re.replace_all(&self.initial_qs, |caps: ®ex::Captures| { + let parameter_idx = caps[1].to_string(); + + parameters_names.push(parameter_idx.clone()); + counter += 1; + + format!("${}", &counter) + }); + + self.converted_qs = Some(ConvertedQueryString::new(result.into(), parameters_names)); + } + + fn is_kwargs_parametrized(&self) -> bool { + Regex::new(KWARGS_PARAMS_REGEXP) + .unwrap() + .is_match(&self.initial_qs) + } +} + +#[derive(Clone)] +pub(crate) struct ConvertedQueryString { + converted_qs: String, + params_names: Vec, +} + +impl ConvertedQueryString { + fn new(converted_qs: String, params_names: Vec) -> Self { + Self { + converted_qs, + params_names, + } + } + + fn query(&self) -> &str { + &self.converted_qs + } + + pub(crate) fn params_names(&self) -> &Vec { + &self.params_names + } +} diff --git a/src/statement/statement.rs b/src/statement/statement.rs new file mode 100644 index 00000000..4c3a6e9b --- /dev/null +++ b/src/statement/statement.rs @@ -0,0 +1,30 @@ +use postgres_types::{ToSql, Type}; + +use super::{parameters::PreparedParameters, query::QueryString}; + +#[derive(Clone)] +pub struct PsqlpyStatement { + query: QueryString, + prepared_parameters: PreparedParameters, +} + +impl PsqlpyStatement { + pub fn new(query: QueryString, prepared_parameters: PreparedParameters) -> Self { + Self { + query, + prepared_parameters, + } + } + + pub fn sql_stmt(&self) -> &str { + self.query.query() + } + + pub fn params(&self) -> Box<[&(dyn ToSql + Sync)]> { + self.prepared_parameters.params() + } + + pub fn params_typed(&self) -> Box<[(&(dyn ToSql + Sync), Type)]> { + self.prepared_parameters.params_typed() + } +} diff --git a/src/statement/statement_builder.rs b/src/statement/statement_builder.rs new file mode 100644 index 00000000..07e003da --- /dev/null +++ b/src/statement/statement_builder.rs @@ -0,0 +1,100 @@ +use pyo3::PyObject; +use tokio_postgres::Statement; + +use crate::{driver::inner_connection::PsqlpyConnection, exceptions::rust_errors::PSQLPyResult}; + +use super::{ + cache::{StatementCacheInfo, STMTS_CACHE}, + parameters::ParametersBuilder, + query::QueryString, + statement::PsqlpyStatement, +}; + +pub struct StatementBuilder<'a> { + querystring: String, + parameters: Option, + inner_conn: &'a PsqlpyConnection, + prepared: bool, +} + +impl<'a> StatementBuilder<'a> { + pub fn new( + querystring: String, + parameters: Option, + inner_conn: &'a PsqlpyConnection, + prepared: Option, + ) -> Self { + Self { + querystring, + parameters, + inner_conn, + prepared: prepared.unwrap_or(true), + } + } + + pub async fn build(self) -> PSQLPyResult { + { + let stmt_cache_guard = STMTS_CACHE.read().await; + if let Some(cached) = stmt_cache_guard.get_cache(&self.querystring) { + return self.build_with_cached(cached); + } + } + + self.build_no_cached().await + } + + fn build_with_cached(self, cached: StatementCacheInfo) -> PSQLPyResult { + let raw_parameters = ParametersBuilder::new(&self.parameters, Some(cached.types())); + + let parameters_names = if let Some(converted_qs) = &cached.query.converted_qs { + Some(converted_qs.params_names().clone()) + } else { + None + }; + + let prepared_parameters = raw_parameters.prepare(parameters_names)?; + + return Ok(PsqlpyStatement::new(cached.query, prepared_parameters)); + } + + async fn build_no_cached(self) -> PSQLPyResult { + let mut querystring = QueryString::new(&self.querystring); + querystring.process_qs(); + + let prepared_stmt = self.prepare_query(&querystring).await?; + let parameters_builder = + ParametersBuilder::new(&self.parameters, Some(prepared_stmt.params().to_vec())); + + if !self.prepared { + Self::drop_prepared(self.inner_conn, &prepared_stmt).await?; + } + + let parameters_names = if let Some(converted_qs) = &querystring.converted_qs { + Some(converted_qs.params_names().clone()) + } else { + None + }; + + let prepared_parameters = parameters_builder.prepare(parameters_names)?; + + { + self.write_to_cache(&querystring, &prepared_stmt).await; + } + let statement = PsqlpyStatement::new(querystring, prepared_parameters); + + return Ok(statement); + } + + async fn write_to_cache(&self, query: &QueryString, inner_stmt: &Statement) { + let mut stmt_cache_guard = STMTS_CACHE.write().await; + stmt_cache_guard.add_cache(query, inner_stmt); + } + + async fn prepare_query(&self, query: &QueryString) -> PSQLPyResult { + self.inner_conn.prepare(query.query()).await + } + + async fn drop_prepared(inner_conn: &PsqlpyConnection, stmt: &Statement) -> PSQLPyResult<()> { + inner_conn.drop_prepared(stmt).await + } +} diff --git a/src/statement/traits.rs b/src/statement/traits.rs new file mode 100644 index 00000000..a79f8bdd --- /dev/null +++ b/src/statement/traits.rs @@ -0,0 +1,8 @@ +use std::hash::{DefaultHasher, Hash, Hasher}; + +pub(crate) fn hash_str(string: &String) -> u64 { + let mut hasher = DefaultHasher::new(); + string.hash(&mut hasher); + + hasher.finish() +} diff --git a/src/statement/utils.rs b/src/statement/utils.rs new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/src/statement/utils.rs @@ -0,0 +1 @@ + diff --git a/src/value_converter/consts.rs b/src/value_converter/consts.rs index 40fa932b..e5ff56e4 100644 --- a/src/value_converter/consts.rs +++ b/src/value_converter/consts.rs @@ -8,6 +8,8 @@ use pyo3::{ Bound, Py, PyResult, Python, }; +pub static KWARGS_PARAMS_REGEXP: &str = r"\$\(([^)]+)\)p"; + pub static DECIMAL_CLS: GILOnceCell> = GILOnceCell::new(); pub static TIMEDELTA_CLS: GILOnceCell> = GILOnceCell::new(); pub static KWARGS_QUERYSTRINGS: Lazy)>>> = diff --git a/src/value_converter/dto/converter_impls.rs b/src/value_converter/dto/converter_impls.rs index 97675af3..64948f29 100644 --- a/src/value_converter/dto/converter_impls.rs +++ b/src/value_converter/dto/converter_impls.rs @@ -10,30 +10,28 @@ use rust_decimal::Decimal; use uuid::Uuid; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, extra_types::{self, PythonDecimal, PythonUUID}, value_converter::{ additional_types::NonePyType, - funcs::from_python::{ - extract_datetime_from_python_object_attrs, py_sequence_into_postgres_array, - }, + from_python::{extract_datetime_from_python_object_attrs, py_sequence_into_postgres_array}, models::serde_value::build_serde_value, - traits::PythonToDTO, + traits::ToPythonDTO, }, }; use super::enums::PythonDTO; -impl PythonToDTO for NonePyType { - fn to_python_dto(_python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for NonePyType { + fn to_python_dto(_python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { Ok(PythonDTO::PyNone) } } macro_rules! construct_simple_type_matcher { ($match_type:ty, $kind:path) => { - impl PythonToDTO for $match_type { - fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + impl ToPythonDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { Ok($kind(python_param.extract::<$match_type>()?)) } } @@ -51,8 +49,8 @@ construct_simple_type_matcher!(i64, PythonDTO::PyIntI64); construct_simple_type_matcher!(NaiveDate, PythonDTO::PyDate); construct_simple_type_matcher!(NaiveTime, PythonDTO::PyTime); -impl PythonToDTO for PyDateTime { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for PyDateTime { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { let timestamp_tz = python_param.extract::>(); if let Ok(pydatetime_tz) = timestamp_tz { return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); @@ -74,8 +72,8 @@ impl PythonToDTO for PyDateTime { } } -impl PythonToDTO for PyDelta { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for PyDelta { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { let duration = python_param.extract::()?; if let Some(interval) = Interval::from_duration(duration) { return Ok(PythonDTO::PyInterval(interval)); @@ -86,8 +84,8 @@ impl PythonToDTO for PyDelta { } } -impl PythonToDTO for PyDict { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for PyDict { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { let serde_value = build_serde_value(python_param)?; return Ok(PythonDTO::PyJsonb(serde_value)); @@ -96,14 +94,22 @@ impl PythonToDTO for PyDict { macro_rules! construct_extra_type_matcher { ($match_type:ty, $kind:path) => { - impl PythonToDTO for $match_type { - fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + impl ToPythonDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { Ok($kind(python_param.extract::<$match_type>()?.inner())) } } }; } +construct_extra_type_matcher!(extra_types::Text, PythonDTO::PyText); +construct_extra_type_matcher!(extra_types::VarChar, PythonDTO::PyVarChar); +construct_extra_type_matcher!(extra_types::SmallInt, PythonDTO::PyIntI16); +construct_extra_type_matcher!(extra_types::Integer, PythonDTO::PyIntI32); +construct_extra_type_matcher!(extra_types::BigInt, PythonDTO::PyIntI64); +construct_extra_type_matcher!(extra_types::Float32, PythonDTO::PyFloat32); +construct_extra_type_matcher!(extra_types::Float64, PythonDTO::PyFloat64); +construct_extra_type_matcher!(extra_types::Money, PythonDTO::PyMoney); construct_extra_type_matcher!(extra_types::JSONB, PythonDTO::PyJsonb); construct_extra_type_matcher!(extra_types::JSON, PythonDTO::PyJson); construct_extra_type_matcher!(extra_types::MacAddr6, PythonDTO::PyMacAddr6); @@ -114,33 +120,34 @@ construct_extra_type_matcher!(extra_types::Path, PythonDTO::PyPath); construct_extra_type_matcher!(extra_types::Line, PythonDTO::PyLine); construct_extra_type_matcher!(extra_types::LineSegment, PythonDTO::PyLineSegment); construct_extra_type_matcher!(extra_types::Circle, PythonDTO::PyCircle); +construct_extra_type_matcher!(extra_types::PgVector, PythonDTO::PyPgVector); -impl PythonToDTO for PythonDecimal { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for PythonDecimal { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( python_param.str()?.extract::<&str>()?, )?)) } } -impl PythonToDTO for PythonUUID { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for PythonUUID { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { Ok(PythonDTO::PyUUID(Uuid::parse_str( python_param.str()?.extract::<&str>()?, )?)) } } -impl PythonToDTO for extra_types::PythonArray { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for extra_types::PythonArray { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( python_param, )?)) } } -impl PythonToDTO for IpAddr { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for IpAddr { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { if let Ok(id_address) = python_param.extract::() { return Ok(PythonDTO::PyIpAddress(id_address)); } @@ -151,8 +158,8 @@ impl PythonToDTO for IpAddr { } } -impl PythonToDTO for extra_types::PythonEnum { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +impl ToPythonDTO for extra_types::PythonEnum { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { let string = python_param.extract::()?; return Ok(PythonDTO::PyString(string)); } @@ -160,8 +167,8 @@ impl PythonToDTO for extra_types::PythonEnum { macro_rules! construct_array_type_matcher { ($match_type:ty) => { - impl PythonToDTO for $match_type { - fn to_python_dto(python_param: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { + impl ToPythonDTO for $match_type { + fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { python_param .extract::<$match_type>()? ._convert_to_python_dto() diff --git a/src/value_converter/dto/impls.rs b/src/value_converter/dto/impls.rs index b634d8b8..58debfdc 100644 --- a/src/value_converter/dto/impls.rs +++ b/src/value_converter/dto/impls.rs @@ -12,7 +12,7 @@ use pyo3::{Bound, IntoPyObject, PyAny, Python}; use tokio_postgres::types::{to_sql_checked, Type}; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, value_converter::{ additional_types::{Circle, Line, RustLineSegment, RustLineString, RustPoint, RustRect}, models::serde_value::pythondto_array_to_serde, @@ -53,7 +53,7 @@ impl PythonDTO { /// /// # Errors /// May return Err Result if there is no support for passed python type. - pub fn array_type(&self) -> RustPSQLDriverPyResult { + pub fn array_type(&self) -> PSQLPyResult { match self { PythonDTO::PyBool(_) => Ok(tokio_postgres::types::Type::BOOL_ARRAY), PythonDTO::PyUUID(_) => Ok(tokio_postgres::types::Type::UUID_ARRAY), @@ -96,7 +96,7 @@ impl PythonDTO { /// /// # Errors /// May return Err Result if cannot convert python type into rust. - pub fn to_serde_value(&self) -> RustPSQLDriverPyResult { + pub fn to_serde_value(&self) -> PSQLPyResult { match self { PythonDTO::PyNone => Ok(Value::Null), PythonDTO::PyBool(pybool) => Ok(json!(pybool)), diff --git a/src/value_converter/funcs/from_python.rs b/src/value_converter/from_python.rs similarity index 64% rename from src/value_converter/funcs/from_python.rs rename to src/value_converter/from_python.rs index adad8879..b104c993 100644 --- a/src/value_converter/funcs/from_python.rs +++ b/src/value_converter/from_python.rs @@ -18,7 +18,7 @@ use pyo3::{ }; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, extra_types::{self}, value_converter::{ consts::KWARGS_QUERYSTRINGS, dto::enums::PythonDTO, @@ -26,6 +26,8 @@ use crate::{ }, }; +use super::{additional_types::NonePyType, consts::KWARGS_PARAMS_REGEXP, traits::ToPythonDTO}; + /// Convert single python parameter to `PythonDTO` enum. /// /// # Errors @@ -33,9 +35,9 @@ use crate::{ /// May return Err Result if python type doesn't have support yet /// or value of the type is incorrect. #[allow(clippy::too_many_lines)] -pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { if parameter.is_none() { - return Ok(PythonDTO::PyNone); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { @@ -45,387 +47,251 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult< } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBool(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBytes(parameter.extract::>()?)); + return as ToPythonDTO>::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyText( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyVarChar( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyString(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat64(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat32( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyFloat64( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI16( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI32( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI64( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMoney( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyIntI32(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - let timestamp_tz = parameter.extract::>(); - if let Ok(pydatetime_tz) = timestamp_tz { - return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); - } - - let timestamp_no_tz = parameter.extract::(); - if let Ok(pydatetime_no_tz) = timestamp_no_tz { - return Ok(PythonDTO::PyDateTime(pydatetime_no_tz)); - } - - let timestamp_tz = extract_datetime_from_python_object_attrs(parameter); - if let Ok(pydatetime_tz) = timestamp_tz { - return Ok(PythonDTO::PyDateTimeTz(pydatetime_tz)); - } - - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Can not convert you datetime to rust type".into(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyDate(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyTime(parameter.extract::()?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - let duration = parameter.extract::()?; - if let Some(interval) = Interval::from_duration(duration) { - return Ok(PythonDTO::PyInterval(interval)); - } - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert timedelta from Python to inner Rust type.".to_string(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() | parameter.is_instance_of::() { - return Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( - parameter, - )?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - let dict = parameter.downcast::().map_err(|error| { - RustPSQLDriverError::PyToRustValueConversionError(format!( - "Can't cast to inner dict: {error}" - )) - })?; - - let mut serde_map: Map = Map::new(); - - for dict_item in dict.items() { - let py_list = dict_item.downcast::().map_err(|error| { - RustPSQLDriverError::PyToRustValueConversionError(format!( - "Cannot cast to list: {error}" - )) - })?; - - let key = py_list.get_item(0)?.extract::()?; - let value = py_to_rust(&py_list.get_item(1)?)?; - - serde_map.insert(key, value.to_serde_value()?); - } - - return Ok(PythonDTO::PyJsonb(Value::Object(serde_map))); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyJsonb( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); + // return Ok(PythonDTO::PyJsonb( + // parameter.extract::()?.inner(), + // )); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyJson( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); + // return Ok(PythonDTO::PyJson( + // parameter.extract::()?.inner(), + // )); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMacAddr6( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyMacAddr8( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPoint( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyBox( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPath( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyLine( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyLineSegment( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyCircle( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } if parameter.get_type().name()? == "UUID" { - return Ok(PythonDTO::PyUUID(Uuid::parse_str( - parameter.str()?.extract::<&str>()?, - )?)); + return ::to_python_dto(parameter); } if parameter.get_type().name()? == "decimal.Decimal" || parameter.get_type().name()? == "Decimal" { - return Ok(PythonDTO::PyDecimal(Decimal::from_str_exact( - parameter.str()?.extract::<&str>()?, - )?)); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return parameter - .extract::()? - ._convert_to_python_dto(); + return ::to_python_dto(parameter); } if parameter.is_instance_of::() { - return Ok(PythonDTO::PyPgVector( - parameter.extract::()?.inner(), - )); + return ::to_python_dto(parameter); } - if let Ok(id_address) = parameter.extract::() { - return Ok(PythonDTO::PyIpAddress(id_address)); + if let Ok(_) = parameter.extract::() { + return ::to_python_dto(parameter); } // It's used for Enum. @@ -502,9 +368,7 @@ pub fn extract_datetime_from_python_object_attrs( /// May return Err Result if cannot convert at least one element. #[allow(clippy::cast_possible_truncation)] #[allow(clippy::cast_possible_wrap)] -pub fn py_sequence_into_postgres_array( - parameter: &Bound, -) -> RustPSQLDriverPyResult> { +pub fn py_sequence_into_postgres_array(parameter: &Bound) -> PSQLPyResult> { let mut py_seq = parameter .downcast::() .map_err(|_| { @@ -562,9 +426,7 @@ pub fn py_sequence_into_postgres_array( /// /// # Errors /// May return Err Result if cannot convert element into Rust one. -pub fn py_sequence_into_flat_vec( - parameter: &Bound, -) -> RustPSQLDriverPyResult> { +pub fn py_sequence_into_flat_vec(parameter: &Bound) -> PSQLPyResult> { let py_seq = parameter.downcast::().map_err(|_| { RustPSQLDriverError::PyToRustValueConversionError( "PostgreSQL ARRAY type can be made only from python Sequence".into(), @@ -597,85 +459,6 @@ pub fn py_sequence_into_flat_vec( Ok(final_vec) } -/// Convert parameters come from python. -/// -/// Parameters for `execute()` method can be either -/// a list or a tuple or a set. -/// -/// We parse every parameter from python object and return -/// Vector of out `PythonDTO`. -/// -/// # Errors -/// -/// May return Err Result if can't convert python object. -#[allow(clippy::needless_pass_by_value)] -pub fn convert_parameters_and_qs( - querystring: String, - parameters: Option>, -) -> RustPSQLDriverPyResult<(String, Vec)> { - let Some(parameters) = parameters else { - return Ok((querystring, vec![])); - }; - - let res = Python::with_gil(|gil| { - let params = parameters.extract::>>(gil).map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - "Cannot convert you parameters argument into Rust type, please use List/Tuple" - .into(), - ) - }); - if let Ok(params) = params { - return Ok((querystring, convert_seq_parameters(params)?)); - } - - let kw_params = parameters.downcast_bound::(gil); - if let Ok(kw_params) = kw_params { - return convert_kwargs_parameters(kw_params, &querystring); - } - - Err(RustPSQLDriverError::PyToRustValueConversionError( - "Parameters must be sequence or mapping".into(), - )) - })?; - - Ok(res) -} - -pub fn convert_kwargs_parameters<'a>( - kw_params: &Bound<'_, PyMapping>, - querystring: &'a str, -) -> RustPSQLDriverPyResult<(String, Vec)> { - let mut result_vec: Vec = vec![]; - let (changed_string, params_names) = parse_kwargs_qs(querystring); - - for param_name in params_names { - match kw_params.get_item(¶m_name) { - Ok(param) => result_vec.push(py_to_rust(¶m)?), - Err(_) => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - format!("Cannot find parameter with name <{param_name}> in parameters").into(), - )) - } - } - } - - Ok((changed_string, result_vec)) -} - -pub fn convert_seq_parameters( - seq_params: Vec>, -) -> RustPSQLDriverPyResult> { - let mut result_vec: Vec = vec![]; - Python::with_gil(|gil| { - for parameter in seq_params { - result_vec.push(py_to_rust(parameter.bind(gil))?); - } - Ok::<(), RustPSQLDriverError>(()) - })?; - - Ok(result_vec) -} - /// Convert two python parameters(x and y) to Coord from `geo_type`. /// Also it checks that passed values is int or float. /// @@ -683,7 +466,7 @@ pub fn convert_seq_parameters( /// /// May return error if cannot convert Python type into Rust one. /// May return error if parameters type isn't correct. -fn convert_py_to_rust_coord_values(parameters: Vec>) -> RustPSQLDriverPyResult> { +fn convert_py_to_rust_coord_values(parameters: Vec>) -> PSQLPyResult> { Python::with_gil(|gil| { let mut coord_values_vec: Vec = vec![]; @@ -737,7 +520,7 @@ fn convert_py_to_rust_coord_values(parameters: Vec>) -> RustPSQLDriver pub fn build_geo_coords( py_parameters: Py, allowed_length_option: Option, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { let mut result_vec: Vec = vec![]; result_vec = Python::with_gil(|gil| { @@ -811,7 +594,7 @@ pub fn build_geo_coords( pub fn build_flat_geo_coords( py_parameters: Py, allowed_length_option: Option, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { Python::with_gil(|gil| { let allowed_length = allowed_length_option.unwrap_or_default(); @@ -845,7 +628,7 @@ pub fn build_flat_geo_coords( /// /// May return error if cannot convert Python type into Rust one. /// May return error if parameters type isn't correct. -fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult>> { +fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>> { let mut coord_values_sequence_vec: Vec> = vec![]; if bind_parameters.is_instance_of::() { @@ -877,7 +660,7 @@ fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult } fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { - let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); + let re = regex::Regex::new(KWARGS_PARAMS_REGEXP).unwrap(); { let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); diff --git a/src/value_converter/funcs/mod.rs b/src/value_converter/funcs/mod.rs deleted file mode 100644 index 4db4cd38..00000000 --- a/src/value_converter/funcs/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod from_python; -pub mod to_python; diff --git a/src/value_converter/mod.rs b/src/value_converter/mod.rs index 7d08bf3f..41c42284 100644 --- a/src/value_converter/mod.rs +++ b/src/value_converter/mod.rs @@ -1,7 +1,8 @@ pub mod additional_types; pub mod consts; pub mod dto; -pub mod funcs; +pub mod from_python; pub mod models; +pub mod to_python; pub mod traits; pub mod utils; diff --git a/src/value_converter/models/serde_value.rs b/src/value_converter/models/serde_value.rs index 0bf6652f..71239c2b 100644 --- a/src/value_converter/models/serde_value.rs +++ b/src/value_converter/models/serde_value.rs @@ -1,7 +1,6 @@ use postgres_array::{Array, Dimension}; use postgres_types::FromSql; use serde_json::{json, Map, Value}; -use uuid::Uuid; use pyo3::{ types::{PyAnyMethods, PyDict, PyDictMethods, PyList, PyTuple}, @@ -10,10 +9,9 @@ use pyo3::{ use tokio_postgres::types::Type; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, value_converter::{ - dto::enums::PythonDTO, - funcs::{from_python::py_to_rust, to_python::build_python_from_serde_value}, + dto::enums::PythonDTO, from_python::py_to_rust, to_python::build_python_from_serde_value, }, }; @@ -54,10 +52,7 @@ impl<'a> FromSql<'a> for InternalSerdeValue { } } -fn serde_value_from_list( - gil: Python<'_>, - bind_value: &Bound<'_, PyAny>, -) -> RustPSQLDriverPyResult { +fn serde_value_from_list(gil: Python<'_>, bind_value: &Bound<'_, PyAny>) -> PSQLPyResult { let mut result_vec: Vec = vec![]; let params = bind_value.extract::>>()?; @@ -79,7 +74,7 @@ fn serde_value_from_list( Ok(json!(result_vec)) } -fn serde_value_from_dict(bind_value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +fn serde_value_from_dict(bind_value: &Bound<'_, PyAny>) -> PSQLPyResult { let dict = bind_value.downcast::().map_err(|error| { RustPSQLDriverError::PyToRustValueConversionError(format!( "Can't cast to inner dict: {error}" @@ -109,7 +104,7 @@ fn serde_value_from_dict(bind_value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResul /// # Errors /// May return error if cannot convert Python type into Rust one. #[allow(clippy::needless_pass_by_value)] -pub fn build_serde_value(value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult { +pub fn build_serde_value(value: &Bound<'_, PyAny>) -> PSQLPyResult { Python::with_gil(|gil| { if value.is_instance_of::() { return serde_value_from_list(gil, value); @@ -126,7 +121,7 @@ pub fn build_serde_value(value: &Bound<'_, PyAny>) -> RustPSQLDriverPyResult>) -> RustPSQLDriverPyResult { +pub fn pythondto_array_to_serde(array: Option>) -> PSQLPyResult { match array { Some(array) => inner_pythondto_array_to_serde( array.dimensions(), @@ -145,7 +140,7 @@ fn inner_pythondto_array_to_serde( data: &[&PythonDTO], dimension_index: usize, mut lower_bound: usize, -) -> RustPSQLDriverPyResult { +) -> PSQLPyResult { let current_dimension = dimensions.get(dimension_index); if let Some(current_dimension) = current_dimension { diff --git a/src/value_converter/params_converters.rs b/src/value_converter/params_converters.rs new file mode 100644 index 00000000..e69de29b diff --git a/src/value_converter/funcs/to_python.rs b/src/value_converter/to_python.rs similarity index 97% rename from src/value_converter/funcs/to_python.rs rename to src/value_converter/to_python.rs index e65a0085..5dbfd7ce 100644 --- a/src/value_converter/funcs/to_python.rs +++ b/src/value_converter/to_python.rs @@ -17,7 +17,7 @@ use pyo3::{ }; use crate::{ - exceptions::rust_errors::{RustPSQLDriverError, RustPSQLDriverPyResult}, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, value_converter::{ additional_types::{ Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, @@ -35,10 +35,7 @@ use pgvector::Vector as PgVector; /// Convert serde `Value` into Python object. /// # Errors /// May return Err Result if cannot add new value to Python Dict. -pub fn build_python_from_serde_value( - py: Python<'_>, - value: Value, -) -> RustPSQLDriverPyResult> { +pub fn build_python_from_serde_value(py: Python<'_>, value: Value) -> PSQLPyResult> { match value { Value::Array(massive) => { let mut result_vec: Vec> = vec![]; @@ -112,7 +109,7 @@ fn composite_field_postgres_to_py<'a, T: FromSql<'a>>( type_: &Type, buf: &mut &'a [u8], is_simple: bool, -) -> RustPSQLDriverPyResult { +) -> PSQLPyResult { if is_simple { return T::from_sql_nullable(type_, Some(buf)).map_err(|err| { RustPSQLDriverError::RustToPyValueConversionError(format!( @@ -196,7 +193,7 @@ fn postgres_bytes_to_py( type_: &Type, buf: &mut &[u8], is_simple: bool, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { match *type_ { // ---------- Bytes Types ---------- // Convert BYTEA type into Vector, then into PyBytes @@ -524,7 +521,7 @@ pub fn other_postgres_bytes_to_py( type_: &Type, buf: &mut &[u8], is_simple: bool, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { if type_.name() == "vector" { let vector = composite_field_postgres_to_py::>(type_, buf, is_simple)?; match vector { @@ -550,7 +547,7 @@ pub fn composite_postgres_to_py( fields: &Vec, buf: &mut &[u8], custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { let result_py_dict: Bound<'_, PyDict> = PyDict::new_bound(py); let num_fields = postgres_types::private::read_be_i32(buf).map_err(|err| { @@ -619,7 +616,7 @@ pub fn raw_bytes_data_process( column_name: &str, column_type: &Type, custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { if let Some(custom_decoders) = custom_decoders { let py_encoder_func = custom_decoders .bind(py) @@ -658,7 +655,7 @@ pub fn postgres_to_py( column: &Column, column_i: usize, custom_decoders: &Option>, -) -> RustPSQLDriverPyResult> { +) -> PSQLPyResult> { let raw_bytes_data = row.col_buffer(column_i); if let Some(mut raw_bytes_data) = raw_bytes_data { return raw_bytes_data_process( @@ -679,7 +676,7 @@ pub fn postgres_to_py( /// /// May return error if cannot convert Python type into Rust one. /// May return error if parameters type isn't correct. -fn py_sequence_to_rust(bind_parameters: &Bound) -> RustPSQLDriverPyResult>> { +fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>> { let mut coord_values_sequence_vec: Vec> = vec![]; if bind_parameters.is_instance_of::() { diff --git a/src/value_converter/traits.rs b/src/value_converter/traits.rs index ca44a7d0..261ee16d 100644 --- a/src/value_converter/traits.rs +++ b/src/value_converter/traits.rs @@ -1,9 +1,9 @@ use pyo3::PyAny; -use crate::exceptions::rust_errors::RustPSQLDriverPyResult; +use crate::exceptions::rust_errors::PSQLPyResult; use super::dto::enums::PythonDTO; -pub trait PythonToDTO { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> RustPSQLDriverPyResult; +pub trait ToPythonDTO { + fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult; } From af51f438c5e9e0f7685bd37ab38453153b137da8 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 3 May 2025 18:12:59 +0200 Subject: [PATCH 27/65] Full value converter refactor --- python/tests/test_value_converter.py | 42 +++- src/exceptions/rust_errors.rs | 2 +- src/extra_types.rs | 69 +++--- src/statement/parameters.rs | 30 +-- src/statement/statement.rs | 2 +- src/value_converter/consts.rs | 3 - src/value_converter/dto/converter_impls.rs | 153 ++++++------ src/value_converter/dto/enums.rs | 7 +- src/value_converter/dto/funcs.rs | 33 +++ src/value_converter/dto/impls.rs | 60 ++--- src/value_converter/dto/mod.rs | 1 + src/value_converter/from_python.rs | 262 ++++++++++++++------- src/value_converter/models/serde_value.rs | 7 +- src/value_converter/params_converters.rs | 0 src/value_converter/to_python.rs | 70 ------ src/value_converter/traits.rs | 8 + 16 files changed, 435 insertions(+), 314 deletions(-) create mode 100644 src/value_converter/dto/funcs.rs delete mode 100644 src/value_converter/params_converters.rs diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index 34361b22..c35baec1 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -141,7 +141,6 @@ async def test_as_class( ("INT2", SmallInt(12), 12), ("INT4", Integer(121231231), 121231231), ("INT8", BigInt(99999999999999999), 99999999999999999), - ("MONEY", BigInt(99999999999999999), 99999999999999999), ("MONEY", Money(99999999999999999), 99999999999999999), ("NUMERIC(5, 2)", Decimal("120.12"), Decimal("120.12")), ("FLOAT8", 32.12329864501953, 32.12329864501953), @@ -270,11 +269,6 @@ async def test_as_class( [Money(99999999999999999), Money(99999999999999999)], [99999999999999999, 99999999999999999], ), - ( - "MONEY ARRAY", - [[Money(99999999999999999)], [Money(99999999999999999)]], - [[99999999999999999], [99999999999999999]], - ), ( "NUMERIC(5, 2) ARRAY", [Decimal("121.23"), Decimal("188.99")], @@ -666,6 +660,37 @@ async def test_deserialization_simple_into_python( postgres_type: str, py_value: Any, expected_deserialized: Any, +) -> None: + """Test how types can cast from Python and to Python.""" + connection = await psql_pool.connection() + table_name = f"for_test{uuid.uuid4().hex}" + await connection.execute(f"DROP TABLE IF EXISTS {table_name}") + create_table_query = f""" + CREATE TABLE {table_name} (test_field {postgres_type}) + """ + insert_data_query = f""" + INSERT INTO {table_name} VALUES ($1) + """ + await connection.execute(querystring=create_table_query) + await connection.execute( + querystring=insert_data_query, + parameters=[py_value], + ) + + raw_result = await connection.execute( + querystring=f"SELECT test_field FROM {table_name}", + ) + + assert raw_result.result()[0]["test_field"] == expected_deserialized + + await connection.execute(f"DROP TABLE IF EXISTS {table_name}") + + +async def test_aboba( + psql_pool: ConnectionPool, + postgres_type: str = "INT2", + py_value: Any = 2, + expected_deserialized: Any = 2, ) -> None: """Test how types can cast from Python and to Python.""" connection = await psql_pool.connection() @@ -1175,11 +1200,6 @@ async def test_empty_array( MoneyArray([Money(99999999999999999), Money(99999999999999999)]), [99999999999999999, 99999999999999999], ), - ( - "MONEY ARRAY", - MoneyArray([[Money(99999999999999999)], [Money(99999999999999999)]]), - [[99999999999999999], [99999999999999999]], - ), ( "NUMERIC(5, 2) ARRAY", NumericArray([Decimal("121.23"), Decimal("188.99")]), diff --git a/src/exceptions/rust_errors.rs b/src/exceptions/rust_errors.rs index b6694da1..94b89fa0 100644 --- a/src/exceptions/rust_errors.rs +++ b/src/exceptions/rust_errors.rs @@ -76,7 +76,7 @@ pub enum RustPSQLDriverError { #[error("Can't convert value from driver to python type: {0}")] RustToPyValueConversionError(String), - #[error("Can't convert value from python to rust type: {0}")] + #[error("{0}")] PyToRustValueConversionError(String), #[error("Python exception: {0}.")] diff --git a/src/extra_types.rs b/src/extra_types.rs index c3b2d832..b3411eae 100644 --- a/src/extra_types.rs +++ b/src/extra_types.rs @@ -2,8 +2,8 @@ use std::str::FromStr; use geo_types::{Line as RustLineSegment, LineString, Point as RustPoint, Rect as RustRect}; use macaddr::{MacAddr6 as RustMacAddr6, MacAddr8 as RustMacAddr8}; +use postgres_types::Type; use pyo3::{ - conversion::FromPyObjectBound, pyclass, pymethods, types::{PyModule, PyModuleMethods}, Bound, Py, PyAny, PyResult, Python, @@ -325,7 +325,7 @@ impl Circle { } macro_rules! build_array_type { - ($st_name:ident, $kind:path) => { + ($st_name:ident, $kind:path, $elem_kind:path) => { #[pyclass] #[derive(Clone)] pub struct $st_name { @@ -347,11 +347,15 @@ macro_rules! build_array_type { self.inner.clone() } + pub fn element_type() -> Type { + $elem_kind + } + /// Convert incoming sequence from python to internal `PythonDTO`. /// /// # Errors /// May return Err Result if cannot convert sequence to array. - pub fn _convert_to_python_dto(&self) -> PSQLPyResult { + pub fn _convert_to_python_dto(&self, elem_type: &Type) -> PSQLPyResult { return Python::with_gil(|gil| { let binding = &self.inner; let bound_inner = Ok::<&pyo3::Bound<'_, pyo3::PyAny>, RustPSQLDriverError>( @@ -359,6 +363,7 @@ macro_rules! build_array_type { )?; Ok::($kind(py_sequence_into_postgres_array( bound_inner, + elem_type, )?)) }); } @@ -366,33 +371,37 @@ macro_rules! build_array_type { }; } -build_array_type!(BoolArray, PythonDTO::PyBoolArray); -build_array_type!(UUIDArray, PythonDTO::PyUuidArray); -build_array_type!(VarCharArray, PythonDTO::PyVarCharArray); -build_array_type!(TextArray, PythonDTO::PyTextArray); -build_array_type!(Int16Array, PythonDTO::PyInt16Array); -build_array_type!(Int32Array, PythonDTO::PyInt32Array); -build_array_type!(Int64Array, PythonDTO::PyInt64Array); -build_array_type!(Float32Array, PythonDTO::PyFloat32Array); -build_array_type!(Float64Array, PythonDTO::PyFloat64Array); -build_array_type!(MoneyArray, PythonDTO::PyMoneyArray); -build_array_type!(IpAddressArray, PythonDTO::PyIpAddressArray); -build_array_type!(JSONBArray, PythonDTO::PyJSONBArray); -build_array_type!(JSONArray, PythonDTO::PyJSONArray); -build_array_type!(DateArray, PythonDTO::PyDateArray); -build_array_type!(TimeArray, PythonDTO::PyTimeArray); -build_array_type!(DateTimeArray, PythonDTO::PyDateTimeArray); -build_array_type!(DateTimeTZArray, PythonDTO::PyDateTimeTZArray); -build_array_type!(MacAddr6Array, PythonDTO::PyMacAddr6Array); -build_array_type!(MacAddr8Array, PythonDTO::PyMacAddr8Array); -build_array_type!(NumericArray, PythonDTO::PyNumericArray); -build_array_type!(PointArray, PythonDTO::PyPointArray); -build_array_type!(BoxArray, PythonDTO::PyBoxArray); -build_array_type!(PathArray, PythonDTO::PyPathArray); -build_array_type!(LineArray, PythonDTO::PyLineArray); -build_array_type!(LsegArray, PythonDTO::PyLsegArray); -build_array_type!(CircleArray, PythonDTO::PyCircleArray); -build_array_type!(IntervalArray, PythonDTO::PyIntervalArray); +build_array_type!(BoolArray, PythonDTO::PyBoolArray, Type::BOOL); +build_array_type!(UUIDArray, PythonDTO::PyUuidArray, Type::UUID); +build_array_type!(VarCharArray, PythonDTO::PyVarCharArray, Type::VARCHAR); +build_array_type!(TextArray, PythonDTO::PyTextArray, Type::TEXT); +build_array_type!(Int16Array, PythonDTO::PyInt16Array, Type::INT2); +build_array_type!(Int32Array, PythonDTO::PyInt32Array, Type::INT4); +build_array_type!(Int64Array, PythonDTO::PyInt64Array, Type::INT8); +build_array_type!(Float32Array, PythonDTO::PyFloat32Array, Type::FLOAT4); +build_array_type!(Float64Array, PythonDTO::PyFloat64Array, Type::FLOAT8); +build_array_type!(MoneyArray, PythonDTO::PyMoneyArray, Type::MONEY); +build_array_type!(IpAddressArray, PythonDTO::PyIpAddressArray, Type::INET); +build_array_type!(JSONBArray, PythonDTO::PyJSONBArray, Type::JSONB); +build_array_type!(JSONArray, PythonDTO::PyJSONArray, Type::JSON); +build_array_type!(DateArray, PythonDTO::PyDateArray, Type::DATE); +build_array_type!(TimeArray, PythonDTO::PyTimeArray, Type::TIME); +build_array_type!(DateTimeArray, PythonDTO::PyDateTimeArray, Type::TIMESTAMP); +build_array_type!( + DateTimeTZArray, + PythonDTO::PyDateTimeTZArray, + Type::TIMESTAMPTZ +); +build_array_type!(MacAddr6Array, PythonDTO::PyMacAddr6Array, Type::MACADDR); +build_array_type!(MacAddr8Array, PythonDTO::PyMacAddr8Array, Type::MACADDR8); +build_array_type!(NumericArray, PythonDTO::PyNumericArray, Type::NUMERIC); +build_array_type!(PointArray, PythonDTO::PyPointArray, Type::POINT); +build_array_type!(BoxArray, PythonDTO::PyBoxArray, Type::BOX); +build_array_type!(PathArray, PythonDTO::PyPathArray, Type::PATH); +build_array_type!(LineArray, PythonDTO::PyLineArray, Type::LINE); +build_array_type!(LsegArray, PythonDTO::PyLsegArray, Type::LSEG); +build_array_type!(CircleArray, PythonDTO::PyCircleArray, Type::CIRCLE); +build_array_type!(IntervalArray, PythonDTO::PyIntervalArray, Type::INTERVAL); #[allow(clippy::module_name_repetitions)] #[allow(clippy::missing_errors_doc)] diff --git a/src/statement/parameters.rs b/src/statement/parameters.rs index baeded5d..0a2d9105 100644 --- a/src/statement/parameters.rs +++ b/src/statement/parameters.rs @@ -9,7 +9,10 @@ use pyo3::{ use crate::{ exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, - value_converter::{dto::enums::PythonDTO, from_python::py_to_rust}, + value_converter::{ + dto::enums::PythonDTO, + from_python::{from_python_typed, from_python_untyped}, + }, }; pub type QueryParameter = (dyn ToSql + Sync); @@ -137,7 +140,7 @@ impl MappingParametersBuilder { let converted_parameters = self .extract_parameters(gil, parameters_names)? .iter() - .map(|parameter| py_to_rust(parameter.bind(gil))) + .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. @@ -151,7 +154,7 @@ impl MappingParametersBuilder { let converted_parameters = self .extract_parameters(gil, parameters_names)? .iter() - .map(|parameter| py_to_rust(parameter.bind(gil))) + .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. @@ -193,30 +196,29 @@ impl SequenceParametersBuilder { } fn prepare(self, gil: Python<'_>) -> PSQLPyResult { - if self.types.is_some() { - return self.prepare_typed(gil); + let types = self.types.clone(); + + if types.is_some() { + return self.prepare_typed(gil, types.clone().unwrap()); } self.prepare_not_typed(gil) } - fn prepare_typed(self, gil: Python<'_>) -> PSQLPyResult { - let converted_parameters = self - .seq_parameters - .iter() - .map(|parameter| py_to_rust(parameter.bind(gil))) + fn prepare_typed(self, gil: Python<'_>, types: Vec) -> PSQLPyResult { + let zipped_params_types = zip(self.seq_parameters, &types); + let converted_parameters = zipped_params_types + .map(|(parameter, type_)| from_python_typed(parameter.bind(gil), &type_)) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. - - // Ok(prepared_parameters) // TODO: put there normal convert with types + Ok(PreparedParameters::new(converted_parameters, types)) } fn prepare_not_typed(self, gil: Python<'_>) -> PSQLPyResult { let converted_parameters = self .seq_parameters .iter() - .map(|parameter| py_to_rust(parameter.bind(gil))) + .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. diff --git a/src/statement/statement.rs b/src/statement/statement.rs index 4c3a6e9b..4cfdc09c 100644 --- a/src/statement/statement.rs +++ b/src/statement/statement.rs @@ -9,7 +9,7 @@ pub struct PsqlpyStatement { } impl PsqlpyStatement { - pub fn new(query: QueryString, prepared_parameters: PreparedParameters) -> Self { + pub(crate) fn new(query: QueryString, prepared_parameters: PreparedParameters) -> Self { Self { query, prepared_parameters, diff --git a/src/value_converter/consts.rs b/src/value_converter/consts.rs index e5ff56e4..82a34f0f 100644 --- a/src/value_converter/consts.rs +++ b/src/value_converter/consts.rs @@ -1,5 +1,4 @@ use once_cell::sync::Lazy; -use postgres_types::ToSql; use std::{collections::HashMap, sync::RwLock}; use pyo3::{ @@ -35,5 +34,3 @@ pub fn get_timedelta_cls(py: Python<'_>) -> PyResult<&Bound<'_, PyType>> { }) .map(|ty| ty.bind(py)) } - -pub type QueryParameter = (dyn ToSql + Sync); diff --git a/src/value_converter/dto/converter_impls.rs b/src/value_converter/dto/converter_impls.rs index 64948f29..1e6fa7be 100644 --- a/src/value_converter/dto/converter_impls.rs +++ b/src/value_converter/dto/converter_impls.rs @@ -2,11 +2,13 @@ use std::net::IpAddr; use chrono::{DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; use pg_interval::Interval; +use postgres_types::Type; use pyo3::{ types::{PyAnyMethods, PyDateTime, PyDelta, PyDict}, Bound, PyAny, }; use rust_decimal::Decimal; +use serde::de::IntoDeserializer; use uuid::Uuid; use crate::{ @@ -16,11 +18,11 @@ use crate::{ additional_types::NonePyType, from_python::{extract_datetime_from_python_object_attrs, py_sequence_into_postgres_array}, models::serde_value::build_serde_value, - traits::ToPythonDTO, + traits::{ToPythonDTO, ToPythonDTOArray}, }, }; -use super::enums::PythonDTO; +use super::{enums::PythonDTO, funcs::array_type_to_single_type}; impl ToPythonDTO for NonePyType { fn to_python_dto(_python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { @@ -28,7 +30,7 @@ impl ToPythonDTO for NonePyType { } } -macro_rules! construct_simple_type_matcher { +macro_rules! construct_simple_type_converter { ($match_type:ty, $kind:path) => { impl ToPythonDTO for $match_type { fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { @@ -38,16 +40,16 @@ macro_rules! construct_simple_type_matcher { }; } -construct_simple_type_matcher!(bool, PythonDTO::PyBool); -construct_simple_type_matcher!(Vec, PythonDTO::PyBytes); -construct_simple_type_matcher!(String, PythonDTO::PyString); -construct_simple_type_matcher!(f32, PythonDTO::PyFloat32); -construct_simple_type_matcher!(f64, PythonDTO::PyFloat64); -construct_simple_type_matcher!(i16, PythonDTO::PyIntI16); -construct_simple_type_matcher!(i32, PythonDTO::PyIntI32); -construct_simple_type_matcher!(i64, PythonDTO::PyIntI64); -construct_simple_type_matcher!(NaiveDate, PythonDTO::PyDate); -construct_simple_type_matcher!(NaiveTime, PythonDTO::PyTime); +construct_simple_type_converter!(bool, PythonDTO::PyBool); +construct_simple_type_converter!(Vec, PythonDTO::PyBytes); +construct_simple_type_converter!(String, PythonDTO::PyString); +construct_simple_type_converter!(f32, PythonDTO::PyFloat32); +construct_simple_type_converter!(f64, PythonDTO::PyFloat64); +construct_simple_type_converter!(i16, PythonDTO::PyIntI16); +construct_simple_type_converter!(i32, PythonDTO::PyIntI32); +construct_simple_type_converter!(i64, PythonDTO::PyIntI64); +construct_simple_type_converter!(NaiveDate, PythonDTO::PyDate); +construct_simple_type_converter!(NaiveTime, PythonDTO::PyTime); impl ToPythonDTO for PyDateTime { fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { @@ -92,7 +94,7 @@ impl ToPythonDTO for PyDict { } } -macro_rules! construct_extra_type_matcher { +macro_rules! construct_extra_type_converter { ($match_type:ty, $kind:path) => { impl ToPythonDTO for $match_type { fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { @@ -102,25 +104,26 @@ macro_rules! construct_extra_type_matcher { }; } -construct_extra_type_matcher!(extra_types::Text, PythonDTO::PyText); -construct_extra_type_matcher!(extra_types::VarChar, PythonDTO::PyVarChar); -construct_extra_type_matcher!(extra_types::SmallInt, PythonDTO::PyIntI16); -construct_extra_type_matcher!(extra_types::Integer, PythonDTO::PyIntI32); -construct_extra_type_matcher!(extra_types::BigInt, PythonDTO::PyIntI64); -construct_extra_type_matcher!(extra_types::Float32, PythonDTO::PyFloat32); -construct_extra_type_matcher!(extra_types::Float64, PythonDTO::PyFloat64); -construct_extra_type_matcher!(extra_types::Money, PythonDTO::PyMoney); -construct_extra_type_matcher!(extra_types::JSONB, PythonDTO::PyJsonb); -construct_extra_type_matcher!(extra_types::JSON, PythonDTO::PyJson); -construct_extra_type_matcher!(extra_types::MacAddr6, PythonDTO::PyMacAddr6); -construct_extra_type_matcher!(extra_types::MacAddr8, PythonDTO::PyMacAddr8); -construct_extra_type_matcher!(extra_types::Point, PythonDTO::PyPoint); -construct_extra_type_matcher!(extra_types::Box, PythonDTO::PyBox); -construct_extra_type_matcher!(extra_types::Path, PythonDTO::PyPath); -construct_extra_type_matcher!(extra_types::Line, PythonDTO::PyLine); -construct_extra_type_matcher!(extra_types::LineSegment, PythonDTO::PyLineSegment); -construct_extra_type_matcher!(extra_types::Circle, PythonDTO::PyCircle); -construct_extra_type_matcher!(extra_types::PgVector, PythonDTO::PyPgVector); +construct_extra_type_converter!(extra_types::Text, PythonDTO::PyText); +construct_extra_type_converter!(extra_types::VarChar, PythonDTO::PyVarChar); +construct_extra_type_converter!(extra_types::SmallInt, PythonDTO::PyIntI16); +construct_extra_type_converter!(extra_types::Integer, PythonDTO::PyIntI32); +construct_extra_type_converter!(extra_types::BigInt, PythonDTO::PyIntI64); +construct_extra_type_converter!(extra_types::Float32, PythonDTO::PyFloat32); +construct_extra_type_converter!(extra_types::Float64, PythonDTO::PyFloat64); +construct_extra_type_converter!(extra_types::Money, PythonDTO::PyMoney); +construct_extra_type_converter!(extra_types::JSONB, PythonDTO::PyJsonb); +construct_extra_type_converter!(extra_types::JSON, PythonDTO::PyJson); +construct_extra_type_converter!(extra_types::MacAddr6, PythonDTO::PyMacAddr6); +construct_extra_type_converter!(extra_types::MacAddr8, PythonDTO::PyMacAddr8); +construct_extra_type_converter!(extra_types::Point, PythonDTO::PyPoint); +construct_extra_type_converter!(extra_types::Box, PythonDTO::PyBox); +construct_extra_type_converter!(extra_types::Path, PythonDTO::PyPath); +construct_extra_type_converter!(extra_types::Line, PythonDTO::PyLine); +construct_extra_type_converter!(extra_types::LineSegment, PythonDTO::PyLineSegment); +construct_extra_type_converter!(extra_types::Circle, PythonDTO::PyCircle); +construct_extra_type_converter!(extra_types::PgVector, PythonDTO::PyPgVector); +construct_extra_type_converter!(extra_types::CustomType, PythonDTO::PyCustomType); impl ToPythonDTO for PythonDecimal { fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { @@ -138,11 +141,16 @@ impl ToPythonDTO for PythonUUID { } } -impl ToPythonDTO for extra_types::PythonArray { - fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { - Ok(PythonDTO::PyArray(py_sequence_into_postgres_array( - python_param, - )?)) +impl ToPythonDTOArray for extra_types::PythonArray { + fn to_python_dto( + python_param: &pyo3::Bound<'_, PyAny>, + array_type: Type, + ) -> PSQLPyResult { + let elem_type = array_type_to_single_type(&array_type); + Ok(PythonDTO::PyArray( + py_sequence_into_postgres_array(python_param, &elem_type)?, + array_type, + )) } } @@ -160,47 +168,54 @@ impl ToPythonDTO for IpAddr { impl ToPythonDTO for extra_types::PythonEnum { fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { - let string = python_param.extract::()?; - return Ok(PythonDTO::PyString(string)); + if let Ok(value_attr) = python_param.getattr("value") { + if let Ok(possible_string) = value_attr.extract::() { + return Ok(PythonDTO::PyString(possible_string)); + } + } + + Err(RustPSQLDriverError::PyToRustValueConversionError( + "Cannot convert Enum to inner type".into(), + )) } } -macro_rules! construct_array_type_matcher { +macro_rules! construct_array_type_converter { ($match_type:ty) => { impl ToPythonDTO for $match_type { fn to_python_dto(python_param: &Bound<'_, PyAny>) -> PSQLPyResult { python_param .extract::<$match_type>()? - ._convert_to_python_dto() + ._convert_to_python_dto(&Self::element_type()) } } }; } -construct_array_type_matcher!(extra_types::BoolArray); -construct_array_type_matcher!(extra_types::UUIDArray); -construct_array_type_matcher!(extra_types::VarCharArray); -construct_array_type_matcher!(extra_types::TextArray); -construct_array_type_matcher!(extra_types::Int16Array); -construct_array_type_matcher!(extra_types::Int32Array); -construct_array_type_matcher!(extra_types::Int64Array); -construct_array_type_matcher!(extra_types::Float32Array); -construct_array_type_matcher!(extra_types::Float64Array); -construct_array_type_matcher!(extra_types::MoneyArray); -construct_array_type_matcher!(extra_types::IpAddressArray); -construct_array_type_matcher!(extra_types::JSONBArray); -construct_array_type_matcher!(extra_types::JSONArray); -construct_array_type_matcher!(extra_types::DateArray); -construct_array_type_matcher!(extra_types::TimeArray); -construct_array_type_matcher!(extra_types::DateTimeArray); -construct_array_type_matcher!(extra_types::DateTimeTZArray); -construct_array_type_matcher!(extra_types::MacAddr6Array); -construct_array_type_matcher!(extra_types::MacAddr8Array); -construct_array_type_matcher!(extra_types::NumericArray); -construct_array_type_matcher!(extra_types::PointArray); -construct_array_type_matcher!(extra_types::BoxArray); -construct_array_type_matcher!(extra_types::PathArray); -construct_array_type_matcher!(extra_types::LineArray); -construct_array_type_matcher!(extra_types::LsegArray); -construct_array_type_matcher!(extra_types::CircleArray); -construct_array_type_matcher!(extra_types::IntervalArray); +construct_array_type_converter!(extra_types::BoolArray); +construct_array_type_converter!(extra_types::UUIDArray); +construct_array_type_converter!(extra_types::VarCharArray); +construct_array_type_converter!(extra_types::TextArray); +construct_array_type_converter!(extra_types::Int16Array); +construct_array_type_converter!(extra_types::Int32Array); +construct_array_type_converter!(extra_types::Int64Array); +construct_array_type_converter!(extra_types::Float32Array); +construct_array_type_converter!(extra_types::Float64Array); +construct_array_type_converter!(extra_types::MoneyArray); +construct_array_type_converter!(extra_types::IpAddressArray); +construct_array_type_converter!(extra_types::JSONBArray); +construct_array_type_converter!(extra_types::JSONArray); +construct_array_type_converter!(extra_types::DateArray); +construct_array_type_converter!(extra_types::TimeArray); +construct_array_type_converter!(extra_types::DateTimeArray); +construct_array_type_converter!(extra_types::DateTimeTZArray); +construct_array_type_converter!(extra_types::MacAddr6Array); +construct_array_type_converter!(extra_types::MacAddr8Array); +construct_array_type_converter!(extra_types::NumericArray); +construct_array_type_converter!(extra_types::PointArray); +construct_array_type_converter!(extra_types::BoxArray); +construct_array_type_converter!(extra_types::PathArray); +construct_array_type_converter!(extra_types::LineArray); +construct_array_type_converter!(extra_types::LsegArray); +construct_array_type_converter!(extra_types::CircleArray); +construct_array_type_converter!(extra_types::IntervalArray); diff --git a/src/value_converter/dto/enums.rs b/src/value_converter/dto/enums.rs index 00e88a10..a90f1527 100644 --- a/src/value_converter/dto/enums.rs +++ b/src/value_converter/dto/enums.rs @@ -2,6 +2,7 @@ use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime}; use geo_types::{Line as LineSegment, LineString, Point, Rect}; use macaddr::{MacAddr6, MacAddr8}; use pg_interval::Interval; +use postgres_types::Type; use rust_decimal::Decimal; use serde_json::Value; use std::{fmt::Debug, net::IpAddr}; @@ -34,9 +35,9 @@ pub enum PythonDTO { PyDateTimeTz(DateTime), PyInterval(Interval), PyIpAddress(IpAddr), - PyList(Vec), - PyArray(Array), - PyTuple(Vec), + PyList(Vec, Type), + PyArray(Array, Type), + PyTuple(Vec, Type), PyJsonb(Value), PyJson(Value), PyMacAddr6(MacAddr6), diff --git a/src/value_converter/dto/funcs.rs b/src/value_converter/dto/funcs.rs new file mode 100644 index 00000000..116db7d0 --- /dev/null +++ b/src/value_converter/dto/funcs.rs @@ -0,0 +1,33 @@ +use postgres_types::Type; + +pub fn array_type_to_single_type(array_type: &Type) -> Type { + match *array_type { + Type::BOOL_ARRAY => Type::BOOL, + Type::UUID_ARRAY => Type::UUID_ARRAY, + Type::VARCHAR_ARRAY => Type::VARCHAR, + Type::TEXT_ARRAY => Type::TEXT, + Type::INT2_ARRAY => Type::INT2, + Type::INT4_ARRAY => Type::INT4, + Type::INT8_ARRAY => Type::INT8, + Type::FLOAT4_ARRAY => Type::FLOAT4, + Type::FLOAT8_ARRAY => Type::FLOAT8, + Type::MONEY_ARRAY => Type::MONEY, + Type::INET_ARRAY => Type::INET, + Type::JSON_ARRAY => Type::JSON, + Type::JSONB_ARRAY => Type::JSONB, + Type::DATE_ARRAY => Type::DATE, + Type::TIME_ARRAY => Type::TIME, + Type::TIMESTAMP_ARRAY => Type::TIMESTAMP, + Type::TIMESTAMPTZ_ARRAY => Type::TIMESTAMPTZ, + Type::INTERVAL_ARRAY => Type::INTERVAL, + Type::MACADDR_ARRAY => Type::MACADDR, + Type::MACADDR8_ARRAY => Type::MACADDR8, + Type::POINT_ARRAY => Type::POINT, + Type::BOX_ARRAY => Type::BOX, + Type::PATH_ARRAY => Type::PATH, + Type::LINE_ARRAY => Type::LINE, + Type::LSEG_ARRAY => Type::LSEG, + Type::CIRCLE_ARRAY => Type::CIRCLE, + _ => Type::ANY, + } +} diff --git a/src/value_converter/dto/impls.rs b/src/value_converter/dto/impls.rs index 58debfdc..bd48ddb3 100644 --- a/src/value_converter/dto/impls.rs +++ b/src/value_converter/dto/impls.rs @@ -39,7 +39,9 @@ impl<'py> IntoPyObject<'py> for PythonDTO { PythonDTO::PyIntU64(pyint) => Ok(pyint.into_pyobject(py)?.into_any()), PythonDTO::PyFloat32(pyfloat) => Ok(pyfloat.into_pyobject(py)?.into_any()), PythonDTO::PyFloat64(pyfloat) => Ok(pyfloat.into_pyobject(py)?.into_any()), - _ => unreachable!(), + _ => { + unreachable!() + } } } } @@ -108,7 +110,7 @@ impl PythonDTO { PythonDTO::PyIntU64(pyint) => Ok(json!(pyint)), PythonDTO::PyFloat32(pyfloat) => Ok(json!(pyfloat)), PythonDTO::PyFloat64(pyfloat) => Ok(json!(pyfloat)), - PythonDTO::PyList(pylist) => { + PythonDTO::PyList(pylist, _) => { let mut vec_serde_values: Vec = vec![]; for py_object in pylist { @@ -117,7 +119,9 @@ impl PythonDTO { Ok(json!(vec_serde_values)) } - PythonDTO::PyArray(array) => Ok(json!(pythondto_array_to_serde(Some(array.clone()))?)), + PythonDTO::PyArray(array, _) => { + Ok(json!(pythondto_array_to_serde(Some(array.clone()))?)) + } PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => Ok(py_dict.clone()), _ => Err(RustPSQLDriverError::PyToRustValueConversionError( "Cannot convert your type into Rust type".into(), @@ -238,30 +242,32 @@ impl ToSql for PythonDTO { PythonDTO::PyCircle(pycircle) => { <&Circle as ToSql>::to_sql(&pycircle, ty, out)?; } - PythonDTO::PyList(py_iterable) | PythonDTO::PyTuple(py_iterable) => { - let mut items = Vec::new(); - for inner in py_iterable { - items.push(inner); - } - if items.is_empty() { - return_is_null_true = true; - } else { - items.to_sql(&items[0].array_type()?, out)?; - } - } - PythonDTO::PyArray(array) => { - if let Some(first_elem) = array.iter().nth(0) { - match first_elem.array_type() { - Ok(ok_type) => { - array.to_sql(&ok_type, out)?; - } - Err(_) => { - return Err(RustPSQLDriverError::PyToRustValueConversionError( - "Cannot define array type.".into(), - ))? - } - } - } + PythonDTO::PyList(py_iterable, type_) | PythonDTO::PyTuple(py_iterable, type_) => { + return py_iterable.to_sql(type_, out); + // let mut items = Vec::new(); + // for inner in py_iterable { + // items.push(inner); + // } + // if items.is_empty() { + // return_is_null_true = true; + // } else { + // items.to_sql(&items[0].array_type()?, out)?; + // } + } + PythonDTO::PyArray(array, type_) => { + return array.to_sql(type_, out); + // if let Some(first_elem) = array.iter().nth(0) { + // match first_elem.array_type() { + // Ok(ok_type) => { + // array.to_sql(&ok_type, out)?; + // } + // Err(_) => { + // return Err(RustPSQLDriverError::PyToRustValueConversionError( + // "Cannot define array type.".into(), + // ))? + // } + // } + // } } PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => { <&Value as ToSql>::to_sql(&py_dict, ty, out)?; diff --git a/src/value_converter/dto/mod.rs b/src/value_converter/dto/mod.rs index 5be9ae5b..49985cf1 100644 --- a/src/value_converter/dto/mod.rs +++ b/src/value_converter/dto/mod.rs @@ -1,3 +1,4 @@ pub mod converter_impls; pub mod enums; +pub mod funcs; pub mod impls; diff --git a/src/value_converter/from_python.rs b/src/value_converter/from_python.rs index b104c993..57307f29 100644 --- a/src/value_converter/from_python.rs +++ b/src/value_converter/from_python.rs @@ -2,17 +2,14 @@ use chrono::{self, DateTime, FixedOffset, NaiveDate, NaiveDateTime, NaiveTime, T use chrono_tz::Tz; use geo_types::{coord, Coord}; use itertools::Itertools; -use pg_interval::Interval; use postgres_array::{Array, Dimension}; -use rust_decimal::Decimal; -use serde_json::{Map, Value}; +use postgres_types::Type; use std::net::IpAddr; -use uuid::Uuid; use pyo3::{ types::{ - PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyDictMethods, PyFloat, - PyInt, PyList, PyMapping, PySequence, PySet, PyString, PyTime, PyTuple, PyTypeMethods, + PyAnyMethods, PyBool, PyBytes, PyDate, PyDateTime, PyDelta, PyDict, PyFloat, PyInt, PyList, + PySequence, PySet, PyString, PyTime, PyTuple, PyTypeMethods, }, Bound, Py, PyAny, Python, }; @@ -20,13 +17,13 @@ use pyo3::{ use crate::{ exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, extra_types::{self}, - value_converter::{ - consts::KWARGS_QUERYSTRINGS, dto::enums::PythonDTO, - utils::extract_value_from_python_object_or_raise, - }, + value_converter::{dto::enums::PythonDTO, utils::extract_value_from_python_object_or_raise}, }; -use super::{additional_types::NonePyType, consts::KWARGS_PARAMS_REGEXP, traits::ToPythonDTO}; +use super::{ + additional_types::NonePyType, + traits::{ToPythonDTO, ToPythonDTOArray}, +}; /// Convert single python parameter to `PythonDTO` enum. /// @@ -35,17 +32,11 @@ use super::{additional_types::NonePyType, consts::KWARGS_PARAMS_REGEXP, traits:: /// May return Err Result if python type doesn't have support yet /// or value of the type is incorrect. #[allow(clippy::too_many_lines)] -pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { +pub fn from_python_untyped(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { if parameter.is_none() { return ::to_python_dto(parameter); } - if parameter.is_instance_of::() { - return Ok(PythonDTO::PyCustomType( - parameter.extract::()?.inner(), - )); - } - if parameter.is_instance_of::() { return ::to_python_dto(parameter); } @@ -115,7 +106,7 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult } if parameter.is_instance_of::() | parameter.is_instance_of::() { - return ::to_python_dto(parameter); + return ::to_python_dto(parameter, Type::ANY); } if parameter.is_instance_of::() { @@ -124,16 +115,10 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult if parameter.is_instance_of::() { return ::to_python_dto(parameter); - // return Ok(PythonDTO::PyJsonb( - // parameter.extract::()?.inner(), - // )); } if parameter.is_instance_of::() { return ::to_python_dto(parameter); - // return Ok(PythonDTO::PyJson( - // parameter.extract::()?.inner(), - // )); } if parameter.is_instance_of::() { @@ -178,6 +163,162 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult return ::to_python_dto(parameter); } + if let Ok(converted_array) = from_python_array_typed(parameter) { + return Ok(converted_array); + } + + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + + if parameter.extract::().is_ok() { + return ::to_python_dto(parameter); + } + + if parameter.getattr("value").is_ok() { + return ::to_python_dto(parameter); + } + + Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Can not covert you type {parameter} into inner one", + ))) +} + +/// Convert single python parameter to `PythonDTO` enum. +/// +/// # Errors +/// +/// May return Err Result if python type doesn't have support yet +/// or value of the type is incorrect. +#[allow(clippy::too_many_lines)] +pub fn from_python_typed( + parameter: &pyo3::Bound<'_, PyAny>, + type_: &Type, +) -> PSQLPyResult { + println!("{:?} {:?}", type_, parameter); + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + + if parameter.is_none() { + return ::to_python_dto(parameter); + } + + if parameter.get_type().name()? == "UUID" { + return ::to_python_dto(parameter); + } + + if parameter.get_type().name()? == "decimal.Decimal" + || parameter.get_type().name()? == "Decimal" + { + return ::to_python_dto(parameter); + } + + if parameter.is_instance_of::() | parameter.is_instance_of::() { + return ::to_python_dto( + parameter, + type_.clone(), + ); + } + + if let Ok(converted_array) = from_python_array_typed(parameter) { + return Ok(converted_array); + } + + match *type_ { + Type::BYTEA => return as ToPythonDTO>::to_python_dto(parameter), + Type::TEXT => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::VARCHAR => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::XML => return ::to_python_dto(parameter), + Type::BOOL => return ::to_python_dto(parameter), + Type::INT2 => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::INT4 => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::INT8 => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::MONEY => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::FLOAT4 => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::FLOAT8 => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + return ::to_python_dto(parameter); + } + Type::INET => return ::to_python_dto(parameter), + Type::DATE => return ::to_python_dto(parameter), + Type::TIME => return ::to_python_dto(parameter), + Type::TIMESTAMP | Type::TIMESTAMPTZ => { + return ::to_python_dto(parameter) + } + Type::INTERVAL => return ::to_python_dto(parameter), + Type::JSONB => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + + return ::to_python_dto(parameter); + } + Type::JSON => { + if parameter.is_instance_of::() { + return ::to_python_dto(parameter); + } + + return ::to_python_dto(parameter); + } + Type::MACADDR => return ::to_python_dto(parameter), + Type::MACADDR8 => return ::to_python_dto(parameter), + Type::POINT => return ::to_python_dto(parameter), + Type::BOX => return ::to_python_dto(parameter), + Type::PATH => return ::to_python_dto(parameter), + Type::LINE => return ::to_python_dto(parameter), + Type::LSEG => return ::to_python_dto(parameter), + Type::CIRCLE => return ::to_python_dto(parameter), + _ => {} + } + + if let Ok(converted_value) = from_python_untyped(parameter) { + return Ok(converted_value); + } + + Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Can not covert you type {parameter} into {type_}", + ))) +} + +fn from_python_array_typed(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult { if parameter.is_instance_of::() { return ::to_python_dto(parameter); } @@ -286,25 +427,8 @@ pub fn py_to_rust(parameter: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult return ::to_python_dto(parameter); } - if parameter.is_instance_of::() { - return ::to_python_dto(parameter); - } - - if let Ok(_) = parameter.extract::() { - return ::to_python_dto(parameter); - } - - // It's used for Enum. - // If StrEnum is used on Python side, - // we simply stop at the `is_instance_of::``. - if let Ok(value_attr) = parameter.getattr("value") { - if let Ok(possible_string) = value_attr.extract::() { - return Ok(PythonDTO::PyString(possible_string)); - } - } - Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Can not covert you type {parameter} into inner one", + "Cannot convert parameter in extra types Array", ))) } @@ -368,7 +492,10 @@ pub fn extract_datetime_from_python_object_attrs( /// May return Err Result if cannot convert at least one element. #[allow(clippy::cast_possible_truncation)] #[allow(clippy::cast_possible_wrap)] -pub fn py_sequence_into_postgres_array(parameter: &Bound) -> PSQLPyResult> { +pub fn py_sequence_into_postgres_array( + parameter: &Bound, + type_: &Type, +) -> PSQLPyResult> { let mut py_seq = parameter .downcast::() .map_err(|_| { @@ -413,7 +540,7 @@ pub fn py_sequence_into_postgres_array(parameter: &Bound) -> PSQLPyResult } } - let array_data = py_sequence_into_flat_vec(parameter)?; + let array_data = py_sequence_into_flat_vec(parameter, type_)?; match postgres_array::Array::from_parts_no_panic(array_data, dimensions) { Ok(result_array) => Ok(result_array), Err(err) => Err(RustPSQLDriverError::PyToRustValueConversionError(format!( @@ -426,7 +553,10 @@ pub fn py_sequence_into_postgres_array(parameter: &Bound) -> PSQLPyResult /// /// # Errors /// May return Err Result if cannot convert element into Rust one. -pub fn py_sequence_into_flat_vec(parameter: &Bound) -> PSQLPyResult> { +pub fn py_sequence_into_flat_vec( + parameter: &Bound, + type_: &Type, +) -> PSQLPyResult> { let py_seq = parameter.downcast::().map_err(|_| { RustPSQLDriverError::PyToRustValueConversionError( "PostgreSQL ARRAY type can be made only from python Sequence".into(), @@ -441,17 +571,17 @@ pub fn py_sequence_into_flat_vec(parameter: &Bound) -> PSQLPyResult() { - final_vec.push(py_to_rust(&ok_seq_elem)?); + final_vec.push(from_python_typed(&ok_seq_elem, type_)?); continue; } let possible_next_seq = ok_seq_elem.downcast::(); if let Ok(next_seq) = possible_next_seq { - let mut next_vec = py_sequence_into_flat_vec(next_seq)?; + let mut next_vec = py_sequence_into_flat_vec(next_seq, type_)?; final_vec.append(&mut next_vec); } else { - final_vec.push(py_to_rust(&ok_seq_elem)?); + final_vec.push(from_python_typed(&ok_seq_elem, type_)?); continue; } } @@ -481,7 +611,7 @@ fn convert_py_to_rust_coord_values(parameters: Vec>) -> PSQLPyResult coord_values_vec.push(f64::from(pyint)), PythonDTO::PyIntI32(pyint) => coord_values_vec.push(f64::from(pyint)), @@ -658,35 +788,3 @@ fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>, RustPSQLDriverError>(coord_values_sequence_vec) } - -fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { - let re = regex::Regex::new(KWARGS_PARAMS_REGEXP).unwrap(); - - { - let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); - let qs = kq_read.get(querystring); - - if let Some(qs) = qs { - return qs.clone(); - } - }; - - let mut counter = 0; - let mut sequence = Vec::new(); - - let result = re.replace_all(querystring, |caps: ®ex::Captures| { - let account_id = caps[1].to_string(); - - sequence.push(account_id.clone()); - counter += 1; - - format!("${}", &counter) - }); - - let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); - kq_write.insert( - querystring.to_string(), - (result.clone().into(), sequence.clone()), - ); - (result.into(), sequence) -} diff --git a/src/value_converter/models/serde_value.rs b/src/value_converter/models/serde_value.rs index 71239c2b..392e3fd0 100644 --- a/src/value_converter/models/serde_value.rs +++ b/src/value_converter/models/serde_value.rs @@ -11,7 +11,8 @@ use tokio_postgres::types::Type; use crate::{ exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, value_converter::{ - dto::enums::PythonDTO, from_python::py_to_rust, to_python::build_python_from_serde_value, + dto::enums::PythonDTO, from_python::from_python_untyped, + to_python::build_python_from_serde_value, }, }; @@ -60,7 +61,7 @@ fn serde_value_from_list(gil: Python<'_>, bind_value: &Bound<'_, PyAny>) -> PSQL for inner in params { let inner_bind = inner.bind(gil); if inner_bind.is_instance_of::() { - let python_dto = py_to_rust(inner_bind)?; + let python_dto = from_python_untyped(inner_bind)?; result_vec.push(python_dto.to_serde_value()?); } else if inner_bind.is_instance_of::() { let serde_value = build_serde_value(inner.bind(gil))?; @@ -91,7 +92,7 @@ fn serde_value_from_dict(bind_value: &Bound<'_, PyAny>) -> PSQLPyResult { })?; let key = py_list.get_item(0)?.extract::()?; - let value = py_to_rust(&py_list.get_item(1)?)?; + let value = from_python_untyped(&py_list.get_item(1)?)?; serde_map.insert(key, value.to_serde_value()?); } diff --git a/src/value_converter/params_converters.rs b/src/value_converter/params_converters.rs deleted file mode 100644 index e69de29b..00000000 diff --git a/src/value_converter/to_python.rs b/src/value_converter/to_python.rs index 5dbfd7ce..b3bf2af5 100644 --- a/src/value_converter/to_python.rs +++ b/src/value_converter/to_python.rs @@ -73,38 +73,6 @@ pub fn build_python_from_serde_value(py: Python<'_>, value: Value) -> PSQLPyResu } } -fn parse_kwargs_qs(querystring: &str) -> (String, Vec) { - let re = regex::Regex::new(r"\$\(([^)]+)\)p").unwrap(); - - { - let kq_read = KWARGS_QUERYSTRINGS.read().unwrap(); - let qs = kq_read.get(querystring); - - if let Some(qs) = qs { - return qs.clone(); - } - }; - - let mut counter = 0; - let mut sequence = Vec::new(); - - let result = re.replace_all(querystring, |caps: ®ex::Captures| { - let account_id = caps[1].to_string(); - - sequence.push(account_id.clone()); - counter += 1; - - format!("${}", &counter) - }); - - let mut kq_write = KWARGS_QUERYSTRINGS.write().unwrap(); - kq_write.insert( - querystring.to_string(), - (result.clone().into(), sequence.clone()), - ); - (result.into(), sequence) -} - fn composite_field_postgres_to_py<'a, T: FromSql<'a>>( type_: &Type, buf: &mut &'a [u8], @@ -668,41 +636,3 @@ pub fn postgres_to_py( } Ok(py.None()) } - -/// Convert Python sequence to Rust vector. -/// Also it checks that sequence has set/list/tuple type. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parameters type isn't correct. -fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>> { - let mut coord_values_sequence_vec: Vec> = vec![]; - - if bind_parameters.is_instance_of::() { - let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); - - for one_parameter in bind_pyset_parameters { - let extracted_parameter = one_parameter.extract::>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - coord_values_sequence_vec.push(extracted_parameter); - } - } else if bind_parameters.is_instance_of::() - | bind_parameters.is_instance_of::() - { - coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid sequence type, please use list/tuple/set, {bind_parameters}" - ))); - }; - - Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) -} diff --git a/src/value_converter/traits.rs b/src/value_converter/traits.rs index 261ee16d..d9d3512e 100644 --- a/src/value_converter/traits.rs +++ b/src/value_converter/traits.rs @@ -1,3 +1,4 @@ +use postgres_types::Type; use pyo3::PyAny; use crate::exceptions::rust_errors::PSQLPyResult; @@ -7,3 +8,10 @@ use super::dto::enums::PythonDTO; pub trait ToPythonDTO { fn to_python_dto(python_param: &pyo3::Bound<'_, PyAny>) -> PSQLPyResult; } + +pub trait ToPythonDTOArray { + fn to_python_dto( + python_param: &pyo3::Bound<'_, PyAny>, + array_type_: Type, + ) -> PSQLPyResult; +} From 602f00e727b2ef2cb272cff9eb75b0126ca89ffc Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 18:44:55 +0200 Subject: [PATCH 28/65] Full value converter refactor --- python/tests/test_value_converter.py | 30 +++- src/driver/connection.rs | 23 ++- src/driver/connection_pool.rs | 38 ++++- src/driver/connection_pool_builder.rs | 12 ++ src/driver/inner_connection.rs | 158 ++++++++++++--------- src/driver/listener/core.rs | 10 +- src/statement/cache.rs | 2 +- src/statement/mod.rs | 1 - src/statement/query.rs | 2 +- src/statement/statement.rs | 20 ++- src/statement/statement_builder.rs | 71 +++++---- src/statement/traits.rs | 8 -- src/statement/utils.rs | 7 + src/value_converter/dto/converter_impls.rs | 1 - src/value_converter/dto/impls.rs | 21 --- src/value_converter/from_python.rs | 1 - src/value_converter/models/decimal.rs | 2 +- src/value_converter/to_python.rs | 38 +++++ 18 files changed, 301 insertions(+), 144 deletions(-) delete mode 100644 src/statement/traits.rs diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index c35baec1..b0ec5c8d 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -139,13 +139,18 @@ async def test_as_class( ), ("BOOL", True, True), ("INT2", SmallInt(12), 12), + ("INT2", 12, 12), ("INT4", Integer(121231231), 121231231), + ("INT4", 121231231, 121231231), ("INT8", BigInt(99999999999999999), 99999999999999999), + ("INT8", 99999999999999999, 99999999999999999), ("MONEY", Money(99999999999999999), 99999999999999999), + ("MONEY", 99999999999999999, 99999999999999999), ("NUMERIC(5, 2)", Decimal("120.12"), Decimal("120.12")), - ("FLOAT8", 32.12329864501953, 32.12329864501953), ("FLOAT4", Float32(32.12329864501953), 32.12329864501953), + ("FLOAT4", 32.12329864501953, 32.12329864501953), ("FLOAT8", Float64(32.12329864501953), 32.12329864501953), + ("FLOAT8", 32.12329864501953, 32.12329864501953), ("DATE", now_datetime.date(), now_datetime.date()), ("TIME", now_datetime.time(), now_datetime.time()), ("TIMESTAMP", now_datetime, now_datetime), @@ -426,6 +431,29 @@ async def test_as_class( [[{"array": "json"}], [{"one more": "test"}]], ], ), + ( + "JSON ARRAY", + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + ), ( "JSON ARRAY", [ diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 8f2a4b40..469ece0b 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -25,6 +25,7 @@ pub struct Connection { db_client: Option>, db_pool: Option, pg_config: Arc, + prepare: bool, } impl Connection { @@ -33,11 +34,13 @@ impl Connection { db_client: Option>, db_pool: Option, pg_config: Arc, + prepare: bool, ) -> Self { Connection { db_client, db_pool, pg_config, + prepare, } } @@ -54,7 +57,7 @@ impl Connection { impl Default for Connection { fn default() -> Self { - Connection::new(None, None, Arc::new(Config::default())) + Connection::new(None, None, Arc::new(Config::default()), true) } } @@ -138,11 +141,16 @@ impl Connection { } async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { - let (db_client, db_pool) = pyo3::Python::with_gil(|gil| { + let (db_client, db_pool, prepare) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); - (self_.db_client.clone(), self_.db_pool.clone()) + ( + self_.db_client.clone(), + self_.db_pool.clone(), + self_.prepare, + ) }); + let db_pool_2 = db_pool.clone(); if db_client.is_some() { return Ok(self_); } @@ -155,7 +163,11 @@ impl Connection { .await??; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.db_client = Some(Arc::new(PsqlpyConnection::PoolConn(db_connection))); + self_.db_client = Some(Arc::new(PsqlpyConnection::PoolConn( + db_connection, + db_pool_2.unwrap(), + prepare, + ))); }); return Ok(self_); } @@ -209,7 +221,8 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client.execute(querystring, parameters, prepared).await; + let res = db_client.execute(querystring, parameters, prepared).await; + return res; } Err(RustPSQLDriverError::ConnectionClosedError) diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 0c52c256..1ef2d8f9 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -1,5 +1,6 @@ use crate::runtime::tokio_runtime; use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; +use postgres_types::Type; use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny}; use std::sync::Arc; use tokio_postgres::Config; @@ -46,6 +47,7 @@ use super::{ ca_file=None, max_db_pool_size=None, conn_recycling_method=None, + prepare=None, ))] #[allow(clippy::too_many_arguments)] pub fn connect( @@ -75,6 +77,7 @@ pub fn connect( ca_file: Option, max_db_pool_size: Option, conn_recycling_method: Option, + prepare: Option, ) -> PSQLPyResult { if let Some(max_db_pool_size) = max_db_pool_size { if max_db_pool_size < 2 { @@ -139,6 +142,7 @@ pub fn connect( pg_config: Arc::new(pg_config), ca_file: ca_file, ssl_mode: ssl_mode, + prepare: prepare.unwrap_or(true), }) } @@ -207,6 +211,7 @@ pub struct ConnectionPool { pg_config: Arc, ca_file: Option, ssl_mode: Option, + prepare: bool, } impl ConnectionPool { @@ -216,14 +221,20 @@ impl ConnectionPool { pg_config: Config, ca_file: Option, ssl_mode: Option, + prepare: Option, ) -> Self { ConnectionPool { pool: pool, pg_config: Arc::new(pg_config), ca_file: ca_file, ssl_mode: ssl_mode, + prepare: prepare.unwrap_or(true), } } + + pub fn remove_prepared_stmt(&mut self, query: &str, types: &[Type]) { + self.pool.manager().statement_caches.remove(query, types); + } } #[pymethods] @@ -260,6 +271,7 @@ impl ConnectionPool { conn_recycling_method=None, ssl_mode=None, ca_file=None, + prepare=None, ))] #[allow(clippy::too_many_arguments)] pub fn new( @@ -289,6 +301,7 @@ impl ConnectionPool { conn_recycling_method: Option, ssl_mode: Option, ca_file: Option, + prepare: Option, ) -> PSQLPyResult { connect( dsn, @@ -317,6 +330,7 @@ impl ConnectionPool { ca_file, max_db_pool_size, conn_recycling_method, + prepare, ) } @@ -360,22 +374,28 @@ impl ConnectionPool { #[must_use] pub fn acquire(&self) -> Connection { - Connection::new(None, Some(self.pool.clone()), self.pg_config.clone()) + Connection::new( + None, + Some(self.pool.clone()), + self.pg_config.clone(), + self.prepare, + ) } #[must_use] #[allow(clippy::needless_pass_by_value)] pub fn listener(self_: pyo3::Py) -> Listener { - let (pg_config, ca_file, ssl_mode) = pyo3::Python::with_gil(|gil| { + let (pg_config, ca_file, ssl_mode, prepare) = pyo3::Python::with_gil(|gil| { let b_gil = self_.borrow(gil); ( b_gil.pg_config.clone(), b_gil.ca_file.clone(), b_gil.ssl_mode, + b_gil.prepare, ) }); - Listener::new(pg_config, ca_file, ssl_mode) + Listener::new(pg_config, ca_file, ssl_mode, prepare) } /// Return new single connection. @@ -383,10 +403,11 @@ impl ConnectionPool { /// # Errors /// May return Err Result if cannot get new connection from the pool. pub async fn connection(self_: pyo3::Py) -> PSQLPyResult { - let (db_pool, pg_config) = pyo3::Python::with_gil(|gil| { + let (db_pool, pg_config, prepare) = pyo3::Python::with_gil(|gil| { let slf = self_.borrow(gil); - (slf.pool.clone(), slf.pg_config.clone()) + (slf.pool.clone(), slf.pg_config.clone(), slf.prepare) }); + let db_pool_2 = db_pool.clone(); let db_connection = tokio_runtime() .spawn(async move { Ok::(db_pool.get().await?) @@ -394,9 +415,14 @@ impl ConnectionPool { .await??; Ok(Connection::new( - Some(Arc::new(PsqlpyConnection::PoolConn(db_connection))), + Some(Arc::new(PsqlpyConnection::PoolConn( + db_connection, + db_pool_2.clone(), + prepare, + ))), None, pg_config, + prepare, )) } diff --git a/src/driver/connection_pool_builder.rs b/src/driver/connection_pool_builder.rs index 42cdd641..ea311642 100644 --- a/src/driver/connection_pool_builder.rs +++ b/src/driver/connection_pool_builder.rs @@ -18,6 +18,7 @@ pub struct ConnectionPoolBuilder { conn_recycling_method: Option, ca_file: Option, ssl_mode: Option, + prepare: Option, } #[pymethods] @@ -31,6 +32,7 @@ impl ConnectionPoolBuilder { conn_recycling_method: None, ca_file: None, ssl_mode: None, + prepare: None, } } @@ -68,6 +70,7 @@ impl ConnectionPoolBuilder { self.config.clone(), self.ca_file.clone(), self.ssl_mode, + self.prepare, )) } @@ -80,6 +83,15 @@ impl ConnectionPoolBuilder { self_ } + /// Set ca_file for ssl_mode in PostgreSQL. + fn prepare(self_: Py, prepare: bool) -> Py { + Python::with_gil(|gil| { + let mut self_ = self_.borrow_mut(gil); + self_.prepare = Some(prepare); + }); + self_ + } + /// Set size to the connection pool. /// /// # Error diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index a7e9d233..c463be64 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -1,5 +1,5 @@ use bytes::Buf; -use deadpool_postgres::Object; +use deadpool_postgres::{Object, Pool}; use postgres_types::{ToSql, Type}; use pyo3::{Py, PyAny, Python}; use std::vec; @@ -14,7 +14,7 @@ use crate::{ #[allow(clippy::module_name_repetitions)] pub enum PsqlpyConnection { - PoolConn(Object), + PoolConn(Object, Pool, bool), SingleConn(Client), } @@ -23,9 +23,18 @@ impl PsqlpyConnection { /// /// # Errors /// May return Err if cannot prepare statement. - pub async fn prepare(&self, query: &str) -> PSQLPyResult { + pub async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.prepare_cached(query).await?), + PsqlpyConnection::PoolConn(pconn, _, _) => { + if prepared { + return Ok(pconn.prepare_cached(query).await?); + } else { + println!("999999"); + let prepared = pconn.prepare(query).await?; + self.drop_prepared(&prepared).await?; + return Ok(prepared); + } + } PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.prepare(query).await?), } } @@ -35,28 +44,18 @@ impl PsqlpyConnection { /// # Errors /// May return Err if cannot prepare statement. pub async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { - let query = format!("DEALLOCATE PREPARE {}", stmt.name()); + let deallocate_query = format!("DEALLOCATE PREPARE {}", stmt.name()); match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(&query).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(&query).await?), + PsqlpyConnection::PoolConn(pconn, _, _) => { + return Ok(pconn.batch_execute(&deallocate_query).await?) + } + PsqlpyConnection::SingleConn(sconn) => { + return Ok(sconn.batch_execute(&deallocate_query).await?) + } } } - /// Prepare and delete statement. - /// - /// # Errors - /// Can return Err if cannot prepare statement. - pub async fn prepare_then_drop(&self, query: &str) -> PSQLPyResult> { - let types: Vec; - - let stmt = self.prepare(query).await?; - types = stmt.params().to_vec(); - self.drop_prepared(&stmt).await?; - - Ok(types) - } - - /// Prepare cached statement. + /// Execute statement with parameters. /// /// # Errors /// May return Err if cannot execute statement. @@ -69,20 +68,43 @@ impl PsqlpyConnection { T: ?Sized + ToStatement, { match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.query(statement, params).await?), + PsqlpyConnection::PoolConn(pconn, _, _) => { + return Ok(pconn.query(statement, params).await?) + } PsqlpyConnection::SingleConn(sconn) => { return Ok(sconn.query(statement, params).await?) } } } - /// Prepare cached statement. + /// Execute statement with parameters. + /// + /// # Errors + /// May return Err if cannot execute statement. + pub async fn query_typed( + &self, + statement: &str, + params: &[(&(dyn ToSql + Sync), Type)], + ) -> PSQLPyResult> { + match self { + PsqlpyConnection::PoolConn(pconn, _, _) => { + return Ok(pconn.query_typed(statement, params).await?) + } + PsqlpyConnection::SingleConn(sconn) => { + return Ok(sconn.query_typed(statement, params).await?) + } + } + } + + /// Batch execute statement. /// /// # Errors /// May return Err if cannot execute statement. pub async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.batch_execute(query).await?), + PsqlpyConnection::PoolConn(pconn, _, _) => { + return Ok(pconn.batch_execute(query).await?) + } PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), } } @@ -90,6 +112,21 @@ impl PsqlpyConnection { /// Prepare cached statement. /// /// # Errors + /// May return Err if cannot execute copy data. + pub async fn copy_in(&self, statement: &T) -> PSQLPyResult> + where + T: ?Sized + ToStatement, + U: Buf + 'static + Send, + { + match self { + PsqlpyConnection::PoolConn(pconn, _, _) => return Ok(pconn.copy_in(statement).await?), + PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), + } + } + + /// Executes a statement which returns a single row, returning it. + /// + /// # Errors /// May return Err if cannot execute statement. pub async fn query_one( &self, @@ -100,7 +137,7 @@ impl PsqlpyConnection { T: ?Sized + ToStatement, { match self { - PsqlpyConnection::PoolConn(pconn) => { + PsqlpyConnection::PoolConn(pconn, _, _) => { return Ok(pconn.query_one(statement, params).await?) } PsqlpyConnection::SingleConn(sconn) => { @@ -123,17 +160,20 @@ impl PsqlpyConnection { let result = if prepared { self.query( - &self.prepare(&statement.sql_stmt()).await.map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, + &self + .prepare(&statement.raw_query(), true) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, &statement.params(), ) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? } else { - self.query(statement.sql_stmt(), &statement.params()) + self.query(statement.raw_query(), &statement.params()) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? }; @@ -153,21 +193,19 @@ impl PsqlpyConnection { let prepared = prepared.unwrap_or(true); - let result = if prepared { - self.query( - &self.prepare(statement.sql_stmt()).await.map_err(|err| { + let result = match prepared { + true => self + .query(statement.statement_query()?, &statement.params()) + .await + .map_err(|err| { RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement, error - {err}" )) })?, - &statement.params(), - ) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - } else { - self.query(statement.sql_stmt(), &statement.params()) + false => self + .query_typed(statement.raw_query(), &statement.params_typed()) .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, }; Ok(PSQLDriverPyQueryResult::new(result)) @@ -196,19 +234,19 @@ impl PsqlpyConnection { for statement in statements { let querystring_result = if prepared { - let prepared_stmt = &self.prepare(&statement.sql_stmt()).await; + let prepared_stmt = &self.prepare(&statement.raw_query(), true).await; if let Err(error) = prepared_stmt { return Err(RustPSQLDriverError::ConnectionExecuteError(format!( "Cannot prepare statement in execute_many, operation rolled back {error}", ))); } self.query( - &self.prepare(&statement.sql_stmt()).await?, + &self.prepare(&statement.raw_query(), true).await?, &statement.params(), ) .await } else { - self.query(statement.sql_stmt(), &statement.params()).await + self.query(statement.raw_query(), &statement.params()).await }; if let Err(error) = querystring_result { @@ -235,17 +273,20 @@ impl PsqlpyConnection { let result = if prepared { self.query_one( - &self.prepare(&statement.sql_stmt()).await.map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, + &self + .prepare(&statement.raw_query(), true) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, &statement.params(), ) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? } else { - self.query_one(statement.sql_stmt(), &statement.params()) + self.query_one(statement.raw_query(), &statement.params()) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? }; @@ -281,19 +322,4 @@ impl PsqlpyConnection { None => Ok(gil.None()), }); } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute copy data. - pub async fn copy_in(&self, statement: &T) -> PSQLPyResult> - where - T: ?Sized + ToStatement, - U: Buf + 'static + Send, - { - match self { - PsqlpyConnection::PoolConn(pconn) => return Ok(pconn.copy_in(statement).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), - } - } } diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index 16b323d8..4a9580af 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -42,14 +42,19 @@ pub struct Listener { impl Listener { #[must_use] - pub fn new(pg_config: Arc, ca_file: Option, ssl_mode: Option) -> Self { + pub fn new( + pg_config: Arc, + ca_file: Option, + ssl_mode: Option, + prepare: bool, + ) -> Self { Listener { pg_config: pg_config.clone(), ca_file, ssl_mode, channel_callbacks: Arc::default(), listen_abort_handler: Option::default(), - connection: Connection::new(None, None, pg_config.clone()), + connection: Connection::new(None, None, pg_config.clone(), prepare), receiver: Option::default(), listen_query: Arc::default(), is_listened: Arc::new(RwLock::new(false)), @@ -222,6 +227,7 @@ impl Listener { Some(Arc::new(PsqlpyConnection::SingleConn(client))), None, self.pg_config.clone(), + false, ); self.is_started = true; diff --git a/src/statement/cache.rs b/src/statement/cache.rs index a6fbc131..7d78898d 100644 --- a/src/statement/cache.rs +++ b/src/statement/cache.rs @@ -5,7 +5,7 @@ use postgres_types::Type; use tokio::sync::RwLock; use tokio_postgres::Statement; -use super::{query::QueryString, traits::hash_str}; +use super::{query::QueryString, utils::hash_str}; #[derive(Default)] pub(crate) struct StatementsCache(HashMap); diff --git a/src/statement/mod.rs b/src/statement/mod.rs index e027eaea..c894b9a8 100644 --- a/src/statement/mod.rs +++ b/src/statement/mod.rs @@ -3,5 +3,4 @@ pub mod parameters; pub mod query; pub mod statement; pub mod statement_builder; -pub mod traits; pub mod utils; diff --git a/src/statement/query.rs b/src/statement/query.rs index 7f87cede..2b08aa62 100644 --- a/src/statement/query.rs +++ b/src/statement/query.rs @@ -4,7 +4,7 @@ use regex::Regex; use crate::value_converter::consts::KWARGS_PARAMS_REGEXP; -use super::traits::hash_str; +use super::utils::hash_str; #[derive(Clone)] pub struct QueryString { diff --git a/src/statement/statement.rs b/src/statement/statement.rs index 4cfdc09c..a93d9cd5 100644 --- a/src/statement/statement.rs +++ b/src/statement/statement.rs @@ -1,4 +1,7 @@ use postgres_types::{ToSql, Type}; +use tokio_postgres::Statement; + +use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::{parameters::PreparedParameters, query::QueryString}; @@ -6,20 +9,33 @@ use super::{parameters::PreparedParameters, query::QueryString}; pub struct PsqlpyStatement { query: QueryString, prepared_parameters: PreparedParameters, + prepared_statement: Option, } impl PsqlpyStatement { - pub(crate) fn new(query: QueryString, prepared_parameters: PreparedParameters) -> Self { + pub(crate) fn new( + query: QueryString, + prepared_parameters: PreparedParameters, + prepared_statement: Option, + ) -> Self { Self { query, prepared_parameters, + prepared_statement, } } - pub fn sql_stmt(&self) -> &str { + pub fn raw_query(&self) -> &str { self.query.query() } + pub fn statement_query(&self) -> PSQLPyResult<&Statement> { + match &self.prepared_statement { + Some(prepared_stmt) => return Ok(prepared_stmt), + None => return Err(RustPSQLDriverError::ConnectionExecuteError("No".into())), + } + } + pub fn params(&self) -> Box<[&(dyn ToSql + Sync)]> { self.prepared_parameters.params() } diff --git a/src/statement/statement_builder.rs b/src/statement/statement_builder.rs index 07e003da..863ba400 100644 --- a/src/statement/statement_builder.rs +++ b/src/statement/statement_builder.rs @@ -1,10 +1,11 @@ use pyo3::PyObject; +use tokio::sync::RwLockWriteGuard; use tokio_postgres::Statement; use crate::{driver::inner_connection::PsqlpyConnection, exceptions::rust_errors::PSQLPyResult}; use super::{ - cache::{StatementCacheInfo, STMTS_CACHE}, + cache::{StatementCacheInfo, StatementsCache, STMTS_CACHE}, parameters::ParametersBuilder, query::QueryString, statement::PsqlpyStatement, @@ -33,14 +34,17 @@ impl<'a> StatementBuilder<'a> { } pub async fn build(self) -> PSQLPyResult { - { - let stmt_cache_guard = STMTS_CACHE.read().await; - if let Some(cached) = stmt_cache_guard.get_cache(&self.querystring) { - return self.build_with_cached(cached); + if !self.prepared { + { + let stmt_cache_guard = STMTS_CACHE.read().await; + if let Some(cached) = stmt_cache_guard.get_cache(&self.querystring) { + return self.build_with_cached(cached); + } } } - self.build_no_cached().await + let stmt_cache_guard = STMTS_CACHE.write().await; + self.build_no_cached(stmt_cache_guard).await } fn build_with_cached(self, cached: StatementCacheInfo) -> PSQLPyResult { @@ -54,21 +58,24 @@ impl<'a> StatementBuilder<'a> { let prepared_parameters = raw_parameters.prepare(parameters_names)?; - return Ok(PsqlpyStatement::new(cached.query, prepared_parameters)); + return Ok(PsqlpyStatement::new( + cached.query, + prepared_parameters, + None, + )); } - async fn build_no_cached(self) -> PSQLPyResult { + async fn build_no_cached( + self, + cache_guard: RwLockWriteGuard<'_, StatementsCache>, + ) -> PSQLPyResult { let mut querystring = QueryString::new(&self.querystring); querystring.process_qs(); - let prepared_stmt = self.prepare_query(&querystring).await?; + let prepared_stmt = self.prepare_query(&querystring, self.prepared).await?; let parameters_builder = ParametersBuilder::new(&self.parameters, Some(prepared_stmt.params().to_vec())); - if !self.prepared { - Self::drop_prepared(self.inner_conn, &prepared_stmt).await?; - } - let parameters_names = if let Some(converted_qs) = &querystring.converted_qs { Some(converted_qs.params_names().clone()) } else { @@ -77,24 +84,34 @@ impl<'a> StatementBuilder<'a> { let prepared_parameters = parameters_builder.prepare(parameters_names)?; - { - self.write_to_cache(&querystring, &prepared_stmt).await; + match self.prepared { + true => { + return Ok(PsqlpyStatement::new( + querystring, + prepared_parameters, + Some(prepared_stmt), + )) + } + false => { + { + self.write_to_cache(cache_guard, &querystring, &prepared_stmt) + .await; + } + return Ok(PsqlpyStatement::new(querystring, prepared_parameters, None)); + } } - let statement = PsqlpyStatement::new(querystring, prepared_parameters); - - return Ok(statement); - } - - async fn write_to_cache(&self, query: &QueryString, inner_stmt: &Statement) { - let mut stmt_cache_guard = STMTS_CACHE.write().await; - stmt_cache_guard.add_cache(query, inner_stmt); } - async fn prepare_query(&self, query: &QueryString) -> PSQLPyResult { - self.inner_conn.prepare(query.query()).await + async fn write_to_cache( + &self, + mut cache_guard: RwLockWriteGuard<'_, StatementsCache>, + query: &QueryString, + inner_stmt: &Statement, + ) { + cache_guard.add_cache(query, inner_stmt); } - async fn drop_prepared(inner_conn: &PsqlpyConnection, stmt: &Statement) -> PSQLPyResult<()> { - inner_conn.drop_prepared(stmt).await + async fn prepare_query(&self, query: &QueryString, prepared: bool) -> PSQLPyResult { + self.inner_conn.prepare(query.query(), prepared).await } } diff --git a/src/statement/traits.rs b/src/statement/traits.rs deleted file mode 100644 index a79f8bdd..00000000 --- a/src/statement/traits.rs +++ /dev/null @@ -1,8 +0,0 @@ -use std::hash::{DefaultHasher, Hash, Hasher}; - -pub(crate) fn hash_str(string: &String) -> u64 { - let mut hasher = DefaultHasher::new(); - string.hash(&mut hasher); - - hasher.finish() -} diff --git a/src/statement/utils.rs b/src/statement/utils.rs index 8b137891..a79f8bdd 100644 --- a/src/statement/utils.rs +++ b/src/statement/utils.rs @@ -1 +1,8 @@ +use std::hash::{DefaultHasher, Hash, Hasher}; +pub(crate) fn hash_str(string: &String) -> u64 { + let mut hasher = DefaultHasher::new(); + string.hash(&mut hasher); + + hasher.finish() +} diff --git a/src/value_converter/dto/converter_impls.rs b/src/value_converter/dto/converter_impls.rs index 1e6fa7be..f50529bc 100644 --- a/src/value_converter/dto/converter_impls.rs +++ b/src/value_converter/dto/converter_impls.rs @@ -8,7 +8,6 @@ use pyo3::{ Bound, PyAny, }; use rust_decimal::Decimal; -use serde::de::IntoDeserializer; use uuid::Uuid; use crate::{ diff --git a/src/value_converter/dto/impls.rs b/src/value_converter/dto/impls.rs index bd48ddb3..3450dfd0 100644 --- a/src/value_converter/dto/impls.rs +++ b/src/value_converter/dto/impls.rs @@ -244,30 +244,9 @@ impl ToSql for PythonDTO { } PythonDTO::PyList(py_iterable, type_) | PythonDTO::PyTuple(py_iterable, type_) => { return py_iterable.to_sql(type_, out); - // let mut items = Vec::new(); - // for inner in py_iterable { - // items.push(inner); - // } - // if items.is_empty() { - // return_is_null_true = true; - // } else { - // items.to_sql(&items[0].array_type()?, out)?; - // } } PythonDTO::PyArray(array, type_) => { return array.to_sql(type_, out); - // if let Some(first_elem) = array.iter().nth(0) { - // match first_elem.array_type() { - // Ok(ok_type) => { - // array.to_sql(&ok_type, out)?; - // } - // Err(_) => { - // return Err(RustPSQLDriverError::PyToRustValueConversionError( - // "Cannot define array type.".into(), - // ))? - // } - // } - // } } PythonDTO::PyJsonb(py_dict) | PythonDTO::PyJson(py_dict) => { <&Value as ToSql>::to_sql(&py_dict, ty, out)?; diff --git a/src/value_converter/from_python.rs b/src/value_converter/from_python.rs index 57307f29..fa1d5c60 100644 --- a/src/value_converter/from_python.rs +++ b/src/value_converter/from_python.rs @@ -195,7 +195,6 @@ pub fn from_python_typed( parameter: &pyo3::Bound<'_, PyAny>, type_: &Type, ) -> PSQLPyResult { - println!("{:?} {:?}", type_, parameter); if parameter.is_instance_of::() { return ::to_python_dto(parameter); } diff --git a/src/value_converter/models/decimal.rs b/src/value_converter/models/decimal.rs index 13d009cc..44a898a1 100644 --- a/src/value_converter/models/decimal.rs +++ b/src/value_converter/models/decimal.rs @@ -1,5 +1,5 @@ use postgres_types::{FromSql, Type}; -use pyo3::{types::PyAnyMethods, PyObject, Python, ToPyObject}; +use pyo3::{types::PyAnyMethods, Bound, IntoPyObject, PyAny, PyObject, Python, ToPyObject}; use rust_decimal::Decimal; use crate::value_converter::consts::get_decimal_cls; diff --git a/src/value_converter/to_python.rs b/src/value_converter/to_python.rs index b3bf2af5..047cd4c4 100644 --- a/src/value_converter/to_python.rs +++ b/src/value_converter/to_python.rs @@ -636,3 +636,41 @@ pub fn postgres_to_py( } Ok(py.None()) } + +/// Convert Python sequence to Rust vector. +/// Also it checks that sequence has set/list/tuple type. +/// +/// # Errors +/// +/// May return error if cannot convert Python type into Rust one. +/// May return error if parameters type isn't correct. +fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>> { + let mut coord_values_sequence_vec: Vec> = vec![]; + + if bind_parameters.is_instance_of::() { + let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); + + for one_parameter in bind_pyset_parameters { + let extracted_parameter = one_parameter.extract::>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + coord_values_sequence_vec.push(extracted_parameter); + } + } else if bind_parameters.is_instance_of::() + | bind_parameters.is_instance_of::() + { + coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { + RustPSQLDriverError::PyToRustValueConversionError( + format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") + ) + })?; + } else { + return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( + "Invalid sequence type, please use list/tuple/set, {bind_parameters}" + ))); + }; + + Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) +} From 19fe58d52ffa03761e01fd207b4a2790d9e7f607 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 20:36:53 +0200 Subject: [PATCH 29/65] Full value converter refactor --- src/driver/connection.rs | 8 ++------ src/driver/connection_pool.rs | 7 +------ src/driver/inner_connection.rs | 32 ++++++++++++++++++------------ src/statement/query.rs | 4 ++-- src/statement/statement.rs | 2 +- src/statement/statement_builder.rs | 6 ++---- src/value_converter/to_python.rs | 1 - 7 files changed, 27 insertions(+), 33 deletions(-) diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 469ece0b..d38b71f9 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -150,7 +150,6 @@ impl Connection { ) }); - let db_pool_2 = db_pool.clone(); if db_client.is_some() { return Ok(self_); } @@ -163,11 +162,8 @@ impl Connection { .await??; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.db_client = Some(Arc::new(PsqlpyConnection::PoolConn( - db_connection, - db_pool_2.unwrap(), - prepare, - ))); + self_.db_client = + Some(Arc::new(PsqlpyConnection::PoolConn(db_connection, prepare))); }); return Ok(self_); } diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 1ef2d8f9..16454de0 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -407,7 +407,6 @@ impl ConnectionPool { let slf = self_.borrow(gil); (slf.pool.clone(), slf.pg_config.clone(), slf.prepare) }); - let db_pool_2 = db_pool.clone(); let db_connection = tokio_runtime() .spawn(async move { Ok::(db_pool.get().await?) @@ -415,11 +414,7 @@ impl ConnectionPool { .await??; Ok(Connection::new( - Some(Arc::new(PsqlpyConnection::PoolConn( - db_connection, - db_pool_2.clone(), - prepare, - ))), + Some(Arc::new(PsqlpyConnection::PoolConn(db_connection, prepare))), None, pg_config, prepare, diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index c463be64..797c9749 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -14,7 +14,7 @@ use crate::{ #[allow(clippy::module_name_repetitions)] pub enum PsqlpyConnection { - PoolConn(Object, Pool, bool), + PoolConn(Object, bool), SingleConn(Client), } @@ -25,13 +25,14 @@ impl PsqlpyConnection { /// May return Err if cannot prepare statement. pub async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { + PsqlpyConnection::PoolConn(pconn, _) => { if prepared { return Ok(pconn.prepare_cached(query).await?); } else { - println!("999999"); + pconn.batch_execute("BEGIN").await?; let prepared = pconn.prepare(query).await?; self.drop_prepared(&prepared).await?; + pconn.batch_execute("COMMIT").await?; return Ok(prepared); } } @@ -46,8 +47,9 @@ impl PsqlpyConnection { pub async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { let deallocate_query = format!("DEALLOCATE PREPARE {}", stmt.name()); match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { - return Ok(pconn.batch_execute(&deallocate_query).await?) + PsqlpyConnection::PoolConn(pconn, _) => { + let res = Ok(pconn.batch_execute(&deallocate_query).await?); + res } PsqlpyConnection::SingleConn(sconn) => { return Ok(sconn.batch_execute(&deallocate_query).await?) @@ -68,7 +70,7 @@ impl PsqlpyConnection { T: ?Sized + ToStatement, { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { + PsqlpyConnection::PoolConn(pconn, _) => { return Ok(pconn.query(statement, params).await?) } PsqlpyConnection::SingleConn(sconn) => { @@ -87,7 +89,7 @@ impl PsqlpyConnection { params: &[(&(dyn ToSql + Sync), Type)], ) -> PSQLPyResult> { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { + PsqlpyConnection::PoolConn(pconn, _) => { return Ok(pconn.query_typed(statement, params).await?) } PsqlpyConnection::SingleConn(sconn) => { @@ -102,9 +104,7 @@ impl PsqlpyConnection { /// May return Err if cannot execute statement. pub async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { - return Ok(pconn.batch_execute(query).await?) - } + PsqlpyConnection::PoolConn(pconn, _) => return Ok(pconn.batch_execute(query).await?), PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), } } @@ -119,7 +119,7 @@ impl PsqlpyConnection { U: Buf + 'static + Send, { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => return Ok(pconn.copy_in(statement).await?), + PsqlpyConnection::PoolConn(pconn, _) => return Ok(pconn.copy_in(statement).await?), PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), } } @@ -137,7 +137,7 @@ impl PsqlpyConnection { T: ?Sized + ToStatement, { match self { - PsqlpyConnection::PoolConn(pconn, _, _) => { + PsqlpyConnection::PoolConn(pconn, _) => { return Ok(pconn.query_one(statement, params).await?) } PsqlpyConnection::SingleConn(sconn) => { @@ -202,8 +202,14 @@ impl PsqlpyConnection { "Cannot prepare statement, error - {err}" )) })?, + // false => { + // self + // .query_typed(statement.raw_query(), &statement.params_typed()) + // .await + // .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? + // }, false => self - .query_typed(statement.raw_query(), &statement.params_typed()) + .query_typed("SELECT * FROM users", &[]) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, }; diff --git a/src/statement/query.rs b/src/statement/query.rs index 2b08aa62..108fe756 100644 --- a/src/statement/query.rs +++ b/src/statement/query.rs @@ -6,7 +6,7 @@ use crate::value_converter::consts::KWARGS_PARAMS_REGEXP; use super::utils::hash_str; -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct QueryString { pub(crate) initial_qs: String, // This field are used when kwargs passed @@ -68,7 +68,7 @@ impl QueryString { } } -#[derive(Clone)] +#[derive(Clone, Debug)] pub(crate) struct ConvertedQueryString { converted_qs: String, params_names: Vec, diff --git a/src/statement/statement.rs b/src/statement/statement.rs index a93d9cd5..addaae89 100644 --- a/src/statement/statement.rs +++ b/src/statement/statement.rs @@ -5,7 +5,7 @@ use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::{parameters::PreparedParameters, query::QueryString}; -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct PsqlpyStatement { query: QueryString, prepared_parameters: PreparedParameters, diff --git a/src/statement/statement_builder.rs b/src/statement/statement_builder.rs index 863ba400..5954f88c 100644 --- a/src/statement/statement_builder.rs +++ b/src/statement/statement_builder.rs @@ -93,10 +93,8 @@ impl<'a> StatementBuilder<'a> { )) } false => { - { - self.write_to_cache(cache_guard, &querystring, &prepared_stmt) - .await; - } + self.write_to_cache(cache_guard, &querystring, &prepared_stmt) + .await; return Ok(PsqlpyStatement::new(querystring, prepared_parameters, None)); } } diff --git a/src/value_converter/to_python.rs b/src/value_converter/to_python.rs index 047cd4c4..3d65565b 100644 --- a/src/value_converter/to_python.rs +++ b/src/value_converter/to_python.rs @@ -23,7 +23,6 @@ use crate::{ Circle, Line, RustLineSegment, RustLineString, RustMacAddr6, RustMacAddr8, RustPoint, RustRect, }, - consts::KWARGS_QUERYSTRINGS, models::{ decimal::InnerDecimal, interval::InnerInterval, serde_value::InternalSerdeValue, uuid::InternalUuid, From 3e89ffd50bab16355228e4d8ae26db3fcf9c0715 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 20:38:12 +0200 Subject: [PATCH 30/65] Full value converter refactor --- src/driver/inner_connection.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index 797c9749..5b28d12b 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -202,14 +202,8 @@ impl PsqlpyConnection { "Cannot prepare statement, error - {err}" )) })?, - // false => { - // self - // .query_typed(statement.raw_query(), &statement.params_typed()) - // .await - // .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - // }, false => self - .query_typed("SELECT * FROM users", &[]) + .query_typed(statement.raw_query(), &statement.params_typed()) .await .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, }; From 8453ab7128d4d61a27bc941bdd9921b9fed4a704 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 21:06:13 +0200 Subject: [PATCH 31/65] Full value converter refactor --- python/tests/test_value_converter.py | 5 +++++ src/driver/inner_connection.rs | 2 -- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index b0ec5c8d..022afea2 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -1187,6 +1187,11 @@ async def test_empty_array( VarCharArray([]), [], ), + ( + "VARCHAR ARRAY", + [], + [], + ), ( "TEXT ARRAY", TextArray([]), diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index 5b28d12b..c671229a 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -29,10 +29,8 @@ impl PsqlpyConnection { if prepared { return Ok(pconn.prepare_cached(query).await?); } else { - pconn.batch_execute("BEGIN").await?; let prepared = pconn.prepare(query).await?; self.drop_prepared(&prepared).await?; - pconn.batch_execute("COMMIT").await?; return Ok(prepared); } } From 284cc344c278a70d0e892c5ae87ffc1c51b2497c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 22:58:57 +0200 Subject: [PATCH 32/65] Full value converter refactor --- python/tests/test_value_converter.py | 685 +++++++++----------------- src/driver/common_options.rs | 2 +- src/driver/connection_pool.rs | 73 +-- src/driver/connection_pool_builder.rs | 9 - 4 files changed, 266 insertions(+), 503 deletions(-) diff --git a/python/tests/test_value_converter.py b/python/tests/test_value_converter.py index 022afea2..ce2f05ed 100644 --- a/python/tests/test_value_converter.py +++ b/python/tests/test_value_converter.py @@ -239,448 +239,6 @@ async def test_as_class( datetime.timedelta(days=100, microseconds=100), datetime.timedelta(days=100, microseconds=100), ), - ( - "VARCHAR ARRAY", - ["Some String", "Some String"], - ["Some String", "Some String"], - ), - ( - "TEXT ARRAY", - [Text("Some String"), Text("Some String")], - ["Some String", "Some String"], - ), - ("BOOL ARRAY", [True, False], [True, False]), - ("BOOL ARRAY", [[True], [False]], [[True], [False]]), - ("INT2 ARRAY", [SmallInt(12), SmallInt(100)], [12, 100]), - ("INT2 ARRAY", [[SmallInt(12)], [SmallInt(100)]], [[12], [100]]), - ("INT4 ARRAY", [Integer(121231231), Integer(121231231)], [121231231, 121231231]), - ( - "INT4 ARRAY", - [[Integer(121231231)], [Integer(121231231)]], - [[121231231], [121231231]], - ), - ( - "INT8 ARRAY", - [BigInt(99999999999999999), BigInt(99999999999999999)], - [99999999999999999, 99999999999999999], - ), - ( - "INT8 ARRAY", - [[BigInt(99999999999999999)], [BigInt(99999999999999999)]], - [[99999999999999999], [99999999999999999]], - ), - ( - "MONEY ARRAY", - [Money(99999999999999999), Money(99999999999999999)], - [99999999999999999, 99999999999999999], - ), - ( - "NUMERIC(5, 2) ARRAY", - [Decimal("121.23"), Decimal("188.99")], - [Decimal("121.23"), Decimal("188.99")], - ), - ( - "NUMERIC(5, 2) ARRAY", - [[Decimal("121.23")], [Decimal("188.99")]], - [[Decimal("121.23")], [Decimal("188.99")]], - ), - ( - "FLOAT8 ARRAY", - [32.12329864501953, 32.12329864501953], - [32.12329864501953, 32.12329864501953], - ), - ( - "FLOAT8 ARRAY", - [[32.12329864501953], [32.12329864501953]], - [[32.12329864501953], [32.12329864501953]], - ), - ( - "DATE ARRAY", - [now_datetime.date(), now_datetime.date()], - [now_datetime.date(), now_datetime.date()], - ), - ( - "DATE ARRAY", - [[now_datetime.date()], [now_datetime.date()]], - [[now_datetime.date()], [now_datetime.date()]], - ), - ( - "TIME ARRAY", - [now_datetime.time(), now_datetime.time()], - [now_datetime.time(), now_datetime.time()], - ), - ( - "TIME ARRAY", - [[now_datetime.time()], [now_datetime.time()]], - [[now_datetime.time()], [now_datetime.time()]], - ), - ("TIMESTAMP ARRAY", [now_datetime, now_datetime], [now_datetime, now_datetime]), - ( - "TIMESTAMP ARRAY", - [[now_datetime], [now_datetime]], - [[now_datetime], [now_datetime]], - ), - ( - "TIMESTAMPTZ ARRAY", - [now_datetime_with_tz, now_datetime_with_tz], - [now_datetime_with_tz, now_datetime_with_tz], - ), - ( - "TIMESTAMPTZ ARRAY", - [now_datetime_with_tz, now_datetime_with_tz_in_asia_jakarta], - [now_datetime_with_tz, now_datetime_with_tz_in_asia_jakarta], - ), - ( - "TIMESTAMPTZ ARRAY", - [[now_datetime_with_tz], [now_datetime_with_tz]], - [[now_datetime_with_tz], [now_datetime_with_tz]], - ), - ( - "UUID ARRAY", - [uuid_, uuid_], - [str(uuid_), str(uuid_)], - ), - ( - "UUID ARRAY", - [[uuid_], [uuid_]], - [[str(uuid_)], [str(uuid_)]], - ), - ( - "INET ARRAY", - [IPv4Address("192.0.0.1"), IPv4Address("192.0.0.1")], - [IPv4Address("192.0.0.1"), IPv4Address("192.0.0.1")], - ), - ( - "INET ARRAY", - [[IPv4Address("192.0.0.1")], [IPv4Address("192.0.0.1")]], - [[IPv4Address("192.0.0.1")], [IPv4Address("192.0.0.1")]], - ), - ( - "JSONB ARRAY", - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ), - ( - "JSONB ARRAY", - [ - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ], - [ - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ], - ), - ( - "JSONB ARRAY", - [ - JSONB([{"array": "json"}, {"one more": "test"}]), - JSONB([{"array": "json"}, {"one more": "test"}]), - ], - [ - [{"array": "json"}, {"one more": "test"}], - [{"array": "json"}, {"one more": "test"}], - ], - ), - ( - "JSONB ARRAY", - [ - JSONB([[{"array": "json"}], [{"one more": "test"}]]), - JSONB([[{"array": "json"}], [{"one more": "test"}]]), - ], - [ - [[{"array": "json"}], [{"one more": "test"}]], - [[{"array": "json"}], [{"one more": "test"}]], - ], - ), - ( - "JSON ARRAY", - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ), - ( - "JSON ARRAY", - [ - JSON( - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ), - JSON( - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ), - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ), - ( - "JSON ARRAY", - [ - [ - JSON( - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ), - ], - [ - JSON( - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ), - ], - ], - [ - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - [ - { - "test": ["something", 123, "here"], - "nested": ["JSON"], - }, - ], - ], - ), - ( - "JSON ARRAY", - [ - JSON([{"array": "json"}, {"one more": "test"}]), - JSON([{"array": "json"}, {"one more": "test"}]), - ], - [ - [{"array": "json"}, {"one more": "test"}], - [{"array": "json"}, {"one more": "test"}], - ], - ), - ( - "JSON ARRAY", - [ - JSON([[{"array": "json"}], [{"one more": "test"}]]), - JSON([[{"array": "json"}], [{"one more": "test"}]]), - ], - [ - [[{"array": "json"}], [{"one more": "test"}]], - [[{"array": "json"}], [{"one more": "test"}]], - ], - ), - ( - "POINT ARRAY", - [ - Point([1.5, 2]), - Point([2, 3]), - ], - [ - (1.5, 2.0), - (2.0, 3.0), - ], - ), - ( - "POINT ARRAY", - [ - [Point([1.5, 2])], - [Point([2, 3])], - ], - [ - [(1.5, 2.0)], - [(2.0, 3.0)], - ], - ), - ( - "BOX ARRAY", - [ - Box([3.5, 3, 9, 9]), - Box([8.5, 8, 9, 9]), - ], - [ - ((9.0, 9.0), (3.5, 3.0)), - ((9.0, 9.0), (8.5, 8.0)), - ], - ), - ( - "BOX ARRAY", - [ - [Box([3.5, 3, 9, 9])], - [Box([8.5, 8, 9, 9])], - ], - [ - [((9.0, 9.0), (3.5, 3.0))], - [((9.0, 9.0), (8.5, 8.0))], - ], - ), - ( - "PATH ARRAY", - [ - Path([(3.5, 3), (9, 9), (8, 8)]), - Path([(3.5, 3), (6, 6), (3.5, 3)]), - ], - [ - [(3.5, 3.0), (9.0, 9.0), (8.0, 8.0)], - ((3.5, 3.0), (6.0, 6.0), (3.5, 3.0)), - ], - ), - ( - "PATH ARRAY", - [ - [Path([(3.5, 3), (9, 9), (8, 8)])], - [Path([(3.5, 3), (6, 6), (3.5, 3)])], - ], - [ - [[(3.5, 3.0), (9.0, 9.0), (8.0, 8.0)]], - [((3.5, 3.0), (6.0, 6.0), (3.5, 3.0))], - ], - ), - ( - "LINE ARRAY", - [ - Line([-2, 1, 2]), - Line([1, -2, 3]), - ], - [ - (-2.0, 1.0, 2.0), - (1.0, -2.0, 3.0), - ], - ), - ( - "LINE ARRAY", - [ - [Line([-2, 1, 2])], - [Line([1, -2, 3])], - ], - [ - [(-2.0, 1.0, 2.0)], - [(1.0, -2.0, 3.0)], - ], - ), - ( - "LSEG ARRAY", - [ - LineSegment({(1, 2), (9, 9)}), - LineSegment([(5.6, 3.1), (4, 5)]), - ], - [ - [(1.0, 2.0), (9.0, 9.0)], - [(5.6, 3.1), (4.0, 5.0)], - ], - ), - ( - "LSEG ARRAY", - [ - [LineSegment({(1, 2), (9, 9)})], - [LineSegment([(5.6, 3.1), (4, 5)])], - ], - [ - [[(1.0, 2.0), (9.0, 9.0)]], - [[(5.6, 3.1), (4.0, 5.0)]], - ], - ), - ( - "CIRCLE ARRAY", - [ - Circle([1.7, 2.8, 3]), - Circle([5, 1.8, 10]), - ], - [ - ((1.7, 2.8), 3.0), - ((5.0, 1.8), 10.0), - ], - ), - ( - "CIRCLE ARRAY", - [ - [Circle([1.7, 2.8, 3])], - [Circle([5, 1.8, 10])], - ], - [ - [((1.7, 2.8), 3.0)], - [((5.0, 1.8), 10.0)], - ], - ), - ( - "INTERVAL ARRAY", - [ - datetime.timedelta(days=100, microseconds=100), - datetime.timedelta(days=100, microseconds=100), - ], - [ - datetime.timedelta(days=100, microseconds=100), - datetime.timedelta(days=100, microseconds=100), - ], - ), ], ) async def test_deserialization_simple_into_python( @@ -1177,37 +735,29 @@ async def test_empty_array( @pytest.mark.parametrize( ("postgres_type", "py_value", "expected_deserialized"), [ + ("VARCHAR ARRAY", [], []), ( "VARCHAR ARRAY", VarCharArray(["Some String", "Some String"]), ["Some String", "Some String"], ), - ( - "VARCHAR ARRAY", - VarCharArray([]), - [], - ), - ( - "VARCHAR ARRAY", - [], - [], - ), - ( - "TEXT ARRAY", - TextArray([]), - [], - ), + ("VARCHAR ARRAY", VarCharArray([]), []), + ("TEXT ARRAY", [], []), + ("TEXT ARRAY", TextArray([]), []), ( "TEXT ARRAY", TextArray([Text("Some String"), Text("Some String")]), ["Some String", "Some String"], ), + ("BOOL ARRAY", [], []), ("BOOL ARRAY", BoolArray([]), []), ("BOOL ARRAY", BoolArray([True, False]), [True, False]), ("BOOL ARRAY", BoolArray([[True], [False]]), [[True], [False]]), + ("INT2 ARRAY", [], []), ("INT2 ARRAY", Int16Array([]), []), ("INT2 ARRAY", Int16Array([SmallInt(12), SmallInt(100)]), [12, 100]), ("INT2 ARRAY", Int16Array([[SmallInt(12)], [SmallInt(100)]]), [[12], [100]]), + ("INT4 ARRAY", [], []), ( "INT4 ARRAY", Int32Array([Integer(121231231), Integer(121231231)]), @@ -1218,6 +768,7 @@ async def test_empty_array( Int32Array([[Integer(121231231)], [Integer(121231231)]]), [[121231231], [121231231]], ), + ("INT8 ARRAY", [], []), ( "INT8 ARRAY", Int64Array([BigInt(99999999999999999), BigInt(99999999999999999)]), @@ -1228,11 +779,13 @@ async def test_empty_array( Int64Array([[BigInt(99999999999999999)], [BigInt(99999999999999999)]]), [[99999999999999999], [99999999999999999]], ), + ("MONEY ARRAY", [], []), ( "MONEY ARRAY", MoneyArray([Money(99999999999999999), Money(99999999999999999)]), [99999999999999999, 99999999999999999], ), + ("NUMERIC(5, 2) ARRAY", [], []), ( "NUMERIC(5, 2) ARRAY", NumericArray([Decimal("121.23"), Decimal("188.99")]), @@ -1243,6 +796,13 @@ async def test_empty_array( NumericArray([[Decimal("121.23")], [Decimal("188.99")]]), [[Decimal("121.23")], [Decimal("188.99")]], ), + ("FLOAT4 ARRAY", [], []), + ( + "FLOAT4 ARRAY", + [32.12329864501953, 32.12329864501953], + [32.12329864501953, 32.12329864501953], + ), + ("FLOAT8 ARRAY", [], []), ( "FLOAT8 ARRAY", Float64Array([32.12329864501953, 32.12329864501953]), @@ -1253,6 +813,7 @@ async def test_empty_array( Float64Array([[32.12329864501953], [32.12329864501953]]), [[32.12329864501953], [32.12329864501953]], ), + ("DATE ARRAY", [], []), ( "DATE ARRAY", DateArray([now_datetime.date(), now_datetime.date()]), @@ -1263,6 +824,7 @@ async def test_empty_array( DateArray([[now_datetime.date()], [now_datetime.date()]]), [[now_datetime.date()], [now_datetime.date()]], ), + ("TIME ARRAY", [], []), ( "TIME ARRAY", TimeArray([now_datetime.time(), now_datetime.time()]), @@ -1273,6 +835,7 @@ async def test_empty_array( TimeArray([[now_datetime.time()], [now_datetime.time()]]), [[now_datetime.time()], [now_datetime.time()]], ), + ("TIMESTAMP ARRAY", [], []), ( "TIMESTAMP ARRAY", DateTimeArray([now_datetime, now_datetime]), @@ -1283,6 +846,7 @@ async def test_empty_array( DateTimeArray([[now_datetime], [now_datetime]]), [[now_datetime], [now_datetime]], ), + ("TIMESTAMPTZ ARRAY", [], []), ( "TIMESTAMPTZ ARRAY", DateTimeTZArray([now_datetime_with_tz, now_datetime_with_tz]), @@ -1293,16 +857,13 @@ async def test_empty_array( DateTimeTZArray([[now_datetime_with_tz], [now_datetime_with_tz]]), [[now_datetime_with_tz], [now_datetime_with_tz]], ), - ( - "UUID ARRAY", - UUIDArray([uuid_, uuid_]), - [str(uuid_), str(uuid_)], - ), + ("UUID ARRAY", [], []), ( "UUID ARRAY", UUIDArray([[uuid_], [uuid_]]), [[str(uuid_)], [str(uuid_)]], ), + ("INET ARRAY", [], []), ( "INET ARRAY", IpAddressArray([IPv4Address("192.0.0.1"), IPv4Address("192.0.0.1")]), @@ -1313,6 +874,30 @@ async def test_empty_array( IpAddressArray([[IPv4Address("192.0.0.1")], [IPv4Address("192.0.0.1")]]), [[IPv4Address("192.0.0.1")], [IPv4Address("192.0.0.1")]], ), + ("JSONB ARRAY", [], []), + ( + "JSONB ARRAY", + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + ), ( "JSONB ARRAY", JSONBArray( @@ -1397,6 +982,55 @@ async def test_empty_array( [[{"array": "json"}], [{"one more": "test"}]], ], ), + ("JSON ARRAY", [], []), + ( + "JSON ARRAY", + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + ), + ( + "JSON ARRAY", + JSONArray( + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + ), + [ + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + { + "test": ["something", 123, "here"], + "nested": ["JSON"], + }, + ], + ), ( "JSON ARRAY", JSONArray( @@ -1489,6 +1123,17 @@ async def test_empty_array( [[{"array": "json"}], [{"one more": "test"}]], ], ), + ( + "POINT ARRAY", + [ + Point([1.5, 2]), + Point([2, 3]), + ], + [ + (1.5, 2.0), + (2.0, 3.0), + ], + ), ( "POINT ARRAY", PointArray( @@ -1502,6 +1147,17 @@ async def test_empty_array( (2.0, 3.0), ], ), + ( + "POINT ARRAY", + [ + [Point([1.5, 2])], + [Point([2, 3])], + ], + [ + [(1.5, 2.0)], + [(2.0, 3.0)], + ], + ), ( "POINT ARRAY", PointArray( @@ -1515,6 +1171,18 @@ async def test_empty_array( [(2.0, 3.0)], ], ), + ("BOX ARRAY", [], []), + ( + "BOX ARRAY", + [ + Box([3.5, 3, 9, 9]), + Box([8.5, 8, 9, 9]), + ], + [ + ((9.0, 9.0), (3.5, 3.0)), + ((9.0, 9.0), (8.5, 8.0)), + ], + ), ( "BOX ARRAY", BoxArray( @@ -1541,6 +1209,18 @@ async def test_empty_array( [((9.0, 9.0), (8.5, 8.0))], ], ), + ("PATH ARRAY", [], []), + ( + "PATH ARRAY", + [ + Path([(3.5, 3), (9, 9), (8, 8)]), + Path([(3.5, 3), (6, 6), (3.5, 3)]), + ], + [ + [(3.5, 3.0), (9.0, 9.0), (8.0, 8.0)], + ((3.5, 3.0), (6.0, 6.0), (3.5, 3.0)), + ], + ), ( "PATH ARRAY", PathArray( @@ -1554,6 +1234,17 @@ async def test_empty_array( ((3.5, 3.0), (6.0, 6.0), (3.5, 3.0)), ], ), + ( + "PATH ARRAY", + [ + [Path([(3.5, 3), (9, 9), (8, 8)])], + [Path([(3.5, 3), (6, 6), (3.5, 3)])], + ], + [ + [[(3.5, 3.0), (9.0, 9.0), (8.0, 8.0)]], + [((3.5, 3.0), (6.0, 6.0), (3.5, 3.0))], + ], + ), ( "PATH ARRAY", PathArray( @@ -1567,6 +1258,18 @@ async def test_empty_array( [((3.5, 3.0), (6.0, 6.0), (3.5, 3.0))], ], ), + ("LINE ARRAY", [], []), + ( + "LINE ARRAY", + [ + Line([-2, 1, 2]), + Line([1, -2, 3]), + ], + [ + (-2.0, 1.0, 2.0), + (1.0, -2.0, 3.0), + ], + ), ( "LINE ARRAY", LineArray( @@ -1580,6 +1283,17 @@ async def test_empty_array( (1.0, -2.0, 3.0), ], ), + ( + "LINE ARRAY", + [ + [Line([-2, 1, 2])], + [Line([1, -2, 3])], + ], + [ + [(-2.0, 1.0, 2.0)], + [(1.0, -2.0, 3.0)], + ], + ), ( "LINE ARRAY", LineArray( @@ -1593,6 +1307,18 @@ async def test_empty_array( [(1.0, -2.0, 3.0)], ], ), + ("LSEG ARRAY", [], []), + ( + "LSEG ARRAY", + [ + LineSegment({(1, 2), (9, 9)}), + LineSegment([(5.6, 3.1), (4, 5)]), + ], + [ + [(1.0, 2.0), (9.0, 9.0)], + [(5.6, 3.1), (4.0, 5.0)], + ], + ), ( "LSEG ARRAY", LsegArray( @@ -1606,6 +1332,17 @@ async def test_empty_array( [(5.6, 3.1), (4.0, 5.0)], ], ), + ( + "LSEG ARRAY", + [ + [LineSegment({(1, 2), (9, 9)})], + [LineSegment([(5.6, 3.1), (4, 5)])], + ], + [ + [[(1.0, 2.0), (9.0, 9.0)]], + [[(5.6, 3.1), (4.0, 5.0)]], + ], + ), ( "LSEG ARRAY", LsegArray( @@ -1619,6 +1356,18 @@ async def test_empty_array( [[(5.6, 3.1), (4.0, 5.0)]], ], ), + ("CIRCLE ARRAY", [], []), + ( + "CIRCLE ARRAY", + [ + Circle([1.7, 2.8, 3]), + Circle([5, 1.8, 10]), + ], + [ + ((1.7, 2.8), 3.0), + ((5.0, 1.8), 10.0), + ], + ), ( "CIRCLE ARRAY", CircleArray( @@ -1645,6 +1394,18 @@ async def test_empty_array( [((5.0, 1.8), 10.0)], ], ), + ("INTERVAL ARRAY", [], []), + ( + "INTERVAL ARRAY", + [ + [datetime.timedelta(days=100, microseconds=100)], + [datetime.timedelta(days=100, microseconds=100)], + ], + [ + [datetime.timedelta(days=100, microseconds=100)], + [datetime.timedelta(days=100, microseconds=100)], + ], + ), ( "INTERVAL ARRAY", IntervalArray( diff --git a/src/driver/common_options.rs b/src/driver/common_options.rs index aebc5837..a76d37dd 100644 --- a/src/driver/common_options.rs +++ b/src/driver/common_options.rs @@ -64,7 +64,7 @@ impl TargetSessionAttrs { } #[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Debug)] pub enum SslMode { /// Do not use TLS. Disable, diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index 16454de0..aa897012 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -15,6 +15,23 @@ use super::{ utils::{build_connection_config, build_manager, build_tls}, }; +#[derive(Debug, Clone)] +pub struct ConnectionPoolConf { + pub ca_file: Option, + pub ssl_mode: Option, + pub prepare: bool, +} + +impl ConnectionPoolConf { + fn new(ca_file: Option, ssl_mode: Option, prepare: bool) -> Self { + Self { + ca_file, + ssl_mode, + prepare, + } + } +} + /// Make new connection pool. /// /// # Errors @@ -47,7 +64,6 @@ use super::{ ca_file=None, max_db_pool_size=None, conn_recycling_method=None, - prepare=None, ))] #[allow(clippy::too_many_arguments)] pub fn connect( @@ -77,7 +93,6 @@ pub fn connect( ca_file: Option, max_db_pool_size: Option, conn_recycling_method: Option, - prepare: Option, ) -> PSQLPyResult { if let Some(max_db_pool_size) = max_db_pool_size { if max_db_pool_size < 2 { @@ -137,13 +152,9 @@ pub fn connect( let pool = db_pool_builder.build()?; - Ok(ConnectionPool { - pool: pool, - pg_config: Arc::new(pg_config), - ca_file: ca_file, - ssl_mode: ssl_mode, - prepare: prepare.unwrap_or(true), - }) + Ok(ConnectionPool::build( + pool, pg_config, ca_file, ssl_mode, None, + )) } #[pyclass] @@ -209,9 +220,7 @@ impl ConnectionPoolStatus { pub struct ConnectionPool { pool: Pool, pg_config: Arc, - ca_file: Option, - ssl_mode: Option, - prepare: bool, + pool_conf: ConnectionPoolConf, } impl ConnectionPool { @@ -226,9 +235,7 @@ impl ConnectionPool { ConnectionPool { pool: pool, pg_config: Arc::new(pg_config), - ca_file: ca_file, - ssl_mode: ssl_mode, - prepare: prepare.unwrap_or(true), + pool_conf: ConnectionPoolConf::new(ca_file, ssl_mode, prepare.unwrap_or(true)), } } @@ -271,7 +278,6 @@ impl ConnectionPool { conn_recycling_method=None, ssl_mode=None, ca_file=None, - prepare=None, ))] #[allow(clippy::too_many_arguments)] pub fn new( @@ -301,7 +307,6 @@ impl ConnectionPool { conn_recycling_method: Option, ssl_mode: Option, ca_file: Option, - prepare: Option, ) -> PSQLPyResult { connect( dsn, @@ -330,7 +335,6 @@ impl ConnectionPool { ca_file, max_db_pool_size, conn_recycling_method, - prepare, ) } @@ -378,24 +382,24 @@ impl ConnectionPool { None, Some(self.pool.clone()), self.pg_config.clone(), - self.prepare, + self.pool_conf.prepare, ) } #[must_use] #[allow(clippy::needless_pass_by_value)] pub fn listener(self_: pyo3::Py) -> Listener { - let (pg_config, ca_file, ssl_mode, prepare) = pyo3::Python::with_gil(|gil| { + let (pg_config, pool_conf) = pyo3::Python::with_gil(|gil| { let b_gil = self_.borrow(gil); - ( - b_gil.pg_config.clone(), - b_gil.ca_file.clone(), - b_gil.ssl_mode, - b_gil.prepare, - ) + (b_gil.pg_config.clone(), b_gil.pool_conf.clone()) }); - Listener::new(pg_config, ca_file, ssl_mode, prepare) + Listener::new( + pg_config, + pool_conf.ca_file, + pool_conf.ssl_mode, + pool_conf.prepare, + ) } /// Return new single connection. @@ -403,9 +407,13 @@ impl ConnectionPool { /// # Errors /// May return Err Result if cannot get new connection from the pool. pub async fn connection(self_: pyo3::Py) -> PSQLPyResult { - let (db_pool, pg_config, prepare) = pyo3::Python::with_gil(|gil| { + let (db_pool, pg_config, pool_conf) = pyo3::Python::with_gil(|gil| { let slf = self_.borrow(gil); - (slf.pool.clone(), slf.pg_config.clone(), slf.prepare) + ( + slf.pool.clone(), + slf.pg_config.clone(), + slf.pool_conf.clone(), + ) }); let db_connection = tokio_runtime() .spawn(async move { @@ -414,10 +422,13 @@ impl ConnectionPool { .await??; Ok(Connection::new( - Some(Arc::new(PsqlpyConnection::PoolConn(db_connection, prepare))), + Some(Arc::new(PsqlpyConnection::PoolConn( + db_connection, + pool_conf.prepare, + ))), None, pg_config, - prepare, + pool_conf.prepare, )) } diff --git a/src/driver/connection_pool_builder.rs b/src/driver/connection_pool_builder.rs index ea311642..0cd7432b 100644 --- a/src/driver/connection_pool_builder.rs +++ b/src/driver/connection_pool_builder.rs @@ -83,15 +83,6 @@ impl ConnectionPoolBuilder { self_ } - /// Set ca_file for ssl_mode in PostgreSQL. - fn prepare(self_: Py, prepare: bool) -> Py { - Python::with_gil(|gil| { - let mut self_ = self_.borrow_mut(gil); - self_.prepare = Some(prepare); - }); - self_ - } - /// Set size to the connection pool. /// /// # Error From b5bfec2aac8addb161b80644bc856c73f325dedf Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sun, 4 May 2025 23:04:20 +0200 Subject: [PATCH 33/65] Full value converter refactor --- src/driver/inner_connection.rs | 2 +- src/value_converter/to_python.rs | 38 -------------------------------- 2 files changed, 1 insertion(+), 39 deletions(-) diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index c671229a..d8acc4d8 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -1,5 +1,5 @@ use bytes::Buf; -use deadpool_postgres::{Object, Pool}; +use deadpool_postgres::Object; use postgres_types::{ToSql, Type}; use pyo3::{Py, PyAny, Python}; use std::vec; diff --git a/src/value_converter/to_python.rs b/src/value_converter/to_python.rs index 3d65565b..c0801bac 100644 --- a/src/value_converter/to_python.rs +++ b/src/value_converter/to_python.rs @@ -635,41 +635,3 @@ pub fn postgres_to_py( } Ok(py.None()) } - -/// Convert Python sequence to Rust vector. -/// Also it checks that sequence has set/list/tuple type. -/// -/// # Errors -/// -/// May return error if cannot convert Python type into Rust one. -/// May return error if parameters type isn't correct. -fn py_sequence_to_rust(bind_parameters: &Bound) -> PSQLPyResult>> { - let mut coord_values_sequence_vec: Vec> = vec![]; - - if bind_parameters.is_instance_of::() { - let bind_pyset_parameters = bind_parameters.downcast::().unwrap(); - - for one_parameter in bind_pyset_parameters { - let extracted_parameter = one_parameter.extract::>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - coord_values_sequence_vec.push(extracted_parameter); - } - } else if bind_parameters.is_instance_of::() - | bind_parameters.is_instance_of::() - { - coord_values_sequence_vec = bind_parameters.extract::>>().map_err(|_| { - RustPSQLDriverError::PyToRustValueConversionError( - format!("Error on sequence type extraction, please use correct list/tuple/set, {bind_parameters}") - ) - })?; - } else { - return Err(RustPSQLDriverError::PyToRustValueConversionError(format!( - "Invalid sequence type, please use list/tuple/set, {bind_parameters}" - ))); - }; - - Ok::>, RustPSQLDriverError>(coord_values_sequence_vec) -} From 0464162b0c296b8701f42bce529cef1bf48807b7 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:28:58 +0200 Subject: [PATCH 34/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7d81fbd3..76dbdda8 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -36,6 +36,7 @@ jobs: strategy: matrix: py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + postgres_version: ["14", "15", "16", "17"] job: - os: ubuntu-latest ssl_cmd: sudo apt-get update && sudo apt-get install libssl-dev openssl @@ -43,12 +44,13 @@ jobs: steps: - uses: actions/checkout@v1 - name: Setup Postgres - uses: ./.github/actions/setup_postgres/ + uses: ikalnytskyi/action-setup-postgres@v7 with: username: postgres password: postgres database: psqlpy_test - ssl_on: "on" + ssl: true + postgres-version: ${{ matrix.postgres_version }} id: postgres - uses: actions-rs/toolchain@v1 with: From 7c6d373e1c8e72698891e768751ee9fdab534430 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:35:11 +0200 Subject: [PATCH 35/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 2 +- python/tests/conftest.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 76dbdda8..f23ba46c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -32,7 +32,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} args: -p psqlpy --all-features -- -W clippy::all -W clippy::pedantic pytest: - name: ${{matrix.job.os}}-${{matrix.py_version}} + name: ${{matrix.job.os}}-${{matrix.py_version}}-${{ matrix.postgres_version }} strategy: matrix: py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 30426e5f..1ee7e9b4 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -85,7 +85,7 @@ def number_database_records() -> int: @pytest.fixture def ssl_cert_file() -> str: - return os.environ.get("POSTGRES_CERT_FILE", "./root.crt") + return os.environ.get("POSTGRES_CERT_FILE", "./server.crt") @pytest.fixture From 47a441c37cc89aa446ef3a0984f6fe166d161635 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:39:38 +0200 Subject: [PATCH 36/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index f23ba46c..aee295cd 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -44,6 +44,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Setup Postgres + id: postgres uses: ikalnytskyi/action-setup-postgres@v7 with: username: postgres @@ -66,4 +67,6 @@ jobs: - name: Install tox run: pip install "tox-gh>=1.2,<2" - name: Run pytest + env: + POSTGRES_CERT_FILE: "${{ steps.postgres.outputs.certificate-path }}" run: tox -v -c tox.ini From 28c6d4ee4eba2eca5e4ebf5ac726bcbdcc3397f9 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:40:22 +0200 Subject: [PATCH 37/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index aee295cd..eb8f6197 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -52,7 +52,6 @@ jobs: database: psqlpy_test ssl: true postgres-version: ${{ matrix.postgres_version }} - id: postgres - uses: actions-rs/toolchain@v1 with: toolchain: stable From 7a5362b9055450318e0cc512d51cc8bd9e1abe17 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:46:04 +0200 Subject: [PATCH 38/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 6 ++++-- python/tests/conftest.py | 5 ++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index eb8f6197..144e3bfd 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -35,8 +35,10 @@ jobs: name: ${{matrix.job.os}}-${{matrix.py_version}}-${{ matrix.postgres_version }} strategy: matrix: - py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - postgres_version: ["14", "15", "16", "17"] + # py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + py_version: ["3.9"] + # postgres_version: ["14", "15", "16", "17"] + postgres_version: ["14"] job: - os: ubuntu-latest ssl_cmd: sudo apt-get update && sudo apt-get install libssl-dev openssl diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 1ee7e9b4..a9bfc4d3 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -85,7 +85,10 @@ def number_database_records() -> int: @pytest.fixture def ssl_cert_file() -> str: - return os.environ.get("POSTGRES_CERT_FILE", "./server.crt") + return os.environ.get( + "POSTGRES_CERT_FILE", + "/home/runner/work/_temp/pgdata/server.crt", + ) @pytest.fixture From 936529ae75e97120c57819b2bbeacb0ff82321ec Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 00:50:28 +0200 Subject: [PATCH 39/65] Added 14, 15, 16, 17 version of PostgreSQL to tests --- .github/workflows/test.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 144e3bfd..eb8f6197 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -35,10 +35,8 @@ jobs: name: ${{matrix.job.os}}-${{matrix.py_version}}-${{ matrix.postgres_version }} strategy: matrix: - # py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - py_version: ["3.9"] - # postgres_version: ["14", "15", "16", "17"] - postgres_version: ["14"] + py_version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + postgres_version: ["14", "15", "16", "17"] job: - os: ubuntu-latest ssl_cmd: sudo apt-get update && sudo apt-get install libssl-dev openssl From 65e762f5a1526a7a10fb2c6014db020f771ad534 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 01:20:46 +0200 Subject: [PATCH 40/65] Added information about deprecated functionaly to docs --- docs/usage/types/array_types.md | 9 ++++++++- docs/usage/types/extra_types.md | 21 ++++++++++++++------- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/docs/usage/types/array_types.md b/docs/usage/types/array_types.md index 312b4bb1..f2584804 100644 --- a/docs/usage/types/array_types.md +++ b/docs/usage/types/array_types.md @@ -1,6 +1,13 @@ --- -title: Array Types +title: Array Types (Deprecated) --- + +::: important +Deprecated functionality. + +Use any Python Sequence as a parameter. +::: + For type safety and better performance we have predefined array types. | PSQLPy Array Type | PostgreSQL Array Type | diff --git a/docs/usage/types/extra_types.md b/docs/usage/types/extra_types.md index 52431843..e4528831 100644 --- a/docs/usage/types/extra_types.md +++ b/docs/usage/types/extra_types.md @@ -5,15 +5,22 @@ title: Extra Types PSQLPy has additional types due to the inability to accurately recognize the type passed from Python. All extra types available from Python with mapping to PostgreSQL type and Rust type. + +::: important +Some of the types are deprecated. + +Use standard python types instead of deprecated ones. +::: + | PSQLPy type | PostgreSQL type | Rust Type | | :---: | :---: | :---: | -| BigInt | BigInt | i64 | -| Integer | Integer | i32 | -| SmallInt | SmallInt | i16 | -| Float32 | FLOAT4 | f32 | -| Float64 | FLOAT8 | f64 | -| VarChar | VarChar | String | -| Text | Text | String | +| BigInt (Deprecated) | BigInt | i64 | +| Integer (Deprecated) | Integer | i32 | +| SmallInt (Deprecated) | SmallInt | i16 | +| Float32 (Deprecated) | FLOAT4 | f32 | +| Float64 (Deprecated) | FLOAT8 | f64 | +| VarChar (Deprecated) | VarChar | String | +| Text (Deprecated) | Text | String | | JSON | JSON | serde::Value | | JSONB | JSONB | serde::Value | | MacAddr6 | MacAddr | MacAddr6 | From df6d1e7ba2cc45f3ce0cd7564afd3e8f53867f3c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 01:33:07 +0200 Subject: [PATCH 41/65] Added information about external connection pools --- docs/.vuepress/sidebar.ts | 5 +++++ docs/external_connection_pools.md | 25 +++++++++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 docs/external_connection_pools.md diff --git a/docs/.vuepress/sidebar.ts b/docs/.vuepress/sidebar.ts index d3afac19..1a3c1efd 100644 --- a/docs/.vuepress/sidebar.ts +++ b/docs/.vuepress/sidebar.ts @@ -77,6 +77,11 @@ export default sidebar({ "opentelemetry", ], }, + { + text: "External connection pools", + prefix: "/extra_conn_pools", + link: "/external_connection_pools.md" + }, { text: "Contribution guide", prefix: "/contribution_guide", diff --git a/docs/external_connection_pools.md b/docs/external_connection_pools.md new file mode 100644 index 00000000..70a892a2 --- /dev/null +++ b/docs/external_connection_pools.md @@ -0,0 +1,25 @@ +--- +title: External connection pools +--- + +PSQLPy supports external connection pools like [PgBouncer](https://www.pgbouncer.org/) or [Supavisor](https://github.com/supabase/supavisor). + +Usually, external connection pools have 3 main [modes](https://www.pgbouncer.org/features.html): `Session`, `Transaction` and `Statement`. + +If you use `Session` mode, there is nothing you have to do, just use PSQLPy as usual. + +But there are a few conditions that must be met to make `Transaction` and `Statement` work. + +### Disable statement preparation +Disable statement preparation for any sql statement execution (if a method has `prepared` parameter, set it to `False`). + +### Execute statement only in transaction +Each statement must be executed in a transaction. + +```python +db_pool = ConnectionPool(...) + +async with db_pool.acquire() as conn: + async with conn.transaction() as transaction: + await transaction.execute("SELECT 1", prepared=False) +``` From b7812c90a95d95d479b514b20f3bb5a6be06c3e2 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 11:33:04 +0200 Subject: [PATCH 42/65] Fixed broken links in the docs --- docs/components/results.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/components/results.md b/docs/components/results.md index 5b5d5392..15bc3690 100644 --- a/docs/components/results.md +++ b/docs/components/results.md @@ -14,7 +14,7 @@ Currently there are two results: ### Result #### Parameters -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) Get the result as a list of dicts @@ -34,7 +34,7 @@ async def main() -> None: #### Parameters - `as_class`: Custom class from Python. -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) Get the result as a list of passed class instances. Passed class can easily be either pydantic or msgspec model. @@ -62,16 +62,16 @@ async def main() -> None: #### Parameters - `row_factory`: custom callable object. -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) -[Read more](./../../usage/row_factories/overall_usage.md) +[Read more](/usage/row_factories/row_factories.md) ## SingleQueryResult methods ### Result #### Parameters -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) Get the result as a dict @@ -91,7 +91,7 @@ async def main() -> None: #### Parameters - `as_class`: Custom class from Python. -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) Get the result as a passed class instance. Passed class can easily be either pydantic or msgspec model. @@ -118,6 +118,6 @@ async def main() -> None: #### Parameters - `row_factory`: custom callable object. -- `custom_decoders`: custom decoders for unsupported types. [Read more](./../../usage/types/advanced_type_usage.md) +- `custom_decoders`: custom decoders for unsupported types. [Read more](/usage/types/advanced_type_usage.md) -[Read more](./../../usage/row_factories/overall_usage.md) +[Read more](/usage/row_factories/row_factories.md) From b55aa6beb7347f9be6254fd7ae31ab41ae228ec4 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 5 May 2025 21:52:56 +0200 Subject: [PATCH 43/65] Bumped version to 0.10.0 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index df4dc951..3d925ff1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,7 +997,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.9.3" +version = "0.10.0" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 1846f8c2..8a4c5807 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.9.3" +version = "0.10.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From de851761fc315873d6a23bec80e2912b3b435531 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 6 May 2025 00:16:38 +0200 Subject: [PATCH 44/65] Fixed doc --- docs/usage/types/extra_types.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/usage/types/extra_types.md b/docs/usage/types/extra_types.md index e4528831..311a7294 100644 --- a/docs/usage/types/extra_types.md +++ b/docs/usage/types/extra_types.md @@ -21,8 +21,8 @@ Use standard python types instead of deprecated ones. | Float64 (Deprecated) | FLOAT8 | f64 | | VarChar (Deprecated) | VarChar | String | | Text (Deprecated) | Text | String | -| JSON | JSON | serde::Value | -| JSONB | JSONB | serde::Value | +| JSON (Deprecated) | JSON | serde::Value | +| JSONB (Deprecated) | JSONB | serde::Value | | MacAddr6 | MacAddr | MacAddr6 | | MacAddr8 | MacAddr8 | MacAddr8 | | Point | Point | Point | From 093d78c38a4712af00dd12feee3577d2bc14ee3f Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 6 May 2025 12:40:48 +0200 Subject: [PATCH 45/65] Small fixes with kwargs parameters --- python/tests/conftest.py | 2 +- src/statement/parameters.rs | 25 +++++++++++-------------- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/python/tests/conftest.py b/python/tests/conftest.py index a9bfc4d3..efb7f6e3 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -177,7 +177,7 @@ async def create_table_for_map_parameters_test( connection = await psql_pool.connection() await connection.execute( f"CREATE TABLE {map_parameters_table_name}" - "(id SERIAL, name VARCHAR(255),surname VARCHAR(255), age INT)", + "(id SERIAL, name VARCHAR(255),surname VARCHAR(255), age SMALLINT)", ) yield diff --git a/src/statement/parameters.rs b/src/statement/parameters.rs index 0a2d9105..5ca0f1b3 100644 --- a/src/statement/parameters.rs +++ b/src/statement/parameters.rs @@ -125,22 +125,22 @@ impl MappingParametersBuilder { gil: Python<'_>, parameters_names: Vec, ) -> PSQLPyResult { - if self.types.is_some() { - return self.prepare_typed(gil, parameters_names); + match self.types.clone() { + Some(types) => return self.prepare_typed(gil, parameters_names, types), + None => return self.prepare_not_typed(gil, parameters_names), } - - self.prepare_not_typed(gil, parameters_names) } fn prepare_typed( self, gil: Python<'_>, parameters_names: Vec, + types: Vec, ) -> PSQLPyResult { - let converted_parameters = self - .extract_parameters(gil, parameters_names)? - .iter() - .map(|parameter| from_python_untyped(parameter.bind(gil))) + let extracted_parameters = self.extract_parameters(gil, parameters_names)?; + let zipped_params_types = zip(extracted_parameters, types); + let converted_parameters = zipped_params_types + .map(|(parameter, type_)| from_python_typed(parameter.bind(gil), &type_)) .collect::>>()?; Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. @@ -196,13 +196,10 @@ impl SequenceParametersBuilder { } fn prepare(self, gil: Python<'_>) -> PSQLPyResult { - let types = self.types.clone(); - - if types.is_some() { - return self.prepare_typed(gil, types.clone().unwrap()); + match self.types.clone() { + Some(types) => return self.prepare_typed(gil, types), + None => return self.prepare_not_typed(gil), } - - self.prepare_not_typed(gil) } fn prepare_typed(self, gil: Python<'_>, types: Vec) -> PSQLPyResult { From c98ae531aecbc7938df240792a26007af531baee Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 6 May 2025 12:43:13 +0200 Subject: [PATCH 46/65] Small fixes with kwargs parameters --- python/tests/test_kwargs_parameters.py | 9 +++++++++ src/statement/parameters.rs | 8 ++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/python/tests/test_kwargs_parameters.py b/python/tests/test_kwargs_parameters.py index d1fb1ebf..885e99f9 100644 --- a/python/tests/test_kwargs_parameters.py +++ b/python/tests/test_kwargs_parameters.py @@ -29,7 +29,15 @@ async def test_success_default_map_parameters( @pytest.mark.usefixtures("create_table_for_map_parameters_test") +@pytest.mark.parametrize( + "prepared", + [ + True, + False, + ], +) async def test_success_multiple_same_parameters( + prepared: bool, psql_pool: ConnectionPool, map_parameters_table_name: str, ) -> None: @@ -54,6 +62,7 @@ async def test_success_multiple_same_parameters( "WHERE name = $(name)p OR surname = $(name)p" ), parameters={"name": test_name_surname}, + prepared=prepared, ) assert res.result()[0]["name"] == test_name_surname diff --git a/src/statement/parameters.rs b/src/statement/parameters.rs index 5ca0f1b3..09e0cbef 100644 --- a/src/statement/parameters.rs +++ b/src/statement/parameters.rs @@ -138,12 +138,12 @@ impl MappingParametersBuilder { types: Vec, ) -> PSQLPyResult { let extracted_parameters = self.extract_parameters(gil, parameters_names)?; - let zipped_params_types = zip(extracted_parameters, types); + let zipped_params_types = zip(extracted_parameters, &types); let converted_parameters = zipped_params_types .map(|(parameter, type_)| from_python_typed(parameter.bind(gil), &type_)) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + Ok(PreparedParameters::new(converted_parameters, types)) } fn prepare_not_typed( @@ -157,7 +157,7 @@ impl MappingParametersBuilder { .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + Ok(PreparedParameters::new(converted_parameters, vec![])) } fn extract_parameters( @@ -218,7 +218,7 @@ impl SequenceParametersBuilder { .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) // TODO: change vec![] to real types. + Ok(PreparedParameters::new(converted_parameters, vec![])) } } From 2e4b134e9d97e2d2def15e01f714f8748f29b5ed Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 6 May 2025 12:43:35 +0200 Subject: [PATCH 47/65] Bumped version to 0.10.1 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d925ff1..fc0b9c9b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -997,7 +997,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.10.0" +version = "0.10.1" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 8a4c5807..b33f08c2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.10.0" +version = "0.10.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From e51b58b5fffe9c4c11b74236ae6ab6c70854d248 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 6 May 2025 23:31:18 +0200 Subject: [PATCH 48/65] Added DBAPI exceptions and connect method to single connection creation --- docs/components/connection_pool.md | 6 +- pyproject.toml | 4 + python/psqlpy/__init__.py | 2 + python/psqlpy/_internal/__init__.pyi | 30 ++++++- python/psqlpy/_internal/exceptions.pyi | 85 +++++++++++++++--- python/psqlpy/exceptions.py | 22 ++++- python/tests/test_connection_pool.py | 12 +-- python/tests/test_transaction.py | 8 +- src/driver/connection.rs | 101 ++++++++++++++++++++- src/driver/connection_pool.rs | 18 +++- src/driver/cursor.rs | 2 - src/exceptions/python_errors.rs | 120 +++++++++++++------------ src/exceptions/rust_errors.rs | 4 +- src/lib.rs | 6 +- 14 files changed, 326 insertions(+), 94 deletions(-) diff --git a/docs/components/connection_pool.md b/docs/components/connection_pool.md index 514f899d..b731135f 100644 --- a/docs/components/connection_pool.md +++ b/docs/components/connection_pool.md @@ -125,15 +125,15 @@ db_pool: Final = ConnectionPool( ```py from typing import Final -from psqlpy import connect +from psqlpy import connect_pool -db_pool: Final = connect( +db_pool: Final = connect_pool( dsn="postgres://postgres:postgres@localhost:5432/postgres", max_db_pool_size=10, ) ``` -`connect` function has the same parameters as `ConnectionPool`. +`connect_pool` function has the same parameters as `ConnectionPool`. ### Use Connection Pool as context manager ```py diff --git a/pyproject.toml b/pyproject.toml index 84c00f42..cd2b2f42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,10 @@ ignore = [ "D103", # Missing docstring in public function "S311", # Standard pseudo-random generators are not suitable for security/cryptographic purposes ] +"python/psqlpy/_internal/exceptions.pyi" = [ + "D205", + "RUF002", +] "./psqlpy-stress/psqlpy_stress/migrations/env.py" = ["INP001"] "examples/*" = ["INP001"] diff --git a/python/psqlpy/__init__.py b/python/psqlpy/__init__.py index 6f899719..41ede3fe 100644 --- a/python/psqlpy/__init__.py +++ b/python/psqlpy/__init__.py @@ -17,6 +17,7 @@ TargetSessionAttrs, Transaction, connect, + connect_pool, ) __all__ = [ @@ -38,4 +39,5 @@ "TargetSessionAttrs", "Transaction", "connect", + "connect_pool", ] diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index 8c391d96..8cf394b7 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -886,6 +886,34 @@ class Transaction: number of inserted rows; """ +async def connect( + dsn: str | None = None, + username: str | None = None, + password: str | None = None, + host: str | None = None, + hosts: list[str] | None = None, + port: int | None = None, + ports: list[int] | None = None, + db_name: str | None = None, + target_session_attrs: TargetSessionAttrs | None = None, + options: str | None = None, + application_name: str | None = None, + connect_timeout_sec: int | None = None, + connect_timeout_nanosec: int | None = None, + tcp_user_timeout_sec: int | None = None, + tcp_user_timeout_nanosec: int | None = None, + keepalives: bool | None = None, + keepalives_idle_sec: int | None = None, + keepalives_idle_nanosec: int | None = None, + keepalives_interval_sec: int | None = None, + keepalives_interval_nanosec: int | None = None, + keepalives_retries: int | None = None, + load_balance_hosts: LoadBalanceHosts | None = None, + ssl_mode: SslMode | None = None, + ca_file: str | None = None, +) -> Connection: + """Create new standalone connection.""" + class Connection: """Connection from Database Connection Pool. @@ -1336,7 +1364,7 @@ class ConnectionPool: def close(self: Self) -> None: """Close the connection pool.""" -def connect( +def connect_pool( dsn: str | None = None, username: str | None = None, password: str | None = None, diff --git a/python/psqlpy/_internal/exceptions.pyi b/python/psqlpy/_internal/exceptions.pyi index a0588e9f..0aabd342 100644 --- a/python/psqlpy/_internal/exceptions.pyi +++ b/python/psqlpy/_internal/exceptions.pyi @@ -1,7 +1,68 @@ -class RustPSQLDriverPyBaseError(Exception): - """Base PSQL-Rust-Engine exception.""" +class WarningError(Exception): + """ + Exception raised for important warnings + like data truncations while inserting, etc. + """ + +class Error(Exception): + """ + Exception that is the base class of all other error exceptions. -class BaseConnectionPoolError(RustPSQLDriverPyBaseError): + You can use this to catch all errors with one single except statement. + """ + +class InterfaceError(Error): + """ + Exception raised for errors that are related to the + database interface rather than the database itself. + """ + +class DatabaseError(Error): + """Exception raised for errors that are related to the database.""" + +class DataError(DatabaseError): + """ + Exception raised for errors that are due to problems with + the processed data like division by zero, numeric value out of range, etc. + """ + +class OperationalError(DatabaseError): + """ + Exception raised for errors that are related to the database’s operation + and not necessarily under the control of the programmer, + e.g. an unexpected disconnect occurs, the data source name is not found, + a transaction could not be processed, a memory allocation error + occurred during processing, etc. + """ + +class IntegrityError(DatabaseError): + """ + Exception raised when the relational integrity of the + database is affected, e.g. a foreign key check fails. + """ + +class InternalError(DatabaseError): + """ + Exception raised when the database encounters an internal error, + e.g. the cursor is not valid anymore, the transaction is out of sync, etc. + """ + +class ProgrammingError(DatabaseError): + """ + Exception raised for programming errors, e.g. table not found or + already exists, syntax error in the SQL statement, + wrong number of parameters specified, etc. + """ + +class NotSupportedError(DatabaseError): + """ + Exception raised in case a method or database API was used which + is not supported by the database, e.g. requesting a .rollback() + on a connection that does not support transaction + or has transactions turned off. + """ + +class BaseConnectionPoolError(InterfaceError): """Base error for all Connection Pool errors.""" class ConnectionPoolBuildError(BaseConnectionPoolError): @@ -13,7 +74,7 @@ class ConnectionPoolConfigurationError(BaseConnectionPoolError): class ConnectionPoolExecuteError(BaseConnectionPoolError): """Error in connection pool execution.""" -class BaseConnectionError(RustPSQLDriverPyBaseError): +class BaseConnectionError(InterfaceError): """Base error for Connection errors.""" class ConnectionExecuteError(BaseConnectionError): @@ -22,7 +83,7 @@ class ConnectionExecuteError(BaseConnectionError): class ConnectionClosedError(BaseConnectionError): """Error if underlying connection is already closed.""" -class BaseTransactionError(RustPSQLDriverPyBaseError): +class BaseTransactionError(InterfaceError): """Base error for all transaction errors.""" class TransactionBeginError(BaseTransactionError): @@ -43,7 +104,7 @@ class TransactionExecuteError(BaseTransactionError): class TransactionClosedError(BaseTransactionError): """Error if underlying connection is already closed.""" -class BaseCursorError(RustPSQLDriverPyBaseError): +class BaseCursorError(InterfaceError): """Base error for Cursor errors.""" class CursorStartError(BaseCursorError): @@ -58,29 +119,27 @@ class CursorFetchError(BaseCursorError): class CursorClosedError(BaseCursorError): """Error if underlying connection is already closed.""" -class UUIDValueConvertError(RustPSQLDriverPyBaseError): +class UUIDValueConvertError(DataError): """Error if it's impossible to convert py string UUID into rust UUID.""" -class MacAddrConversionError(RustPSQLDriverPyBaseError): +class MacAddrConversionError(DataError): """Error if cannot convert MacAddr string value to rust type.""" -class RustToPyValueMappingError(RustPSQLDriverPyBaseError): +class RustToPyValueMappingError(DataError): """Error if it is not possible to covert rust type to python. You can get it if you database contains data type that it not supported by this library. - - It's better to handle this exception. """ -class PyToRustValueMappingError(RustPSQLDriverPyBaseError): +class PyToRustValueMappingError(DataError): """Error if it is not possible to covert python type to rust. You can get this exception when executing queries with parameters. So, if there are no parameters for the query, don't handle this error. """ -class BaseListenerError(RustPSQLDriverPyBaseError): +class BaseListenerError(InterfaceError): """Base error for all Listener errors.""" class ListenerStartError(BaseListenerError): diff --git a/python/psqlpy/exceptions.py b/python/psqlpy/exceptions.py index 2d981ef3..da5f51a0 100644 --- a/python/psqlpy/exceptions.py +++ b/python/psqlpy/exceptions.py @@ -13,12 +13,20 @@ CursorCloseError, CursorFetchError, CursorStartError, + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, ListenerCallbackError, ListenerClosedError, ListenerStartError, MacAddrConversionError, + NotSupportedError, + OperationalError, + ProgrammingError, PyToRustValueMappingError, - RustPSQLDriverPyBaseError, RustToPyValueMappingError, TransactionBeginError, TransactionClosedError, @@ -27,6 +35,7 @@ TransactionRollbackError, TransactionSavepointError, UUIDValueConvertError, + WarningError, ) __all__ = [ @@ -44,12 +53,20 @@ "CursorClosedError", "CursorFetchError", "CursorStartError", + "DataError", + "DatabaseError", + "Error", + "IntegrityError", + "InterfaceError", + "InternalError", "ListenerCallbackError", "ListenerClosedError", "ListenerStartError", "MacAddrConversionError", + "NotSupportedError", + "OperationalError", + "ProgrammingError", "PyToRustValueMappingError", - "RustPSQLDriverPyBaseError", "RustToPyValueMappingError", "TransactionBeginError", "TransactionClosedError", @@ -58,4 +75,5 @@ "TransactionRollbackError", "TransactionSavepointError", "UUIDValueConvertError", + "WarningError", ] diff --git a/python/tests/test_connection_pool.py b/python/tests/test_connection_pool.py index 405fceb7..dee61e86 100644 --- a/python/tests/test_connection_pool.py +++ b/python/tests/test_connection_pool.py @@ -5,11 +5,11 @@ ConnRecyclingMethod, LoadBalanceHosts, TargetSessionAttrs, - connect, + connect_pool, ) from psqlpy.exceptions import ( ConnectionPoolConfigurationError, - RustPSQLDriverPyBaseError, + InterfaceError, ) pytestmark = pytest.mark.anyio @@ -17,7 +17,7 @@ async def test_connect_func() -> None: """Test that connect function makes new connection pool.""" - pg_pool = connect( + pg_pool = connect_pool( dsn="postgres://postgres:postgres@localhost:5432/psqlpy_test", ) @@ -106,7 +106,7 @@ async def test_pool_target_session_attrs( ) if target_session_attrs == TargetSessionAttrs.ReadOnly: - with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): + with pytest.raises(expected_exception=InterfaceError): await pg_pool.connection() else: conn = await pg_pool.connection() @@ -143,7 +143,7 @@ async def test_close_connection_pool() -> None: pg_pool.close() - with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): + with pytest.raises(expected_exception=InterfaceError): await pg_pool.connection() @@ -156,5 +156,5 @@ async def test_connection_pool_as_context_manager() -> None: res = await conn.execute("SELECT 1") assert res.result() - with pytest.raises(expected_exception=RustPSQLDriverPyBaseError): + with pytest.raises(expected_exception=InterfaceError): await pg_pool.connection() diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index 3c60676a..280d21be 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -11,7 +11,7 @@ SynchronousCommit, ) from psqlpy.exceptions import ( - RustPSQLDriverPyBaseError, + InterfaceError, TransactionBeginError, TransactionExecuteError, TransactionSavepointError, @@ -50,7 +50,7 @@ async def test_transaction_init_parameters( f"INSERT INTO {table_name} VALUES ($1, $2)", parameters=[100, "test_name"], ) - except RustPSQLDriverPyBaseError: + except InterfaceError: assert read_variant is ReadVariant.ReadOnly else: assert read_variant is not ReadVariant.ReadOnly @@ -287,7 +287,7 @@ async def test_transaction_fetch_row_more_than_one_row( ) -> None: connection = await psql_pool.connection() async with connection.transaction() as transaction: - with pytest.raises(RustPSQLDriverPyBaseError): + with pytest.raises(InterfaceError): await transaction.fetch_row( f"SELECT * FROM {table_name}", [], @@ -313,7 +313,7 @@ async def test_transaction_fetch_val_more_than_one_row( ) -> None: connection = await psql_pool.connection() async with connection.transaction() as transaction: - with pytest.raises(RustPSQLDriverPyBaseError): + with pytest.raises(InterfaceError): await transaction.fetch_row( f"SELECT * FROM {table_name}", [], diff --git a/src/driver/connection.rs b/src/driver/connection.rs index d38b71f9..ded325a2 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -1,7 +1,7 @@ use bytes::BytesMut; use deadpool_postgres::Pool; use futures_util::pin_mut; -use pyo3::{buffer::PyBuffer, pyclass, pymethods, Py, PyAny, PyErr, Python}; +use pyo3::{buffer::PyBuffer, pyclass, pyfunction, pymethods, Py, PyAny, PyErr, Python}; use std::{collections::HashSet, net::IpAddr, sync::Arc}; use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; @@ -9,16 +9,113 @@ use crate::{ exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - runtime::tokio_runtime, + runtime::{rustdriver_future, tokio_runtime}, }; use super::{ + common_options::{LoadBalanceHosts, SslMode, TargetSessionAttrs}, + connection_pool::{connect_pool, ConnectionPool}, cursor::Cursor, inner_connection::PsqlpyConnection, transaction::Transaction, transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, + utils::build_connection_config, }; +/// Make new connection pool. +/// +/// # Errors +/// May return error if cannot build new connection pool. +#[pyfunction] +#[pyo3(signature = ( + dsn=None, + username=None, + password=None, + host=None, + hosts=None, + port=None, + ports=None, + db_name=None, + target_session_attrs=None, + options=None, + application_name=None, + connect_timeout_sec=None, + connect_timeout_nanosec=None, + tcp_user_timeout_sec=None, + tcp_user_timeout_nanosec=None, + keepalives=None, + keepalives_idle_sec=None, + keepalives_idle_nanosec=None, + keepalives_interval_sec=None, + keepalives_interval_nanosec=None, + keepalives_retries=None, + load_balance_hosts=None, + ssl_mode=None, + ca_file=None, +))] +#[allow(clippy::too_many_arguments)] +pub async fn connect( + dsn: Option, + username: Option, + password: Option, + host: Option, + hosts: Option>, + port: Option, + ports: Option>, + db_name: Option, + target_session_attrs: Option, + options: Option, + application_name: Option, + connect_timeout_sec: Option, + connect_timeout_nanosec: Option, + tcp_user_timeout_sec: Option, + tcp_user_timeout_nanosec: Option, + keepalives: Option, + keepalives_idle_sec: Option, + keepalives_idle_nanosec: Option, + keepalives_interval_sec: Option, + keepalives_interval_nanosec: Option, + keepalives_retries: Option, + load_balance_hosts: Option, + ssl_mode: Option, + ca_file: Option, +) -> PSQLPyResult { + let mut connection_pool = connect_pool( + dsn, + username, + password, + host, + hosts, + port, + ports, + db_name, + target_session_attrs, + options, + application_name, + connect_timeout_sec, + connect_timeout_nanosec, + tcp_user_timeout_sec, + tcp_user_timeout_nanosec, + keepalives, + keepalives_idle_sec, + keepalives_idle_nanosec, + keepalives_interval_sec, + keepalives_interval_nanosec, + keepalives_retries, + load_balance_hosts, + ssl_mode, + ca_file, + Some(2), + None, + )?; + + let db_connection = tokio_runtime() + .spawn(async move { connection_pool.retrieve_connection().await }) + .await??; + + Ok(db_connection) +} + #[pyclass(subclass)] #[derive(Clone)] pub struct Connection { diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index aa897012..a764cea3 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -66,7 +66,7 @@ impl ConnectionPoolConf { conn_recycling_method=None, ))] #[allow(clippy::too_many_arguments)] -pub fn connect( +pub fn connect_pool( dsn: Option, username: Option, password: Option, @@ -239,6 +239,20 @@ impl ConnectionPool { } } + pub async fn retrieve_connection(&mut self) -> PSQLPyResult { + let connection = self.pool.get().await?; + + Ok(Connection::new( + Some(Arc::new(PsqlpyConnection::PoolConn( + connection, + self.pool_conf.prepare, + ))), + None, + self.pg_config.clone(), + self.pool_conf.prepare, + )) + } + pub fn remove_prepared_stmt(&mut self, query: &str, types: &[Type]) { self.pool.manager().statement_caches.remove(query, types); } @@ -308,7 +322,7 @@ impl ConnectionPool { ssl_mode: Option, ca_file: Option, ) -> PSQLPyResult { - connect( + connect_pool( dsn, username, password, diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 1f435ef5..54aee852 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -303,8 +303,6 @@ impl Cursor { /// /// Execute FETCH FROM /// - /// This is the only place where we use `rustdriver_future` cuz - /// we didn't find any solution how to implement it without /// # Errors /// May return Err Result if can't execute querystring. fn __anext__(&self) -> PSQLPyResult> { diff --git a/src/exceptions/python_errors.rs b/src/exceptions/python_errors.rs index 4d3798cb..716e2f04 100644 --- a/src/exceptions/python_errors.rs +++ b/src/exceptions/python_errors.rs @@ -4,27 +4,61 @@ use pyo3::{ Bound, PyResult, Python, }; -// Main exception. +// Exception raised for important warnings like data truncations while inserting, etc. create_exception!( psqlpy.exceptions, - RustPSQLDriverPyBaseError, + WarningError, pyo3::exceptions::PyException ); +// Exception that is the base class of all other error exceptions. +// You can use this to catch all errors with one single except statement. +create_exception!(psqlpy.exceptions, Error, pyo3::exceptions::PyException); + +// Exception raised for errors that are related to the +// database interface rather than the database itself. +create_exception!(psqlpy.exceptions, InterfaceError, Error); + +// Exception raised for errors that are related to the database. +create_exception!(psqlpy.exceptions, DatabaseError, Error); + +// Exception raised for errors that are due to problems with +// the processed data like division by zero, numeric value out of range, etc. +create_exception!(psqlpy.exceptions, DataError, DatabaseError); + +// Exception raised for errors that are related to the database’s operation +// and not necessarily under the control of the programmer, +// e.g. an unexpected disconnect occurs, the data source name is not found, +// a transaction could not be processed, a memory allocation error +// occurred during processing, etc. +create_exception!(psqlpy.exceptions, OperationalError, DatabaseError); + +// Exception raised when the relational integrity of the +// database is affected, e.g. a foreign key check fails. +create_exception!(psqlpy.exceptions, IntegrityError, DatabaseError); + +// Exception raised when the database encounters an internal error, +// e.g. the cursor is not valid anymore, the transaction is out of sync, etc. +create_exception!(psqlpy.exceptions, InternalError, DatabaseError); + +// Exception raised for programming errors, e.g. table not found or +// already exists, syntax error in the SQL statement, +// wrong number of parameters specified, etc. +create_exception!(psqlpy.exceptions, ProgrammingError, DatabaseError); +// Exception raised in case a method or database API was used which +// is not supported by the database, e.g. requesting a .rollback() +// on a connection that does not support transaction +// or has transactions turned off. +create_exception!(psqlpy.exceptions, NotSupportedError, DatabaseError); + // Rust exceptions // `Rust` means thats these exceptions come from external rust crates, // not from the code of the library. -create_exception!(psqlpy.exceptions, RustException, RustPSQLDriverPyBaseError); -create_exception!(psqlpy.exceptions, DriverError, RustException); -create_exception!(psqlpy.exceptions, MacAddrParseError, RustException); -create_exception!(psqlpy.exceptions, RuntimeJoinError, RustException); +create_exception!(psqlpy.exceptions, MacAddrParseError, DataError); +create_exception!(psqlpy.exceptions, RuntimeJoinError, DataError); // ConnectionPool exceptions -create_exception!( - psqlpy.exceptions, - BaseConnectionPoolError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, BaseConnectionPoolError, InterfaceError); create_exception!( psqlpy.exceptions, ConnectionPoolBuildError, @@ -42,11 +76,7 @@ create_exception!( ); // Connection exceptions -create_exception!( - psqlpy.exceptions, - BaseConnectionError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, BaseConnectionError, InterfaceError); create_exception!( psqlpy.exceptions, ConnectionExecuteError, @@ -59,11 +89,7 @@ create_exception!( ); // Transaction exceptions -create_exception!( - psqlpy.exceptions, - BaseTransactionError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, BaseTransactionError, InterfaceError); create_exception!( psqlpy.exceptions, TransactionBeginError, @@ -96,59 +122,41 @@ create_exception!( ); // Cursor exceptions -create_exception!( - psqlpy.exceptions, - BaseCursorError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, BaseCursorError, InterfaceError); create_exception!(psqlpy.exceptions, CursorStartError, BaseCursorError); create_exception!(psqlpy.exceptions, CursorCloseError, BaseCursorError); create_exception!(psqlpy.exceptions, CursorFetchError, BaseCursorError); create_exception!(psqlpy.exceptions, CursorClosedError, BaseCursorError); // Listener Error -create_exception!( - psqlpy.exceptions, - BaseListenerError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, BaseListenerError, InterfaceError); create_exception!(psqlpy.exceptions, ListenerStartError, BaseListenerError); create_exception!(psqlpy.exceptions, ListenerClosedError, BaseListenerError); create_exception!(psqlpy.exceptions, ListenerCallbackError, BaseListenerError); // Inner exceptions -create_exception!( - psqlpy.exceptions, - RustToPyValueMappingError, - RustPSQLDriverPyBaseError -); -create_exception!( - psqlpy.exceptions, - PyToRustValueMappingError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, RustToPyValueMappingError, DataError); +create_exception!(psqlpy.exceptions, PyToRustValueMappingError, DataError); -create_exception!( - psqlpy.exceptions, - UUIDValueConvertError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, UUIDValueConvertError, DataError); -create_exception!( - psqlpy.exceptions, - MacAddrConversionError, - RustPSQLDriverPyBaseError -); +create_exception!(psqlpy.exceptions, MacAddrConversionError, DataError); -create_exception!(psqlpy.exceptions, SSLError, RustPSQLDriverPyBaseError); +create_exception!(psqlpy.exceptions, SSLError, DatabaseError); #[allow(clippy::missing_errors_doc)] #[allow(clippy::too_many_lines)] pub fn python_exceptions_module(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { - pymod.add( - "RustPSQLDriverPyBaseError", - py.get_type::(), - )?; + pymod.add("Warning", py.get_type::())?; + pymod.add("Error", py.get_type::())?; + pymod.add("InterfaceError", py.get_type::())?; + pymod.add("DatabaseError", py.get_type::())?; + pymod.add("DataError", py.get_type::())?; + pymod.add("OperationalError", py.get_type::())?; + pymod.add("IntegrityError", py.get_type::())?; + pymod.add("InternalError", py.get_type::())?; + pymod.add("ProgrammingError", py.get_type::())?; + pymod.add("NotSupportedError", py.get_type::())?; pymod.add( "BaseConnectionPoolError", diff --git a/src/exceptions/rust_errors.rs b/src/exceptions/rust_errors.rs index 94b89fa0..f133321b 100644 --- a/src/exceptions/rust_errors.rs +++ b/src/exceptions/rust_errors.rs @@ -8,7 +8,7 @@ use super::python_errors::{ BaseConnectionError, BaseConnectionPoolError, BaseCursorError, BaseListenerError, BaseTransactionError, ConnectionClosedError, ConnectionExecuteError, ConnectionPoolBuildError, ConnectionPoolConfigurationError, ConnectionPoolExecuteError, CursorCloseError, - CursorClosedError, CursorFetchError, CursorStartError, DriverError, ListenerCallbackError, + CursorClosedError, CursorFetchError, CursorStartError, DatabaseError, ListenerCallbackError, ListenerClosedError, ListenerStartError, MacAddrParseError, RuntimeJoinError, SSLError, TransactionBeginError, TransactionClosedError, TransactionCommitError, TransactionExecuteError, TransactionRollbackError, TransactionSavepointError, UUIDValueConvertError, @@ -104,7 +104,7 @@ impl From for pyo3::PyErr { let error_desc = error.to_string(); match error { RustPSQLDriverError::RustPyError(err) => err, - RustPSQLDriverError::RustDriverError(_) => DriverError::new_err((error_desc,)), + RustPSQLDriverError::RustDriverError(_) => DatabaseError::new_err((error_desc,)), RustPSQLDriverError::RustMacAddrConversionError(_) => { MacAddrParseError::new_err((error_desc,)) } diff --git a/src/lib.rs b/src/lib.rs index 6be59c75..d6ae473a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -25,8 +25,12 @@ fn psqlpy(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; - pymod.add_function(wrap_pyfunction!(driver::connection_pool::connect, pymod)?)?; + pymod.add_function(wrap_pyfunction!( + driver::connection_pool::connect_pool, + pymod + )?)?; pymod.add_class::()?; + pymod.add_function(wrap_pyfunction!(driver::connection::connect, pymod)?)?; pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; From 0a484a8f6ecbe22903a4ba5803e0971a37d1bf8c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Wed, 7 May 2025 00:45:36 +0200 Subject: [PATCH 49/65] Added DBAPI exceptions and connect method to single connection creation --- src/exceptions/python_errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/exceptions/python_errors.rs b/src/exceptions/python_errors.rs index 716e2f04..e7c0a214 100644 --- a/src/exceptions/python_errors.rs +++ b/src/exceptions/python_errors.rs @@ -147,7 +147,7 @@ create_exception!(psqlpy.exceptions, SSLError, DatabaseError); #[allow(clippy::missing_errors_doc)] #[allow(clippy::too_many_lines)] pub fn python_exceptions_module(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { - pymod.add("Warning", py.get_type::())?; + pymod.add("WarningError", py.get_type::())?; pymod.add("Error", py.get_type::())?; pymod.add("InterfaceError", py.get_type::())?; pymod.add("DatabaseError", py.get_type::())?; From c793b32f747ce5b3be4aace8f8f91ebac019063a Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 13 May 2025 23:00:12 +0200 Subject: [PATCH 50/65] Added inner transaction impl --- src/driver/inner_transaction.rs | 83 +++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 src/driver/inner_transaction.rs diff --git a/src/driver/inner_transaction.rs b/src/driver/inner_transaction.rs new file mode 100644 index 00000000..2dd7f009 --- /dev/null +++ b/src/driver/inner_transaction.rs @@ -0,0 +1,83 @@ +use deadpool_postgres::Transaction as dp_Transaction; +use postgres_types::ToSql; +use tokio_postgres::{Portal, Row, ToStatement, Transaction as tp_Transaction}; + +use crate::exceptions::rust_errors::PSQLPyResult; + +pub enum PsqlpyTransaction { + PoolTrans(dp_Transaction<'static>), + SingleConnTrans(tp_Transaction<'static>) +} + +impl PsqlpyTransaction { + async fn commit(self) -> PSQLPyResult<()> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.commit().await?), + PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.commit().await?) + } + } + + async fn rollback(self) -> PSQLPyResult<()> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.rollback().await?), + PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.rollback().await?) + } + } + + async fn savepoint(&mut self, sp_name: &str) -> PSQLPyResult<()> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => { + p_txid.savepoint(sp_name).await?; + Ok(()) + }, + PsqlpyTransaction::SingleConnTrans(s_txid) => { + s_txid.savepoint(sp_name).await?; + Ok(()) + } + } + } + + async fn release_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => { + p_txid.batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()).await?; + Ok(()) + }, + PsqlpyTransaction::SingleConnTrans(s_txid) => { + s_txid.batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()).await?; + Ok(()) + } + } + } + + async fn rollback_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => { + p_txid.batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()).await?; + Ok(()) + }, + PsqlpyTransaction::SingleConnTrans(s_txid) => { + s_txid.batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()).await?; + Ok(()) + } + } + } + + async fn bind(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> PSQLPyResult + where + T: ?Sized + ToStatement { + match self { + PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.bind(statement, params).await?), + PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.bind(statement, params).await?) + } + } + + pub async fn query_portal(&self, portal: &Portal, size: i32) -> PSQLPyResult> { + match self { + PsqlpyTransaction::PoolTrans(p_txid) + => Ok(p_txid.query_portal(portal, size).await?), + PsqlpyTransaction::SingleConnTrans(s_txid) + => Ok(s_txid.query_portal(portal, size).await?) + } + } +} From 82b9fc5213fa531ec1f19edfbda5e739ef487d51 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 13 May 2025 23:00:54 +0200 Subject: [PATCH 51/65] Made small use corrections --- src/driver/connection.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/driver/connection.rs b/src/driver/connection.rs index ded325a2..2210e303 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -9,17 +9,16 @@ use crate::{ exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - runtime::{rustdriver_future, tokio_runtime}, + runtime::tokio_runtime, }; use super::{ common_options::{LoadBalanceHosts, SslMode, TargetSessionAttrs}, - connection_pool::{connect_pool, ConnectionPool}, + connection_pool::connect_pool, cursor::Cursor, inner_connection::PsqlpyConnection, transaction::Transaction, transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, - utils::build_connection_config, }; /// Make new connection pool. From b260ba342e8651ca5e7713d3d8444b3f88389da0 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 13 May 2025 23:01:25 +0200 Subject: [PATCH 52/65] Made small use corrections --- src/driver/inner_connection.rs | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs index d8acc4d8..bb591de7 100644 --- a/src/driver/inner_connection.rs +++ b/src/driver/inner_connection.rs @@ -1,7 +1,7 @@ use bytes::Buf; -use deadpool_postgres::Object; +use deadpool_postgres::{Object, Transaction}; use postgres_types::{ToSql, Type}; -use pyo3::{Py, PyAny, Python}; +use pyo3::{pyclass, Py, PyAny, Python}; use std::vec; use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; @@ -18,6 +18,11 @@ pub enum PsqlpyConnection { SingleConn(Client), } +// #[pyclass] +// struct Portal { +// trans: Transaction<'static>, +// } + impl PsqlpyConnection { /// Prepare cached statement. /// @@ -38,6 +43,25 @@ impl PsqlpyConnection { } } + // pub async fn transaction(&mut self) -> Portal { + // match self { + // PsqlpyConnection::PoolConn(pconn, _) => { + // let b = unsafe { + // std::mem::transmute::, Transaction<'static>>(pconn.transaction().await.unwrap()) + // }; + // Portal {trans: b} + // // let c = b.bind("SELECT 1", &[]).await.unwrap(); + // // b.query_portal(&c, 1).await; + // } + // PsqlpyConnection::SingleConn(sconn) => { + // let b = unsafe { + // std::mem::transmute::, Transaction<'static>>(sconn.transaction().await.unwrap()) + // }; + // Portal {trans: b} + // }, + // } + // } + /// Delete prepared statement. /// /// # Errors From 519c4d4afabe93d296b23c8596bae683bf08e2a7 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 13 May 2025 23:02:13 +0200 Subject: [PATCH 53/65] Added Portal impl --- src/driver/inner_transaction.rs | 43 +++++++++++++++++---------- src/driver/mod.rs | 2 ++ src/driver/portal.rs | 52 +++++++++++++++++++++++++++++++++ 3 files changed, 81 insertions(+), 16 deletions(-) create mode 100644 src/driver/portal.rs diff --git a/src/driver/inner_transaction.rs b/src/driver/inner_transaction.rs index 2dd7f009..a23f0536 100644 --- a/src/driver/inner_transaction.rs +++ b/src/driver/inner_transaction.rs @@ -6,21 +6,21 @@ use crate::exceptions::rust_errors::PSQLPyResult; pub enum PsqlpyTransaction { PoolTrans(dp_Transaction<'static>), - SingleConnTrans(tp_Transaction<'static>) + SingleConnTrans(tp_Transaction<'static>), } impl PsqlpyTransaction { async fn commit(self) -> PSQLPyResult<()> { match self { PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.commit().await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.commit().await?) + PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.commit().await?), } } async fn rollback(self) -> PSQLPyResult<()> { match self { PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.rollback().await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.rollback().await?) + PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.rollback().await?), } } @@ -29,7 +29,7 @@ impl PsqlpyTransaction { PsqlpyTransaction::PoolTrans(p_txid) => { p_txid.savepoint(sp_name).await?; Ok(()) - }, + } PsqlpyTransaction::SingleConnTrans(s_txid) => { s_txid.savepoint(sp_name).await?; Ok(()) @@ -40,11 +40,15 @@ impl PsqlpyTransaction { async fn release_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { match self { PsqlpyTransaction::PoolTrans(p_txid) => { - p_txid.batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()).await?; + p_txid + .batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()) + .await?; Ok(()) - }, + } PsqlpyTransaction::SingleConnTrans(s_txid) => { - s_txid.batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()).await?; + s_txid + .batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()) + .await?; Ok(()) } } @@ -53,11 +57,15 @@ impl PsqlpyTransaction { async fn rollback_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { match self { PsqlpyTransaction::PoolTrans(p_txid) => { - p_txid.batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()).await?; + p_txid + .batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()) + .await?; Ok(()) - }, + } PsqlpyTransaction::SingleConnTrans(s_txid) => { - s_txid.batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()).await?; + s_txid + .batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()) + .await?; Ok(()) } } @@ -65,19 +73,22 @@ impl PsqlpyTransaction { async fn bind(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> PSQLPyResult where - T: ?Sized + ToStatement { + T: ?Sized + ToStatement, + { match self { PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.bind(statement, params).await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.bind(statement, params).await?) + PsqlpyTransaction::SingleConnTrans(s_txid) => { + Ok(s_txid.bind(statement, params).await?) + } } } pub async fn query_portal(&self, portal: &Portal, size: i32) -> PSQLPyResult> { match self { - PsqlpyTransaction::PoolTrans(p_txid) - => Ok(p_txid.query_portal(portal, size).await?), - PsqlpyTransaction::SingleConnTrans(s_txid) - => Ok(s_txid.query_portal(portal, size).await?) + PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.query_portal(portal, size).await?), + PsqlpyTransaction::SingleConnTrans(s_txid) => { + Ok(s_txid.query_portal(portal, size).await?) + } } } } diff --git a/src/driver/mod.rs b/src/driver/mod.rs index e7827cd5..416bfa97 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -4,7 +4,9 @@ pub mod connection_pool; pub mod connection_pool_builder; pub mod cursor; pub mod inner_connection; +pub mod inner_transaction; pub mod listener; +pub mod portal; pub mod transaction; pub mod transaction_options; pub mod utils; diff --git a/src/driver/portal.rs b/src/driver/portal.rs new file mode 100644 index 00000000..d90138b0 --- /dev/null +++ b/src/driver/portal.rs @@ -0,0 +1,52 @@ +use std::sync::Arc; + +use pyo3::{pyclass, pymethods}; +use tokio_postgres::Portal as tp_Portal; + +use crate::{exceptions::rust_errors::PSQLPyResult, query_result::PSQLDriverPyQueryResult}; + +use super::inner_transaction::PsqlpyTransaction; + +#[pyclass] +struct Portal { + transaction: Arc, + inner: tp_Portal, + array_size: i32, +} + +impl Portal { + async fn query_portal(&self, size: i32) -> PSQLPyResult { + let result = self.transaction.query_portal(&self.inner, size).await?; + Ok(PSQLDriverPyQueryResult::new(result)) + } +} + +#[pymethods] +impl Portal { + #[getter] + fn get_array_size(&self) -> i32 { + self.array_size + } + + #[setter] + fn set_array_size(&mut self, value: i32) { + self.array_size = value; + } + + async fn fetch_one(&self) -> PSQLPyResult { + self.query_portal(1).await + } + + #[pyo3(signature = (size=None))] + async fn fetch_many(&self, size: Option) -> PSQLPyResult { + self.query_portal(size.unwrap_or(self.array_size)).await + } + + async fn fetch_all(&self) -> PSQLPyResult { + self.query_portal(-1).await + } + + async fn close(&mut self) { + let _ = Arc::downgrade(&self.transaction); + } +} From 55a44e7c7050f118f50e38af48e6a30677ad4222 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Wed, 14 May 2025 20:10:08 +0200 Subject: [PATCH 54/65] Started making PSQLPy SQLAlchemy compatible --- python/psqlpy/__init__.py | 2 - python/psqlpy/_internal/__init__.pyi | 34 -- python/tests/test_transaction.py | 27 -- src/connection/impls.rs | 485 +++++++++++++++++++++++++++ src/connection/mod.rs | 3 + src/connection/structs.rs | 15 + src/connection/traits.rs | 87 +++++ src/driver/connection.rs | 34 +- src/driver/connection_pool.rs | 44 +-- src/driver/cursor.rs | 9 +- src/driver/inner_connection.rs | 347 ------------------- src/driver/inner_transaction.rs | 94 ------ src/driver/listener/core.rs | 21 +- src/driver/mod.rs | 3 - src/driver/portal.rs | 139 +++++--- src/driver/transaction.rs | 177 ++-------- src/driver/transaction_options.rs | 81 ----- src/lib.rs | 7 +- src/options.rs | 221 ++++++++++++ src/statement/statement_builder.rs | 9 +- 20 files changed, 984 insertions(+), 855 deletions(-) create mode 100644 src/connection/impls.rs create mode 100644 src/connection/mod.rs create mode 100644 src/connection/structs.rs create mode 100644 src/connection/traits.rs delete mode 100644 src/driver/inner_connection.rs delete mode 100644 src/driver/inner_transaction.rs delete mode 100644 src/driver/transaction_options.rs create mode 100644 src/options.rs diff --git a/python/psqlpy/__init__.py b/python/psqlpy/__init__.py index 41ede3fe..fbaf123d 100644 --- a/python/psqlpy/__init__.py +++ b/python/psqlpy/__init__.py @@ -13,7 +13,6 @@ ReadVariant, SingleQueryResult, SslMode, - SynchronousCommit, TargetSessionAttrs, Transaction, connect, @@ -35,7 +34,6 @@ "ReadVariant", "SingleQueryResult", "SslMode", - "SynchronousCommit", "TargetSessionAttrs", "Transaction", "connect", diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index 8cf394b7..d900d228 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -150,38 +150,6 @@ class SingleQueryResult: Type that return passed function. """ -class SynchronousCommit(Enum): - """ - Synchronous_commit option for transactions. - - ### Variants: - - `On`: The meaning may change based on whether you have - a synchronous standby or not. - If there is a synchronous standby, - setting the value to on will result in waiting till “remote flush”. - - `Off`: As the name indicates, the commit acknowledgment can come before - flushing the records to disk. - This is generally called as an asynchronous commit. - If the PostgreSQL instance crashes, - the last few asynchronous commits might be lost. - - `Local`: WAL records are written and flushed to local disks. - In this case, the commit will be acknowledged after the - local WAL Write and WAL flush completes. - - `RemoteWrite`: WAL records are successfully handed over to - remote instances which acknowledged back - about the write (not flush). - - `RemoteApply`: This will result in commits waiting until replies from the - current synchronous standby(s) indicate they have received - the commit record of the transaction and applied it so - that it has become visible to queries on the standby(s). - """ - - On = 1 - Off = 2 - Local = 3 - RemoteWrite = 4 - RemoteApply = 5 - class IsolationLevel(Enum): """Isolation Level for transactions.""" @@ -1117,7 +1085,6 @@ class Connection: isolation_level: IsolationLevel | None = None, read_variant: ReadVariant | None = None, deferrable: bool | None = None, - synchronous_commit: SynchronousCommit | None = None, ) -> Transaction: """Create new transaction. @@ -1125,7 +1092,6 @@ class Connection: - `isolation_level`: configure isolation level of the transaction. - `read_variant`: configure read variant of the transaction. - `deferrable`: configure deferrable of the transaction. - - `synchronous_commit`: configure synchronous_commit option for transaction. """ def cursor( self: Self, diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index 280d21be..a6dfd191 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -8,7 +8,6 @@ Cursor, IsolationLevel, ReadVariant, - SynchronousCommit, ) from psqlpy.exceptions import ( InterfaceError, @@ -362,29 +361,3 @@ async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: await transaction.execute(querystring="SELECT * FROM execute_batch2") connection.back_to_pool() - - -@pytest.mark.parametrize( - "synchronous_commit", - [ - SynchronousCommit.On, - SynchronousCommit.Off, - SynchronousCommit.Local, - SynchronousCommit.RemoteWrite, - SynchronousCommit.RemoteApply, - ], -) -async def test_synchronous_commit( - synchronous_commit: SynchronousCommit, - psql_pool: ConnectionPool, - table_name: str, - number_database_records: int, -) -> None: - async with psql_pool.acquire() as conn, conn.transaction( - synchronous_commit=synchronous_commit, - ) as trans: - res = await trans.execute( - f"SELECT * FROM {table_name}", - ) - - assert len(res.result()) == number_database_records diff --git a/src/connection/impls.rs b/src/connection/impls.rs new file mode 100644 index 00000000..50b195a0 --- /dev/null +++ b/src/connection/impls.rs @@ -0,0 +1,485 @@ +use bytes::Buf; +use pyo3::{PyAny, Python}; +use tokio_postgres::{CopyInSink, Row, Statement, ToStatement}; + +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + options::{IsolationLevel, ReadVariant}, + query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, + statement::{statement::PsqlpyStatement, statement_builder::StatementBuilder}, + value_converter::to_python::postgres_to_py, +}; + +use super::{ + structs::{PSQLPyConnection, PoolConnection, SingleConnection}, + traits::{Connection, Transaction}, +}; + +impl Transaction for T +where + T: Connection, +{ + async fn start( + &self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> PSQLPyResult<()> { + let start_qs = self.build_start_qs(isolation_level, read_variant, deferrable); + self.batch_execute(start_qs.as_str()).await.map_err(|err| { + RustPSQLDriverError::TransactionBeginError( + format!("Cannot start transaction due to - {err}").into(), + ) + })?; + + Ok(()) + } + + async fn commit(&self) -> PSQLPyResult<()> { + self.batch_execute("COMMIT;").await.map_err(|err| { + RustPSQLDriverError::TransactionCommitError(format!( + "Cannot execute COMMIT statement, error - {err}" + )) + })?; + Ok(()) + } + + async fn rollback(&self) -> PSQLPyResult<()> { + self.batch_execute("ROLLBACK;").await.map_err(|err| { + RustPSQLDriverError::TransactionRollbackError(format!( + "Cannot execute ROLLBACK statement, error - {err}" + )) + })?; + Ok(()) + } +} + +impl Connection for SingleConnection { + async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { + let prepared_stmt = self.connection.prepare(query).await?; + + if !prepared { + self.drop_prepared(&prepared_stmt).await?; + } + return Ok(prepared_stmt); + } + + async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { + let deallocate_query = format!("DEALLOCATE PREPARE {}", stmt.name()); + + Ok(self.connection.batch_execute(&deallocate_query).await?) + } + + async fn query( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult> + where + T: ?Sized + ToStatement, + { + Ok(self.connection.query(statement, params).await?) + } + + async fn query_typed( + &self, + statement: &str, + params: &[(&(dyn postgres_types::ToSql + Sync), postgres_types::Type)], + ) -> PSQLPyResult> { + Ok(self.connection.query_typed(statement, params).await?) + } + + async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { + Ok(self.connection.batch_execute(query).await?) + } + + async fn query_one( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult + where + T: ?Sized + ToStatement, + { + Ok(self.connection.query_one(statement, params).await?) + } +} + +// impl Transaction for SingleConnection { +// async fn start( +// &self, +// isolation_level: Option, +// read_variant: Option, +// deferrable: Option, +// ) -> PSQLPyResult<()> { +// let start_qs = self.build_start_qs(isolation_level, read_variant, deferrable); +// self.batch_execute(start_qs.as_str()).await.map_err(|err| { +// RustPSQLDriverError::TransactionBeginError( +// format!("Cannot start transaction due to - {err}").into(), +// ) +// })?; + +// Ok(()) +// } + +// async fn commit(&self) -> PSQLPyResult<()> { +// self.batch_execute("COMMIT;").await.map_err(|err| { +// RustPSQLDriverError::TransactionCommitError(format!( +// "Cannot execute COMMIT statement, error - {err}" +// )) +// })?; +// Ok(()) +// } + +// async fn rollback(&self) -> PSQLPyResult<()> { +// self.batch_execute("ROLLBACK;").await.map_err(|err| { +// RustPSQLDriverError::TransactionRollbackError(format!( +// "Cannot execute ROLLBACK statement, error - {err}" +// )) +// })?; +// Ok(()) +// } +// } + +impl Connection for PoolConnection { + async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { + if prepared { + return Ok(self.connection.prepare_cached(query).await?); + } + + let prepared = self.connection.prepare(query).await?; + self.drop_prepared(&prepared).await?; + return Ok(prepared); + } + + async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { + let deallocate_query = format!("DEALLOCATE PREPARE {}", stmt.name()); + + Ok(self.connection.batch_execute(&deallocate_query).await?) + } + + async fn query( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult> + where + T: ?Sized + ToStatement, + { + Ok(self.connection.query(statement, params).await?) + } + + async fn query_typed( + &self, + statement: &str, + params: &[(&(dyn postgres_types::ToSql + Sync), postgres_types::Type)], + ) -> PSQLPyResult> { + Ok(self.connection.query_typed(statement, params).await?) + } + + async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { + Ok(self.connection.batch_execute(query).await?) + } + + async fn query_one( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult + where + T: ?Sized + ToStatement, + { + Ok(self.connection.query_one(statement, params).await?) + } +} + +// impl Transaction for PoolConnection { +// async fn start( +// &self, +// isolation_level: Option, +// read_variant: Option, +// deferrable: Option, +// ) -> PSQLPyResult<()> { +// let start_qs = self.build_start_qs(isolation_level, read_variant, deferrable); +// self.batch_execute(start_qs.as_str()).await.map_err(|err| { +// RustPSQLDriverError::TransactionBeginError( +// format!("Cannot start transaction due to - {err}").into(), +// ) +// })?; + +// Ok(()) +// } + +// async fn commit(&self) -> PSQLPyResult<()> { +// self.batch_execute("COMMIT;").await.map_err(|err| { +// RustPSQLDriverError::TransactionCommitError(format!( +// "Cannot execute COMMIT statement, error - {err}" +// )) +// })?; +// Ok(()) +// } + +// async fn rollback(&self) -> PSQLPyResult<()> { +// self.batch_execute("ROLLBACK;").await.map_err(|err| { +// RustPSQLDriverError::TransactionRollbackError(format!( +// "Cannot execute ROLLBACK statement, error - {err}" +// )) +// })?; +// Ok(()) +// } +// } + +impl Connection for PSQLPyConnection { + async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.prepare(query, prepared).await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.prepare(query, prepared).await, + } + } + + async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.drop_prepared(stmt).await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.drop_prepared(stmt).await, + } + } + + async fn query( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult> + where + T: ?Sized + ToStatement, + { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.query(statement, params).await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.query(statement, params).await, + } + } + + async fn query_typed( + &self, + statement: &str, + params: &[(&(dyn postgres_types::ToSql + Sync), postgres_types::Type)], + ) -> PSQLPyResult> { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.query_typed(statement, params).await, + PSQLPyConnection::SingleConnection(s_conn) => { + s_conn.query_typed(statement, params).await + } + } + } + + async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.batch_execute(query).await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.batch_execute(query).await, + } + } + + async fn query_one( + &self, + statement: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult + where + T: ?Sized + ToStatement, + { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.query_one(statement, params).await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.query_one(statement, params).await, + } + } +} + +// impl Transaction for PSQLPyConnection { +// async fn start( +// &self, +// isolation_level: Option, +// read_variant: Option, +// deferrable: Option, +// ) -> PSQLPyResult<()> { +// match self { +// PSQLPyConnection::PoolConn(p_conn) => p_conn.start(isolation_level, read_variant, deferrable).await, +// PSQLPyConnection::SingleConnection(s_conn) => s_conn.start(isolation_level, read_variant, deferrable).await, +// } +// } + +// async fn commit(&self) -> PSQLPyResult<()> { +// self.batch_execute("COMMIT;").await.map_err(|err| { +// RustPSQLDriverError::TransactionCommitError(format!( +// "Cannot execute COMMIT statement, error - {err}" +// )) +// })?; +// Ok(()) +// } + +// async fn rollback(&self) -> PSQLPyResult<()> { +// self.batch_execute("ROLLBACK;").await.map_err(|err| { +// RustPSQLDriverError::TransactionRollbackError(format!( +// "Cannot execute ROLLBACK statement, error - {err}" +// )) +// })?; +// Ok(()) +// } +// } + +impl PSQLPyConnection { + pub async fn execute( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> PSQLPyResult { + let statement = StatementBuilder::new(querystring, parameters, self, prepared) + .build() + .await?; + + let prepared = prepared.unwrap_or(true); + + let result = match prepared { + true => self + .query(statement.statement_query()?, &statement.params()) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, + false => self + .query_typed(statement.raw_query(), &statement.params_typed()) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, + }; + + Ok(PSQLDriverPyQueryResult::new(result)) + } + + pub async fn execute_many( + &self, + querystring: String, + parameters: Option>>, + prepared: Option, + ) -> PSQLPyResult<()> { + let mut statements: Vec = vec![]; + if let Some(parameters) = parameters { + for vec_of_py_any in parameters { + // TODO: Fix multiple qs creation + let statement = + StatementBuilder::new(querystring.clone(), Some(vec_of_py_any), self, prepared) + .build() + .await?; + + statements.push(statement); + } + } + + let prepared = prepared.unwrap_or(true); + + for statement in statements { + let querystring_result = if prepared { + let prepared_stmt = &self.prepare(&statement.raw_query(), true).await; + if let Err(error) = prepared_stmt { + return Err(RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement in execute_many, operation rolled back {error}", + ))); + } + self.query( + &self.prepare(&statement.raw_query(), true).await?, + &statement.params(), + ) + .await + } else { + self.query(statement.raw_query(), &statement.params()).await + }; + + if let Err(error) = querystring_result { + return Err(RustPSQLDriverError::ConnectionExecuteError(format!( + "Error occured in `execute_many` statement: {error}" + ))); + } + } + + return Ok(()); + } + + pub async fn fetch_row_raw( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> PSQLPyResult { + let statement = StatementBuilder::new(querystring, parameters, self, prepared) + .build() + .await?; + + let prepared = prepared.unwrap_or(true); + + let result = if prepared { + self.query_one( + &self + .prepare(&statement.raw_query(), true) + .await + .map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot prepare statement, error - {err}" + )) + })?, + &statement.params(), + ) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? + } else { + self.query_one(statement.raw_query(), &statement.params()) + .await + .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? + }; + + return Ok(result); + } + + pub async fn fetch_row( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> PSQLPyResult { + let result = self + .fetch_row_raw(querystring, parameters, prepared) + .await?; + + return Ok(PSQLDriverSinglePyQueryResult::new(result)); + } + + pub async fn fetch_val( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> PSQLPyResult> { + let result = self + .fetch_row_raw(querystring, parameters, prepared) + .await?; + + return Python::with_gil(|gil| match result.columns().first() { + Some(first_column) => postgres_to_py(gil, &result, first_column, 0, &None), + None => Ok(gil.None()), + }); + } + + pub async fn copy_in(&self, statement: &T) -> PSQLPyResult> + where + T: ?Sized + ToStatement, + U: Buf + 'static + Send, + { + match self { + PSQLPyConnection::PoolConn(pconn) => { + return Ok(pconn.connection.copy_in(statement).await?) + } + PSQLPyConnection::SingleConnection(sconn) => { + return Ok(sconn.connection.copy_in(statement).await?) + } + } + } +} diff --git a/src/connection/mod.rs b/src/connection/mod.rs new file mode 100644 index 00000000..c8f176fa --- /dev/null +++ b/src/connection/mod.rs @@ -0,0 +1,3 @@ +pub mod impls; +pub mod structs; +pub mod traits; diff --git a/src/connection/structs.rs b/src/connection/structs.rs new file mode 100644 index 00000000..9e713bfd --- /dev/null +++ b/src/connection/structs.rs @@ -0,0 +1,15 @@ +use deadpool_postgres::Object; +use tokio_postgres::Client; + +pub struct PoolConnection { + pub connection: Object, +} + +pub struct SingleConnection { + pub connection: Client, +} + +pub enum PSQLPyConnection { + PoolConn(PoolConnection), + SingleConnection(SingleConnection), +} diff --git a/src/connection/traits.rs b/src/connection/traits.rs new file mode 100644 index 00000000..9428eb70 --- /dev/null +++ b/src/connection/traits.rs @@ -0,0 +1,87 @@ +use postgres_types::{ToSql, Type}; +use tokio_postgres::{Row, Statement, ToStatement}; + +use crate::exceptions::rust_errors::PSQLPyResult; + +use crate::options::{IsolationLevel, ReadVariant}; + +pub trait Connection { + fn prepare( + &self, + query: &str, + prepared: bool, + ) -> impl std::future::Future> + Send; + + fn drop_prepared( + &self, + stmt: &Statement, + ) -> impl std::future::Future> + Send; + + fn query( + &self, + statement: &T, + params: &[&(dyn ToSql + Sync)], + ) -> impl std::future::Future>> + where + T: ?Sized + ToStatement; + + fn query_typed( + &self, + statement: &str, + params: &[(&(dyn ToSql + Sync), Type)], + ) -> impl std::future::Future>>; + + fn batch_execute( + &self, + query: &str, + ) -> impl std::future::Future> + Send; + + fn query_one( + &self, + statement: &T, + params: &[&(dyn ToSql + Sync)], + ) -> impl std::future::Future> + where + T: ?Sized + ToStatement; +} + +pub trait Transaction { + fn build_start_qs( + &self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> String { + let mut querystring = "START TRANSACTION".to_string(); + + if let Some(level) = isolation_level { + let level = &level.to_str_level(); + querystring.push_str(format!(" ISOLATION LEVEL {level}").as_str()); + }; + + querystring.push_str(match read_variant { + Some(ReadVariant::ReadOnly) => " READ ONLY", + Some(ReadVariant::ReadWrite) => " READ WRITE", + None => "", + }); + + querystring.push_str(match deferrable { + Some(true) => " DEFERRABLE", + Some(false) => " NOT DEFERRABLE", + None => "", + }); + + querystring + } + + fn start( + &self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> impl std::future::Future>; + + fn commit(&self) -> impl std::future::Future>; + + fn rollback(&self) -> impl std::future::Future>; +} diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 2210e303..3b5a4ab5 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -6,8 +6,13 @@ use std::{collections::HashSet, net::IpAddr, sync::Arc}; use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ + connection::{ + structs::{PSQLPyConnection, PoolConnection}, + traits::Connection as _, + }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, + options::{IsolationLevel, ReadVariant}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, runtime::tokio_runtime, }; @@ -16,9 +21,7 @@ use super::{ common_options::{LoadBalanceHosts, SslMode, TargetSessionAttrs}, connection_pool::connect_pool, cursor::Cursor, - inner_connection::PsqlpyConnection, transaction::Transaction, - transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, }; /// Make new connection pool. @@ -118,30 +121,27 @@ pub async fn connect( #[pyclass(subclass)] #[derive(Clone)] pub struct Connection { - db_client: Option>, + db_client: Option>, db_pool: Option, pg_config: Arc, - prepare: bool, } impl Connection { #[must_use] pub fn new( - db_client: Option>, + db_client: Option>, db_pool: Option, pg_config: Arc, - prepare: bool, ) -> Self { Connection { db_client, db_pool, pg_config, - prepare, } } #[must_use] - pub fn db_client(&self) -> Option> { + pub fn db_client(&self) -> Option> { self.db_client.clone() } @@ -153,7 +153,7 @@ impl Connection { impl Default for Connection { fn default() -> Self { - Connection::new(None, None, Arc::new(Config::default()), true) + Connection::new(None, None, Arc::new(Config::default())) } } @@ -237,13 +237,9 @@ impl Connection { } async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { - let (db_client, db_pool, prepare) = pyo3::Python::with_gil(|gil| { + let (db_client, db_pool) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); - ( - self_.db_client.clone(), - self_.db_pool.clone(), - self_.prepare, - ) + (self_.db_client.clone(), self_.db_pool.clone()) }); if db_client.is_some() { @@ -258,8 +254,9 @@ impl Connection { .await??; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.db_client = - Some(Arc::new(PsqlpyConnection::PoolConn(db_connection, prepare))); + self_.db_client = Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { + connection: db_connection, + }))); }); return Ok(self_); } @@ -459,14 +456,12 @@ impl Connection { isolation_level=None, read_variant=None, deferrable=None, - synchronous_commit=None, ))] pub fn transaction( &self, isolation_level: Option, read_variant: Option, deferrable: Option, - synchronous_commit: Option, ) -> PSQLPyResult { if let Some(db_client) = &self.db_client { return Ok(Transaction::new( @@ -475,7 +470,6 @@ impl Connection { false, false, isolation_level, - synchronous_commit, read_variant, deferrable, HashSet::new(), diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index a764cea3..c66be3da 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -1,4 +1,7 @@ -use crate::runtime::tokio_runtime; +use crate::{ + connection::structs::{PSQLPyConnection, PoolConnection}, + runtime::tokio_runtime, +}; use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; use postgres_types::Type; use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny}; @@ -10,7 +13,6 @@ use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; use super::{ common_options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, connection::Connection, - inner_connection::PsqlpyConnection, listener::core::Listener, utils::{build_connection_config, build_manager, build_tls}, }; @@ -243,13 +245,11 @@ impl ConnectionPool { let connection = self.pool.get().await?; Ok(Connection::new( - Some(Arc::new(PsqlpyConnection::PoolConn( + Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { connection, - self.pool_conf.prepare, - ))), + }))), None, self.pg_config.clone(), - self.pool_conf.prepare, )) } @@ -392,12 +392,7 @@ impl ConnectionPool { #[must_use] pub fn acquire(&self) -> Connection { - Connection::new( - None, - Some(self.pool.clone()), - self.pg_config.clone(), - self.pool_conf.prepare, - ) + Connection::new(None, Some(self.pool.clone()), self.pg_config.clone()) } #[must_use] @@ -408,12 +403,7 @@ impl ConnectionPool { (b_gil.pg_config.clone(), b_gil.pool_conf.clone()) }); - Listener::new( - pg_config, - pool_conf.ca_file, - pool_conf.ssl_mode, - pool_conf.prepare, - ) + Listener::new(pg_config, pool_conf.ca_file, pool_conf.ssl_mode) } /// Return new single connection. @@ -421,28 +411,22 @@ impl ConnectionPool { /// # Errors /// May return Err Result if cannot get new connection from the pool. pub async fn connection(self_: pyo3::Py) -> PSQLPyResult { - let (db_pool, pg_config, pool_conf) = pyo3::Python::with_gil(|gil| { + let (db_pool, pg_config) = pyo3::Python::with_gil(|gil| { let slf = self_.borrow(gil); - ( - slf.pool.clone(), - slf.pg_config.clone(), - slf.pool_conf.clone(), - ) + (slf.pool.clone(), slf.pg_config.clone()) }); - let db_connection = tokio_runtime() + let connection = tokio_runtime() .spawn(async move { Ok::(db_pool.get().await?) }) .await??; Ok(Connection::new( - Some(Arc::new(PsqlpyConnection::PoolConn( - db_connection, - pool_conf.prepare, - ))), + Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { + connection, + }))), None, pg_config, - pool_conf.prepare, )) } diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 54aee852..7229c6ee 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -6,13 +6,12 @@ use pyo3::{ use tokio_postgres::{config::Host, Config}; use crate::{ + connection::structs::PSQLPyConnection, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, query_result::PSQLDriverPyQueryResult, runtime::rustdriver_future, }; -use super::inner_connection::PsqlpyConnection; - /// Additional implementation for the `Object` type. #[allow(clippy::ref_option)] trait CursorObjectTrait { @@ -28,7 +27,7 @@ trait CursorObjectTrait { async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> PSQLPyResult<()>; } -impl CursorObjectTrait for PsqlpyConnection { +impl CursorObjectTrait for PSQLPyConnection { /// Start the cursor. /// /// Execute `DECLARE` command with parameters. @@ -90,7 +89,7 @@ impl CursorObjectTrait for PsqlpyConnection { #[pyclass(subclass)] pub struct Cursor { - db_transaction: Option>, + db_transaction: Option>, pg_config: Arc, querystring: String, parameters: Option>, @@ -105,7 +104,7 @@ pub struct Cursor { impl Cursor { #[must_use] pub fn new( - db_transaction: Arc, + db_transaction: Arc, pg_config: Arc, querystring: String, parameters: Option>, diff --git a/src/driver/inner_connection.rs b/src/driver/inner_connection.rs deleted file mode 100644 index bb591de7..00000000 --- a/src/driver/inner_connection.rs +++ /dev/null @@ -1,347 +0,0 @@ -use bytes::Buf; -use deadpool_postgres::{Object, Transaction}; -use postgres_types::{ToSql, Type}; -use pyo3::{pyclass, Py, PyAny, Python}; -use std::vec; -use tokio_postgres::{Client, CopyInSink, Row, Statement, ToStatement}; - -use crate::{ - exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, - query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, - statement::{statement::PsqlpyStatement, statement_builder::StatementBuilder}, - value_converter::to_python::postgres_to_py, -}; - -#[allow(clippy::module_name_repetitions)] -pub enum PsqlpyConnection { - PoolConn(Object, bool), - SingleConn(Client), -} - -// #[pyclass] -// struct Portal { -// trans: Transaction<'static>, -// } - -impl PsqlpyConnection { - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot prepare statement. - pub async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { - match self { - PsqlpyConnection::PoolConn(pconn, _) => { - if prepared { - return Ok(pconn.prepare_cached(query).await?); - } else { - let prepared = pconn.prepare(query).await?; - self.drop_prepared(&prepared).await?; - return Ok(prepared); - } - } - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.prepare(query).await?), - } - } - - // pub async fn transaction(&mut self) -> Portal { - // match self { - // PsqlpyConnection::PoolConn(pconn, _) => { - // let b = unsafe { - // std::mem::transmute::, Transaction<'static>>(pconn.transaction().await.unwrap()) - // }; - // Portal {trans: b} - // // let c = b.bind("SELECT 1", &[]).await.unwrap(); - // // b.query_portal(&c, 1).await; - // } - // PsqlpyConnection::SingleConn(sconn) => { - // let b = unsafe { - // std::mem::transmute::, Transaction<'static>>(sconn.transaction().await.unwrap()) - // }; - // Portal {trans: b} - // }, - // } - // } - - /// Delete prepared statement. - /// - /// # Errors - /// May return Err if cannot prepare statement. - pub async fn drop_prepared(&self, stmt: &Statement) -> PSQLPyResult<()> { - let deallocate_query = format!("DEALLOCATE PREPARE {}", stmt.name()); - match self { - PsqlpyConnection::PoolConn(pconn, _) => { - let res = Ok(pconn.batch_execute(&deallocate_query).await?); - res - } - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.batch_execute(&deallocate_query).await?) - } - } - } - - /// Execute statement with parameters. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn query( - &self, - statement: &T, - params: &[&(dyn ToSql + Sync)], - ) -> PSQLPyResult> - where - T: ?Sized + ToStatement, - { - match self { - PsqlpyConnection::PoolConn(pconn, _) => { - return Ok(pconn.query(statement, params).await?) - } - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.query(statement, params).await?) - } - } - } - - /// Execute statement with parameters. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn query_typed( - &self, - statement: &str, - params: &[(&(dyn ToSql + Sync), Type)], - ) -> PSQLPyResult> { - match self { - PsqlpyConnection::PoolConn(pconn, _) => { - return Ok(pconn.query_typed(statement, params).await?) - } - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.query_typed(statement, params).await?) - } - } - } - - /// Batch execute statement. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn batch_execute(&self, query: &str) -> PSQLPyResult<()> { - match self { - PsqlpyConnection::PoolConn(pconn, _) => return Ok(pconn.batch_execute(query).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.batch_execute(query).await?), - } - } - - /// Prepare cached statement. - /// - /// # Errors - /// May return Err if cannot execute copy data. - pub async fn copy_in(&self, statement: &T) -> PSQLPyResult> - where - T: ?Sized + ToStatement, - U: Buf + 'static + Send, - { - match self { - PsqlpyConnection::PoolConn(pconn, _) => return Ok(pconn.copy_in(statement).await?), - PsqlpyConnection::SingleConn(sconn) => return Ok(sconn.copy_in(statement).await?), - } - } - - /// Executes a statement which returns a single row, returning it. - /// - /// # Errors - /// May return Err if cannot execute statement. - pub async fn query_one( - &self, - statement: &T, - params: &[&(dyn ToSql + Sync)], - ) -> PSQLPyResult - where - T: ?Sized + ToStatement, - { - match self { - PsqlpyConnection::PoolConn(pconn, _) => { - return Ok(pconn.query_one(statement, params).await?) - } - PsqlpyConnection::SingleConn(sconn) => { - return Ok(sconn.query_one(statement, params).await?) - } - } - } - - pub async fn cursor_execute( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult { - let statement = StatementBuilder::new(querystring, parameters, self, prepared) - .build() - .await?; - - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - self.query( - &self - .prepare(&statement.raw_query(), true) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - &statement.params(), - ) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - } else { - self.query(statement.raw_query(), &statement.params()) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - }; - - Ok(PSQLDriverPyQueryResult::new(result)) - } - - pub async fn execute( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult { - let statement = StatementBuilder::new(querystring, parameters, self, prepared) - .build() - .await?; - - let prepared = prepared.unwrap_or(true); - - let result = match prepared { - true => self - .query(statement.statement_query()?, &statement.params()) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - false => self - .query_typed(statement.raw_query(), &statement.params_typed()) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, - }; - - Ok(PSQLDriverPyQueryResult::new(result)) - } - - pub async fn execute_many( - &self, - querystring: String, - parameters: Option>>, - prepared: Option, - ) -> PSQLPyResult<()> { - let mut statements: Vec = vec![]; - if let Some(parameters) = parameters { - for vec_of_py_any in parameters { - // TODO: Fix multiple qs creation - let statement = - StatementBuilder::new(querystring.clone(), Some(vec_of_py_any), self, prepared) - .build() - .await?; - - statements.push(statement); - } - } - - let prepared = prepared.unwrap_or(true); - - for statement in statements { - let querystring_result = if prepared { - let prepared_stmt = &self.prepare(&statement.raw_query(), true).await; - if let Err(error) = prepared_stmt { - return Err(RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement in execute_many, operation rolled back {error}", - ))); - } - self.query( - &self.prepare(&statement.raw_query(), true).await?, - &statement.params(), - ) - .await - } else { - self.query(statement.raw_query(), &statement.params()).await - }; - - if let Err(error) = querystring_result { - return Err(RustPSQLDriverError::ConnectionExecuteError(format!( - "Error occured in `execute_many` statement: {error}" - ))); - } - } - - return Ok(()); - } - - pub async fn fetch_row_raw( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult { - let statement = StatementBuilder::new(querystring, parameters, self, prepared) - .build() - .await?; - - let prepared = prepared.unwrap_or(true); - - let result = if prepared { - self.query_one( - &self - .prepare(&statement.raw_query(), true) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - &statement.params(), - ) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - } else { - self.query_one(statement.raw_query(), &statement.params()) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))? - }; - - return Ok(result); - } - - pub async fn fetch_row( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult { - let result = self - .fetch_row_raw(querystring, parameters, prepared) - .await?; - - return Ok(PSQLDriverSinglePyQueryResult::new(result)); - } - - pub async fn fetch_val( - &self, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult> { - let result = self - .fetch_row_raw(querystring, parameters, prepared) - .await?; - - return Python::with_gil(|gil| match result.columns().first() { - Some(first_column) => postgres_to_py(gil, &result, first_column, 0, &None), - None => Ok(gil.None()), - }); - } -} diff --git a/src/driver/inner_transaction.rs b/src/driver/inner_transaction.rs deleted file mode 100644 index a23f0536..00000000 --- a/src/driver/inner_transaction.rs +++ /dev/null @@ -1,94 +0,0 @@ -use deadpool_postgres::Transaction as dp_Transaction; -use postgres_types::ToSql; -use tokio_postgres::{Portal, Row, ToStatement, Transaction as tp_Transaction}; - -use crate::exceptions::rust_errors::PSQLPyResult; - -pub enum PsqlpyTransaction { - PoolTrans(dp_Transaction<'static>), - SingleConnTrans(tp_Transaction<'static>), -} - -impl PsqlpyTransaction { - async fn commit(self) -> PSQLPyResult<()> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.commit().await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.commit().await?), - } - } - - async fn rollback(self) -> PSQLPyResult<()> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.rollback().await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => Ok(s_txid.rollback().await?), - } - } - - async fn savepoint(&mut self, sp_name: &str) -> PSQLPyResult<()> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => { - p_txid.savepoint(sp_name).await?; - Ok(()) - } - PsqlpyTransaction::SingleConnTrans(s_txid) => { - s_txid.savepoint(sp_name).await?; - Ok(()) - } - } - } - - async fn release_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => { - p_txid - .batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()) - .await?; - Ok(()) - } - PsqlpyTransaction::SingleConnTrans(s_txid) => { - s_txid - .batch_execute(format!("RELEASE SAVEPOINT {sp_name}").as_str()) - .await?; - Ok(()) - } - } - } - - async fn rollback_savepoint(&self, sp_name: &str) -> PSQLPyResult<()> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => { - p_txid - .batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()) - .await?; - Ok(()) - } - PsqlpyTransaction::SingleConnTrans(s_txid) => { - s_txid - .batch_execute(format!("ROLLBACK TO SAVEPOINT {sp_name}").as_str()) - .await?; - Ok(()) - } - } - } - - async fn bind(&self, statement: &T, params: &[&(dyn ToSql + Sync)]) -> PSQLPyResult - where - T: ?Sized + ToStatement, - { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.bind(statement, params).await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => { - Ok(s_txid.bind(statement, params).await?) - } - } - } - - pub async fn query_portal(&self, portal: &Portal, size: i32) -> PSQLPyResult> { - match self { - PsqlpyTransaction::PoolTrans(p_txid) => Ok(p_txid.query_portal(portal, size).await?), - PsqlpyTransaction::SingleConnTrans(s_txid) => { - Ok(s_txid.query_portal(portal, size).await?) - } - } - } -} diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index 4a9580af..7837478f 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -12,10 +12,13 @@ use tokio::{ use tokio_postgres::{AsyncMessage, Config}; use crate::{ + connection::{ + structs::{PSQLPyConnection, SingleConnection}, + traits::Connection as _, + }, driver::{ common_options::SslMode, connection::Connection, - inner_connection::PsqlpyConnection, utils::{build_tls, is_coroutine_function, ConfiguredTLS}, }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, @@ -42,19 +45,14 @@ pub struct Listener { impl Listener { #[must_use] - pub fn new( - pg_config: Arc, - ca_file: Option, - ssl_mode: Option, - prepare: bool, - ) -> Self { + pub fn new(pg_config: Arc, ca_file: Option, ssl_mode: Option) -> Self { Listener { pg_config: pg_config.clone(), ca_file, ssl_mode, channel_callbacks: Arc::default(), listen_abort_handler: Option::default(), - connection: Connection::new(None, None, pg_config.clone(), prepare), + connection: Connection::new(None, None, pg_config.clone()), receiver: Option::default(), listen_query: Arc::default(), is_listened: Arc::new(RwLock::new(false)), @@ -224,10 +222,11 @@ impl Listener { self.receiver = Some(Arc::new(RwLock::new(receiver))); self.connection = Connection::new( - Some(Arc::new(PsqlpyConnection::SingleConn(client))), + Some(Arc::new(PSQLPyConnection::SingleConnection( + SingleConnection { connection: client }, + ))), None, self.pg_config.clone(), - false, ); self.is_started = true; @@ -356,7 +355,7 @@ async fn dispatch_callback( async fn execute_listen( is_listened: &Arc>, listen_query: &Arc>, - client: &Arc, + client: &Arc, ) -> PSQLPyResult<()> { let mut write_is_listened = is_listened.write().await; diff --git a/src/driver/mod.rs b/src/driver/mod.rs index 416bfa97..1cff9f57 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -3,10 +3,7 @@ pub mod connection; pub mod connection_pool; pub mod connection_pool_builder; pub mod cursor; -pub mod inner_connection; -pub mod inner_transaction; pub mod listener; pub mod portal; pub mod transaction; -pub mod transaction_options; pub mod utils; diff --git a/src/driver/portal.rs b/src/driver/portal.rs index d90138b0..0c280637 100644 --- a/src/driver/portal.rs +++ b/src/driver/portal.rs @@ -1,52 +1,87 @@ -use std::sync::Arc; - -use pyo3::{pyclass, pymethods}; -use tokio_postgres::Portal as tp_Portal; - -use crate::{exceptions::rust_errors::PSQLPyResult, query_result::PSQLDriverPyQueryResult}; - -use super::inner_transaction::PsqlpyTransaction; - -#[pyclass] -struct Portal { - transaction: Arc, - inner: tp_Portal, - array_size: i32, -} - -impl Portal { - async fn query_portal(&self, size: i32) -> PSQLPyResult { - let result = self.transaction.query_portal(&self.inner, size).await?; - Ok(PSQLDriverPyQueryResult::new(result)) - } -} - -#[pymethods] -impl Portal { - #[getter] - fn get_array_size(&self) -> i32 { - self.array_size - } - - #[setter] - fn set_array_size(&mut self, value: i32) { - self.array_size = value; - } - - async fn fetch_one(&self) -> PSQLPyResult { - self.query_portal(1).await - } - - #[pyo3(signature = (size=None))] - async fn fetch_many(&self, size: Option) -> PSQLPyResult { - self.query_portal(size.unwrap_or(self.array_size)).await - } - - async fn fetch_all(&self) -> PSQLPyResult { - self.query_portal(-1).await - } - - async fn close(&mut self) { - let _ = Arc::downgrade(&self.transaction); - } -} +// use std::sync::Arc; + +// use pyo3::{pyclass, pymethods, Py, PyObject, Python}; +// use tokio_postgres::Portal as tp_Portal; + +// use crate::{ +// exceptions::rust_errors::PSQLPyResult, query_result::PSQLDriverPyQueryResult, +// runtime::rustdriver_future, +// }; + +// use super::inner_transaction::PsqlpyTransaction; + +// #[pyclass] +// pub struct Portal { +// transaction: Arc, +// inner: tp_Portal, +// array_size: i32, +// } + +// impl Portal { +// pub fn new(transaction: Arc, inner: tp_Portal, array_size: i32) -> Self { +// Self { +// transaction, +// inner, +// array_size, +// } +// } + +// async fn query_portal(&self, size: i32) -> PSQLPyResult { +// let result = self.transaction.query_portal(&self.inner, size).await?; +// Ok(PSQLDriverPyQueryResult::new(result)) +// } +// } + +// #[pymethods] +// impl Portal { +// #[getter] +// fn get_array_size(&self) -> i32 { +// self.array_size +// } + +// #[setter] +// fn set_array_size(&mut self, value: i32) { +// self.array_size = value; +// } + +// fn __aiter__(slf: Py) -> Py { +// slf +// } + +// fn __await__(slf: Py) -> Py { +// slf +// } + +// fn __anext__(&self) -> PSQLPyResult> { +// let transaction = self.transaction.clone(); +// let portal = self.inner.clone(); +// let size = self.array_size.clone(); + +// let py_future = Python::with_gil(move |gil| { +// rustdriver_future(gil, async move { +// let result = transaction.query_portal(&portal, size).await?; + +// Ok(PSQLDriverPyQueryResult::new(result)) +// }) +// }); + +// Ok(Some(py_future?)) +// } + +// async fn fetch_one(&self) -> PSQLPyResult { +// self.query_portal(1).await +// } + +// #[pyo3(signature = (size=None))] +// async fn fetch_many(&self, size: Option) -> PSQLPyResult { +// self.query_portal(size.unwrap_or(self.array_size)).await +// } + +// async fn fetch_all(&self) -> PSQLPyResult { +// self.query_portal(-1).await +// } + +// async fn close(&mut self) { +// let _ = Arc::downgrade(&self.transaction); +// } +// } diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 60f054b7..50bbfb74 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -9,108 +9,27 @@ use pyo3::{ use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; use crate::{ + connection::{ + structs::PSQLPyConnection, + traits::{Connection as _, Transaction as _}, + }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, + options::{IsolationLevel, ReadVariant}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, }; -use super::{ - cursor::Cursor, - inner_connection::PsqlpyConnection, - transaction_options::{IsolationLevel, ReadVariant, SynchronousCommit}, -}; +use super::cursor::Cursor; use std::{collections::HashSet, net::IpAddr, sync::Arc}; -#[allow(clippy::module_name_repetitions)] -pub trait TransactionObjectTrait { - fn start_transaction( - &self, - isolation_level: Option, - read_variant: Option, - defferable: Option, - synchronous_commit: Option, - ) -> impl std::future::Future> + Send; - fn commit(&self) -> impl std::future::Future> + Send; - fn rollback(&self) -> impl std::future::Future> + Send; -} - -impl TransactionObjectTrait for PsqlpyConnection { - async fn start_transaction( - &self, - isolation_level: Option, - read_variant: Option, - deferrable: Option, - synchronous_commit: Option, - ) -> PSQLPyResult<()> { - let mut querystring = "START TRANSACTION".to_string(); - - if let Some(level) = isolation_level { - let level = &level.to_str_level(); - querystring.push_str(format!(" ISOLATION LEVEL {level}").as_str()); - }; - - querystring.push_str(match read_variant { - Some(ReadVariant::ReadOnly) => " READ ONLY", - Some(ReadVariant::ReadWrite) => " READ WRITE", - None => "", - }); - - querystring.push_str(match deferrable { - Some(true) => " DEFERRABLE", - Some(false) => " NOT DEFERRABLE", - None => "", - }); - - self.batch_execute(&querystring).await.map_err(|err| { - RustPSQLDriverError::TransactionBeginError(format!( - "Cannot execute statement to start transaction, err - {err}" - )) - })?; - - if let Some(synchronous_commit) = synchronous_commit { - let str_synchronous_commit = synchronous_commit.to_str_level(); - - let synchronous_commit_query = - format!("SET LOCAL synchronous_commit = '{str_synchronous_commit}'"); - - self.batch_execute(&synchronous_commit_query) - .await - .map_err(|err| { - RustPSQLDriverError::TransactionBeginError(format!( - "Cannot set synchronous_commit parameter, err - {err}" - )) - })?; - } - - Ok(()) - } - async fn commit(&self) -> PSQLPyResult<()> { - self.batch_execute("COMMIT;").await.map_err(|err| { - RustPSQLDriverError::TransactionCommitError(format!( - "Cannot execute COMMIT statement, error - {err}" - )) - })?; - Ok(()) - } - async fn rollback(&self) -> PSQLPyResult<()> { - self.batch_execute("ROLLBACK;").await.map_err(|err| { - RustPSQLDriverError::TransactionRollbackError(format!( - "Cannot execute ROLLBACK statement, error - {err}" - )) - })?; - Ok(()) - } -} - #[pyclass(subclass)] pub struct Transaction { - pub db_client: Option>, + pub db_client: Option>, pg_config: Arc, is_started: bool, is_done: bool, isolation_level: Option, - synchronous_commit: Option, read_variant: Option, deferrable: Option, @@ -121,12 +40,11 @@ impl Transaction { #[allow(clippy::too_many_arguments)] #[must_use] pub fn new( - db_client: Arc, + db_client: Arc, pg_config: Arc, is_started: bool, is_done: bool, isolation_level: Option, - synchronous_commit: Option, read_variant: Option, deferrable: Option, savepoints_map: HashSet, @@ -137,7 +55,6 @@ impl Transaction { is_started, is_done, isolation_level, - synchronous_commit, read_variant, deferrable, savepoints_map, @@ -243,26 +160,18 @@ impl Transaction { } async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { - let ( - is_started, - is_done, - isolation_level, - synchronous_commit, - read_variant, - deferrable, - db_client, - ) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.is_started, - self_.is_done, - self_.isolation_level, - self_.synchronous_commit, - self_.read_variant, - self_.deferrable, - self_.db_client.clone(), - ) - }); + let (is_started, is_done, isolation_level, read_variant, deferrable, db_client) = + pyo3::Python::with_gil(|gil| { + let self_ = self_.borrow(gil); + ( + self_.is_started, + self_.is_done, + self_.isolation_level, + self_.read_variant, + self_.deferrable, + self_.db_client.clone(), + ) + }); if is_started { return Err(RustPSQLDriverError::TransactionBeginError( @@ -278,12 +187,7 @@ impl Transaction { if let Some(db_client) = db_client { db_client - .start_transaction( - isolation_level, - read_variant, - deferrable, - synchronous_commit, - ) + .start(isolation_level, read_variant, deferrable) .await?; Python::with_gil(|gil| { @@ -565,26 +469,18 @@ impl Transaction { /// 2) Transaction is done. /// 3) Cannot execute `BEGIN` command. pub async fn begin(self_: Py) -> PSQLPyResult<()> { - let ( - is_started, - is_done, - isolation_level, - synchronous_commit, - read_variant, - deferrable, - db_client, - ) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.is_started, - self_.is_done, - self_.isolation_level, - self_.synchronous_commit, - self_.read_variant, - self_.deferrable, - self_.db_client.clone(), - ) - }); + let (is_started, is_done, isolation_level, read_variant, deferrable, db_client) = + pyo3::Python::with_gil(|gil| { + let self_ = self_.borrow(gil); + ( + self_.is_started, + self_.is_done, + self_.isolation_level, + self_.read_variant, + self_.deferrable, + self_.db_client.clone(), + ) + }); if let Some(db_client) = db_client { if is_started { @@ -599,12 +495,7 @@ impl Transaction { )); } db_client - .start_transaction( - isolation_level, - read_variant, - deferrable, - synchronous_commit, - ) + .start(isolation_level, read_variant, deferrable) .await?; pyo3::Python::with_gil(|gil| { diff --git a/src/driver/transaction_options.rs b/src/driver/transaction_options.rs deleted file mode 100644 index 281b9a71..00000000 --- a/src/driver/transaction_options.rs +++ /dev/null @@ -1,81 +0,0 @@ -use pyo3::pyclass; - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum IsolationLevel { - ReadUncommitted, - ReadCommitted, - RepeatableRead, - Serializable, -} - -impl IsolationLevel { - /// Return isolation level as String literal. - #[must_use] - pub fn to_str_level(&self) -> String { - match self { - IsolationLevel::ReadUncommitted => "READ UNCOMMITTED".into(), - IsolationLevel::ReadCommitted => "READ COMMITTED".into(), - IsolationLevel::RepeatableRead => "REPEATABLE READ".into(), - IsolationLevel::Serializable => "SERIALIZABLE".into(), - } - } -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum ReadVariant { - ReadOnly, - ReadWrite, -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum SynchronousCommit { - /// As the name indicates, the commit acknowledgment can come before - /// flushing the records to disk. - /// This is generally called as an asynchronous commit. - /// If the PostgreSQL instance crashes, - /// the last few asynchronous commits might be lost. - Off, - /// WAL records are written and flushed to local disks. - /// In this case, the commit will be acknowledged after the - /// local WAL Write and WAL flush completes. - Local, - /// WAL records are successfully handed over to - /// remote instances which acknowledged back - /// about the write (not flush). - RemoteWrite, - /// The meaning may change based on whether you have - /// a synchronous standby or not. - /// If there is a synchronous standby, - /// setting the value to on will result in waiting till “remote flush”. - On, - /// This will result in commits waiting until replies from the - /// current synchronous standby(s) indicate they have received - /// the commit record of the transaction and applied it so - /// that it has become visible to queries on the standby(s). - RemoteApply, -} - -impl SynchronousCommit { - /// Return isolation level as String literal. - #[must_use] - pub fn to_str_level(&self) -> String { - match self { - SynchronousCommit::Off => "off".into(), - SynchronousCommit::Local => "local".into(), - SynchronousCommit::RemoteWrite => "remote_write".into(), - SynchronousCommit::On => "on".into(), - SynchronousCommit::RemoteApply => "remote_apply".into(), - } - } -} - -#[derive(Clone, Copy, PartialEq)] -pub struct ListenerTransactionConfig { - isolation_level: Option, - read_variant: Option, - deferrable: Option, - synchronous_commit: Option, -} diff --git a/src/lib.rs b/src/lib.rs index d6ae473a..33ec678c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,10 @@ pub mod common; +pub mod connection; pub mod driver; pub mod exceptions; pub mod extra_types; pub mod format_helpers; +pub mod options; pub mod query_result; pub mod row_factories; pub mod runtime; @@ -35,9 +37,8 @@ fn psqlpy(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; diff --git a/src/options.rs b/src/options.rs new file mode 100644 index 00000000..bd8ad511 --- /dev/null +++ b/src/options.rs @@ -0,0 +1,221 @@ +use std::time::Duration; + +use deadpool_postgres::RecyclingMethod; +use pyo3::{pyclass, pymethods}; + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum ConnRecyclingMethod { + Fast, + Verified, + Clean, +} + +impl ConnRecyclingMethod { + #[must_use] + pub fn to_internal(&self) -> RecyclingMethod { + match self { + ConnRecyclingMethod::Fast => RecyclingMethod::Fast, + ConnRecyclingMethod::Verified => RecyclingMethod::Verified, + ConnRecyclingMethod::Clean => RecyclingMethod::Clean, + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum LoadBalanceHosts { + /// Make connection attempts to hosts in the order provided. + Disable, + /// Make connection attempts to hosts in a random order. + Random, +} + +impl LoadBalanceHosts { + #[must_use] + pub fn to_internal(&self) -> tokio_postgres::config::LoadBalanceHosts { + match self { + LoadBalanceHosts::Disable => tokio_postgres::config::LoadBalanceHosts::Disable, + LoadBalanceHosts::Random => tokio_postgres::config::LoadBalanceHosts::Random, + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum TargetSessionAttrs { + /// No special properties are required. + Any, + /// The session must allow writes. + ReadWrite, + /// The session allow only reads. + ReadOnly, +} + +impl TargetSessionAttrs { + #[must_use] + pub fn to_internal(&self) -> tokio_postgres::config::TargetSessionAttrs { + match self { + TargetSessionAttrs::Any => tokio_postgres::config::TargetSessionAttrs::Any, + TargetSessionAttrs::ReadWrite => tokio_postgres::config::TargetSessionAttrs::ReadWrite, + TargetSessionAttrs::ReadOnly => tokio_postgres::config::TargetSessionAttrs::ReadOnly, + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq, Debug)] +pub enum SslMode { + /// Do not use TLS. + Disable, + /// Pay the overhead of encryption if the server insists on it. + Allow, + /// Attempt to connect with TLS but allow sessions without. + Prefer, + /// Require the use of TLS. + Require, + /// I want my data encrypted, + /// and I accept the overhead. + /// I want to be sure that I connect to a server that I trust. + VerifyCa, + /// I want my data encrypted, + /// and I accept the overhead. + /// I want to be sure that I connect to a server I trust, + /// and that it's the one I specify. + VerifyFull, +} + +impl SslMode { + #[must_use] + pub fn to_internal(&self) -> tokio_postgres::config::SslMode { + match self { + SslMode::Disable => tokio_postgres::config::SslMode::Disable, + SslMode::Allow => tokio_postgres::config::SslMode::Allow, + SslMode::Prefer => tokio_postgres::config::SslMode::Prefer, + SslMode::Require => tokio_postgres::config::SslMode::Require, + SslMode::VerifyCa => tokio_postgres::config::SslMode::VerifyCa, + SslMode::VerifyFull => tokio_postgres::config::SslMode::VerifyFull, + } + } +} + +#[pyclass] +#[derive(Clone, Copy)] +pub struct KeepaliveConfig { + pub idle: Duration, + pub interval: Option, + pub retries: Option, +} + +#[pymethods] +impl KeepaliveConfig { + #[new] + #[pyo3(signature = (idle, interval=None, retries=None))] + fn build_config(idle: u64, interval: Option, retries: Option) -> Self { + let interval_internal = interval.map(Duration::from_secs); + KeepaliveConfig { + idle: Duration::from_secs(idle), + interval: interval_internal, + retries, + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum CopyCommandFormat { + TEXT, + CSV, + BINARY, +} + +impl CopyCommandFormat { + #[must_use] + pub fn to_internal(&self) -> String { + match self { + CopyCommandFormat::TEXT => "text".into(), + CopyCommandFormat::CSV => "csv".into(), + CopyCommandFormat::BINARY => "binary".into(), + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum IsolationLevel { + ReadUncommitted, + ReadCommitted, + RepeatableRead, + Serializable, +} + +impl IsolationLevel { + /// Return isolation level as String literal. + #[must_use] + pub fn to_str_level(&self) -> String { + match self { + IsolationLevel::ReadUncommitted => "READ UNCOMMITTED".into(), + IsolationLevel::ReadCommitted => "READ COMMITTED".into(), + IsolationLevel::RepeatableRead => "REPEATABLE READ".into(), + IsolationLevel::Serializable => "SERIALIZABLE".into(), + } + } +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum ReadVariant { + ReadOnly, + ReadWrite, +} + +#[pyclass(eq, eq_int)] +#[derive(Clone, Copy, PartialEq)] +pub enum SynchronousCommit { + /// As the name indicates, the commit acknowledgment can come before + /// flushing the records to disk. + /// This is generally called as an asynchronous commit. + /// If the PostgreSQL instance crashes, + /// the last few asynchronous commits might be lost. + Off, + /// WAL records are written and flushed to local disks. + /// In this case, the commit will be acknowledged after the + /// local WAL Write and WAL flush completes. + Local, + /// WAL records are successfully handed over to + /// remote instances which acknowledged back + /// about the write (not flush). + RemoteWrite, + /// The meaning may change based on whether you have + /// a synchronous standby or not. + /// If there is a synchronous standby, + /// setting the value to on will result in waiting till “remote flush”. + On, + /// This will result in commits waiting until replies from the + /// current synchronous standby(s) indicate they have received + /// the commit record of the transaction and applied it so + /// that it has become visible to queries on the standby(s). + RemoteApply, +} + +impl SynchronousCommit { + /// Return isolation level as String literal. + #[must_use] + pub fn to_str_level(&self) -> String { + match self { + SynchronousCommit::Off => "off".into(), + SynchronousCommit::Local => "local".into(), + SynchronousCommit::RemoteWrite => "remote_write".into(), + SynchronousCommit::On => "on".into(), + SynchronousCommit::RemoteApply => "remote_apply".into(), + } + } +} + +#[derive(Clone, Copy, PartialEq)] +pub struct ListenerTransactionConfig { + isolation_level: Option, + read_variant: Option, + deferrable: Option, + synchronous_commit: Option, +} diff --git a/src/statement/statement_builder.rs b/src/statement/statement_builder.rs index 5954f88c..c909f68d 100644 --- a/src/statement/statement_builder.rs +++ b/src/statement/statement_builder.rs @@ -2,7 +2,10 @@ use pyo3::PyObject; use tokio::sync::RwLockWriteGuard; use tokio_postgres::Statement; -use crate::{driver::inner_connection::PsqlpyConnection, exceptions::rust_errors::PSQLPyResult}; +use crate::{ + connection::{structs::PSQLPyConnection, traits::Connection}, + exceptions::rust_errors::PSQLPyResult, +}; use super::{ cache::{StatementCacheInfo, StatementsCache, STMTS_CACHE}, @@ -14,7 +17,7 @@ use super::{ pub struct StatementBuilder<'a> { querystring: String, parameters: Option, - inner_conn: &'a PsqlpyConnection, + inner_conn: &'a PSQLPyConnection, prepared: bool, } @@ -22,7 +25,7 @@ impl<'a> StatementBuilder<'a> { pub fn new( querystring: String, parameters: Option, - inner_conn: &'a PsqlpyConnection, + inner_conn: &'a PSQLPyConnection, prepared: Option, ) -> Self { Self { From 414f2ac2dd9f5a7b14b2208afdd90fe18accd4fe Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Fri, 16 May 2025 02:00:50 +0200 Subject: [PATCH 55/65] Making PSQLPy SQLAlchemy compatible --- Cargo.lock | 16 + Cargo.toml | 2 +- python/tests/test_transaction.py | 18 +- src/connection/impls.rs | 274 +++++---- src/connection/structs.rs | 31 +- src/connection/traits.rs | 60 +- src/driver/common.rs | 94 ++++ src/driver/common_options.rs | 141 ----- src/driver/connection.rs | 146 ++--- src/driver/connection_pool.rs | 19 +- src/driver/connection_pool_builder.rs | 19 +- src/driver/cursor.rs | 773 +++++++------------------- src/driver/listener/core.rs | 13 +- src/driver/mod.rs | 2 +- src/driver/transaction.rs | 760 +++++++------------------ src/driver/utils.rs | 9 +- src/lib.rs | 10 +- 17 files changed, 845 insertions(+), 1542 deletions(-) create mode 100644 src/driver/common.rs delete mode 100644 src/driver/common_options.rs diff --git a/Cargo.lock b/Cargo.lock index fc0b9c9b..076ac4c3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -572,6 +572,15 @@ version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" +[[package]] +name = "inventory" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b31349d02fe60f80bbbab1a9402364cad7460626d6030494b08ac4a2075bf81" +dependencies = [ + "rustversion", +] + [[package]] name = "itertools" version = "0.12.1" @@ -1061,6 +1070,7 @@ dependencies = [ "cfg-if", "chrono", "indoc", + "inventory", "libc", "memoffset", "once_cell", @@ -1291,6 +1301,12 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustversion" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" + [[package]] name = "ryu" version = "1.0.18" diff --git a/Cargo.toml b/Cargo.toml index b33f08c2..68160a6a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ crate-type = ["cdylib"] [dependencies] deadpool-postgres = { git = "https://github.com/chandr-andr/deadpool.git", branch = "psqlpy" } -pyo3 = { version = "0.23.4", features = ["chrono", "experimental-async", "rust_decimal", "py-clone", "macros"] } +pyo3 = { version = "0.23.4", features = ["chrono", "experimental-async", "rust_decimal", "py-clone", "macros", "multiple-pymethods"] } pyo3-async-runtimes = { git = "https://github.com/chandr-andr/pyo3-async-runtimes.git", branch = "psqlpy", features = [ "tokio-runtime", ] } diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index a6dfd191..a186084a 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -9,11 +9,10 @@ IsolationLevel, ReadVariant, ) +from psqlpy._internal.exceptions import TransactionClosedError from psqlpy.exceptions import ( InterfaceError, - TransactionBeginError, TransactionExecuteError, - TransactionSavepointError, ) from tests.helpers import count_rows_in_test_table @@ -64,10 +63,10 @@ async def test_transaction_begin( connection = await psql_pool.connection() transaction = connection.transaction() - with pytest.raises(expected_exception=TransactionBeginError): - await transaction.execute( - f"SELECT * FROM {table_name}", - ) + # with pytest.raises(expected_exception=TransactionBeginError): + await transaction.execute( + f"SELECT * FROM {table_name}", + ) await transaction.begin() @@ -170,7 +169,7 @@ async def test_transaction_rollback( await transaction.rollback() - with pytest.raises(expected_exception=TransactionBeginError): + with pytest.raises(expected_exception=TransactionClosedError): await transaction.execute( f"SELECT * FROM {table_name} WHERE name = $1", parameters=[test_name], @@ -198,9 +197,8 @@ async def test_transaction_release_savepoint( sp_name_2 = "sp2" await transaction.create_savepoint(sp_name_1) - - with pytest.raises(expected_exception=TransactionSavepointError): - await transaction.create_savepoint(sp_name_1) + # There is no problem in creating the same sp_name + await transaction.create_savepoint(sp_name_1) await transaction.create_savepoint(sp_name_2) diff --git a/src/connection/impls.rs b/src/connection/impls.rs index 50b195a0..ee6bab4b 100644 --- a/src/connection/impls.rs +++ b/src/connection/impls.rs @@ -12,15 +12,15 @@ use crate::{ use super::{ structs::{PSQLPyConnection, PoolConnection, SingleConnection}, - traits::{Connection, Transaction}, + traits::{CloseTransaction, Connection, Cursor, StartTransaction, Transaction}, }; impl Transaction for T where T: Connection, { - async fn start( - &self, + async fn _start_transaction( + &mut self, isolation_level: Option, read_variant: Option, deferrable: Option, @@ -35,7 +35,7 @@ where Ok(()) } - async fn commit(&self) -> PSQLPyResult<()> { + async fn _commit(&self) -> PSQLPyResult<()> { self.batch_execute("COMMIT;").await.map_err(|err| { RustPSQLDriverError::TransactionCommitError(format!( "Cannot execute COMMIT statement, error - {err}" @@ -44,7 +44,7 @@ where Ok(()) } - async fn rollback(&self) -> PSQLPyResult<()> { + async fn _rollback(&self) -> PSQLPyResult<()> { self.batch_execute("ROLLBACK;").await.map_err(|err| { RustPSQLDriverError::TransactionRollbackError(format!( "Cannot execute ROLLBACK statement, error - {err}" @@ -105,41 +105,37 @@ impl Connection for SingleConnection { } } -// impl Transaction for SingleConnection { -// async fn start( -// &self, -// isolation_level: Option, -// read_variant: Option, -// deferrable: Option, -// ) -> PSQLPyResult<()> { -// let start_qs = self.build_start_qs(isolation_level, read_variant, deferrable); -// self.batch_execute(start_qs.as_str()).await.map_err(|err| { -// RustPSQLDriverError::TransactionBeginError( -// format!("Cannot start transaction due to - {err}").into(), -// ) -// })?; - -// Ok(()) -// } - -// async fn commit(&self) -> PSQLPyResult<()> { -// self.batch_execute("COMMIT;").await.map_err(|err| { -// RustPSQLDriverError::TransactionCommitError(format!( -// "Cannot execute COMMIT statement, error - {err}" -// )) -// })?; -// Ok(()) -// } - -// async fn rollback(&self) -> PSQLPyResult<()> { -// self.batch_execute("ROLLBACK;").await.map_err(|err| { -// RustPSQLDriverError::TransactionRollbackError(format!( -// "Cannot execute ROLLBACK statement, error - {err}" -// )) -// })?; -// Ok(()) -// } -// } +impl StartTransaction for SingleConnection { + async fn start_transaction( + &mut self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> PSQLPyResult<()> { + let res = self + ._start_transaction(isolation_level, read_variant, deferrable) + .await?; + self.in_transaction = true; + + Ok(res) + } +} + +impl CloseTransaction for SingleConnection { + async fn commit(&mut self) -> PSQLPyResult<()> { + let res = self._commit().await?; + self.in_transaction = false; + + Ok(res) + } + + async fn rollback(&mut self) -> PSQLPyResult<()> { + let res = self._rollback().await?; + self.in_transaction = false; + + Ok(res) + } +} impl Connection for PoolConnection { async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { @@ -193,41 +189,34 @@ impl Connection for PoolConnection { } } -// impl Transaction for PoolConnection { -// async fn start( -// &self, -// isolation_level: Option, -// read_variant: Option, -// deferrable: Option, -// ) -> PSQLPyResult<()> { -// let start_qs = self.build_start_qs(isolation_level, read_variant, deferrable); -// self.batch_execute(start_qs.as_str()).await.map_err(|err| { -// RustPSQLDriverError::TransactionBeginError( -// format!("Cannot start transaction due to - {err}").into(), -// ) -// })?; - -// Ok(()) -// } - -// async fn commit(&self) -> PSQLPyResult<()> { -// self.batch_execute("COMMIT;").await.map_err(|err| { -// RustPSQLDriverError::TransactionCommitError(format!( -// "Cannot execute COMMIT statement, error - {err}" -// )) -// })?; -// Ok(()) -// } - -// async fn rollback(&self) -> PSQLPyResult<()> { -// self.batch_execute("ROLLBACK;").await.map_err(|err| { -// RustPSQLDriverError::TransactionRollbackError(format!( -// "Cannot execute ROLLBACK statement, error - {err}" -// )) -// })?; -// Ok(()) -// } -// } +impl StartTransaction for PoolConnection { + async fn start_transaction( + &mut self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> PSQLPyResult<()> { + self.in_transaction = true; + self._start_transaction(isolation_level, read_variant, deferrable) + .await + } +} + +impl CloseTransaction for PoolConnection { + async fn commit(&mut self) -> PSQLPyResult<()> { + let res = self._commit().await?; + self.in_transaction = false; + + Ok(res) + } + + async fn rollback(&mut self) -> PSQLPyResult<()> { + let res = self._rollback().await?; + self.in_transaction = false; + + Ok(res) + } +} impl Connection for PSQLPyConnection { async fn prepare(&self, query: &str, prepared: bool) -> PSQLPyResult { @@ -293,37 +282,81 @@ impl Connection for PSQLPyConnection { } } -// impl Transaction for PSQLPyConnection { -// async fn start( -// &self, -// isolation_level: Option, -// read_variant: Option, -// deferrable: Option, -// ) -> PSQLPyResult<()> { -// match self { -// PSQLPyConnection::PoolConn(p_conn) => p_conn.start(isolation_level, read_variant, deferrable).await, -// PSQLPyConnection::SingleConnection(s_conn) => s_conn.start(isolation_level, read_variant, deferrable).await, -// } -// } - -// async fn commit(&self) -> PSQLPyResult<()> { -// self.batch_execute("COMMIT;").await.map_err(|err| { -// RustPSQLDriverError::TransactionCommitError(format!( -// "Cannot execute COMMIT statement, error - {err}" -// )) -// })?; -// Ok(()) -// } - -// async fn rollback(&self) -> PSQLPyResult<()> { -// self.batch_execute("ROLLBACK;").await.map_err(|err| { -// RustPSQLDriverError::TransactionRollbackError(format!( -// "Cannot execute ROLLBACK statement, error - {err}" -// )) -// })?; -// Ok(()) -// } -// } +impl StartTransaction for PSQLPyConnection { + async fn start_transaction( + &mut self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> PSQLPyResult<()> { + match self { + PSQLPyConnection::PoolConn(p_conn) => { + p_conn + .start_transaction(isolation_level, read_variant, deferrable) + .await + } + PSQLPyConnection::SingleConnection(s_conn) => { + s_conn + .start_transaction(isolation_level, read_variant, deferrable) + .await + } + } + } +} + +impl CloseTransaction for PSQLPyConnection { + async fn commit(&mut self) -> PSQLPyResult<()> { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.commit().await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.commit().await, + } + } + + async fn rollback(&mut self) -> PSQLPyResult<()> { + match self { + PSQLPyConnection::PoolConn(p_conn) => p_conn.rollback().await, + PSQLPyConnection::SingleConnection(s_conn) => s_conn.rollback().await, + } + } +} + +impl Cursor for PSQLPyConnection { + async fn start_cursor( + &mut self, + cursor_name: &str, + scroll: &Option, + querystring: String, + prepared: &Option, + parameters: Option>, + ) -> PSQLPyResult<()> { + let cursor_qs = self.build_cursor_start_qs(cursor_name, scroll, &querystring); + self.execute(cursor_qs, parameters, *prepared) + .await + .map_err(|err| { + RustPSQLDriverError::CursorStartError(format!("Cannot start cursor due to {err}")) + })?; + match self { + PSQLPyConnection::PoolConn(conn) => conn.in_cursor = true, + PSQLPyConnection::SingleConnection(conn) => conn.in_cursor = true, + } + Ok(()) + } + + async fn close_cursor(&mut self, cursor_name: &str) -> PSQLPyResult<()> { + self.execute( + format!("CLOSE {cursor_name}"), + Option::default(), + Some(false), + ) + .await?; + + match self { + PSQLPyConnection::PoolConn(conn) => conn.in_cursor = false, + PSQLPyConnection::SingleConnection(conn) => conn.in_cursor = false, + } + Ok(()) + } +} impl PSQLPyConnection { pub async fn execute( @@ -337,23 +370,24 @@ impl PSQLPyConnection { .await?; let prepared = prepared.unwrap_or(true); - let result = match prepared { - true => self - .query(statement.statement_query()?, &statement.params()) - .await - .map_err(|err| { - RustPSQLDriverError::ConnectionExecuteError(format!( - "Cannot prepare statement, error - {err}" - )) - })?, - false => self - .query_typed(statement.raw_query(), &statement.params_typed()) - .await - .map_err(|err| RustPSQLDriverError::ConnectionExecuteError(format!("{err}")))?, + true => { + self.query(statement.statement_query()?, &statement.params()) + .await + } + false => { + self.query_typed(statement.raw_query(), &statement.params_typed()) + .await + } }; - Ok(PSQLDriverPyQueryResult::new(result)) + let return_result = result.map_err(|err| { + RustPSQLDriverError::ConnectionExecuteError(format!( + "Cannot execute query, error - {err}" + )) + })?; + + Ok(PSQLDriverPyQueryResult::new(return_result)) } pub async fn execute_many( diff --git a/src/connection/structs.rs b/src/connection/structs.rs index 9e713bfd..ccfd101d 100644 --- a/src/connection/structs.rs +++ b/src/connection/structs.rs @@ -1,12 +1,41 @@ +use std::sync::Arc; + use deadpool_postgres::Object; -use tokio_postgres::Client; +use tokio_postgres::{Client, Config}; pub struct PoolConnection { pub connection: Object, + pub in_transaction: bool, + pub in_cursor: bool, + pub pg_config: Arc, } +impl PoolConnection { + pub fn new(connection: Object, pg_config: Arc) -> Self { + Self { + connection, + in_transaction: false, + in_cursor: false, + pg_config, + } + } +} pub struct SingleConnection { pub connection: Client, + pub in_transaction: bool, + pub in_cursor: bool, + pub pg_config: Arc, +} + +impl SingleConnection { + pub fn new(connection: Client, pg_config: Arc) -> Self { + Self { + connection, + in_transaction: false, + in_cursor: false, + pg_config, + } + } } pub enum PSQLPyConnection { diff --git a/src/connection/traits.rs b/src/connection/traits.rs index 9428eb70..8e868a06 100644 --- a/src/connection/traits.rs +++ b/src/connection/traits.rs @@ -1,4 +1,5 @@ use postgres_types::{ToSql, Type}; +use pyo3::PyAny; use tokio_postgres::{Row, Statement, ToStatement}; use crate::exceptions::rust_errors::PSQLPyResult; @@ -74,14 +75,65 @@ pub trait Transaction { querystring } - fn start( - &self, + fn _start_transaction( + &mut self, + isolation_level: Option, + read_variant: Option, + deferrable: Option, + ) -> impl std::future::Future>; + + fn _commit(&self) -> impl std::future::Future>; + + fn _rollback(&self) -> impl std::future::Future>; +} + +pub trait StartTransaction: Transaction { + fn start_transaction( + &mut self, isolation_level: Option, read_variant: Option, deferrable: Option, ) -> impl std::future::Future>; +} - fn commit(&self) -> impl std::future::Future>; +pub trait CloseTransaction: StartTransaction { + fn commit(&mut self) -> impl std::future::Future>; - fn rollback(&self) -> impl std::future::Future>; + fn rollback(&mut self) -> impl std::future::Future>; +} + +pub trait Cursor { + fn build_cursor_start_qs( + &self, + cursor_name: &str, + scroll: &Option, + querystring: &str, + ) -> String { + let mut cursor_init_query = format!("DECLARE {cursor_name}"); + if let Some(scroll) = scroll { + if *scroll { + cursor_init_query.push_str(" SCROLL"); + } else { + cursor_init_query.push_str(" NO SCROLL"); + } + } + + cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); + + cursor_init_query + } + + fn start_cursor( + &mut self, + cursor_name: &str, + scroll: &Option, + querystring: String, + prepared: &Option, + parameters: Option>, + ) -> impl std::future::Future>; + + fn close_cursor( + &mut self, + cursor_name: &str, + ) -> impl std::future::Future>; } diff --git a/src/driver/common.rs b/src/driver/common.rs new file mode 100644 index 00000000..528fed84 --- /dev/null +++ b/src/driver/common.rs @@ -0,0 +1,94 @@ +use pyo3::prelude::*; +use tokio_postgres::config::Host; + +use std::net::IpAddr; + +use super::{connection::Connection, cursor::Cursor, transaction::Transaction}; + +macro_rules! impl_config_py_methods { + ($name:ident) => { + #[pymethods] + impl $name { + #[getter] + fn conn_dbname(&self) -> Option<&str> { + self.pg_config.get_dbname() + } + + #[getter] + fn user(&self) -> Option<&str> { + self.pg_config.get_user() + } + + #[getter] + fn host_addrs(&self) -> Vec { + let mut host_addrs_vec = vec![]; + + let host_addrs = self.pg_config.get_hostaddrs(); + for ip_addr in host_addrs { + match ip_addr { + IpAddr::V4(ipv4) => { + host_addrs_vec.push(ipv4.to_string()); + } + IpAddr::V6(ipv6) => { + host_addrs_vec.push(ipv6.to_string()); + } + } + } + + host_addrs_vec + } + + #[cfg(unix)] + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + Host::Unix(host) => { + hosts_vec.push(host.display().to_string()); + } + } + } + + hosts_vec + } + + #[cfg(not(unix))] + #[getter] + fn hosts(&self) -> Vec { + let mut hosts_vec = vec![]; + + let hosts = self.pg_config.get_hosts(); + for host in hosts { + match host { + Host::Tcp(host) => { + hosts_vec.push(host.to_string()); + } + _ => unreachable!(), + } + } + + hosts_vec + } + + #[getter] + fn ports(&self) -> Vec<&u16> { + return self.pg_config.get_ports().iter().collect::>(); + } + + #[getter] + fn options(&self) -> Option<&str> { + return self.pg_config.get_options(); + } + } + }; +} + +impl_config_py_methods!(Transaction); +impl_config_py_methods!(Connection); +impl_config_py_methods!(Cursor); diff --git a/src/driver/common_options.rs b/src/driver/common_options.rs deleted file mode 100644 index a76d37dd..00000000 --- a/src/driver/common_options.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::time::Duration; - -use deadpool_postgres::RecyclingMethod; -use pyo3::{pyclass, pymethods}; - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum ConnRecyclingMethod { - Fast, - Verified, - Clean, -} - -impl ConnRecyclingMethod { - #[must_use] - pub fn to_internal(&self) -> RecyclingMethod { - match self { - ConnRecyclingMethod::Fast => RecyclingMethod::Fast, - ConnRecyclingMethod::Verified => RecyclingMethod::Verified, - ConnRecyclingMethod::Clean => RecyclingMethod::Clean, - } - } -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum LoadBalanceHosts { - /// Make connection attempts to hosts in the order provided. - Disable, - /// Make connection attempts to hosts in a random order. - Random, -} - -impl LoadBalanceHosts { - #[must_use] - pub fn to_internal(&self) -> tokio_postgres::config::LoadBalanceHosts { - match self { - LoadBalanceHosts::Disable => tokio_postgres::config::LoadBalanceHosts::Disable, - LoadBalanceHosts::Random => tokio_postgres::config::LoadBalanceHosts::Random, - } - } -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum TargetSessionAttrs { - /// No special properties are required. - Any, - /// The session must allow writes. - ReadWrite, - /// The session allow only reads. - ReadOnly, -} - -impl TargetSessionAttrs { - #[must_use] - pub fn to_internal(&self) -> tokio_postgres::config::TargetSessionAttrs { - match self { - TargetSessionAttrs::Any => tokio_postgres::config::TargetSessionAttrs::Any, - TargetSessionAttrs::ReadWrite => tokio_postgres::config::TargetSessionAttrs::ReadWrite, - TargetSessionAttrs::ReadOnly => tokio_postgres::config::TargetSessionAttrs::ReadOnly, - } - } -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq, Debug)] -pub enum SslMode { - /// Do not use TLS. - Disable, - /// Pay the overhead of encryption if the server insists on it. - Allow, - /// Attempt to connect with TLS but allow sessions without. - Prefer, - /// Require the use of TLS. - Require, - /// I want my data encrypted, - /// and I accept the overhead. - /// I want to be sure that I connect to a server that I trust. - VerifyCa, - /// I want my data encrypted, - /// and I accept the overhead. - /// I want to be sure that I connect to a server I trust, - /// and that it's the one I specify. - VerifyFull, -} - -impl SslMode { - #[must_use] - pub fn to_internal(&self) -> tokio_postgres::config::SslMode { - match self { - SslMode::Disable => tokio_postgres::config::SslMode::Disable, - SslMode::Allow => tokio_postgres::config::SslMode::Allow, - SslMode::Prefer => tokio_postgres::config::SslMode::Prefer, - SslMode::Require => tokio_postgres::config::SslMode::Require, - SslMode::VerifyCa => tokio_postgres::config::SslMode::VerifyCa, - SslMode::VerifyFull => tokio_postgres::config::SslMode::VerifyFull, - } - } -} - -#[pyclass] -#[derive(Clone, Copy)] -pub struct KeepaliveConfig { - pub idle: Duration, - pub interval: Option, - pub retries: Option, -} - -#[pymethods] -impl KeepaliveConfig { - #[new] - #[pyo3(signature = (idle, interval=None, retries=None))] - fn build_config(idle: u64, interval: Option, retries: Option) -> Self { - let interval_internal = interval.map(Duration::from_secs); - KeepaliveConfig { - idle: Duration::from_secs(idle), - interval: interval_internal, - retries, - } - } -} - -#[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] -pub enum CopyCommandFormat { - TEXT, - CSV, - BINARY, -} - -impl CopyCommandFormat { - #[must_use] - pub fn to_internal(&self) -> String { - match self { - CopyCommandFormat::TEXT => "text".into(), - CopyCommandFormat::CSV => "csv".into(), - CopyCommandFormat::BINARY => "binary".into(), - } - } -} diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 3b5a4ab5..9635a836 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -2,8 +2,9 @@ use bytes::BytesMut; use deadpool_postgres::Pool; use futures_util::pin_mut; use pyo3::{buffer::PyBuffer, pyclass, pyfunction, pymethods, Py, PyAny, PyErr, Python}; -use std::{collections::HashSet, net::IpAddr, sync::Arc}; -use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; +use std::sync::Arc; +use tokio::sync::RwLock; +use tokio_postgres::{binary_copy::BinaryCopyInWriter, Config}; use crate::{ connection::{ @@ -12,17 +13,12 @@ use crate::{ }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, - options::{IsolationLevel, ReadVariant}, + options::{IsolationLevel, LoadBalanceHosts, ReadVariant, SslMode, TargetSessionAttrs}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, runtime::tokio_runtime, }; -use super::{ - common_options::{LoadBalanceHosts, SslMode, TargetSessionAttrs}, - connection_pool::connect_pool, - cursor::Cursor, - transaction::Transaction, -}; +use super::{connection_pool::connect_pool, cursor::Cursor, transaction::Transaction}; /// Make new connection pool. /// @@ -121,15 +117,15 @@ pub async fn connect( #[pyclass(subclass)] #[derive(Clone)] pub struct Connection { - db_client: Option>, + db_client: Option>>, db_pool: Option, - pg_config: Arc, + pub pg_config: Arc, } impl Connection { #[must_use] pub fn new( - db_client: Option>, + db_client: Option>>, db_pool: Option, pg_config: Arc, ) -> Self { @@ -141,7 +137,7 @@ impl Connection { } #[must_use] - pub fn db_client(&self) -> Option> { + pub fn db_client(&self) -> Option>> { self.db_client.clone() } @@ -159,87 +155,14 @@ impl Default for Connection { #[pymethods] impl Connection { - #[getter] - fn conn_dbname(&self) -> Option<&str> { - self.pg_config.get_dbname() - } - - #[getter] - fn user(&self) -> Option<&str> { - self.pg_config.get_user() - } - - #[getter] - fn host_addrs(&self) -> Vec { - let mut host_addrs_vec = vec![]; - - let host_addrs = self.pg_config.get_hostaddrs(); - for ip_addr in host_addrs { - match ip_addr { - IpAddr::V4(ipv4) => { - host_addrs_vec.push(ipv4.to_string()); - } - IpAddr::V6(ipv6) => { - host_addrs_vec.push(ipv6.to_string()); - } - } - } - - host_addrs_vec - } - - #[cfg(unix)] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - Host::Unix(host) => { - hosts_vec.push(host.display().to_string()); - } - } - } - - hosts_vec - } - - #[cfg(not(unix))] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - _ => unreachable!(), - } - } - - hosts_vec - } - - #[getter] - fn ports(&self) -> Vec<&u16> { - return self.pg_config.get_ports().iter().collect::>(); - } - - #[getter] - fn options(&self) -> Option<&str> { - return self.pg_config.get_options(); - } - async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { - let (db_client, db_pool) = pyo3::Python::with_gil(|gil| { + let (db_client, db_pool, pg_config) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); - (self_.db_client.clone(), self_.db_pool.clone()) + ( + self_.db_client.clone(), + self_.db_pool.clone(), + self_.pg_config.clone(), + ) }); if db_client.is_some() { @@ -247,16 +170,16 @@ impl Connection { } if let Some(db_pool) = db_pool { - let db_connection = tokio_runtime() + let connection = tokio_runtime() .spawn(async move { Ok::(db_pool.get().await?) }) .await??; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.db_client = Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { - connection: db_connection, - }))); + self_.db_client = Some(Arc::new(RwLock::new(PSQLPyConnection::PoolConn( + PoolConnection::new(connection, pg_config), + )))); }); return Ok(self_); } @@ -310,7 +233,8 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - let res = db_client.execute(querystring, parameters, prepared).await; + let read_conn_g = db_client.read().await; + let res = read_conn_g.execute(querystring, parameters, prepared).await; return res; } @@ -333,7 +257,8 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client.batch_execute(&querystring).await; + let read_conn_g = db_client.read().await; + return read_conn_g.batch_execute(&querystring).await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -359,7 +284,8 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client + let read_conn_g = db_client.read().await; + return read_conn_g .execute_many(querystring, parameters, prepared) .await; } @@ -385,7 +311,8 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client.execute(querystring, parameters, prepared).await; + let read_conn_g = db_client.read().await; + return read_conn_g.execute(querystring, parameters, prepared).await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -415,7 +342,10 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client.fetch_row(querystring, parameters, prepared).await; + let read_conn_g = db_client.read().await; + return read_conn_g + .fetch_row(querystring, parameters, prepared) + .await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -442,7 +372,10 @@ impl Connection { let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); if let Some(db_client) = db_client { - return db_client.fetch_val(querystring, parameters, prepared).await; + let read_conn_g = db_client.read().await; + return read_conn_g + .fetch_val(querystring, parameters, prepared) + .await; } Err(RustPSQLDriverError::ConnectionClosedError) @@ -465,14 +398,11 @@ impl Connection { ) -> PSQLPyResult { if let Some(db_client) = &self.db_client { return Ok(Transaction::new( - db_client.clone(), + Some(db_client.clone()), self.pg_config.clone(), - false, - false, isolation_level, read_variant, deferrable, - HashSet::new(), )); } @@ -504,7 +434,6 @@ impl Connection { self.pg_config.clone(), querystring, parameters, - "cur_name".into(), fetch_number.unwrap_or(10), scroll, prepared, @@ -576,7 +505,8 @@ impl Connection { )) })?; - let sink = db_client.copy_in(©_qs).await?; + let read_conn_g = db_client.read().await; + let sink = read_conn_g.copy_in(©_qs).await?; let writer = BinaryCopyInWriter::new_empty_buffer(sink, &[]); pin_mut!(writer); writer.as_mut().write_raw_bytes(&mut psql_bytes).await?; diff --git a/src/driver/connection_pool.rs b/src/driver/connection_pool.rs index c66be3da..16e1fe90 100644 --- a/src/driver/connection_pool.rs +++ b/src/driver/connection_pool.rs @@ -6,12 +6,15 @@ use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; use postgres_types::Type; use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny}; use std::sync::Arc; +use tokio::sync::RwLock; use tokio_postgres::Config; -use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, +}; use super::{ - common_options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, connection::Connection, listener::core::Listener, utils::{build_connection_config, build_manager, build_tls}, @@ -245,9 +248,9 @@ impl ConnectionPool { let connection = self.pool.get().await?; Ok(Connection::new( - Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { - connection, - }))), + Some(Arc::new(RwLock::new(PSQLPyConnection::PoolConn( + PoolConnection::new(connection, self.pg_config.clone()), + )))), None, self.pg_config.clone(), )) @@ -422,9 +425,9 @@ impl ConnectionPool { .await??; Ok(Connection::new( - Some(Arc::new(PSQLPyConnection::PoolConn(PoolConnection { - connection, - }))), + Some(Arc::new(RwLock::new(PSQLPyConnection::PoolConn( + PoolConnection::new(connection, pg_config.clone()), + )))), None, pg_config, )) diff --git a/src/driver/connection_pool_builder.rs b/src/driver/connection_pool_builder.rs index 0cd7432b..dcecd761 100644 --- a/src/driver/connection_pool_builder.rs +++ b/src/driver/connection_pool_builder.rs @@ -3,10 +3,12 @@ use std::{net::IpAddr, time::Duration}; use deadpool_postgres::{Manager, ManagerConfig, Pool, RecyclingMethod}; use pyo3::{pyclass, pymethods, Py, Python}; -use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + options::{ConnRecyclingMethod, LoadBalanceHosts, SslMode, TargetSessionAttrs}, +}; use super::{ - common_options, connection_pool::ConnectionPool, utils::{build_manager, build_tls}, }; @@ -17,7 +19,7 @@ pub struct ConnectionPoolBuilder { max_db_pool_size: Option, conn_recycling_method: Option, ca_file: Option, - ssl_mode: Option, + ssl_mode: Option, prepare: Option, } @@ -104,7 +106,7 @@ impl ConnectionPoolBuilder { /// Set connection recycling method. fn conn_recycling_method( self_: Py, - conn_recycling_method: super::common_options::ConnRecyclingMethod, + conn_recycling_method: ConnRecyclingMethod, ) -> Py { Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); @@ -171,7 +173,7 @@ impl ConnectionPoolBuilder { /// /// Defaults to `prefer`. #[must_use] - pub fn ssl_mode(self_: Py, ssl_mode: crate::driver::common_options::SslMode) -> Py { + pub fn ssl_mode(self_: Py, ssl_mode: SslMode) -> Py { Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); self_.ssl_mode = Some(ssl_mode); @@ -259,7 +261,7 @@ impl ConnectionPoolBuilder { #[must_use] pub fn target_session_attrs( self_: Py, - target_session_attrs: super::common_options::TargetSessionAttrs, + target_session_attrs: TargetSessionAttrs, ) -> Py { Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); @@ -274,10 +276,7 @@ impl ConnectionPoolBuilder { /// /// Defaults to `disable`. #[must_use] - pub fn load_balance_hosts( - self_: Py, - load_balance_hosts: super::common_options::LoadBalanceHosts, - ) -> Py { + pub fn load_balance_hosts(self_: Py, load_balance_hosts: LoadBalanceHosts) -> Py { Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); self_ diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 7229c6ee..a12d2bfa 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -1,209 +1,84 @@ -use std::{net::IpAddr, sync::Arc}; +use std::sync::{ + atomic::{AtomicUsize, Ordering}, + Arc, +}; use pyo3::{ exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, }; -use tokio_postgres::{config::Host, Config}; +use tokio::sync::RwLock; +use tokio_postgres::Config; use crate::{ - connection::structs::PSQLPyConnection, + connection::{structs::PSQLPyConnection, traits::Cursor as _}, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, query_result::PSQLDriverPyQueryResult, runtime::rustdriver_future, }; -/// Additional implementation for the `Object` type. -#[allow(clippy::ref_option)] -trait CursorObjectTrait { - async fn cursor_start( - &self, - cursor_name: &str, - scroll: &Option, - querystring: &str, - prepared: &Option, - parameters: &Option>, - ) -> PSQLPyResult<()>; - - async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> PSQLPyResult<()>; -} - -impl CursorObjectTrait for PSQLPyConnection { - /// Start the cursor. - /// - /// Execute `DECLARE` command with parameters. - /// - /// # Errors - /// May return Err Result if cannot execute querystring. - #[allow(clippy::ref_option)] - async fn cursor_start( - &self, - cursor_name: &str, - scroll: &Option, - querystring: &str, - prepared: &Option, - parameters: &Option>, - ) -> PSQLPyResult<()> { - let mut cursor_init_query = format!("DECLARE {cursor_name}"); - if let Some(scroll) = scroll { - if *scroll { - cursor_init_query.push_str(" SCROLL"); - } else { - cursor_init_query.push_str(" NO SCROLL"); - } - } - - cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); - - self.execute(cursor_init_query, parameters.clone(), *prepared) - .await - .map_err(|err| { - RustPSQLDriverError::CursorStartError(format!("Cannot start cursor, error - {err}")) - })?; - - Ok(()) - } - - /// Close the cursor. - /// - /// Execute `CLOSE` command. - /// - /// # Errors - /// May return Err Result if cannot execute querystring. - async fn cursor_close(&self, closed: &bool, cursor_name: &str) -> PSQLPyResult<()> { - if *closed { - return Err(RustPSQLDriverError::CursorCloseError( - "Cursor is already closed".into(), - )); - } - - self.execute( - format!("CLOSE {cursor_name}"), - Option::default(), - Some(false), - ) - .await?; +static NEXT_CUR_ID: AtomicUsize = AtomicUsize::new(0); - Ok(()) - } +fn next_cursor_name() -> String { + format!("cur{}", NEXT_CUR_ID.fetch_add(1, Ordering::SeqCst),) } #[pyclass(subclass)] pub struct Cursor { - db_transaction: Option>, - pg_config: Arc, + conn: Option>>, + pub pg_config: Arc, querystring: String, parameters: Option>, - cursor_name: String, + cursor_name: Option, fetch_number: usize, scroll: Option, prepared: Option, - is_started: bool, - closed: bool, } impl Cursor { - #[must_use] pub fn new( - db_transaction: Arc, + conn: Arc>, pg_config: Arc, querystring: String, parameters: Option>, - cursor_name: String, fetch_number: usize, scroll: Option, prepared: Option, ) -> Self { Cursor { - db_transaction: Some(db_transaction), + conn: Some(conn), pg_config, querystring, parameters, - cursor_name, + cursor_name: None, fetch_number, scroll, prepared, - is_started: false, - closed: false, } } -} -#[pymethods] -impl Cursor { - #[getter] - fn conn_dbname(&self) -> Option<&str> { - self.pg_config.get_dbname() - } - - #[getter] - fn user(&self) -> Option<&str> { - self.pg_config.get_user() - } + async fn execute(&self, querystring: &str) -> PSQLPyResult { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + let read_conn_g = conn.read().await; - #[getter] - fn host_addrs(&self) -> Vec { - let mut host_addrs_vec = vec![]; - - let host_addrs = self.pg_config.get_hostaddrs(); - for ip_addr in host_addrs { - match ip_addr { - IpAddr::V4(ipv4) => { - host_addrs_vec.push(ipv4.to_string()); - } - IpAddr::V6(ipv6) => { - host_addrs_vec.push(ipv6.to_string()); - } - } - } - - host_addrs_vec - } - - #[cfg(unix)] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - Host::Unix(host) => { - hosts_vec.push(host.display().to_string()); - } - } - } - - hosts_vec - } - - #[cfg(not(unix))] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - _ => unreachable!(), - } - } - - hosts_vec - } + let result = read_conn_g + .execute(querystring.to_string(), None, Some(false)) + .await + .map_err(|err| { + RustPSQLDriverError::CursorFetchError(format!( + "Cannot fetch data from cursor, error - {err}" + )) + })?; - #[getter] - fn ports(&self) -> Vec<&u16> { - return self.pg_config.get_ports().iter().collect::>(); + Ok(result) } +} +#[pymethods] +impl Cursor { #[getter] - fn cursor_name(&self) -> String { + fn cursor_name(&self) -> Option { return self.cursor_name.clone(); } @@ -232,471 +107,237 @@ impl Cursor { } async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { - let (db_transaction, cursor_name, scroll, querystring, prepared, parameters) = - Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - ( - self_.db_transaction.clone(), - self_.cursor_name.clone(), - self_.scroll, - self_.querystring.clone(), - self_.prepared, - self_.parameters.clone(), - ) - }); - - if let Some(db_transaction) = db_transaction { - db_transaction - .cursor_start(&cursor_name, &scroll, &querystring, &prepared, ¶meters) - .await?; - Python::with_gil(|gil| { - let mut self_ = slf.borrow_mut(gil); - self_.is_started = true; - }); - return Ok(slf); - } - Err(RustPSQLDriverError::CursorClosedError) + let cursor_name = next_cursor_name(); + + let (conn, scroll, querystring, prepared, parameters) = Python::with_gil(|gil| { + let mut self_ = slf.borrow_mut(gil); + self_.cursor_name = Some(cursor_name.clone()); + ( + self_.conn.clone(), + self_.scroll, + self_.querystring.clone(), + self_.prepared, + self_.parameters.clone(), + ) + }); + + let Some(conn) = conn else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + let mut write_conn_g = conn.write().await; + + write_conn_g + .start_cursor( + &cursor_name, + &scroll, + querystring.clone(), + &prepared, + parameters.clone(), + ) + .await?; + + Ok(slf) } #[allow(clippy::needless_pass_by_value)] async fn __aexit__<'a>( - slf: Py, + &mut self, _exception_type: Py, exception: Py, _traceback: Py, ) -> PSQLPyResult<()> { - let (db_transaction, closed, cursor_name, is_exception_none, py_err) = - pyo3::Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - ( - self_.db_transaction.clone(), - self_.closed, - self_.cursor_name.clone(), - exception.is_none(gil), - PyErr::from_value(exception.into_bound(gil)), - ) - }); - - if let Some(db_transaction) = db_transaction { - db_transaction - .cursor_close(&closed, &cursor_name) - .await - .map_err(|err| { - RustPSQLDriverError::CursorCloseError(format!( - "Cannot close the cursor, error - {err}" - )) - })?; - pyo3::Python::with_gil(|gil| { - let mut self_ = slf.borrow_mut(gil); - std::mem::take(&mut self_.db_transaction); - }); - if !is_exception_none { - return Err(RustPSQLDriverError::RustPyError(py_err)); - } - return Ok(()); + self.close().await?; + + let (is_exc_none, py_err) = pyo3::Python::with_gil(|gil| { + ( + exception.is_none(gil), + PyErr::from_value(exception.into_bound(gil)), + ) + }); + + if !is_exc_none { + return Err(RustPSQLDriverError::RustPyError(py_err)); } - Err(RustPSQLDriverError::CursorClosedError) + Ok(()) } - /// Return next result from the SQL statement. - /// - /// Execute FETCH FROM - /// - /// # Errors - /// May return Err Result if can't execute querystring. fn __anext__(&self) -> PSQLPyResult> { - let db_transaction = self.db_transaction.clone(); + let conn = self.conn.clone(); let fetch_number = self.fetch_number; - let cursor_name = self.cursor_name.clone(); + let Some(cursor_name) = self.cursor_name.clone() else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + let py_future = Python::with_gil(move |gil| { rustdriver_future(gil, async move { - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH {fetch_number} FROM {cursor_name}"), - None, - Some(false), - ) - .await?; - - if result.is_empty() { - return Err(PyStopAsyncIteration::new_err( - "Iteration is over, no more results in cursor", - ) - .into()); - }; - - return Ok(result); - } - Err(RustPSQLDriverError::CursorClosedError) + let Some(conn) = conn else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + + let read_conn_g = conn.read().await; + let result = read_conn_g + .execute( + format!("FETCH {fetch_number} FROM {cursor_name}"), + None, + Some(false), + ) + .await?; + + if result.is_empty() { + return Err(PyStopAsyncIteration::new_err( + "Iteration is over, no more results in cursor", + ) + .into()); + }; + Ok(result) }) }); Ok(Some(py_future?)) } - /// Start the cursor - /// - /// # Errors - /// May return Err Result - /// if cannot execute querystring for cursor declaration. pub async fn start(&mut self) -> PSQLPyResult<()> { - let db_transaction_arc = self.db_transaction.clone(); - - if let Some(db_transaction) = db_transaction_arc { - db_transaction - .cursor_start( - &self.cursor_name, - &self.scroll, - &self.querystring, - &self.prepared, - &self.parameters, - ) - .await?; - - self.is_started = true; + if self.cursor_name.is_some() { return Ok(()); } - Err(RustPSQLDriverError::CursorClosedError) - } + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + let mut write_conn_g = conn.write().await; - /// Close the cursor. - /// - /// It executes CLOSE command to close cursor in the transaction. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn close(&mut self) -> PSQLPyResult<()> { - let db_transaction_arc = self.db_transaction.clone(); + let cursor_name = next_cursor_name(); - if let Some(db_transaction) = db_transaction_arc { - db_transaction - .cursor_close(&self.closed, &self.cursor_name) - .await?; + write_conn_g + .start_cursor( + &cursor_name, + &self.scroll, + self.querystring.clone(), + &self.prepared, + self.parameters.clone(), + ) + .await?; - self.closed = true; - std::mem::take(&mut self.db_transaction); - return Ok(()); - } + self.cursor_name = Some(cursor_name); - Err(RustPSQLDriverError::CursorClosedError) + Ok(()) } - /// Fetch data from cursor. - /// - /// It's possible to specify fetch number. - /// - /// # Errors - /// May return Err Result if cannot execute query. - #[pyo3(signature = (fetch_number=None))] - pub async fn fetch<'a>( - slf: Py, - fetch_number: Option, - ) -> PSQLPyResult { - let (db_transaction, inner_fetch_number, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - ( - self_.db_transaction.clone(), - self_.fetch_number, - self_.cursor_name.clone(), - ) - }); - - if let Some(db_transaction) = db_transaction { - let fetch_number = match fetch_number { - Some(usize) => usize, - None => inner_fetch_number, + pub async fn close(&mut self) -> PSQLPyResult<()> { + if let Some(cursor_name) = &self.cursor_name { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::CursorClosedError); }; + let mut write_conn_g = conn.write().await; + write_conn_g.close_cursor(&cursor_name).await?; + self.cursor_name = None; + }; - let result = db_transaction - .execute( - format!("FETCH {fetch_number} FROM {cursor_name}"), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - - return Ok(result); - } + self.conn = None; - Err(RustPSQLDriverError::CursorClosedError) + Ok(()) } - /// Fetch row from cursor. - /// - /// Execute FETCH NEXT. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_next<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute(format!("FETCH NEXT FROM {cursor_name}"), None, Some(false)) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + #[pyo3(signature = (fetch_number=None))] + pub async fn fetch( + &self, + fetch_number: Option, + ) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!( + "FETCH {} FROM {}", + fetch_number.unwrap_or(self.fetch_number), + cursor_name, + )) + .await } - /// Fetch previous from cursor. - /// - /// Execute FETCH PRIOR. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_prior<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute(format!("FETCH PRIOR FROM {cursor_name}"), None, Some(false)) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + pub async fn fetch_next(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH NEXT FROM {cursor_name}")) + .await } - /// Fetch first row from cursor. - /// - /// Execute FETCH FIRST (same as ABSOLUTE 1) - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_first<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute(format!("FETCH FIRST FROM {cursor_name}"), None, Some(false)) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + pub async fn fetch_prior(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH PRIOR FROM {cursor_name}")) + .await } - /// Fetch last row from cursor. - /// - /// Execute FETCH LAST (same as ABSOLUTE -1) - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_last<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute(format!("FETCH LAST FROM {cursor_name}"), None, Some(false)) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } + pub async fn fetch_first(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH FIRST FROM {cursor_name}")) + .await + } - Err(RustPSQLDriverError::CursorClosedError) + pub async fn fetch_last(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH LAST FROM {cursor_name}")) + .await } - /// Fetch absolute row from cursor. - /// - /// Execute FETCH ABSOLUTE. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_absolute<'a>( - slf: Py, + pub async fn fetch_absolute( + &self, absolute_number: i64, ) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH ABSOLUTE {absolute_number} FROM {cursor_name}"), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!( + "FETCH ABSOLUTE {absolute_number} FROM {cursor_name}" + )) + .await } - /// Fetch absolute row from cursor. - /// - /// Execute FETCH ABSOLUTE. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_relative<'a>( - slf: Py, + pub async fn fetch_relative( + &self, relative_number: i64, ) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH RELATIVE {relative_number} FROM {cursor_name}"), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!( + "FETCH RELATIVE {relative_number} FROM {cursor_name}" + )) + .await } - /// Fetch forward all from cursor. - /// - /// Execute FORWARD ALL. - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_forward_all<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH FORWARD ALL FROM {cursor_name}"), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + pub async fn fetch_forward_all(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH FORWARD ALL FROM {cursor_name}")) + .await } - /// Fetch backward from cursor. - /// - /// Execute BACKWARD . - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_backward<'a>( - slf: Py, + pub async fn fetch_backward( + &self, backward_count: i64, ) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH BACKWARD {backward_count} FROM {cursor_name}",), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!( + "FETCH BACKWARD {backward_count} FROM {cursor_name}" + )) + .await } - /// Fetch backward from cursor. - /// - /// Execute BACKWARD . - /// - /// # Errors - /// May return Err Result if cannot execute query. - pub async fn fetch_backward_all<'a>(slf: Py) -> PSQLPyResult { - let (db_transaction, cursor_name) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - (self_.db_transaction.clone(), self_.cursor_name.clone()) - }); - - if let Some(db_transaction) = db_transaction { - let result = db_transaction - .execute( - format!("FETCH BACKWARD ALL FROM {cursor_name}"), - None, - Some(false), - ) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - return Ok(result); - } - - Err(RustPSQLDriverError::CursorClosedError) + pub async fn fetch_backward_all(&self) -> PSQLPyResult { + let Some(cursor_name) = &self.cursor_name else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + self.execute(&format!("FETCH BACKWARD ALL FROM {cursor_name}")) + .await } } diff --git a/src/driver/listener/core.rs b/src/driver/listener/core.rs index 7837478f..8ae57d22 100644 --- a/src/driver/listener/core.rs +++ b/src/driver/listener/core.rs @@ -17,11 +17,11 @@ use crate::{ traits::Connection as _, }, driver::{ - common_options::SslMode, connection::Connection, utils::{build_tls, is_coroutine_function, ConfiguredTLS}, }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + options::SslMode, runtime::{rustdriver_future, tokio_runtime}, }; @@ -222,9 +222,9 @@ impl Listener { self.receiver = Some(Arc::new(RwLock::new(receiver))); self.connection = Connection::new( - Some(Arc::new(PSQLPyConnection::SingleConnection( - SingleConnection { connection: client }, - ))), + Some(Arc::new(RwLock::new(PSQLPyConnection::SingleConnection( + SingleConnection::new(client, self.pg_config.clone()), + )))), None, self.pg_config.clone(), ); @@ -355,8 +355,9 @@ async fn dispatch_callback( async fn execute_listen( is_listened: &Arc>, listen_query: &Arc>, - client: &Arc, + client: &Arc>, ) -> PSQLPyResult<()> { + let read_conn_g = client.read().await; let mut write_is_listened = is_listened.write().await; if !write_is_listened.eq(&true) { @@ -365,7 +366,7 @@ async fn execute_listen( String::from(read_listen_query.as_str()) }; - client.batch_execute(listen_q.as_str()).await?; + read_conn_g.batch_execute(listen_q.as_str()).await?; } *write_is_listened = true; diff --git a/src/driver/mod.rs b/src/driver/mod.rs index 1cff9f57..ab1b149c 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -1,4 +1,4 @@ -pub mod common_options; +pub mod common; pub mod connection; pub mod connection_pool; pub mod connection_pool_builder; diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 50bbfb74..c779837f 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -1,17 +1,20 @@ +use std::sync::Arc; + use bytes::BytesMut; -use futures_util::{future, pin_mut}; +use futures::{future, pin_mut}; use pyo3::{ buffer::PyBuffer, - prelude::*, - pyclass, - types::{PyList, PyTuple}, + pyclass, pymethods, + types::{PyAnyMethods, PyList, PyTuple}, + Py, PyAny, PyErr, PyResult, }; -use tokio_postgres::{binary_copy::BinaryCopyInWriter, config::Host, Config}; +use tokio::sync::RwLock; +use tokio_postgres::{binary_copy::BinaryCopyInWriter, Config}; use crate::{ connection::{ structs::PSQLPyConnection, - traits::{Connection as _, Transaction as _}, + traits::{CloseTransaction, Connection, StartTransaction as _}, }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, format_helpers::quote_ident, @@ -20,136 +23,37 @@ use crate::{ }; use super::cursor::Cursor; -use std::{collections::HashSet, net::IpAddr, sync::Arc}; #[pyclass(subclass)] pub struct Transaction { - pub db_client: Option>, - pg_config: Arc, - is_started: bool, - is_done: bool, + pub conn: Option>>, + pub pg_config: Arc, isolation_level: Option, read_variant: Option, deferrable: Option, - - savepoints_map: HashSet, } impl Transaction { - #[allow(clippy::too_many_arguments)] - #[must_use] pub fn new( - db_client: Arc, + conn: Option>>, pg_config: Arc, - is_started: bool, - is_done: bool, isolation_level: Option, read_variant: Option, deferrable: Option, - savepoints_map: HashSet, ) -> Self { Self { - db_client: Some(db_client), + conn, pg_config, - is_started, - is_done, isolation_level, read_variant, deferrable, - savepoints_map, } } - - fn check_is_transaction_ready(&self) -> PSQLPyResult<()> { - if !self.is_started { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is not started, please call begin() on transaction".into(), - )); - } - if self.is_done { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is already committed or rolled back".into(), - )); - } - Ok(()) - } } #[pymethods] impl Transaction { - #[getter] - fn conn_dbname(&self) -> Option<&str> { - self.pg_config.get_dbname() - } - - #[getter] - fn user(&self) -> Option<&str> { - self.pg_config.get_user() - } - - #[getter] - fn host_addrs(&self) -> Vec { - let mut host_addrs_vec = vec![]; - - let host_addrs = self.pg_config.get_hostaddrs(); - for ip_addr in host_addrs { - match ip_addr { - IpAddr::V4(ipv4) => { - host_addrs_vec.push(ipv4.to_string()); - } - IpAddr::V6(ipv6) => { - host_addrs_vec.push(ipv6.to_string()); - } - } - } - - host_addrs_vec - } - - #[cfg(unix)] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - Host::Unix(host) => { - hosts_vec.push(host.display().to_string()); - } - } - } - - hosts_vec - } - - #[cfg(not(unix))] - #[getter] - fn hosts(&self) -> Vec { - let mut hosts_vec = vec![]; - - let hosts = self.pg_config.get_hosts(); - for host in hosts { - match host { - Host::Tcp(host) => { - hosts_vec.push(host.to_string()); - } - _ => unreachable!(), - } - } - - hosts_vec - } - - #[getter] - fn ports(&self) -> Vec<&u16> { - return self.pg_config.get_ports().iter().collect::>(); - } - #[must_use] pub fn __aiter__(self_: Py) -> Py { self_ @@ -160,44 +64,25 @@ impl Transaction { } async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { - let (is_started, is_done, isolation_level, read_variant, deferrable, db_client) = - pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.is_started, - self_.is_done, - self_.isolation_level, - self_.read_variant, - self_.deferrable, - self_.db_client.clone(), - ) - }); - - if is_started { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is already started".into(), - )); - } - - if is_done { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is already committed or rolled back".into(), - )); - } - - if let Some(db_client) = db_client { - db_client - .start(isolation_level, read_variant, deferrable) - .await?; + let (isolation_level, read_variant, deferrable, conn) = pyo3::Python::with_gil(|gil| { + let self_ = self_.borrow(gil); + ( + self_.isolation_level, + self_.read_variant, + self_.deferrable, + self_.conn.clone(), + ) + }); - Python::with_gil(|gil| { - let mut self_ = self_.borrow_mut(gil); - self_.is_started = true; - }); - return Ok(self_); - } + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let mut write_conn_g = conn.write().await; + write_conn_g + .start_transaction(isolation_level, read_variant, deferrable) + .await?; - Err(RustPSQLDriverError::TransactionClosedError) + return Ok(self_); } #[allow(clippy::needless_pass_by_value)] @@ -207,456 +92,251 @@ impl Transaction { exception: Py, _traceback: Py, ) -> PSQLPyResult<()> { - let (is_transaction_ready, is_exception_none, py_err, db_client) = + let (conn, is_exception_none, py_err) = pyo3::Python::with_gil(|gil| { + let self_ = self_.borrow(gil); + ( + self_.conn.clone(), + exception.is_none(gil), + PyErr::from_value(exception.into_bound(gil)), + ) + }); + + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let mut write_conn_g = conn.write().await; + if is_exception_none { + write_conn_g.commit().await?; pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.check_is_transaction_ready(), - exception.is_none(gil), - PyErr::from_value(exception.into_bound(gil)), - self_.db_client.clone(), - ) + let mut self_ = self_.borrow_mut(gil); + self_.conn = None; }); - is_transaction_ready?; - - if let Some(db_client) = db_client { - let exit_result = if is_exception_none { - db_client.commit().await?; - Ok(()) - } else { - db_client.rollback().await?; - Err(RustPSQLDriverError::RustPyError(py_err)) - }; - + Ok(()) + } else { + write_conn_g.rollback().await?; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.is_done = true; - std::mem::take(&mut self_.db_client); + self_.conn = None; }); - return exit_result; + return Err(RustPSQLDriverError::RustPyError(py_err)); } + } - Err(RustPSQLDriverError::TransactionClosedError) + pub async fn begin(&mut self) -> PSQLPyResult<()> { + let conn = &self.conn; + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let mut write_conn_g = conn.write().await; + write_conn_g + .start_transaction(self.isolation_level, self.read_variant, self.deferrable) + .await?; + + Ok(()) } - /// Commit the transaction. - /// - /// Execute `COMMIT` command and mark transaction as `done`. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done - /// 3) Cannot execute `COMMIT` command pub async fn commit(&mut self) -> PSQLPyResult<()> { - self.check_is_transaction_ready()?; - if let Some(db_client) = &self.db_client { - db_client.commit().await?; - self.is_done = true; - std::mem::take(&mut self.db_client); - return Ok(()); - } + let conn = self.conn.clone(); + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let mut write_conn_g = conn.write().await; + write_conn_g.commit().await?; - Err(RustPSQLDriverError::TransactionClosedError) + self.conn = None; + + Ok(()) } - /// Execute ROLLBACK command. - /// - /// Run ROLLBACK command and mark the transaction as done. - /// - /// # Errors - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done - /// 3) Can not execute ROLLBACK command pub async fn rollback(&mut self) -> PSQLPyResult<()> { - self.check_is_transaction_ready()?; - if let Some(db_client) = &self.db_client { - db_client.rollback().await?; - self.is_done = true; - std::mem::take(&mut self.db_client); - return Ok(()); - } + let conn = self.conn.clone(); + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let mut write_conn_g = conn.write().await; + write_conn_g.rollback().await?; - Err(RustPSQLDriverError::TransactionClosedError) + self.conn = None; + + Ok(()) } - /// Execute querystring with parameters. - /// - /// It converts incoming parameters to rust readable - /// and then execute the query with them. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Cannot convert python parameters - /// 2) Cannot execute querystring. #[pyo3(signature = (querystring, parameters=None, prepared=None))] pub async fn execute( - self_: Py, + &self, querystring: String, parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - is_transaction_ready?; - if let Some(db_client) = db_client { - return db_client.execute(querystring, parameters, prepared).await; - } + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; - Err(RustPSQLDriverError::TransactionClosedError) + let read_conn_g = conn.read().await; + read_conn_g.execute(querystring, parameters, prepared).await } - /// Executes a sequence of SQL statements using the simple query protocol. - /// - /// Statements should be separated by semicolons. - /// If an error occurs, execution of the sequence will stop at that point. - /// This is intended for use when, for example, - /// initializing a database schema. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Transaction is closed. - /// 2) Cannot execute querystring. - pub async fn execute_batch(self_: Py, querystring: String) -> PSQLPyResult<()> { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - is_transaction_ready?; - if let Some(db_client) = db_client { - return db_client.batch_execute(&querystring).await; - } - - Err(RustPSQLDriverError::TransactionClosedError) - } - - /// Fetch result from the database. - /// - /// It converts incoming parameters to rust readable - /// and then execute the query with them. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Cannot convert python parameters - /// 2) Cannot execute querystring. #[pyo3(signature = (querystring, parameters=None, prepared=None))] pub async fn fetch( - self_: Py, + &self, querystring: String, parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - is_transaction_ready?; - if let Some(db_client) = db_client { - return db_client.execute(querystring, parameters, prepared).await; - } + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; - Err(RustPSQLDriverError::TransactionClosedError) + let read_conn_g = conn.read().await; + read_conn_g.execute(querystring, parameters, prepared).await } - /// Fetch exaclty single row from query. - /// - /// Method doesn't acquire lock on any structure fields. - /// It prepares and caches querystring in the inner Object object. - /// - /// Then execute the query. - /// - /// # Errors - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done already - /// 3) Can not create/retrieve prepared statement - /// 4) Can not execute statement - /// 5) Query returns more than one row - #[pyo3(signature = (querystring, parameters=None, prepared=None))] - pub async fn fetch_row( - self_: Py, - querystring: String, - parameters: Option>, - prepared: Option, - ) -> PSQLPyResult { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - is_transaction_ready?; - - if let Some(db_client) = db_client { - return db_client.fetch_row(querystring, parameters, prepared).await; - } - - Err(RustPSQLDriverError::TransactionClosedError) - } - /// Execute querystring with parameters and return first value in the first row. - /// - /// It converts incoming parameters to rust readable, - /// executes query with them and returns first row of response. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Cannot convert python parameters - /// 2) Cannot execute querystring. - /// 3) Query returns more than one row #[pyo3(signature = (querystring, parameters=None, prepared=None))] pub async fn fetch_val( - self_: Py, + &self, querystring: String, parameters: Option>, prepared: Option, ) -> PSQLPyResult> { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - is_transaction_ready?; - if let Some(db_client) = db_client { - return db_client.fetch_val(querystring, parameters, prepared).await; - } + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + + let read_conn_g = conn.read().await; + read_conn_g + .fetch_val(querystring, parameters, prepared) + .await + } - Err(RustPSQLDriverError::TransactionClosedError) + pub async fn execute_batch(&self, querystring: String) -> PSQLPyResult<()> { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + + let read_conn_g = conn.read().await; + read_conn_g.batch_execute(&querystring).await } - /// Execute querystring with parameters. - /// - /// It converts incoming parameters to rust readable - /// and then execute the query with them. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Cannot convert python parameters - /// 2) Cannot execute querystring. + #[pyo3(signature = (querystring, parameters=None, prepared=None))] pub async fn execute_many( - self_: Py, + &self, querystring: String, parameters: Option>>, prepared: Option, ) -> PSQLPyResult<()> { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) - }); - - is_transaction_ready?; - if let Some(db_client) = db_client { - return db_client - .execute_many(querystring, parameters, prepared) - .await; - } - - Err(RustPSQLDriverError::TransactionClosedError) + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + + let read_conn_g = conn.read().await; + read_conn_g + .execute_many(querystring, parameters, prepared) + .await } - /// Start the transaction. - /// - /// Execute `BEGIN` commands and mark transaction as `started`. - /// - /// # Errors - /// - /// May return Err Result if: - /// 1) Transaction is already started. - /// 2) Transaction is done. - /// 3) Cannot execute `BEGIN` command. - pub async fn begin(self_: Py) -> PSQLPyResult<()> { - let (is_started, is_done, isolation_level, read_variant, deferrable, db_client) = - pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.is_started, - self_.is_done, - self_.isolation_level, - self_.read_variant, - self_.deferrable, - self_.db_client.clone(), - ) - }); - - if let Some(db_client) = db_client { - if is_started { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is already started".into(), - )); - } - - if is_done { - return Err(RustPSQLDriverError::TransactionBeginError( - "Transaction is already committed or rolled back".into(), - )); - } - db_client - .start(isolation_level, read_variant, deferrable) - .await?; - pyo3::Python::with_gil(|gil| { - let mut self_ = self_.borrow_mut(gil); - self_.is_started = true; - }); - - return Ok(()); - } - - Err(RustPSQLDriverError::TransactionClosedError) + #[pyo3(signature = (querystring, parameters=None, prepared=None))] + pub async fn fetch_row( + &self, + querystring: String, + parameters: Option>, + prepared: Option, + ) -> PSQLPyResult { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + + let read_conn_g = conn.read().await; + read_conn_g + .fetch_row(querystring, parameters, prepared) + .await } - /// Create new SAVEPOINT. - /// - /// Execute SAVEPOINT and - /// add it to the transaction `rollback_savepoint` `HashSet` - /// - /// # Errors - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done - /// 3) Specified savepoint name is exists - /// 4) Can not execute SAVEPOINT command - pub async fn create_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { - let (is_transaction_ready, is_savepoint_name_exists, db_client) = - pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.check_is_transaction_ready(), - self_.savepoints_map.contains(&savepoint_name), - self_.db_client.clone(), - ) - }); - - if let Some(db_client) = db_client { - is_transaction_ready?; - - if is_savepoint_name_exists { - return Err(RustPSQLDriverError::TransactionSavepointError(format!( - "SAVEPOINT name {savepoint_name} is already taken by this transaction", - ))); - } - db_client - .batch_execute(format!("SAVEPOINT {savepoint_name}").as_str()) - .await?; + pub async fn create_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; - pyo3::Python::with_gil(|gil| { - self_.borrow_mut(gil).savepoints_map.insert(savepoint_name); - }); - return Ok(()); - } + let read_conn_g = conn.read().await; + read_conn_g + .batch_execute(format!("SAVEPOINT {savepoint_name}").as_str()) + .await?; - Err(RustPSQLDriverError::TransactionClosedError) + Ok(()) } - /// Execute RELEASE SAVEPOINT. - /// - /// Run RELEASE SAVEPOINT command. - /// - /// # Errors - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done - /// 3) Specified savepoint name doesn't exists - /// 4) Can not execute RELEASE SAVEPOINT command - pub async fn release_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { - let (is_transaction_ready, is_savepoint_name_exists, db_client) = - pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.check_is_transaction_ready(), - self_.savepoints_map.contains(&savepoint_name), - self_.db_client.clone(), - ) - }); - - if let Some(db_client) = db_client { - is_transaction_ready?; - if !is_savepoint_name_exists { - return Err(RustPSQLDriverError::TransactionSavepointError( - "Don't have rollback with this name".into(), - )); - } - db_client - .batch_execute(format!("RELEASE SAVEPOINT {savepoint_name}").as_str()) - .await?; + pub async fn release_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; - pyo3::Python::with_gil(|gil| { - self_.borrow_mut(gil).savepoints_map.remove(&savepoint_name); - }); - return Ok(()); - } + let read_conn_g = conn.read().await; + read_conn_g + .batch_execute(format!("RELEASE SAVEPOINT {savepoint_name}").as_str()) + .await?; - Err(RustPSQLDriverError::TransactionClosedError) + Ok(()) } - /// ROLLBACK to the specified savepoint - /// - /// Execute ROLLBACK TO SAVEPOINT . - /// - /// # Errors - /// May return Err Result if: - /// 1) Transaction is not started - /// 2) Transaction is done - /// 3) Specified savepoint name doesn't exist - /// 4) Can not execute ROLLBACK TO SAVEPOINT command - pub async fn rollback_savepoint(self_: Py, savepoint_name: String) -> PSQLPyResult<()> { - let (is_transaction_ready, is_savepoint_name_exists, db_client) = - pyo3::Python::with_gil(|gil| { - let self_ = self_.borrow(gil); - ( - self_.check_is_transaction_ready(), - self_.savepoints_map.contains(&savepoint_name), - self_.db_client.clone(), - ) - }); - - if let Some(db_client) = db_client { - is_transaction_ready?; - if !is_savepoint_name_exists { - return Err(RustPSQLDriverError::TransactionSavepointError( - "Don't have rollback with this name".into(), - )); - } - db_client - .batch_execute(format!("ROLLBACK TO SAVEPOINT {savepoint_name}").as_str()) - .await?; + pub async fn rollback_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; - pyo3::Python::with_gil(|gil| { - self_.borrow_mut(gil).savepoints_map.remove(&savepoint_name); - }); - return Ok(()); - } + let read_conn_g = conn.read().await; + read_conn_g + .batch_execute(format!("ROLLBACK TO SAVEPOINT {savepoint_name}").as_str()) + .await?; - Err(RustPSQLDriverError::TransactionClosedError) + Ok(()) } - /// Execute querystrings with parameters and return all results. - /// - /// Create pipeline of queries. + + /// Create new cursor object. /// /// # Errors - /// - /// May return Err Result if: - /// 1) Cannot convert python parameters - /// 2) Cannot execute any of querystring. + /// May return Err Result if db_client is None + #[pyo3(signature = ( + querystring, + parameters=None, + fetch_number=None, + scroll=None, + prepared=None, + ))] + pub fn cursor( + &self, + querystring: String, + parameters: Option>, + fetch_number: Option, + scroll: Option, + prepared: Option, + ) -> PSQLPyResult { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + Ok(Cursor::new( + conn.clone(), + self.pg_config.clone(), + querystring, + parameters, + fetch_number.unwrap_or(10), + scroll, + prepared, + )) + } + #[pyo3(signature = (queries=None, prepared=None))] pub async fn pipeline<'py>( self_: Py, queries: Option>, prepared: Option, ) -> PSQLPyResult> { - let (is_transaction_ready, db_client) = pyo3::Python::with_gil(|gil| { + let db_client = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); - (self_.check_is_transaction_ready(), self_.db_client.clone()) + self_.conn.clone() }); - is_transaction_ready?; - if let Some(db_client) = db_client { + let conn_read_g = db_client.read().await; let mut futures = vec![]; if let Some(queries) = queries { let gil_result = pyo3::Python::with_gil(|gil| -> PyResult<()> { @@ -672,7 +352,7 @@ impl Transaction { Ok(param) => Some(param.into()), Err(_) => None, }; - futures.push(db_client.execute(querystring, params, prepared)); + futures.push(conn_read_g.execute(querystring, params, prepared)); } Ok(()) }); @@ -691,41 +371,6 @@ impl Transaction { Err(RustPSQLDriverError::TransactionClosedError) } - /// Create new cursor object. - /// - /// # Errors - /// May return Err Result if db_client is None - #[pyo3(signature = ( - querystring, - parameters=None, - fetch_number=None, - scroll=None, - prepared=None, - ))] - pub fn cursor( - &self, - querystring: String, - parameters: Option>, - fetch_number: Option, - scroll: Option, - prepared: Option, - ) -> PSQLPyResult { - if let Some(db_client) = &self.db_client { - return Ok(Cursor::new( - db_client.clone(), - self.pg_config.clone(), - querystring, - parameters, - "cur_name".into(), - fetch_number.unwrap_or(10), - scroll, - prepared, - )); - } - - Err(RustPSQLDriverError::TransactionClosedError) - } - /// Perform binary copy to postgres table. /// /// # Errors @@ -740,7 +385,7 @@ impl Transaction { columns: Option>, schema_name: Option, ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); let mut table_name = quote_ident(&table_name); if let Some(schema_name) = schema_name { table_name = format!("{}.{}", quote_ident(&schema_name), table_name); @@ -754,7 +399,7 @@ impl Transaction { let copy_qs = format!("COPY {table_name}{formated_columns} FROM STDIN (FORMAT binary)"); if let Some(db_client) = db_client { - let mut psql_bytes: BytesMut = Python::with_gil(|gil| { + let mut psql_bytes: BytesMut = pyo3::Python::with_gil(|gil| { let possible_py_buffer: Result, PyErr> = source.extract::>(gil); if let Ok(py_buffer) = possible_py_buffer { @@ -773,7 +418,8 @@ impl Transaction { )) })?; - let sink = db_client.copy_in(©_qs).await?; + let read_conn_g = db_client.read().await; + let sink = read_conn_g.copy_in(©_qs).await?; let writer = BinaryCopyInWriter::new_empty_buffer(sink, &[]); pin_mut!(writer); writer.as_mut().write_raw_bytes(&mut psql_bytes).await?; diff --git a/src/driver/utils.rs b/src/driver/utils.rs index 15ca4123..e3c0a1f9 100644 --- a/src/driver/utils.rs +++ b/src/driver/utils.rs @@ -6,9 +6,10 @@ use postgres_openssl::MakeTlsConnector; use pyo3::{types::PyAnyMethods, Py, PyAny, Python}; use tokio_postgres::{Config, NoTls}; -use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; - -use super::common_options::{self, LoadBalanceHosts, SslMode, TargetSessionAttrs}; +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + options::{LoadBalanceHosts, SslMode, TargetSessionAttrs}, +}; /// Create new config. /// @@ -190,7 +191,7 @@ pub fn build_tls( builder.build(), ))); } else if let Some(ssl_mode) = ssl_mode { - if *ssl_mode == common_options::SslMode::Require { + if *ssl_mode == SslMode::Require { let mut builder = SslConnector::builder(SslMethod::tls())?; builder.set_verify(SslVerifyMode::NONE); return Ok(ConfiguredTLS::TlsConnector(MakeTlsConnector::new( diff --git a/src/lib.rs b/src/lib.rs index 33ec678c..0eaac910 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -39,11 +39,11 @@ fn psqlpy(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; - pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; add_module(py, pymod, "extra_types", extra_types_module)?; From e60aad94d8efb4d43522ebb5ac2531ce3b8aa513 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Sat, 17 May 2025 02:58:52 +0200 Subject: [PATCH 56/65] Added python Portal class and logic --- src/connection/impls.rs | 47 ++++++- src/driver/connection.rs | 69 +++++---- src/driver/portal.rs | 282 +++++++++++++++++++++++++------------ src/lib.rs | 2 + src/statement/statement.rs | 6 +- src/transaction/impls.rs | 35 +++++ src/transaction/mod.rs | 2 + src/transaction/structs.rs | 7 + 8 files changed, 337 insertions(+), 113 deletions(-) create mode 100644 src/transaction/impls.rs create mode 100644 src/transaction/mod.rs create mode 100644 src/transaction/structs.rs diff --git a/src/connection/impls.rs b/src/connection/impls.rs index ee6bab4b..84683edb 100644 --- a/src/connection/impls.rs +++ b/src/connection/impls.rs @@ -1,15 +1,22 @@ +use std::sync::{Arc, RwLock}; + use bytes::Buf; use pyo3::{PyAny, Python}; -use tokio_postgres::{CopyInSink, Row, Statement, ToStatement}; +use tokio_postgres::{CopyInSink, Portal as tp_Portal, Row, Statement, ToStatement}; use crate::{ + driver::portal::Portal, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, options::{IsolationLevel, ReadVariant}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, statement::{statement::PsqlpyStatement, statement_builder::StatementBuilder}, + transaction::structs::PSQLPyTransaction, value_converter::to_python::postgres_to_py, }; +use deadpool_postgres::Transaction as dp_Transaction; +use tokio_postgres::Transaction as tp_Transaction; + use super::{ structs::{PSQLPyConnection, PoolConnection, SingleConnection}, traits::{CloseTransaction, Connection, Cursor, StartTransaction, Transaction}, @@ -516,4 +523,42 @@ impl PSQLPyConnection { } } } + + pub async fn transaction(&mut self) -> PSQLPyResult { + match self { + PSQLPyConnection::PoolConn(conn) => { + let transaction = unsafe { + std::mem::transmute::, dp_Transaction<'static>>( + conn.connection.transaction().await?, + ) + }; + Ok(PSQLPyTransaction::PoolTransaction(transaction)) + } + PSQLPyConnection::SingleConnection(conn) => { + let transaction = unsafe { + std::mem::transmute::, tp_Transaction<'static>>( + conn.connection.transaction().await?, + ) + }; + Ok(PSQLPyTransaction::SingleTransaction(transaction)) + } + } + } + + pub async fn portal( + &mut self, + querystring: String, + parameters: Option>, + ) -> PSQLPyResult<(PSQLPyTransaction, tp_Portal)> { + let statement = StatementBuilder::new(querystring, parameters, self, Some(false)) + .build() + .await?; + + let transaction = self.transaction().await?; + let inner_portal = transaction + .portal(statement.raw_query(), &statement.params()) + .await?; + + Ok((transaction, inner_portal)) + } } diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 9635a836..fa480386 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -18,7 +18,9 @@ use crate::{ runtime::tokio_runtime, }; -use super::{connection_pool::connect_pool, cursor::Cursor, transaction::Transaction}; +use super::{ + connection_pool::connect_pool, cursor::Cursor, portal::Portal, transaction::Transaction, +}; /// Make new connection pool. /// @@ -396,17 +398,16 @@ impl Connection { read_variant: Option, deferrable: Option, ) -> PSQLPyResult { - if let Some(db_client) = &self.db_client { - return Ok(Transaction::new( - Some(db_client.clone()), - self.pg_config.clone(), - isolation_level, - read_variant, - deferrable, - )); - } - - Err(RustPSQLDriverError::ConnectionClosedError) + let Some(conn) = &self.db_client else { + return Err(RustPSQLDriverError::ConnectionClosedError); + }; + Ok(Transaction::new( + Some(conn.clone()), + self.pg_config.clone(), + isolation_level, + read_variant, + deferrable, + )) } /// Create new cursor object. @@ -428,19 +429,39 @@ impl Connection { scroll: Option, prepared: Option, ) -> PSQLPyResult { - if let Some(db_client) = &self.db_client { - return Ok(Cursor::new( - db_client.clone(), - self.pg_config.clone(), - querystring, - parameters, - fetch_number.unwrap_or(10), - scroll, - prepared, - )); - } + let Some(conn) = &self.db_client else { + return Err(RustPSQLDriverError::ConnectionClosedError); + }; + + Ok(Cursor::new( + conn.clone(), + self.pg_config.clone(), + querystring, + parameters, + fetch_number.unwrap_or(10), + scroll, + prepared, + )) + } - Err(RustPSQLDriverError::ConnectionClosedError) + #[pyo3(signature = ( + querystring, + parameters=None, + fetch_number=None, + ))] + pub fn portal( + &self, + querystring: String, + parameters: Option>, + fetch_number: Option, + ) -> PSQLPyResult { + println!("{:?}", fetch_number); + Ok(Portal::new( + self.db_client.clone(), + querystring, + parameters, + fetch_number, + )) } #[allow(clippy::needless_pass_by_value)] diff --git a/src/driver/portal.rs b/src/driver/portal.rs index 0c280637..f6b4d755 100644 --- a/src/driver/portal.rs +++ b/src/driver/portal.rs @@ -1,87 +1,195 @@ -// use std::sync::Arc; - -// use pyo3::{pyclass, pymethods, Py, PyObject, Python}; -// use tokio_postgres::Portal as tp_Portal; - -// use crate::{ -// exceptions::rust_errors::PSQLPyResult, query_result::PSQLDriverPyQueryResult, -// runtime::rustdriver_future, -// }; - -// use super::inner_transaction::PsqlpyTransaction; - -// #[pyclass] -// pub struct Portal { -// transaction: Arc, -// inner: tp_Portal, -// array_size: i32, -// } - -// impl Portal { -// pub fn new(transaction: Arc, inner: tp_Portal, array_size: i32) -> Self { -// Self { -// transaction, -// inner, -// array_size, -// } -// } - -// async fn query_portal(&self, size: i32) -> PSQLPyResult { -// let result = self.transaction.query_portal(&self.inner, size).await?; -// Ok(PSQLDriverPyQueryResult::new(result)) -// } -// } - -// #[pymethods] -// impl Portal { -// #[getter] -// fn get_array_size(&self) -> i32 { -// self.array_size -// } - -// #[setter] -// fn set_array_size(&mut self, value: i32) { -// self.array_size = value; -// } - -// fn __aiter__(slf: Py) -> Py { -// slf -// } - -// fn __await__(slf: Py) -> Py { -// slf -// } - -// fn __anext__(&self) -> PSQLPyResult> { -// let transaction = self.transaction.clone(); -// let portal = self.inner.clone(); -// let size = self.array_size.clone(); - -// let py_future = Python::with_gil(move |gil| { -// rustdriver_future(gil, async move { -// let result = transaction.query_portal(&portal, size).await?; - -// Ok(PSQLDriverPyQueryResult::new(result)) -// }) -// }); - -// Ok(Some(py_future?)) -// } - -// async fn fetch_one(&self) -> PSQLPyResult { -// self.query_portal(1).await -// } - -// #[pyo3(signature = (size=None))] -// async fn fetch_many(&self, size: Option) -> PSQLPyResult { -// self.query_portal(size.unwrap_or(self.array_size)).await -// } - -// async fn fetch_all(&self) -> PSQLPyResult { -// self.query_portal(-1).await -// } - -// async fn close(&mut self) { -// let _ = Arc::downgrade(&self.transaction); -// } -// } +use std::sync::Arc; + +use pyo3::{ + exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, +}; +use tokio::sync::RwLock; +use tokio_postgres::Portal as tp_Portal; + +use crate::{ + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + query_result::PSQLDriverPyQueryResult, + runtime::rustdriver_future, + transaction::structs::PSQLPyTransaction, +}; + +use crate::connection::structs::PSQLPyConnection; + +#[pyclass] +pub struct Portal { + conn: Option>>, + querystring: String, + parameters: Option>, + array_size: i32, + + transaction: Option>, + inner: Option, +} + +impl Portal { + pub fn new( + conn: Option>>, + querystring: String, + parameters: Option>, + array_size: Option, + ) -> Self { + Self { + conn, + transaction: None, + inner: None, + querystring, + parameters, + array_size: array_size.unwrap_or(1), + } + } + + async fn query_portal(&self, size: i32) -> PSQLPyResult { + let Some(transaction) = &self.transaction else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let Some(portal) = &self.inner else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + transaction.query_portal(&portal, size).await + } +} + +impl Drop for Portal { + fn drop(&mut self) { + self.transaction = None; + self.conn = None; + } +} + +#[pymethods] +impl Portal { + #[getter] + fn get_array_size(&self) -> i32 { + self.array_size + } + + #[setter] + fn set_array_size(&mut self, value: i32) { + self.array_size = value; + } + + fn __aiter__(slf: Py) -> Py { + slf + } + + fn __await__(slf: Py) -> Py { + slf + } + + async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { + let (conn, querystring, parameters) = Python::with_gil(|gil| { + let self_ = slf.borrow(gil); + ( + self_.conn.clone(), + self_.querystring.clone(), + self_.parameters.clone(), + ) + }); + + let Some(conn) = conn else { + return Err(RustPSQLDriverError::CursorClosedError); + }; + let mut write_conn_g = conn.write().await; + + let (txid, inner_portal) = write_conn_g.portal(querystring, parameters).await?; + + Python::with_gil(|gil| { + let mut self_ = slf.borrow_mut(gil); + + self_.transaction = Some(Arc::new(txid)); + self_.inner = Some(inner_portal); + }); + + Ok(slf) + } + + #[allow(clippy::needless_pass_by_value)] + async fn __aexit__<'a>( + &mut self, + _exception_type: Py, + exception: Py, + _traceback: Py, + ) -> PSQLPyResult<()> { + self.close(); + + let (is_exc_none, py_err) = pyo3::Python::with_gil(|gil| { + ( + exception.is_none(gil), + PyErr::from_value(exception.into_bound(gil)), + ) + }); + + if !is_exc_none { + return Err(RustPSQLDriverError::RustPyError(py_err)); + } + Ok(()) + } + + fn __anext__(&self) -> PSQLPyResult> { + let txid = self.transaction.clone(); + let portal = self.inner.clone(); + let size = self.array_size.clone(); + + let py_future = Python::with_gil(move |gil| { + rustdriver_future(gil, async move { + let Some(txid) = &txid else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let Some(portal) = &portal else { + return Err(RustPSQLDriverError::TransactionClosedError); + }; + let result = txid.query_portal(&portal, size).await?; + + if result.is_empty() { + return Err(PyStopAsyncIteration::new_err( + "Iteration is over, no more results in portal", + ) + .into()); + }; + + Ok(result) + }) + }); + + Ok(Some(py_future?)) + } + + async fn start(&mut self) -> PSQLPyResult<()> { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::ConnectionClosedError); + }; + let mut write_conn_g = conn.write().await; + + let (txid, inner_portal) = write_conn_g + .portal(self.querystring.clone(), self.parameters.clone()) + .await?; + + self.transaction = Some(Arc::new(txid)); + self.inner = Some(inner_portal); + + Ok(()) + } + + async fn fetch_one(&self) -> PSQLPyResult { + self.query_portal(1).await + } + + #[pyo3(signature = (size=None))] + async fn fetch_many(&self, size: Option) -> PSQLPyResult { + self.query_portal(size.unwrap_or(self.array_size)).await + } + + async fn fetch_all(&self) -> PSQLPyResult { + self.query_portal(-1).await + } + + fn close(&mut self) { + self.transaction = None; + self.conn = None; + } +} diff --git a/src/lib.rs b/src/lib.rs index 0eaac910..3229e675 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -9,6 +9,7 @@ pub mod query_result; pub mod row_factories; pub mod runtime; pub mod statement; +pub mod transaction; pub mod value_converter; use common::add_module; @@ -35,6 +36,7 @@ fn psqlpy(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { pymod.add_function(wrap_pyfunction!(driver::connection::connect, pymod)?)?; pymod.add_class::()?; pymod.add_class::()?; + pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; diff --git a/src/statement/statement.rs b/src/statement/statement.rs index addaae89..fd77eb55 100644 --- a/src/statement/statement.rs +++ b/src/statement/statement.rs @@ -32,7 +32,11 @@ impl PsqlpyStatement { pub fn statement_query(&self) -> PSQLPyResult<&Statement> { match &self.prepared_statement { Some(prepared_stmt) => return Ok(prepared_stmt), - None => return Err(RustPSQLDriverError::ConnectionExecuteError("No".into())), + None => { + return Err(RustPSQLDriverError::ConnectionExecuteError( + "No prepared parameters".into(), + )) + } } } diff --git a/src/transaction/impls.rs b/src/transaction/impls.rs new file mode 100644 index 00000000..a2a7c147 --- /dev/null +++ b/src/transaction/impls.rs @@ -0,0 +1,35 @@ +use crate::{exceptions::rust_errors::PSQLPyResult, query_result::PSQLDriverPyQueryResult}; + +use super::structs::PSQLPyTransaction; +use tokio_postgres::{Portal as tp_Portal, ToStatement}; + +impl PSQLPyTransaction { + pub async fn query_portal( + &self, + portal: &tp_Portal, + size: i32, + ) -> PSQLPyResult { + let portal_res = match self { + PSQLPyTransaction::PoolTransaction(txid) => txid.query_portal(portal, size).await?, + PSQLPyTransaction::SingleTransaction(txid) => txid.query_portal(portal, size).await?, + }; + + Ok(PSQLDriverPyQueryResult::new(portal_res)) + } + + pub async fn portal( + &self, + querystring: &T, + params: &[&(dyn postgres_types::ToSql + Sync)], + ) -> PSQLPyResult + where + T: ?Sized + ToStatement, + { + let portal: tp_Portal = match self { + PSQLPyTransaction::PoolTransaction(conn) => conn.bind(querystring, params).await?, + PSQLPyTransaction::SingleTransaction(conn) => conn.bind(querystring, params).await?, + }; + + Ok(portal) + } +} diff --git a/src/transaction/mod.rs b/src/transaction/mod.rs new file mode 100644 index 00000000..4bc01193 --- /dev/null +++ b/src/transaction/mod.rs @@ -0,0 +1,2 @@ +pub mod impls; +pub mod structs; diff --git a/src/transaction/structs.rs b/src/transaction/structs.rs new file mode 100644 index 00000000..0f8946cd --- /dev/null +++ b/src/transaction/structs.rs @@ -0,0 +1,7 @@ +use deadpool_postgres::Transaction as dp_Transaction; +use tokio_postgres::Transaction as tp_Transaction; + +pub enum PSQLPyTransaction { + PoolTransaction(dp_Transaction<'static>), + SingleTransaction(tp_Transaction<'static>), +} From 359736d569fa668c0ab3a3fb15bfc9098cc3db15 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 19 May 2025 21:23:27 +0200 Subject: [PATCH 57/65] Changes to connection, transaction and cursor management --- docs/components/connection.md | 4 +- pyproject.toml | 3 + python/psqlpy/__init__.py | 4 + python/psqlpy/_internal/__init__.pyi | 124 ++-------- python/tests/test_connection.py | 4 +- python/tests/test_cursor.py | 159 ++---------- python/tests/test_listener.py | 6 +- python/tests/test_transaction.py | 4 +- src/connection/impls.rs | 87 +++---- src/connection/structs.rs | 4 + src/connection/traits.rs | 70 +++--- src/driver/common.rs | 165 ++++++++++++- src/driver/connection.rs | 177 ++++---------- src/driver/cursor.rs | 348 ++++++++++----------------- src/driver/mod.rs | 2 +- src/driver/portal.rs | 195 --------------- src/driver/prepared_statement.rs | 63 +++++ src/driver/transaction.rs | 154 ++---------- src/exceptions/rust_errors.rs | 6 +- src/lib.rs | 4 +- src/options.rs | 4 +- src/query_result.rs | 7 +- src/statement/cache.rs | 10 +- src/statement/parameters.rs | 88 ++++++- src/statement/statement.rs | 9 +- src/statement/statement_builder.rs | 26 +- src/value_converter/to_python.rs | 11 + 27 files changed, 692 insertions(+), 1046 deletions(-) delete mode 100644 src/driver/portal.rs create mode 100644 src/driver/prepared_statement.rs diff --git a/docs/components/connection.md b/docs/components/connection.md index 7a470b06..1e82d99a 100644 --- a/docs/components/connection.md +++ b/docs/components/connection.md @@ -200,7 +200,7 @@ async def main() -> None: ) ``` -### Back To Pool +### Close Returns connection to the pool. It's crucial to commit all transactions and close all cursor which are made from the connection. Otherwise, this method won't do anything useful. @@ -213,5 +213,5 @@ There is no need in this method if you use async context manager. async def main() -> None: ... connection = await db_pool.connection() - connection.back_to_pool() + connection.close() ``` diff --git a/pyproject.toml b/pyproject.toml index cd2b2f42..c6f3b2e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -106,3 +106,6 @@ ignore = [ [tool.ruff.pydocstyle] convention = "pep257" ignore-decorators = ["typing.overload"] + +[project.entry-points."sqlalchemy.dialects"] +psqlpy = "psqlpy_sqlalchemy.dialect:PSQLPyAsyncDialect" diff --git a/python/psqlpy/__init__.py b/python/psqlpy/__init__.py index fbaf123d..7c76be33 100644 --- a/python/psqlpy/__init__.py +++ b/python/psqlpy/__init__.py @@ -18,6 +18,9 @@ connect, connect_pool, ) +from psqlpy.exceptions import ( + Error, +) __all__ = [ "ConnRecyclingMethod", @@ -25,6 +28,7 @@ "ConnectionPool", "ConnectionPoolBuilder", "Cursor", + "Error", "IsolationLevel", "KeepaliveConfig", "Listener", diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index d900d228..10ac499e 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -253,11 +253,12 @@ class KeepaliveConfig: """Initialize new config.""" class Cursor: - """Represent opened cursor in a transaction. + """Represent binary cursor in a transaction. It can be used as an asynchronous iterator. """ + array_size: int cursor_name: str querystring: str parameters: ParamsT = None @@ -282,118 +283,27 @@ class Cursor: Execute DECLARE command for the cursor. """ - async def close(self: Self) -> None: + def close(self: Self) -> None: """Close the cursor. Execute CLOSE command for the cursor. """ - async def fetch( - self: Self, - fetch_number: int | None = None, - ) -> QueryResult: - """Fetch next rows. - - By default fetches 10 next rows. - - ### Parameters: - - `fetch_number`: how many rows need to fetch. - - ### Returns: - result as `QueryResult`. - """ - async def fetch_next( - self: Self, - ) -> QueryResult: - """Fetch next row. - - Execute FETCH NEXT - - ### Returns: - result as `QueryResult`. - """ - async def fetch_prior( - self: Self, - ) -> QueryResult: - """Fetch previous row. - - Execute FETCH PRIOR - - ### Returns: - result as `QueryResult`. - """ - async def fetch_first( - self: Self, - ) -> QueryResult: - """Fetch first row. - - Execute FETCH FIRST - - ### Returns: - result as `QueryResult`. - """ - async def fetch_last( - self: Self, - ) -> QueryResult: - """Fetch last row. - - Execute FETCH LAST - - ### Returns: - result as `QueryResult`. - """ - async def fetch_absolute( - self: Self, - absolute_number: int, - ) -> QueryResult: - """Fetch absolute rows. - - Execute FETCH ABSOLUTE . - - ### Returns: - result as `QueryResult`. - """ - async def fetch_relative( - self: Self, - relative_number: int, - ) -> QueryResult: - """Fetch absolute rows. - - Execute FETCH RELATIVE . - - ### Returns: - result as `QueryResult`. - """ - async def fetch_forward_all( - self: Self, - ) -> QueryResult: - """Fetch forward all rows. - - Execute FETCH FORWARD ALL. - - ### Returns: - result as `QueryResult`. - """ - async def fetch_backward( - self: Self, - backward_count: int, - ) -> QueryResult: - """Fetch backward rows. - - Execute FETCH BACKWARD . - - ### Returns: - result as `QueryResult`. - """ - async def fetch_backward_all( + async def execute( self: Self, + querystring: str, + parameters: ParamsT = None, ) -> QueryResult: - """Fetch backward all rows. - - Execute FETCH BACKWARD ALL. + """Start cursor with querystring and parameters. - ### Returns: - result as `QueryResult`. + Method should be used instead of context manager + and `start` method. """ + async def fetchone(self: Self) -> QueryResult: + """Return next one row from the cursor.""" + async def fetchmany(self: Self, size: int | None = None) -> QueryResult: + """Return rows from the cursor.""" + async def fetchall(self: Self, size: int | None = None) -> QueryResult: + """Return all remaining rows from the cursor.""" class Transaction: """Single connection for executing queries. @@ -1098,8 +1008,6 @@ class Connection: querystring: str, parameters: ParamsT = None, fetch_number: int | None = None, - scroll: bool | None = None, - prepared: bool = True, ) -> Cursor: """Create new cursor object. @@ -1136,7 +1044,7 @@ class Connection: ... # do something with this result. ``` """ - def back_to_pool(self: Self) -> None: + def close(self: Self) -> None: """Return connection back to the pool. It necessary to commit all transactions and close all cursor diff --git a/python/tests/test_connection.py b/python/tests/test_connection.py index 7af208f2..4643e0c6 100644 --- a/python/tests/test_connection.py +++ b/python/tests/test_connection.py @@ -145,7 +145,7 @@ async def test_connection_cursor( await transaction.begin() cursor = connection.cursor(querystring=f"SELECT * FROM {table_name}") await cursor.start() - await cursor.close() + cursor.close() await transaction.commit() @@ -172,7 +172,7 @@ async def test_closed_connection_error( ) -> None: """Test exception when connection is closed.""" connection = await psql_pool.connection() - connection.back_to_pool() + connection.close() with pytest.raises(expected_exception=ConnectionClosedError): await connection.execute("SELECT 1") diff --git a/python/tests/test_cursor.py b/python/tests/test_cursor.py index 07fca375..fdd53ba7 100644 --- a/python/tests/test_cursor.py +++ b/python/tests/test_cursor.py @@ -1,177 +1,68 @@ from __future__ import annotations -import math from typing import TYPE_CHECKING import pytest if TYPE_CHECKING: - from psqlpy import ConnectionPool, Cursor, QueryResult, Transaction + from psqlpy import ConnectionPool, Cursor pytestmark = pytest.mark.anyio -async def test_cursor_fetch( +async def test_cursor_fetchmany( number_database_records: int, test_cursor: Cursor, ) -> None: """Test cursor fetch with custom number of fetch.""" - result = await test_cursor.fetch(fetch_number=number_database_records // 2) + result = await test_cursor.fetchmany(size=number_database_records // 2) assert len(result.result()) == number_database_records // 2 -async def test_cursor_fetch_next( +async def test_cursor_fetchone( test_cursor: Cursor, ) -> None: - """Test cursor fetch next.""" - result = await test_cursor.fetch_next() + result = await test_cursor.fetchone() assert len(result.result()) == 1 -async def test_cursor_fetch_prior( - test_cursor: Cursor, -) -> None: - """Test cursor fetch prior.""" - result = await test_cursor.fetch_prior() - assert len(result.result()) == 0 - - await test_cursor.fetch(fetch_number=2) - result = await test_cursor.fetch_prior() - assert len(result.result()) == 1 - - -async def test_cursor_fetch_first( - test_cursor: Cursor, -) -> None: - """Test cursor fetch first.""" - fetch_first = await test_cursor.fetch(fetch_number=1) - - await test_cursor.fetch(fetch_number=3) - - first = await test_cursor.fetch_first() - - assert fetch_first.result() == first.result() - - -async def test_cursor_fetch_last( - test_cursor: Cursor, +async def test_cursor_fetchall( number_database_records: int, -) -> None: - """Test cursor fetch last.""" - all_res = await test_cursor.fetch( - fetch_number=number_database_records, - ) - - last_res = await test_cursor.fetch_last() - - assert all_res.result()[-1] == last_res.result()[0] - - -async def test_cursor_fetch_absolute( - test_cursor: Cursor, - number_database_records: int, -) -> None: - """Test cursor fetch Absolute.""" - all_res = await test_cursor.fetch( - fetch_number=number_database_records, - ) - - first_record = await test_cursor.fetch_absolute( - absolute_number=1, - ) - last_record = await test_cursor.fetch_absolute( - absolute_number=-1, - ) - - assert all_res.result()[0] == first_record.result()[0] - assert all_res.result()[-1] == last_record.result()[0] - - -async def test_cursor_fetch_relative( test_cursor: Cursor, - number_database_records: int, ) -> None: - """Test cursor fetch Relative.""" - first_absolute = await test_cursor.fetch_relative( - relative_number=1, - ) - - assert first_absolute.result() - - await test_cursor.fetch( - fetch_number=number_database_records, - ) - records = await test_cursor.fetch_relative( - relative_number=1, - ) + result = await test_cursor.fetchall() + assert len(result.result()) == number_database_records - assert not (records.result()) - -async def test_cursor_fetch_forward_all( - test_cursor: Cursor, +async def test_cursor_start( + psql_pool: ConnectionPool, + table_name: str, number_database_records: int, ) -> None: - """Test that cursor execute FETCH FORWARD ALL correctly.""" - default_fetch_number = 2 - await test_cursor.fetch(fetch_number=default_fetch_number) - - rest_results = await test_cursor.fetch_forward_all() - - assert len(rest_results.result()) == number_database_records - default_fetch_number - - -async def test_cursor_fetch_backward( - test_cursor: Cursor, -) -> None: - """Test cursor backward fetch.""" - must_be_empty = await test_cursor.fetch_backward(backward_count=10) - assert not (must_be_empty.result()) - - default_fetch_number = 5 - await test_cursor.fetch(fetch_number=default_fetch_number) - - expected_number_of_results = 3 - must_not_be_empty = await test_cursor.fetch_backward( - backward_count=expected_number_of_results, + connection = await psql_pool.connection() + cursor = connection.cursor( + querystring=f"SELECT * FROM {table_name}", ) - assert len(must_not_be_empty.result()) == expected_number_of_results - - -async def test_cursor_fetch_backward_all( - test_cursor: Cursor, -) -> None: - """Test cursor `fetch_backward_all`.""" - must_be_empty = await test_cursor.fetch_backward_all() - assert not (must_be_empty.result()) + await cursor.start() + results = await cursor.fetchall() - default_fetch_number = 5 - await test_cursor.fetch(fetch_number=default_fetch_number) + assert len(results.result()) == number_database_records - must_not_be_empty = await test_cursor.fetch_backward_all() - assert len(must_not_be_empty.result()) == default_fetch_number - 1 + cursor.close() -async def test_cursor_as_async_manager( +async def test_cursor_as_async_context_manager( psql_pool: ConnectionPool, table_name: str, number_database_records: int, ) -> None: - """Test cursor async manager and async iterator.""" connection = await psql_pool.connection() - transaction: Transaction - cursor: Cursor - all_results: list[QueryResult] = [] - expected_num_results = math.ceil(number_database_records / 3) - fetch_number = 3 - async with connection.transaction() as transaction, transaction.cursor( + async with connection.cursor( querystring=f"SELECT * FROM {table_name}", - fetch_number=fetch_number, ) as cursor: - async for result in cursor: - all_results.append(result) # noqa: PERF401 + results = await cursor.fetchall() - assert len(all_results) == expected_num_results + assert len(results.result()) == number_database_records async def test_cursor_send_underlying_connection_to_pool( @@ -184,7 +75,7 @@ async def test_cursor_send_underlying_connection_to_pool( async with transaction.cursor( querystring=f"SELECT * FROM {table_name}", ) as cursor: - await cursor.fetch(10) + await cursor.fetchmany(10) assert not psql_pool.status().available assert not psql_pool.status().available assert not psql_pool.status().available @@ -200,9 +91,9 @@ async def test_cursor_send_underlying_connection_to_pool_manually( async with connection.transaction() as transaction: cursor = transaction.cursor(querystring=f"SELECT * FROM {table_name}") await cursor.start() - await cursor.fetch(10) + await cursor.fetchmany(10) assert not psql_pool.status().available - await cursor.close() + cursor.close() assert not psql_pool.status().available assert not psql_pool.status().available assert psql_pool.status().available == 1 diff --git a/python/tests/test_listener.py b/python/tests/test_listener.py index a1ff0742..8db12ae9 100644 --- a/python/tests/test_listener.py +++ b/python/tests/test_listener.py @@ -64,7 +64,7 @@ async def notify( connection = await psql_pool.connection() await connection.execute(f"NOTIFY {channel}, '{TEST_PAYLOAD}'") - connection.back_to_pool() + connection.close() async def check_insert_callback( @@ -91,7 +91,7 @@ async def check_insert_callback( assert data_record["payload"] == TEST_PAYLOAD assert data_record["channel"] == TEST_CHANNEL - connection.back_to_pool() + connection.close() async def clear_test_table( @@ -102,7 +102,7 @@ async def clear_test_table( await connection.execute( f"DELETE FROM {listener_table_name}", ) - connection.back_to_pool() + connection.close() @pytest.mark.usefixtures("create_table_for_listener_tests") diff --git a/python/tests/test_transaction.py b/python/tests/test_transaction.py index a186084a..343bb868 100644 --- a/python/tests/test_transaction.py +++ b/python/tests/test_transaction.py @@ -180,7 +180,7 @@ async def test_transaction_rollback( f"INSERT INTO {table_name} VALUES ($1, $2)", parameters=[100, test_name], ) - connection.back_to_pool() + connection.close() assert not (result_from_conn.result()) @@ -358,4 +358,4 @@ async def test_execute_batch_method(psql_pool: ConnectionPool) -> None: await transaction.execute(querystring="SELECT * FROM execute_batch") await transaction.execute(querystring="SELECT * FROM execute_batch2") - connection.back_to_pool() + connection.close() diff --git a/src/connection/impls.rs b/src/connection/impls.rs index 84683edb..931770aa 100644 --- a/src/connection/impls.rs +++ b/src/connection/impls.rs @@ -1,11 +1,8 @@ -use std::sync::{Arc, RwLock}; - use bytes::Buf; use pyo3::{PyAny, Python}; use tokio_postgres::{CopyInSink, Portal as tp_Portal, Row, Statement, ToStatement}; use crate::{ - driver::portal::Portal, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, options::{IsolationLevel, ReadVariant}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, @@ -19,7 +16,7 @@ use tokio_postgres::Transaction as tp_Transaction; use super::{ structs::{PSQLPyConnection, PoolConnection, SingleConnection}, - traits::{CloseTransaction, Connection, Cursor, StartTransaction, Transaction}, + traits::{CloseTransaction, Connection, StartTransaction, Transaction}, }; impl Transaction for T @@ -327,52 +324,42 @@ impl CloseTransaction for PSQLPyConnection { } } -impl Cursor for PSQLPyConnection { - async fn start_cursor( - &mut self, - cursor_name: &str, - scroll: &Option, +impl PSQLPyConnection { + pub fn in_transaction(&self) -> bool { + match self { + PSQLPyConnection::PoolConn(conn) => conn.in_transaction, + PSQLPyConnection::SingleConnection(conn) => conn.in_transaction, + } + } + + pub async fn prepare_statement( + &self, querystring: String, - prepared: &Option, parameters: Option>, - ) -> PSQLPyResult<()> { - let cursor_qs = self.build_cursor_start_qs(cursor_name, scroll, &querystring); - self.execute(cursor_qs, parameters, *prepared) + ) -> PSQLPyResult { + StatementBuilder::new(&querystring, ¶meters, self, Some(true)) + .build() .await - .map_err(|err| { - RustPSQLDriverError::CursorStartError(format!("Cannot start cursor due to {err}")) - })?; - match self { - PSQLPyConnection::PoolConn(conn) => conn.in_cursor = true, - PSQLPyConnection::SingleConnection(conn) => conn.in_cursor = true, - } - Ok(()) } - async fn close_cursor(&mut self, cursor_name: &str) -> PSQLPyResult<()> { - self.execute( - format!("CLOSE {cursor_name}"), - Option::default(), - Some(false), - ) - .await?; + pub async fn execute_statement( + &self, + statement: &PsqlpyStatement, + ) -> PSQLPyResult { + let result = self + .query(statement.statement_query()?, &statement.params()) + .await?; - match self { - PSQLPyConnection::PoolConn(conn) => conn.in_cursor = false, - PSQLPyConnection::SingleConnection(conn) => conn.in_cursor = false, - } - Ok(()) + Ok(PSQLDriverPyQueryResult::new(result)) } -} -impl PSQLPyConnection { pub async fn execute( &self, querystring: String, parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let statement = StatementBuilder::new(querystring, parameters, self, prepared) + let statement = StatementBuilder::new(&querystring, ¶meters, self, prepared) .build() .await?; @@ -408,7 +395,7 @@ impl PSQLPyConnection { for vec_of_py_any in parameters { // TODO: Fix multiple qs creation let statement = - StatementBuilder::new(querystring.clone(), Some(vec_of_py_any), self, prepared) + StatementBuilder::new(&querystring, &Some(vec_of_py_any), self, prepared) .build() .await?; @@ -451,7 +438,7 @@ impl PSQLPyConnection { parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let statement = StatementBuilder::new(querystring, parameters, self, prepared) + let statement = StatementBuilder::new(&querystring, ¶meters, self, prepared) .build() .await?; @@ -547,12 +534,26 @@ impl PSQLPyConnection { pub async fn portal( &mut self, - querystring: String, - parameters: Option>, + querystring: Option<&String>, + parameters: &Option>, + statement: Option<&PsqlpyStatement>, ) -> PSQLPyResult<(PSQLPyTransaction, tp_Portal)> { - let statement = StatementBuilder::new(querystring, parameters, self, Some(false)) - .build() - .await?; + let statement = { + match statement { + Some(stmt) => stmt, + None => { + let Some(querystring) = querystring else { + return Err(RustPSQLDriverError::ConnectionExecuteError( + "Can't create cursor without querystring".into(), + )); + }; + + &StatementBuilder::new(querystring, parameters, self, Some(false)) + .build() + .await? + } + } + }; let transaction = self.transaction().await?; let inner_portal = transaction diff --git a/src/connection/structs.rs b/src/connection/structs.rs index ccfd101d..a50d3d69 100644 --- a/src/connection/structs.rs +++ b/src/connection/structs.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use deadpool_postgres::Object; use tokio_postgres::{Client, Config}; +#[derive(Debug)] pub struct PoolConnection { pub connection: Object, pub in_transaction: bool, @@ -20,6 +21,8 @@ impl PoolConnection { } } } + +#[derive(Debug)] pub struct SingleConnection { pub connection: Client, pub in_transaction: bool, @@ -38,6 +41,7 @@ impl SingleConnection { } } +#[derive(Debug)] pub enum PSQLPyConnection { PoolConn(PoolConnection), SingleConnection(SingleConnection), diff --git a/src/connection/traits.rs b/src/connection/traits.rs index 8e868a06..5d8d49ae 100644 --- a/src/connection/traits.rs +++ b/src/connection/traits.rs @@ -102,38 +102,38 @@ pub trait CloseTransaction: StartTransaction { fn rollback(&mut self) -> impl std::future::Future>; } -pub trait Cursor { - fn build_cursor_start_qs( - &self, - cursor_name: &str, - scroll: &Option, - querystring: &str, - ) -> String { - let mut cursor_init_query = format!("DECLARE {cursor_name}"); - if let Some(scroll) = scroll { - if *scroll { - cursor_init_query.push_str(" SCROLL"); - } else { - cursor_init_query.push_str(" NO SCROLL"); - } - } - - cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); - - cursor_init_query - } - - fn start_cursor( - &mut self, - cursor_name: &str, - scroll: &Option, - querystring: String, - prepared: &Option, - parameters: Option>, - ) -> impl std::future::Future>; - - fn close_cursor( - &mut self, - cursor_name: &str, - ) -> impl std::future::Future>; -} +// pub trait Cursor { +// fn build_cursor_start_qs( +// &self, +// cursor_name: &str, +// scroll: &Option, +// querystring: &str, +// ) -> String { +// let mut cursor_init_query = format!("DECLARE {cursor_name}"); +// if let Some(scroll) = scroll { +// if *scroll { +// cursor_init_query.push_str(" SCROLL"); +// } else { +// cursor_init_query.push_str(" NO SCROLL"); +// } +// } + +// cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); + +// cursor_init_query +// } + +// fn start_cursor( +// &mut self, +// cursor_name: &str, +// scroll: &Option, +// querystring: String, +// prepared: &Option, +// parameters: Option>, +// ) -> impl std::future::Future>; + +// fn close_cursor( +// &mut self, +// cursor_name: &str, +// ) -> impl std::future::Future>; +// } diff --git a/src/driver/common.rs b/src/driver/common.rs index 528fed84..ac447184 100644 --- a/src/driver/common.rs +++ b/src/driver/common.rs @@ -1,10 +1,23 @@ -use pyo3::prelude::*; use tokio_postgres::config::Host; use std::net::IpAddr; use super::{connection::Connection, cursor::Cursor, transaction::Transaction}; +use pyo3::{pymethods, Py, PyAny}; + +use crate::{ + connection::traits::CloseTransaction, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, +}; + +use bytes::BytesMut; +use futures_util::pin_mut; +use pyo3::{buffer::PyBuffer, PyErr, Python}; +use tokio_postgres::binary_copy::BinaryCopyInWriter; + +use crate::format_helpers::quote_ident; + macro_rules! impl_config_py_methods { ($name:ident) => { #[pymethods] @@ -92,3 +105,153 @@ macro_rules! impl_config_py_methods { impl_config_py_methods!(Transaction); impl_config_py_methods!(Connection); impl_config_py_methods!(Cursor); +// impl_config_py_methods!(Portal); + +macro_rules! impl_is_closed_method { + ($name:ident) => { + #[pymethods] + impl $name { + fn is_closed(&self) -> bool { + if self.conn.is_some() { + return true; + } + false + } + } + }; +} + +impl_is_closed_method!(Transaction); +impl_is_closed_method!(Connection); +impl_is_closed_method!(Cursor); + +macro_rules! impl_portal_method { + ($name:ident) => { + #[pymethods] + impl $name { + #[pyo3(signature = (querystring, parameters=None, fetch_number=None))] + pub fn cursor( + &self, + querystring: Option, + parameters: Option>, + fetch_number: Option, + ) -> PSQLPyResult { + Ok(Cursor::new( + self.conn.clone(), + querystring, + parameters, + fetch_number, + self.pg_config.clone(), + None, + )) + } + } + }; +} + +impl_portal_method!(Transaction); +impl_portal_method!(Connection); + +macro_rules! impl_transaction_methods { + ($name:ident, $val:expr $(,)?) => { + #[pymethods] + impl $name { + pub async fn commit(&mut self) -> PSQLPyResult<()> { + let conn = self.conn.clone(); + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError("1".into())); + }; + let mut write_conn_g = conn.write().await; + write_conn_g.commit().await?; + + if $val { + self.conn = None; + } + + Ok(()) + } + + pub async fn rollback(&mut self) -> PSQLPyResult<()> { + let conn = self.conn.clone(); + let Some(conn) = conn else { + return Err(RustPSQLDriverError::TransactionClosedError("2".into())); + }; + let mut write_conn_g = conn.write().await; + write_conn_g.rollback().await?; + + if $val { + self.conn = None; + } + + Ok(()) + } + } + }; +} + +impl_transaction_methods!(Connection, false); +impl_transaction_methods!(Transaction, true); + +macro_rules! impl_binary_copy_method { + ($name:ident) => { + #[pymethods] + impl $name { + #[pyo3(signature = (source, table_name, columns=None, schema_name=None))] + pub async fn binary_copy_to_table( + self_: pyo3::Py, + source: Py, + table_name: String, + columns: Option>, + schema_name: Option, + ) -> PSQLPyResult { + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); + let mut table_name = quote_ident(&table_name); + if let Some(schema_name) = schema_name { + table_name = format!("{}.{}", quote_ident(&schema_name), table_name); + } + + let mut formated_columns = String::default(); + if let Some(columns) = columns { + formated_columns = format!("({})", columns.join(", ")); + } + + let copy_qs = + format!("COPY {table_name}{formated_columns} FROM STDIN (FORMAT binary)"); + + if let Some(db_client) = db_client { + let mut psql_bytes: BytesMut = Python::with_gil(|gil| { + let possible_py_buffer: Result, PyErr> = + source.extract::>(gil); + if let Ok(py_buffer) = possible_py_buffer { + let vec_buf = py_buffer.to_vec(gil)?; + return Ok(BytesMut::from(vec_buf.as_slice())); + } + + if let Ok(py_bytes) = source.call_method0(gil, "getvalue") { + if let Ok(bytes) = py_bytes.extract::>(gil) { + return Ok(BytesMut::from(bytes.as_slice())); + } + } + + Err(RustPSQLDriverError::PyToRustValueConversionError( + "source must be bytes or support Buffer protocol".into(), + )) + })?; + + let read_conn_g = db_client.read().await; + let sink = read_conn_g.copy_in(©_qs).await?; + let writer = BinaryCopyInWriter::new_empty_buffer(sink, &[]); + pin_mut!(writer); + writer.as_mut().write_raw_bytes(&mut psql_bytes).await?; + let rows_created = writer.as_mut().finish_empty().await?; + return Ok(rows_created); + } + + Ok(0) + } + } + }; +} + +impl_binary_copy_method!(Connection); +impl_binary_copy_method!(Transaction); diff --git a/src/driver/connection.rs b/src/driver/connection.rs index fa480386..3486141d 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -1,10 +1,8 @@ -use bytes::BytesMut; use deadpool_postgres::Pool; -use futures_util::pin_mut; -use pyo3::{buffer::PyBuffer, pyclass, pyfunction, pymethods, Py, PyAny, PyErr, Python}; +use pyo3::{ffi::PyObject, pyclass, pyfunction, pymethods, Py, PyAny, PyErr}; use std::sync::Arc; use tokio::sync::RwLock; -use tokio_postgres::{binary_copy::BinaryCopyInWriter, Config}; +use tokio_postgres::Config; use crate::{ connection::{ @@ -12,14 +10,13 @@ use crate::{ traits::Connection as _, }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, - format_helpers::quote_ident, options::{IsolationLevel, LoadBalanceHosts, ReadVariant, SslMode, TargetSessionAttrs}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, runtime::tokio_runtime, }; use super::{ - connection_pool::connect_pool, cursor::Cursor, portal::Portal, transaction::Transaction, + connection_pool::connect_pool, prepared_statement::PreparedStatement, transaction::Transaction, }; /// Make new connection pool. @@ -117,9 +114,9 @@ pub async fn connect( } #[pyclass(subclass)] -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Connection { - db_client: Option>>, + pub conn: Option>>, db_pool: Option, pub pg_config: Arc, } @@ -127,12 +124,12 @@ pub struct Connection { impl Connection { #[must_use] pub fn new( - db_client: Option>>, + conn: Option>>, db_pool: Option, pg_config: Arc, ) -> Self { Connection { - db_client, + conn, db_pool, pg_config, } @@ -140,7 +137,7 @@ impl Connection { #[must_use] pub fn db_client(&self) -> Option>> { - self.db_client.clone() + self.conn.clone() } #[must_use] @@ -157,11 +154,17 @@ impl Default for Connection { #[pymethods] impl Connection { + async fn in_transaction(&self) -> bool { + let Some(conn) = &self.conn else { return false }; + let read_conn_g = conn.read().await; + read_conn_g.in_transaction() + } + async fn __aenter__<'a>(self_: Py) -> PSQLPyResult> { let (db_client, db_pool, pg_config) = pyo3::Python::with_gil(|gil| { let self_ = self_.borrow(gil); ( - self_.db_client.clone(), + self_.conn.clone(), self_.db_pool.clone(), self_.pg_config.clone(), ) @@ -179,7 +182,7 @@ impl Connection { .await??; pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - self_.db_client = Some(Arc::new(RwLock::new(PSQLPyConnection::PoolConn( + self_.conn = Some(Arc::new(RwLock::new(PSQLPyConnection::PoolConn( PoolConnection::new(connection, pg_config), )))); }); @@ -206,7 +209,7 @@ impl Connection { pyo3::Python::with_gil(|gil| { let mut self_ = self_.borrow_mut(gil); - std::mem::take(&mut self_.db_client); + std::mem::take(&mut self_.conn); std::mem::take(&mut self_.db_pool); if is_exception_none { @@ -232,7 +235,7 @@ impl Connection { parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; @@ -256,7 +259,7 @@ impl Connection { /// 1) Connection is closed. /// 2) Cannot execute querystring. pub async fn execute_batch(self_: pyo3::Py, querystring: String) -> PSQLPyResult<()> { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; @@ -282,14 +285,17 @@ impl Connection { querystring: String, parameters: Option>>, prepared: Option, - ) -> PSQLPyResult<()> { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + ) -> PSQLPyResult> { + let (db_client, py_none) = + pyo3::Python::with_gil(|gil| (self_.borrow(gil).conn.clone(), gil.None().into_any())); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; - return read_conn_g + read_conn_g .execute_many(querystring, parameters, prepared) - .await; + .await?; + + return Ok(py_none); } Err(RustPSQLDriverError::ConnectionClosedError) @@ -310,7 +316,7 @@ impl Connection { parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; @@ -341,7 +347,7 @@ impl Connection { parameters: Option>, prepared: Option, ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; @@ -371,7 +377,7 @@ impl Connection { parameters: Option>, prepared: Option, ) -> PSQLPyResult> { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); + let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); if let Some(db_client) = db_client { let read_conn_g = db_client.read().await; @@ -398,7 +404,7 @@ impl Connection { read_variant: Option, deferrable: Option, ) -> PSQLPyResult { - let Some(conn) = &self.db_client else { + let Some(conn) = &self.conn else { return Err(RustPSQLDriverError::ConnectionClosedError); }; Ok(Transaction::new( @@ -410,131 +416,38 @@ impl Connection { )) } - /// Create new cursor object. - /// - /// # Errors - /// May return Err Result if db_client is None. #[pyo3(signature = ( querystring, parameters=None, - fetch_number=None, - scroll=None, - prepared=None, ))] - pub fn cursor( + pub async fn prepare( &self, querystring: String, - parameters: Option>, - fetch_number: Option, - scroll: Option, - prepared: Option, - ) -> PSQLPyResult { - let Some(conn) = &self.db_client else { + parameters: Option>, + ) -> PSQLPyResult { + let Some(conn) = &self.conn else { return Err(RustPSQLDriverError::ConnectionClosedError); }; - Ok(Cursor::new( - conn.clone(), - self.pg_config.clone(), - querystring, - parameters, - fetch_number.unwrap_or(10), - scroll, - prepared, - )) - } + let read_conn_g = conn.read().await; + let prep_stmt = read_conn_g + .prepare_statement(querystring, parameters) + .await?; - #[pyo3(signature = ( - querystring, - parameters=None, - fetch_number=None, - ))] - pub fn portal( - &self, - querystring: String, - parameters: Option>, - fetch_number: Option, - ) -> PSQLPyResult { - println!("{:?}", fetch_number); - Ok(Portal::new( - self.db_client.clone(), - querystring, - parameters, - fetch_number, + Ok(PreparedStatement::new( + self.conn.clone(), + self.pg_config.clone(), + prep_stmt, )) } #[allow(clippy::needless_pass_by_value)] - pub fn back_to_pool(self_: pyo3::Py) { + pub fn close(self_: pyo3::Py) { pyo3::Python::with_gil(|gil| { let mut connection = self_.borrow_mut(gil); - if connection.db_client.is_some() { - std::mem::take(&mut connection.db_client); + if connection.conn.is_some() { + std::mem::take(&mut connection.conn); } }); } - - /// Perform binary copy to postgres table. - /// - /// # Errors - /// May return Err Result if cannot get bytes, - /// cannot perform request to the database, - /// cannot write bytes to the database. - #[pyo3(signature = ( - source, - table_name, - columns=None, - schema_name=None, - ))] - pub async fn binary_copy_to_table( - self_: pyo3::Py, - source: Py, - table_name: String, - columns: Option>, - schema_name: Option, - ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).db_client.clone()); - let mut table_name = quote_ident(&table_name); - if let Some(schema_name) = schema_name { - table_name = format!("{}.{}", quote_ident(&schema_name), table_name); - } - - let mut formated_columns = String::default(); - if let Some(columns) = columns { - formated_columns = format!("({})", columns.join(", ")); - } - - let copy_qs = format!("COPY {table_name}{formated_columns} FROM STDIN (FORMAT binary)"); - - if let Some(db_client) = db_client { - let mut psql_bytes: BytesMut = Python::with_gil(|gil| { - let possible_py_buffer: Result, PyErr> = - source.extract::>(gil); - if let Ok(py_buffer) = possible_py_buffer { - let vec_buf = py_buffer.to_vec(gil)?; - return Ok(BytesMut::from(vec_buf.as_slice())); - } - - if let Ok(py_bytes) = source.call_method0(gil, "getvalue") { - if let Ok(bytes) = py_bytes.extract::>(gil) { - return Ok(BytesMut::from(bytes.as_slice())); - } - } - - Err(RustPSQLDriverError::PyToRustValueConversionError( - "source must be bytes or support Buffer protocol".into(), - )) - })?; - - let read_conn_g = db_client.read().await; - let sink = read_conn_g.copy_in(©_qs).await?; - let writer = BinaryCopyInWriter::new_empty_buffer(sink, &[]); - pin_mut!(writer); - writer.as_mut().write_raw_bytes(&mut psql_bytes).await?; - let rows_created = writer.as_mut().finish_empty().await?; - return Ok(rows_created); - } - - Ok(0) - } } diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index a12d2bfa..ab40526a 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -1,103 +1,88 @@ -use std::sync::{ - atomic::{AtomicUsize, Ordering}, - Arc, -}; +use std::sync::Arc; use pyo3::{ - exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, + exceptions::PyStopAsyncIteration, pyclass, pymethods, types::PyNone, Py, PyAny, PyErr, + PyObject, Python, }; use tokio::sync::RwLock; -use tokio_postgres::Config; +use tokio_postgres::{Config, Portal as tp_Portal}; use crate::{ - connection::{structs::PSQLPyConnection, traits::Cursor as _}, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, query_result::PSQLDriverPyQueryResult, runtime::rustdriver_future, + statement::statement::PsqlpyStatement, + transaction::structs::PSQLPyTransaction, }; -static NEXT_CUR_ID: AtomicUsize = AtomicUsize::new(0); - -fn next_cursor_name() -> String { - format!("cur{}", NEXT_CUR_ID.fetch_add(1, Ordering::SeqCst),) -} +use crate::connection::structs::PSQLPyConnection; -#[pyclass(subclass)] +#[pyclass] pub struct Cursor { - conn: Option>>, - pub pg_config: Arc, - querystring: String, + pub conn: Option>>, + querystring: Option, parameters: Option>, - cursor_name: Option, - fetch_number: usize, - scroll: Option, - prepared: Option, + array_size: i32, + + statement: Option, + + transaction: Option>, + inner: Option, + + pub pg_config: Arc, } impl Cursor { pub fn new( - conn: Arc>, - pg_config: Arc, - querystring: String, + conn: Option>>, + querystring: Option, parameters: Option>, - fetch_number: usize, - scroll: Option, - prepared: Option, + array_size: Option, + pg_config: Arc, + statement: Option, ) -> Self { - Cursor { - conn: Some(conn), - pg_config, + Self { + conn, + transaction: None, + inner: None, querystring, parameters, - cursor_name: None, - fetch_number, - scroll, - prepared, + array_size: array_size.unwrap_or(1), + pg_config, + statement, } } - async fn execute(&self, querystring: &str) -> PSQLPyResult { - let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::CursorClosedError); + async fn query_portal(&self, size: i32) -> PSQLPyResult { + let Some(transaction) = &self.transaction else { + return Err(RustPSQLDriverError::TransactionClosedError("3".into())); + }; + let Some(portal) = &self.inner else { + return Err(RustPSQLDriverError::TransactionClosedError("4".into())); }; - let read_conn_g = conn.read().await; - - let result = read_conn_g - .execute(querystring.to_string(), None, Some(false)) - .await - .map_err(|err| { - RustPSQLDriverError::CursorFetchError(format!( - "Cannot fetch data from cursor, error - {err}" - )) - })?; - - Ok(result) + transaction.query_portal(&portal, size).await } } -#[pymethods] -impl Cursor { - #[getter] - fn cursor_name(&self) -> Option { - return self.cursor_name.clone(); - } - - #[getter] - fn querystring(&self) -> String { - return self.querystring.clone(); +impl Drop for Cursor { + fn drop(&mut self) { + self.transaction = None; + self.conn = None; } +} +#[pymethods] +impl Cursor { #[getter] - fn parameters(&self) -> Option> { - return self.parameters.clone(); + fn get_array_size(&self) -> i32 { + self.array_size } - #[getter] - fn prepared(&self) -> Option { - return self.prepared.clone(); + #[setter] + fn set_array_size(&mut self, value: i32) { + self.array_size = value; } - #[must_use] fn __aiter__(slf: Py) -> Py { slf } @@ -107,17 +92,13 @@ impl Cursor { } async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { - let cursor_name = next_cursor_name(); - - let (conn, scroll, querystring, prepared, parameters) = Python::with_gil(|gil| { - let mut self_ = slf.borrow_mut(gil); - self_.cursor_name = Some(cursor_name.clone()); + let (conn, querystring, parameters, statement) = Python::with_gil(|gil| { + let self_ = slf.borrow(gil); ( self_.conn.clone(), - self_.scroll, self_.querystring.clone(), - self_.prepared, self_.parameters.clone(), + self_.statement.clone(), ) }); @@ -126,15 +107,28 @@ impl Cursor { }; let mut write_conn_g = conn.write().await; - write_conn_g - .start_cursor( - &cursor_name, - &scroll, - querystring.clone(), - &prepared, - parameters.clone(), - ) - .await?; + let (txid, inner_portal) = match querystring { + Some(querystring) => { + write_conn_g + .portal(Some(&querystring), ¶meters, None) + .await? + } + None => { + let Some(statement) = statement else { + return Err(RustPSQLDriverError::CursorStartError( + "Cannot start cursor".into(), + )); + }; + write_conn_g.portal(None, &None, Some(&statement)).await? + } + }; + + Python::with_gil(|gil| { + let mut self_ = slf.borrow_mut(gil); + + self_.transaction = Some(Arc::new(txid)); + self_.inner = Some(inner_portal); + }); Ok(slf) } @@ -146,7 +140,7 @@ impl Cursor { exception: Py, _traceback: Py, ) -> PSQLPyResult<()> { - self.close().await?; + self.close(); let (is_exc_none, py_err) = pyo3::Python::with_gil(|gil| { ( @@ -162,33 +156,27 @@ impl Cursor { } fn __anext__(&self) -> PSQLPyResult> { - let conn = self.conn.clone(); - let fetch_number = self.fetch_number; - let Some(cursor_name) = self.cursor_name.clone() else { - return Err(RustPSQLDriverError::CursorClosedError); - }; + let txid = self.transaction.clone(); + let portal = self.inner.clone(); + let size = self.array_size.clone(); let py_future = Python::with_gil(move |gil| { rustdriver_future(gil, async move { - let Some(conn) = conn else { - return Err(RustPSQLDriverError::CursorClosedError); + let Some(txid) = &txid else { + return Err(RustPSQLDriverError::TransactionClosedError("5".into())); }; - - let read_conn_g = conn.read().await; - let result = read_conn_g - .execute( - format!("FETCH {fetch_number} FROM {cursor_name}"), - None, - Some(false), - ) - .await?; + let Some(portal) = &portal else { + return Err(RustPSQLDriverError::TransactionClosedError("6".into())); + }; + let result = txid.query_portal(&portal, size).await?; if result.is_empty() { return Err(PyStopAsyncIteration::new_err( - "Iteration is over, no more results in cursor", + "Iteration is over, no more results in portal", ) .into()); }; + Ok(result) }) }); @@ -196,148 +184,66 @@ impl Cursor { Ok(Some(py_future?)) } - pub async fn start(&mut self) -> PSQLPyResult<()> { - if self.cursor_name.is_some() { - return Ok(()); - } - + async fn start(&mut self) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::CursorClosedError); + return Err(RustPSQLDriverError::ConnectionClosedError); }; let mut write_conn_g = conn.write().await; - let cursor_name = next_cursor_name(); - - write_conn_g - .start_cursor( - &cursor_name, - &self.scroll, - self.querystring.clone(), - &self.prepared, - self.parameters.clone(), - ) - .await?; - - self.cursor_name = Some(cursor_name); - - Ok(()) - } - - pub async fn close(&mut self) -> PSQLPyResult<()> { - if let Some(cursor_name) = &self.cursor_name { - let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - let mut write_conn_g = conn.write().await; - write_conn_g.close_cursor(&cursor_name).await?; - self.cursor_name = None; + let (txid, inner_portal) = match &self.querystring { + Some(querystring) => { + write_conn_g + .portal(Some(&querystring), &self.parameters, None) + .await? + } + None => { + let Some(statement) = &self.statement else { + return Err(RustPSQLDriverError::CursorStartError( + "Cannot start cursor".into(), + )); + }; + write_conn_g.portal(None, &None, Some(&statement)).await? + } }; - self.conn = None; + self.transaction = Some(Arc::new(txid)); + self.inner = Some(inner_portal); Ok(()) } - #[pyo3(signature = (fetch_number=None))] - pub async fn fetch( - &self, - fetch_number: Option, - ) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!( - "FETCH {} FROM {}", - fetch_number.unwrap_or(self.fetch_number), - cursor_name, - )) - .await - } - - pub async fn fetch_next(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH NEXT FROM {cursor_name}")) - .await - } - - pub async fn fetch_prior(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH PRIOR FROM {cursor_name}")) - .await - } - - pub async fn fetch_first(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH FIRST FROM {cursor_name}")) - .await + fn close(&mut self) { + self.transaction = None; + self.conn = None; } - pub async fn fetch_last(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH LAST FROM {cursor_name}")) - .await - } + #[pyo3(signature = ( + querystring, + parameters=None, + ))] + async fn execute( + &mut self, + querystring: String, + parameters: Option>, + ) -> PSQLPyResult<()> { + self.querystring = Some(querystring); + self.parameters = parameters; - pub async fn fetch_absolute( - &self, - absolute_number: i64, - ) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!( - "FETCH ABSOLUTE {absolute_number} FROM {cursor_name}" - )) - .await - } + self.start().await?; - pub async fn fetch_relative( - &self, - relative_number: i64, - ) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!( - "FETCH RELATIVE {relative_number} FROM {cursor_name}" - )) - .await + Ok(()) } - pub async fn fetch_forward_all(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH FORWARD ALL FROM {cursor_name}")) - .await + async fn fetchone(&self) -> PSQLPyResult { + self.query_portal(1).await } - pub async fn fetch_backward( - &self, - backward_count: i64, - ) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!( - "FETCH BACKWARD {backward_count} FROM {cursor_name}" - )) - .await + #[pyo3(signature = (size=None))] + async fn fetchmany(&self, size: Option) -> PSQLPyResult { + self.query_portal(size.unwrap_or(self.array_size)).await } - pub async fn fetch_backward_all(&self) -> PSQLPyResult { - let Some(cursor_name) = &self.cursor_name else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - self.execute(&format!("FETCH BACKWARD ALL FROM {cursor_name}")) - .await + async fn fetchall(&self) -> PSQLPyResult { + self.query_portal(-1).await } } diff --git a/src/driver/mod.rs b/src/driver/mod.rs index ab1b149c..30fec7c7 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -4,6 +4,6 @@ pub mod connection_pool; pub mod connection_pool_builder; pub mod cursor; pub mod listener; -pub mod portal; +pub mod prepared_statement; pub mod transaction; pub mod utils; diff --git a/src/driver/portal.rs b/src/driver/portal.rs deleted file mode 100644 index f6b4d755..00000000 --- a/src/driver/portal.rs +++ /dev/null @@ -1,195 +0,0 @@ -use std::sync::Arc; - -use pyo3::{ - exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, -}; -use tokio::sync::RwLock; -use tokio_postgres::Portal as tp_Portal; - -use crate::{ - exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, - query_result::PSQLDriverPyQueryResult, - runtime::rustdriver_future, - transaction::structs::PSQLPyTransaction, -}; - -use crate::connection::structs::PSQLPyConnection; - -#[pyclass] -pub struct Portal { - conn: Option>>, - querystring: String, - parameters: Option>, - array_size: i32, - - transaction: Option>, - inner: Option, -} - -impl Portal { - pub fn new( - conn: Option>>, - querystring: String, - parameters: Option>, - array_size: Option, - ) -> Self { - Self { - conn, - transaction: None, - inner: None, - querystring, - parameters, - array_size: array_size.unwrap_or(1), - } - } - - async fn query_portal(&self, size: i32) -> PSQLPyResult { - let Some(transaction) = &self.transaction else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - let Some(portal) = &self.inner else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - transaction.query_portal(&portal, size).await - } -} - -impl Drop for Portal { - fn drop(&mut self) { - self.transaction = None; - self.conn = None; - } -} - -#[pymethods] -impl Portal { - #[getter] - fn get_array_size(&self) -> i32 { - self.array_size - } - - #[setter] - fn set_array_size(&mut self, value: i32) { - self.array_size = value; - } - - fn __aiter__(slf: Py) -> Py { - slf - } - - fn __await__(slf: Py) -> Py { - slf - } - - async fn __aenter__<'a>(slf: Py) -> PSQLPyResult> { - let (conn, querystring, parameters) = Python::with_gil(|gil| { - let self_ = slf.borrow(gil); - ( - self_.conn.clone(), - self_.querystring.clone(), - self_.parameters.clone(), - ) - }); - - let Some(conn) = conn else { - return Err(RustPSQLDriverError::CursorClosedError); - }; - let mut write_conn_g = conn.write().await; - - let (txid, inner_portal) = write_conn_g.portal(querystring, parameters).await?; - - Python::with_gil(|gil| { - let mut self_ = slf.borrow_mut(gil); - - self_.transaction = Some(Arc::new(txid)); - self_.inner = Some(inner_portal); - }); - - Ok(slf) - } - - #[allow(clippy::needless_pass_by_value)] - async fn __aexit__<'a>( - &mut self, - _exception_type: Py, - exception: Py, - _traceback: Py, - ) -> PSQLPyResult<()> { - self.close(); - - let (is_exc_none, py_err) = pyo3::Python::with_gil(|gil| { - ( - exception.is_none(gil), - PyErr::from_value(exception.into_bound(gil)), - ) - }); - - if !is_exc_none { - return Err(RustPSQLDriverError::RustPyError(py_err)); - } - Ok(()) - } - - fn __anext__(&self) -> PSQLPyResult> { - let txid = self.transaction.clone(); - let portal = self.inner.clone(); - let size = self.array_size.clone(); - - let py_future = Python::with_gil(move |gil| { - rustdriver_future(gil, async move { - let Some(txid) = &txid else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - let Some(portal) = &portal else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - let result = txid.query_portal(&portal, size).await?; - - if result.is_empty() { - return Err(PyStopAsyncIteration::new_err( - "Iteration is over, no more results in portal", - ) - .into()); - }; - - Ok(result) - }) - }); - - Ok(Some(py_future?)) - } - - async fn start(&mut self) -> PSQLPyResult<()> { - let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::ConnectionClosedError); - }; - let mut write_conn_g = conn.write().await; - - let (txid, inner_portal) = write_conn_g - .portal(self.querystring.clone(), self.parameters.clone()) - .await?; - - self.transaction = Some(Arc::new(txid)); - self.inner = Some(inner_portal); - - Ok(()) - } - - async fn fetch_one(&self) -> PSQLPyResult { - self.query_portal(1).await - } - - #[pyo3(signature = (size=None))] - async fn fetch_many(&self, size: Option) -> PSQLPyResult { - self.query_portal(size.unwrap_or(self.array_size)).await - } - - async fn fetch_all(&self) -> PSQLPyResult { - self.query_portal(-1).await - } - - fn close(&mut self) { - self.transaction = None; - self.conn = None; - } -} diff --git a/src/driver/prepared_statement.rs b/src/driver/prepared_statement.rs new file mode 100644 index 00000000..1880449c --- /dev/null +++ b/src/driver/prepared_statement.rs @@ -0,0 +1,63 @@ +use std::sync::Arc; + +use pyo3::{pyclass, pymethods}; +use tokio::sync::RwLock; +use tokio_postgres::Config; + +use crate::{ + connection::structs::PSQLPyConnection, + exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, + query_result::PSQLDriverPyQueryResult, + statement::{parameters::Column, statement::PsqlpyStatement}, +}; + +use super::cursor::Cursor; + +#[pyclass(subclass)] +#[derive(Debug)] +pub struct PreparedStatement { + pub conn: Option>>, + pub pg_config: Arc, + statement: PsqlpyStatement, +} + +impl PreparedStatement { + pub fn new( + conn: Option>>, + pg_config: Arc, + statement: PsqlpyStatement, + ) -> Self { + Self { + conn, + pg_config, + statement, + } + } +} + +#[pymethods] +impl PreparedStatement { + async fn execute(&self) -> PSQLPyResult { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::TransactionClosedError("12".into())); + }; + + let read_conn_g = conn.read().await; + read_conn_g.execute_statement(&self.statement).await + } + + fn cursor(&self) -> PSQLPyResult { + Ok(Cursor::new( + self.conn.clone(), + None, + None, + None, + self.pg_config.clone(), + Some(self.statement.clone()), + )) + } + + fn columns(&self) -> Vec { + self.statement.columns().clone() + } +} diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index c779837f..81845d40 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -1,15 +1,13 @@ use std::sync::Arc; -use bytes::BytesMut; -use futures::{future, pin_mut}; +use futures::future; use pyo3::{ - buffer::PyBuffer, pyclass, pymethods, types::{PyAnyMethods, PyList, PyTuple}, Py, PyAny, PyErr, PyResult, }; use tokio::sync::RwLock; -use tokio_postgres::{binary_copy::BinaryCopyInWriter, Config}; +use tokio_postgres::Config; use crate::{ connection::{ @@ -17,14 +15,12 @@ use crate::{ traits::{CloseTransaction, Connection, StartTransaction as _}, }, exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}, - format_helpers::quote_ident, options::{IsolationLevel, ReadVariant}, query_result::{PSQLDriverPyQueryResult, PSQLDriverSinglePyQueryResult}, }; -use super::cursor::Cursor; - #[pyclass(subclass)] +#[derive(Debug)] pub struct Transaction { pub conn: Option>>, pub pg_config: Arc, @@ -75,7 +71,7 @@ impl Transaction { }); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("7".into())); }; let mut write_conn_g = conn.write().await; write_conn_g @@ -102,7 +98,7 @@ impl Transaction { }); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("8".into())); }; let mut write_conn_g = conn.write().await; if is_exception_none { @@ -125,7 +121,7 @@ impl Transaction { pub async fn begin(&mut self) -> PSQLPyResult<()> { let conn = &self.conn; let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("9".into())); }; let mut write_conn_g = conn.write().await; write_conn_g @@ -135,32 +131,6 @@ impl Transaction { Ok(()) } - pub async fn commit(&mut self) -> PSQLPyResult<()> { - let conn = self.conn.clone(); - let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - let mut write_conn_g = conn.write().await; - write_conn_g.commit().await?; - - self.conn = None; - - Ok(()) - } - - pub async fn rollback(&mut self) -> PSQLPyResult<()> { - let conn = self.conn.clone(); - let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - let mut write_conn_g = conn.write().await; - write_conn_g.rollback().await?; - - self.conn = None; - - Ok(()) - } - #[pyo3(signature = (querystring, parameters=None, prepared=None))] pub async fn execute( &self, @@ -169,7 +139,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("10".into())); }; let read_conn_g = conn.read().await; @@ -184,7 +154,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("11".into())); }; let read_conn_g = conn.read().await; @@ -199,7 +169,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("12".into())); }; let read_conn_g = conn.read().await; @@ -210,7 +180,7 @@ impl Transaction { pub async fn execute_batch(&self, querystring: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("13".into())); }; let read_conn_g = conn.read().await; @@ -225,7 +195,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("14".into())); }; let read_conn_g = conn.read().await; @@ -242,7 +212,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("15".into())); }; let read_conn_g = conn.read().await; @@ -253,7 +223,7 @@ impl Transaction { pub async fn create_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("16".into())); }; let read_conn_g = conn.read().await; @@ -266,7 +236,7 @@ impl Transaction { pub async fn release_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("17".into())); }; let read_conn_g = conn.read().await; @@ -279,7 +249,7 @@ impl Transaction { pub async fn rollback_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); + return Err(RustPSQLDriverError::TransactionClosedError("18".into())); }; let read_conn_g = conn.read().await; @@ -290,39 +260,6 @@ impl Transaction { Ok(()) } - /// Create new cursor object. - /// - /// # Errors - /// May return Err Result if db_client is None - #[pyo3(signature = ( - querystring, - parameters=None, - fetch_number=None, - scroll=None, - prepared=None, - ))] - pub fn cursor( - &self, - querystring: String, - parameters: Option>, - fetch_number: Option, - scroll: Option, - prepared: Option, - ) -> PSQLPyResult { - let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError); - }; - Ok(Cursor::new( - conn.clone(), - self.pg_config.clone(), - querystring, - parameters, - fetch_number.unwrap_or(10), - scroll, - prepared, - )) - } - #[pyo3(signature = (queries=None, prepared=None))] pub async fn pipeline<'py>( self_: Py, @@ -368,65 +305,6 @@ impl Transaction { return future::try_join_all(futures).await; } - Err(RustPSQLDriverError::TransactionClosedError) - } - - /// Perform binary copy to postgres table. - /// - /// # Errors - /// May return Err Result if cannot get bytes, - /// cannot perform request to the database, - /// cannot write bytes to the database. - #[pyo3(signature = (source, table_name, columns=None, schema_name=None))] - pub async fn binary_copy_to_table( - self_: pyo3::Py, - source: Py, - table_name: String, - columns: Option>, - schema_name: Option, - ) -> PSQLPyResult { - let db_client = pyo3::Python::with_gil(|gil| self_.borrow(gil).conn.clone()); - let mut table_name = quote_ident(&table_name); - if let Some(schema_name) = schema_name { - table_name = format!("{}.{}", quote_ident(&schema_name), table_name); - } - - let mut formated_columns = String::default(); - if let Some(columns) = columns { - formated_columns = format!("({})", columns.join(", ")); - } - - let copy_qs = format!("COPY {table_name}{formated_columns} FROM STDIN (FORMAT binary)"); - - if let Some(db_client) = db_client { - let mut psql_bytes: BytesMut = pyo3::Python::with_gil(|gil| { - let possible_py_buffer: Result, PyErr> = - source.extract::>(gil); - if let Ok(py_buffer) = possible_py_buffer { - let vec_buf = py_buffer.to_vec(gil)?; - return Ok(BytesMut::from(vec_buf.as_slice())); - } - - if let Ok(py_bytes) = source.call_method0(gil, "getvalue") { - if let Ok(bytes) = py_bytes.extract::>(gil) { - return Ok(BytesMut::from(bytes.as_slice())); - } - } - - Err(RustPSQLDriverError::PyToRustValueConversionError( - "source must be bytes or support Buffer protocol".into(), - )) - })?; - - let read_conn_g = db_client.read().await; - let sink = read_conn_g.copy_in(©_qs).await?; - let writer = BinaryCopyInWriter::new_empty_buffer(sink, &[]); - pin_mut!(writer); - writer.as_mut().write_raw_bytes(&mut psql_bytes).await?; - let rows_created = writer.as_mut().finish_empty().await?; - return Ok(rows_created); - } - - Ok(0) + Err(RustPSQLDriverError::TransactionClosedError("19".into())) } } diff --git a/src/exceptions/rust_errors.rs b/src/exceptions/rust_errors.rs index f133321b..9062a37e 100644 --- a/src/exceptions/rust_errors.rs +++ b/src/exceptions/rust_errors.rs @@ -49,8 +49,8 @@ pub enum RustPSQLDriverError { TransactionSavepointError(String), #[error("Transaction execute error: {0}")] TransactionExecuteError(String), - #[error("Underlying connection is returned to the pool")] - TransactionClosedError, + #[error("Underlying connection is returned to the pool: {0}")] + TransactionClosedError(String), // Cursor Errors #[error("Cursor error: {0}")] @@ -162,7 +162,7 @@ impl From for pyo3::PyErr { RustPSQLDriverError::TransactionExecuteError(_) => { TransactionExecuteError::new_err((error_desc,)) } - RustPSQLDriverError::TransactionClosedError => { + RustPSQLDriverError::TransactionClosedError(_) => { TransactionClosedError::new_err((error_desc,)) } RustPSQLDriverError::BaseCursorError(_) => BaseCursorError::new_err((error_desc,)), diff --git a/src/lib.rs b/src/lib.rs index 3229e675..a20c1ce4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -35,8 +35,10 @@ fn psqlpy(py: Python<'_>, pymod: &Bound<'_, PyModule>) -> PyResult<()> { pymod.add_class::()?; pymod.add_function(wrap_pyfunction!(driver::connection::connect, pymod)?)?; pymod.add_class::()?; + // pymod.add_class::()?; + pymod.add_class::()?; + pymod.add_class::()?; pymod.add_class::()?; - pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; pymod.add_class::()?; diff --git a/src/options.rs b/src/options.rs index bd8ad511..f6e4152f 100644 --- a/src/options.rs +++ b/src/options.rs @@ -141,7 +141,7 @@ impl CopyCommandFormat { } #[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Debug)] pub enum IsolationLevel { ReadUncommitted, ReadCommitted, @@ -163,7 +163,7 @@ impl IsolationLevel { } #[pyclass(eq, eq_int)] -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Debug)] pub enum ReadVariant { ReadOnly, ReadWrite, diff --git a/src/query_result.rs b/src/query_result.rs index cda02a8b..d9dd8848 100644 --- a/src/query_result.rs +++ b/src/query_result.rs @@ -1,4 +1,9 @@ -use pyo3::{prelude::*, pyclass, pymethods, types::PyDict, Py, PyAny, Python, ToPyObject}; +use pyo3::{ + prelude::*, + pyclass, pymethods, + types::{PyDict, PyTuple}, + Py, PyAny, Python, ToPyObject, +}; use tokio_postgres::Row; use crate::{exceptions::rust_errors::PSQLPyResult, value_converter::to_python::postgres_to_py}; diff --git a/src/statement/cache.rs b/src/statement/cache.rs index 7d78898d..7c07da40 100644 --- a/src/statement/cache.rs +++ b/src/statement/cache.rs @@ -5,7 +5,7 @@ use postgres_types::Type; use tokio::sync::RwLock; use tokio_postgres::Statement; -use super::{query::QueryString, utils::hash_str}; +use super::{parameters::Column, query::QueryString, utils::hash_str}; #[derive(Default)] pub(crate) struct StatementsCache(HashMap); @@ -44,6 +44,14 @@ impl StatementCacheInfo { pub(crate) fn types(&self) -> Vec { self.inner_stmt.params().to_vec() } + + pub(crate) fn columns(&self) -> Vec { + self.inner_stmt + .columns() + .iter() + .map(|column| Column::new(column.name().to_string(), column.table_oid().clone())) + .collect::>() + } } pub(crate) static STMTS_CACHE: Lazy> = diff --git a/src/statement/parameters.rs b/src/statement/parameters.rs index 09e0cbef..3aa12160 100644 --- a/src/statement/parameters.rs +++ b/src/statement/parameters.rs @@ -3,6 +3,7 @@ use std::iter::zip; use postgres_types::{ToSql, Type}; use pyo3::{ conversion::FromPyObjectBound, + pyclass, pymethods, types::{PyAnyMethods, PyMapping}, Py, PyObject, PyTypeCheck, Python, }; @@ -17,16 +18,48 @@ use crate::{ pub type QueryParameter = (dyn ToSql + Sync); +#[pyclass] +#[derive(Default, Clone, Debug)] +pub struct Column { + name: String, + table_oid: Option, +} + +impl Column { + pub fn new(name: String, table_oid: Option) -> Self { + Self { name, table_oid } + } +} + +#[pymethods] +impl Column { + #[getter] + fn get_name(&self) -> String { + self.name.clone() + } + + #[getter] + fn get_table_oid(&self) -> Option { + self.table_oid.clone() + } +} + pub(crate) struct ParametersBuilder { parameters: Option, types: Option>, + columns: Vec, } impl ParametersBuilder { - pub fn new(parameters: &Option, types: Option>) -> Self { + pub fn new( + parameters: &Option, + types: Option>, + columns: Vec, + ) -> Self { Self { parameters: parameters.clone(), types, + columns, } } @@ -55,13 +88,15 @@ impl ParametersBuilder { match (sequence_typed, mapping_typed) { (Some(sequence), None) => { - prepared_parameters = - Some(SequenceParametersBuilder::new(sequence, self.types).prepare(gil)?); + prepared_parameters = Some( + SequenceParametersBuilder::new(sequence, self.types, self.columns) + .prepare(gil)?, + ); } (None, Some(mapping)) => { if let Some(parameters_names) = parameters_names { prepared_parameters = Some( - MappingParametersBuilder::new(mapping, self.types) + MappingParametersBuilder::new(mapping, self.types, self.columns) .prepare(gil, parameters_names)?, ) } @@ -110,13 +145,15 @@ impl ParametersBuilder { pub(crate) struct MappingParametersBuilder { map_parameters: Py, types: Option>, + columns: Vec, } impl MappingParametersBuilder { - fn new(map_parameters: Py, types: Option>) -> Self { + fn new(map_parameters: Py, types: Option>, columns: Vec) -> Self { Self { map_parameters, types, + columns, } } @@ -143,7 +180,11 @@ impl MappingParametersBuilder { .map(|(parameter, type_)| from_python_typed(parameter.bind(gil), &type_)) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, types)) + Ok(PreparedParameters::new( + converted_parameters, + types, + self.columns, + )) } fn prepare_not_typed( @@ -157,7 +198,11 @@ impl MappingParametersBuilder { .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) + Ok(PreparedParameters::new( + converted_parameters, + vec![], + self.columns, + )) } fn extract_parameters( @@ -185,13 +230,15 @@ impl MappingParametersBuilder { pub(crate) struct SequenceParametersBuilder { seq_parameters: Vec, types: Option>, + columns: Vec, } impl SequenceParametersBuilder { - fn new(seq_parameters: Vec, types: Option>) -> Self { + fn new(seq_parameters: Vec, types: Option>, columns: Vec) -> Self { Self { seq_parameters: seq_parameters, types, + columns, } } @@ -208,7 +255,11 @@ impl SequenceParametersBuilder { .map(|(parameter, type_)| from_python_typed(parameter.bind(gil), &type_)) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, types)) + Ok(PreparedParameters::new( + converted_parameters, + types, + self.columns, + )) } fn prepare_not_typed(self, gil: Python<'_>) -> PSQLPyResult { @@ -218,7 +269,11 @@ impl SequenceParametersBuilder { .map(|parameter| from_python_untyped(parameter.bind(gil))) .collect::>>()?; - Ok(PreparedParameters::new(converted_parameters, vec![])) + Ok(PreparedParameters::new( + converted_parameters, + vec![], + self.columns, + )) } } @@ -226,11 +281,16 @@ impl SequenceParametersBuilder { pub struct PreparedParameters { parameters: Vec, types: Vec, + columns: Vec, } impl PreparedParameters { - pub fn new(parameters: Vec, types: Vec) -> Self { - Self { parameters, types } + pub fn new(parameters: Vec, types: Vec, columns: Vec) -> Self { + Self { + parameters, + types, + columns, + } } pub fn params(&self) -> Box<[&(dyn ToSql + Sync)]> { @@ -251,4 +311,8 @@ impl PreparedParameters { .collect::>() .into_boxed_slice() } + + pub fn columns(&self) -> &Vec { + &self.columns + } } diff --git a/src/statement/statement.rs b/src/statement/statement.rs index fd77eb55..fc45b3eb 100644 --- a/src/statement/statement.rs +++ b/src/statement/statement.rs @@ -3,7 +3,10 @@ use tokio_postgres::Statement; use crate::exceptions::rust_errors::{PSQLPyResult, RustPSQLDriverError}; -use super::{parameters::PreparedParameters, query::QueryString}; +use super::{ + parameters::{Column, PreparedParameters}, + query::QueryString, +}; #[derive(Clone, Debug)] pub struct PsqlpyStatement { @@ -47,4 +50,8 @@ impl PsqlpyStatement { pub fn params_typed(&self) -> Box<[(&(dyn ToSql + Sync), Type)]> { self.prepared_parameters.params_typed() } + + pub fn columns(&self) -> &Vec { + &self.prepared_parameters.columns() + } } diff --git a/src/statement/statement_builder.rs b/src/statement/statement_builder.rs index c909f68d..054352a3 100644 --- a/src/statement/statement_builder.rs +++ b/src/statement/statement_builder.rs @@ -9,22 +9,22 @@ use crate::{ use super::{ cache::{StatementCacheInfo, StatementsCache, STMTS_CACHE}, - parameters::ParametersBuilder, + parameters::{Column, ParametersBuilder}, query::QueryString, statement::PsqlpyStatement, }; pub struct StatementBuilder<'a> { - querystring: String, - parameters: Option, + querystring: &'a String, + parameters: &'a Option, inner_conn: &'a PSQLPyConnection, prepared: bool, } impl<'a> StatementBuilder<'a> { pub fn new( - querystring: String, - parameters: Option, + querystring: &'a String, + parameters: &'a Option, inner_conn: &'a PSQLPyConnection, prepared: Option, ) -> Self { @@ -51,7 +51,8 @@ impl<'a> StatementBuilder<'a> { } fn build_with_cached(self, cached: StatementCacheInfo) -> PSQLPyResult { - let raw_parameters = ParametersBuilder::new(&self.parameters, Some(cached.types())); + let raw_parameters = + ParametersBuilder::new(&self.parameters, Some(cached.types()), cached.columns()); let parameters_names = if let Some(converted_qs) = &cached.query.converted_qs { Some(converted_qs.params_names().clone()) @@ -76,8 +77,17 @@ impl<'a> StatementBuilder<'a> { querystring.process_qs(); let prepared_stmt = self.prepare_query(&querystring, self.prepared).await?; - let parameters_builder = - ParametersBuilder::new(&self.parameters, Some(prepared_stmt.params().to_vec())); + + let columns = prepared_stmt + .columns() + .iter() + .map(|column| Column::new(column.name().to_string(), column.table_oid().clone())) + .collect::>(); + let parameters_builder = ParametersBuilder::new( + &self.parameters, + Some(prepared_stmt.params().to_vec()), + columns, + ); let parameters_names = if let Some(converted_qs) = &querystring.converted_qs { Some(converted_qs.params_names().clone()) diff --git a/src/value_converter/to_python.rs b/src/value_converter/to_python.rs index c0801bac..6ee761bc 100644 --- a/src/value_converter/to_python.rs +++ b/src/value_converter/to_python.rs @@ -172,6 +172,12 @@ fn postgres_bytes_to_py( } Ok(py.None()) } + Type::OID => { + Ok(composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py)) + } + Type::NAME => Ok( + composite_field_postgres_to_py::>(type_, buf, is_simple)?.to_object(py), + ), // // ---------- String Types ---------- // // Convert TEXT and VARCHAR type into String, then into str Type::TEXT | Type::VARCHAR | Type::XML => Ok(composite_field_postgres_to_py::< @@ -342,6 +348,11 @@ fn postgres_bytes_to_py( composite_field_postgres_to_py::>>(type_, buf, is_simple)?, ) .to_object(py)), + Type::OID_ARRAY => Ok(postgres_array_to_py( + py, + composite_field_postgres_to_py::>>(type_, buf, is_simple)?, + ) + .to_object(py)), // Convert ARRAY of TEXT or VARCHAR into Vec, then into list[str] Type::TEXT_ARRAY | Type::VARCHAR_ARRAY | Type::XML_ARRAY => Ok(postgres_array_to_py( py, From d84fd550aedfff3508c3296b6c757a9673e0df7a Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Mon, 26 May 2025 23:56:33 +0200 Subject: [PATCH 58/65] Updated docs --- .github/workflows/deploy-docs.yml | 50 + .gitignore | 6 +- docs/.vuepress/config.ts | 21 +- docs/.vuepress/sidebar.ts | 1 + docs/.vuepress/styles/config.scss | 3 + docs/.vuepress/styles/index.scss | 19 + docs/.vuepress/styles/palette.scss | 1 + docs/.vuepress/theme.ts | 173 - docs/benchmarks.md | 24 +- docs/components/components_overview.md | 6 +- docs/components/connection.md | 88 +- docs/components/cursor.md | 93 +- docs/components/exceptions.md | 139 +- docs/components/prepared_statement.md | 78 + package.json | 35 +- pnpm-lock.yaml | 4285 ++++++++++++++++-------- python/psqlpy/_internal/__init__.pyi | 80 +- src/driver/common.rs | 46 +- src/driver/connection.rs | 29 +- tsconfig.json | 14 + 20 files changed, 3193 insertions(+), 1998 deletions(-) create mode 100644 .github/workflows/deploy-docs.yml create mode 100644 docs/.vuepress/styles/config.scss create mode 100644 docs/.vuepress/styles/index.scss create mode 100644 docs/.vuepress/styles/palette.scss delete mode 100644 docs/.vuepress/theme.ts create mode 100644 docs/components/prepared_statement.md create mode 100644 tsconfig.json diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 00000000..62be81ec --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,50 @@ + +name: Deploy Docs + +on: + push: + branches: + - support_dbapi + +permissions: + contents: write + +jobs: + deploy-gh-pages: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + # if your docs needs submodules, uncomment the following line + # submodules: true + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 22 + cache: pnpm + + - name: Install Deps + run: | + corepack enable + pnpm install --frozen-lockfile + + - name: Build Docs + env: + NODE_OPTIONS: --max_old_space_size=8192 + run: |- + pnpm run docs:build + > docs/.vuepress/dist/.nojekyll + + - name: Deploy Docs + uses: JamesIves/github-pages-deploy-action@v4 + with: + # Deploy Docs + branch: gh-pages + folder: docs/.vuepress/dist diff --git a/.gitignore b/.gitignore index b41bbce9..5fd6dfec 100644 --- a/.gitignore +++ b/.gitignore @@ -77,4 +77,8 @@ _load_test # JS node_modules -.temp \ No newline at end of file +.temp +node_modules/ +docs/.vuepress/.cache/ +docs/.vuepress/.temp/ +docs/.vuepress/dist/ diff --git a/docs/.vuepress/config.ts b/docs/.vuepress/config.ts index c98c5b05..7fbde9c7 100644 --- a/docs/.vuepress/config.ts +++ b/docs/.vuepress/config.ts @@ -1,15 +1,14 @@ import { defineUserConfig } from "vuepress"; import { hopeTheme } from "vuepress-theme-hope"; import sidebar from "./sidebar.js"; - -import { viteBundler } from '@vuepress/bundler-vite' +import { markdownTabPlugin } from '@vuepress/plugin-markdown-tab' export default defineUserConfig({ lang: "en-US", title: "PSQLPy", description: "PSQLPy Documentation", - bundler: viteBundler(), + // bundler: viteBundler(), theme: hopeTheme({ repo: "psqlpy-python/psqlpy", @@ -22,22 +21,20 @@ export default defineUserConfig({ hostname: "https://psqlpy-python.github.io/", + markdown: { + tabs: true, + mermaid: true, + chartjs: true, + }, + plugins: { readingTime: false, - copyCode: { showInMobile: true, }, - searchPro: { + slimsearch: { indexContent: true, - autoSuggestions: false, - }, - - mdEnhance: { - tabs: true, - mermaid: true, - chart: true, }, sitemap: { diff --git a/docs/.vuepress/sidebar.ts b/docs/.vuepress/sidebar.ts index 1a3c1efd..9fc80456 100644 --- a/docs/.vuepress/sidebar.ts +++ b/docs/.vuepress/sidebar.ts @@ -23,6 +23,7 @@ export default sidebar({ "connection", "transaction", "cursor", + "prepared_statement", "listener", "results", "exceptions", diff --git a/docs/.vuepress/styles/config.scss b/docs/.vuepress/styles/config.scss new file mode 100644 index 00000000..613ae09e --- /dev/null +++ b/docs/.vuepress/styles/config.scss @@ -0,0 +1,3 @@ +$code-dark-theme: dracula; +$code-light-theme: one-light; +$theme-color: #264934; \ No newline at end of file diff --git a/docs/.vuepress/styles/index.scss b/docs/.vuepress/styles/index.scss new file mode 100644 index 00000000..8fe3ba67 --- /dev/null +++ b/docs/.vuepress/styles/index.scss @@ -0,0 +1,19 @@ +// place your custom styles here +.vp-site-name { + visibility: hidden; +} + +.vp-hero-info { + img { + max-width: 60% !important; + padding: 1rem; + } +} + +.vp-hero-title { + font-size: 3rem; +} + +.vp-actions { + align-items: flex-end; +} diff --git a/docs/.vuepress/styles/palette.scss b/docs/.vuepress/styles/palette.scss new file mode 100644 index 00000000..d271cb05 --- /dev/null +++ b/docs/.vuepress/styles/palette.scss @@ -0,0 +1 @@ +// you can change colors here diff --git a/docs/.vuepress/theme.ts b/docs/.vuepress/theme.ts deleted file mode 100644 index 5686d1af..00000000 --- a/docs/.vuepress/theme.ts +++ /dev/null @@ -1,173 +0,0 @@ -import { hopeTheme } from "vuepress-theme-hope"; -import sidebar from "./sidebar.js"; - -export default hopeTheme({ - hostname: "https://github.com/psqlpy-python/psqlpy", - - iconAssets: "fontawesome-with-brands", - - logo: "./logo.png", - - repo: "psqlpy-python/psqlpy", - - docsDir: "src", - - // sidebar - sidebar, - - footer: "MIT Licensed | Copyright© 2024", - - displayFooter: true, - - encrypt: { - config: { - "/demo/encrypt.html": ["1234"], - }, - }, - - metaLocales: { - editLink: "Edit this page on GitHub", - }, - - plugins: { - // You should generate and use your own comment service - // comment: { - // provider: "Giscus", - // repo: "vuepress-theme-hope/giscus-discussions", - // repoId: "R_kgDOG_Pt2A", - // category: "Announcements", - // categoryId: "DIC_kwDOG_Pt2M4COD69", - // }, - - components: { - components: ["Badge", "VPCard"], - }, - - // All features are enabled for demo, only preserve features you need here - mdEnhance: { - align: true, - attrs: true, - codetabs: true, - component: true, - demo: true, - figure: true, - imgLazyload: true, - imgSize: true, - include: true, - mark: true, - chart: true, - stylize: [ - { - matcher: "Recommended", - replacer: ({ tag }) => { - if (tag === "em") - return { - tag: "Badge", - attrs: { type: "tip" }, - content: "Recommended", - }; - }, - }, - ], - sub: true, - sup: true, - tabs: true, - vPre: true, - - // install chart.js before enabling it - // chart: true, - - // insert component easily - - // install echarts before enabling it - // echarts: true, - - // install flowchart.ts before enabling it - // flowchart: true, - - // gfm requires mathjax-full to provide tex support - // gfm: true, - - // install katex before enabling it - // katex: true, - - // install mathjax-full before enabling it - // mathjax: true, - - // install mermaid before enabling it - mermaid: true, - - // playground: { - // presets: ["ts", "vue"], - // }, - - // install reveal.js before enabling it - // revealJs: { - // plugins: ["highlight", "math", "search", "notes", "zoom"], - // }, - - // install @vue/repl before enabling it - // vuePlayground: true, - - // install sandpack-vue3 before enabling it - // sandpack: true, - }, - - // install @vuepress/plugin-pwa and uncomment these if you want a PWA - // pwa: { - // favicon: "/favicon.ico", - // cacheHTML: true, - // cachePic: true, - // appendBase: true, - // apple: { - // icon: "/assets/icon/apple-icon-152.png", - // statusBarColor: "black", - // }, - // msTile: { - // image: "/assets/icon/ms-icon-144.png", - // color: "#ffffff", - // }, - // manifest: { - // icons: [ - // { - // src: "/assets/icon/chrome-mask-512.png", - // sizes: "512x512", - // purpose: "maskable", - // type: "image/png", - // }, - // { - // src: "/assets/icon/chrome-mask-192.png", - // sizes: "192x192", - // purpose: "maskable", - // type: "image/png", - // }, - // { - // src: "/assets/icon/chrome-512.png", - // sizes: "512x512", - // type: "image/png", - // }, - // { - // src: "/assets/icon/chrome-192.png", - // sizes: "192x192", - // type: "image/png", - // }, - // ], - // shortcuts: [ - // { - // name: "Demo", - // short_name: "Demo", - // url: "/demo/", - // icons: [ - // { - // src: "/assets/icon/guide-maskable.png", - // sizes: "192x192", - // purpose: "maskable", - // type: "image/png", - // }, - // ], - // }, - // ], - // }, - // }, - }, -}); diff --git a/docs/benchmarks.md b/docs/benchmarks.md index b850e67d..061f09ad 100644 --- a/docs/benchmarks.md +++ b/docs/benchmarks.md @@ -27,7 +27,7 @@ However, if you have application and database located on different machines, you ## Local Database ::: tabs @tab Simple Connection Select -::: chart Simple Connection Select +::: chartjs Simple Connection Select ```json { @@ -80,7 +80,7 @@ However, if you have application and database located on different machines, you } ``` @tab Hard Connection Select -::: chart Hard Connection Select +::: chartjs Hard Connection Select ```json { @@ -133,7 +133,7 @@ However, if you have application and database located on different machines, you } ``` @tab Combined Connection Query -::: chart Combined Connection Query +::: chartjs Combined Connection Query ```json { @@ -186,7 +186,7 @@ However, if you have application and database located on different machines, you } ``` @tab Simple Transaction Select -::: chart Simple Transaction Select +::: chartjs Simple Transaction Select ```json { @@ -239,7 +239,7 @@ However, if you have application and database located on different machines, you } ``` @tab Hard Transaction Select -::: chart Hard Transaction Select +::: chartjs Hard Transaction Select ```json { @@ -292,7 +292,7 @@ However, if you have application and database located on different machines, you } ``` @tab Combined Transaction Query -::: chart Combined Transaction Query +::: chartjs Combined Transaction Query ```json { @@ -349,7 +349,7 @@ However, if you have application and database located on different machines, you ## External Database ::: tabs @tab Simple Connection Select -::: chart Simple Connection Select +::: chartjs Simple Connection Select ```json { @@ -403,7 +403,7 @@ However, if you have application and database located on different machines, you ``` @tab Hard Connection Select -::: chart Hard Connection Select +::: chartjs Hard Connection Select ```json { @@ -457,7 +457,7 @@ However, if you have application and database located on different machines, you ``` @tab Combined Connection Query -::: chart Combined Connection Query +::: chartjs Combined Connection Query ```json { @@ -511,7 +511,7 @@ However, if you have application and database located on different machines, you ``` @tab Simple Transaction Select -::: chart Simple Transaction Select +::: chartjs Simple Transaction Select ```json { @@ -565,7 +565,7 @@ However, if you have application and database located on different machines, you ``` @tab Hard Transaction Select -::: chart Hard Transaction Select +::: chartjs Hard Transaction Select ```json { @@ -619,7 +619,7 @@ However, if you have application and database located on different machines, you ``` @tab Combined Transaction Query -::: chart Combined Transaction Query +::: chartjs Combined Transaction Query ```json { diff --git a/docs/components/components_overview.md b/docs/components/components_overview.md index 90b05b70..14a5d5d4 100644 --- a/docs/components/components_overview.md +++ b/docs/components/components_overview.md @@ -5,11 +5,11 @@ title: Components ## Components - `ConnectionPool`: holds connections in itself and give them when requested. - `ConnectionPoolBuilder`: Chainable builder for `ConnectionPool`, for people who prefer it over big initialization. -- `Connection`: represents single database connection, can be retrieved from `ConnectionPool`. +- `Connection`: represents single database connection, can be retrieved from `ConnectionPool` or created with `connect` method. - `Transaction`: represents database transaction, can be made from `Connection`. -- `Cursor`: represents database cursor, can be made from `Transaction`. +- `Cursor`: represents database cursor, can be made from `Connection`, `Transaction` and `PreparedStatement`. +- `PreparedStatement`: represents PostgreSQL prepared statement. - `Listener`: object to work with [LISTEN](https://www.postgresql.org/docs/current/sql-listen.html)/[NOTIFY](https://www.postgresql.org/docs/current/sql-notify.html) functionality, can be mode from `ConnectionPool`. - `QueryResult`: represents list of results from database. - `SingleQueryResult`: represents single result from the database. - `Exceptions`: we have some custom exceptions. - diff --git a/docs/components/connection.md b/docs/components/connection.md index 1e82d99a..bd73d933 100644 --- a/docs/components/connection.md +++ b/docs/components/connection.md @@ -20,6 +20,18 @@ async def main() -> None: connection = await db_pool.connection() ``` +@tab single connection +```python +from psqlpy import connect + +db_connection: Final = connect( + dsn="postgres://postgres:postgres@localhost:5432/postgres", +) + +async def main() -> None: + await db_connection.execute(...) +``` + @tab async context manager ```python from psqlpy import ConnectionPool @@ -83,48 +95,48 @@ async def main() -> None: ) ``` -### Fetch +### Execute Many #### Parameters: - `querystring`: Statement string. -- `parameters`: List of parameters for the statement string. +- `parameters`: List of list of parameters for the statement string. - `prepared`: Prepare statement before execution or not. -The same as the `execute` method, for some people this naming is preferable. +This method supports parameters, each parameter must be marked as `$` in querystring (number starts with 1). +Atomicity is provided, so you don't need to worry about unsuccessful result, because there is a transaction used internally. +This method returns nothing. ```python async def main() -> None: ... connection = await db_pool.connection() - results: QueryResult = await connection.fetch( - "SELECT * FROM users WHERE id = $1 and username = $2", - [100, "Alex"], + await connection.execute_many( + "INSERT INTO users (name, age) VALUES ($1, $2)", + [["boba", 10], ["boba", 20]], ) - - dict_results: list[dict[str, Any]] = results.result() ``` -### Execute Many +### Fetch #### Parameters: - `querystring`: Statement string. -- `parameters`: List of list of parameters for the statement string. +- `parameters`: List of parameters for the statement string. - `prepared`: Prepare statement before execution or not. -This method supports parameters, each parameter must be marked as `$` in querystring (number starts with 1). -Atomicity is provided, so you don't need to worry about unsuccessful result, because there is a transaction used internally. -This method returns nothing. +The same as the `execute` method, for some people this naming is preferable. ```python async def main() -> None: ... connection = await db_pool.connection() - await connection.execute_many( - "INSERT INTO users (name, age) VALUES ($1, $2)", - [["boba", 10], ["boba", 20]], + results: QueryResult = await connection.fetch( + "SELECT * FROM users WHERE id = $1 and username = $2", + [100, "Alex"], ) + + dict_results: list[dict[str, Any]] = results.result() ``` ### Fetch Row @@ -177,17 +189,14 @@ async def main() -> None: ### Transaction -`Connection` is the only object that can be used to build `Transaction` object. - #### Parameters: - `isolation_level`: level of isolation. Default how it is in PostgreSQL. - `read_variant`: configure read variant of the transaction. Default how it is in PostgreSQL. - `deferrable`: configure deferrable of the transaction. Default how it is in PostgreSQL. -- `synchronous_commit`: configure [synchronous_commit](https://postgresqlco.nf/doc/en/param/synchronous_commit/) option for transaction. Default how it is in PostgreSQL. ```python -from psqlpy import IsolationLevel, ReadVariant, SynchronousCommit +from psqlpy import IsolationLevel, ReadVariant async def main() -> None: ... @@ -196,7 +205,44 @@ async def main() -> None: isolation_level=IsolationLevel.Serializable, read_variant=ReadVariant.ReadWrite, deferrable=True, - synchronous_commit=SynchronousCommit.On, + ) +``` + +### Cursor +Create new server-side cursor + +#### Parameters +- `querystring`: querystring for cursor. +- `parameters`: parameters for querystring. +- `fetch_number`: default value for fetch number, can be changed. + +```python +async def main() -> None: + ... + connection = await db_pool.connection() + cursor = connection.cursor( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + fetch_number=5, + ) +``` + +### Prepare +Prepare statement and return new instance. + +#### Parameters: +- `querystring`: querystring for statement. +- `parameters`: parameters for querystring. + +```python +from psqlpy import IsolationLevel, ReadVariant + +async def main() -> None: + ... + connection = await db_pool.connection() + prepared_stmt = await connection.prepare( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], ) ``` diff --git a/docs/components/cursor.md b/docs/components/cursor.md index 58d4e470..ca931a58 100644 --- a/docs/components/cursor.md +++ b/docs/components/cursor.md @@ -2,8 +2,7 @@ title: Cursor --- -`Cursor` objects represents real `Cursor` in the `PostgreSQL`. [PostgreSQL docs](https://www.postgresql.org/docs/current/plpgsql-cursors.html) -It can be built only from `Transaction`. +`Cursor` objects represents server-side `Cursor` in the `PostgreSQL`. [PostgreSQL docs](https://www.postgresql.org/docs/current/plpgsql-cursors.html). ## Cursor Parameters @@ -70,93 +69,3 @@ async def main() -> None: fetch_number=100, ) ``` - -### Fetch Next - -Just fetch next record from the `Cursor`. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_next() -``` - -### Fetch Prior - -Just fetch previous record. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_prior() -``` - -### Fetch First - -Just fetch the first record. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_first() -``` - -### Fetch Last - -Just fetch the last record. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_last() -``` - -### Fetch Absolute - -Just fetch absolute records. -It has `absolute_number` parameter, you must specify it. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_absolute( - absolute_number=10, - ) -``` - -### Fetch Relative - -Just fetch absolute records. -It has `relative_number` parameter, you must specify it. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_relative( - relative_number=10, - ) -``` - -### Fetch Forward All - -Fetch forward all records in the cursor. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_forward_all() -``` - -### Fetch Backward - -Just backward records. -It has `backward_count` parameter, you must specify it. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_backward( - backward_count=10, - ) -``` - -### Fetch Backward All - -Fetch backward all records in the cursor. - -```python -async def main() -> None: - result: QueryResult = await cursor.fetch_backward_all() -``` diff --git a/docs/components/exceptions.md b/docs/components/exceptions.md index fd2d86fb..1347b43f 100644 --- a/docs/components/exceptions.md +++ b/docs/components/exceptions.md @@ -7,141 +7,4 @@ They are split into different subclasses for `ConnectionPool`, `Connection`, `Tr You can use them to define code behavior based on exceptions. -## Exceptions dependency: -```mermaid - -stateDiagram-v2 - RustPSQLDriverPyBaseError --> BaseConnectionPoolError - RustPSQLDriverPyBaseError --> BaseConnectionError - RustPSQLDriverPyBaseError --> BaseTransactionError - RustPSQLDriverPyBaseError --> BaseCursorError - RustPSQLDriverPyBaseError --> BaseListenerError - RustPSQLDriverPyBaseError --> RustException - RustPSQLDriverPyBaseError --> RustToPyValueMappingError - RustPSQLDriverPyBaseError --> PyToRustValueMappingError - RustPSQLDriverPyBaseError --> UUIDValueConvertError - RustPSQLDriverPyBaseError --> MacAddrConversionError - - state BaseConnectionPoolError { - [*] --> ConnectionPoolBuildError - [*] --> ConnectionPoolConfigurationError - [*] --> ConnectionPoolExecuteError - } - state BaseConnectionError { - [*] --> ConnectionExecuteError - [*] --> ConnectionClosedError - } - state BaseTransactionError { - [*] --> TransactionBeginError - [*] --> TransactionCommitError - [*] --> TransactionRollbackError - [*] --> TransactionSavepointError - [*] --> TransactionExecuteError - [*] --> TransactionClosedError - } - state BaseCursorError { - [*] --> CursorStartError - [*] --> CursorCloseError - [*] --> CursorFetchError - [*] --> CursorClosedError - } - state BaseListenerError { - [*] --> ListenerStartError - [*] --> ListenerClosedError - [*] --> ListenerCallbackError - } - state RustException { - [*] --> DriverError - [*] --> MacAddrParseError - [*] --> RuntimeJoinError - } -``` - -## Exceptions: -### RustPSQLDriverPyBaseError -Main bas exception for all errors. - -### RustToPyValueMappingError -Error if it is not possible to covert rust type to python. - -You can get it if you database contains data type that it not supported by this library. - -### PyToRustValueMappingError -Error if it is not possible to covert python type to rust. - -You can get this exception when executing queries with parameters. So, if there are no parameters for the query, don't handle this error. - -### UUIDValueConvertError -Error if it's impossible to convert py string UUID into rust UUID. - -### MacAddrConversionError -Error if cannot convert MacAddr string value to rust type. - -### BaseConnectionPoolError -Base error for all Connection Pool errors. - -#### ConnectionPoolBuildError -Error for errors in building connection pool. - -#### ConnectionPoolConfigurationError -Error in connection pool configuration. - -#### ConnectionPoolExecuteError -Error in connection pool execution. - -### BaseConnectionError -Base error for Connection errors. - -#### ConnectionExecuteError -Error in connection execution. - -#### ConnectionClosedError -Error if underlying connection is closed. - -### BaseTransactionError -Base error for all transaction errors. - -#### TransactionBeginError -Error in transaction begin. - -#### TransactionCommitError -Error in transaction commit. - -#### TransactionRollbackError -Error in transaction rollback. - -#### TransactionSavepointError -Error in transaction savepoint. - -#### TransactionExecuteError -Error in transaction execution. - -#### TransactionClosedError -Error if underlying connection is closed. - -### BaseCursorError -Base error for Cursor errors. - -#### CursorStartError -Error in cursor declare. - -#### CursorCloseError -Error in cursor close. - -#### CursorFetchError -Error in cursor fetch (any fetch). - -#### CursorClosedError -Error if underlying connection is closed. - -### BaseListenerError -Base error for all Listener errors. - -#### ListenerStartError -Error if listener start failed. - -#### ListenerClosedError -Error if listener manipulated but it's closed - -#### ListenerCallbackError -Error if callback passed to listener isn't a coroutine +All exceptions described [here](https://github.com/psqlpy-python/psqlpy/blob/main/python/psqlpy/_internal/exceptions.pyi) diff --git a/docs/components/prepared_statement.md b/docs/components/prepared_statement.md new file mode 100644 index 00000000..ae38f682 --- /dev/null +++ b/docs/components/prepared_statement.md @@ -0,0 +1,78 @@ +--- +title: Prepared Statement +--- + +Representation of PostgreSQL PreparedStatement. + +## Usage + +::: tabs + +@tab Execute +```python +from psqlpy import ConnectionPool, QueryResult + +db_pool: Final = ConnectionPool( + dsn="postgres://postgres:postgres@localhost:5432/postgres", +) + +async def main() -> None: + connection = await db_pool.connection() + prepared_stmt = await connection.prepare( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + ) + + result: QueryResult = await prepared_stmt.execute() +``` + +@tab Cursor +```python +from psqlpy import ConnectionPool, Cursor, PreparedStatement + +db_pool: Final = ConnectionPool( + dsn="postgres://postgres:postgres@localhost:5432/postgres", +) + +async def main() -> None: + connection = await db_pool.connection() + prepared_stmt: PreparedStatement = await connection.prepare( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + ) + + cursor: Cursor = prepared_stmt.cursor() +``` +::: + +## PreparedStatement methods + +### Execute + +Just execute prepared statement. + +```python +async def main() -> None: + connection = await db_pool.connection() + prepared_stmt = await connection.prepare( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + ) + + result: QueryResult = await prepared_stmt.execute() +``` + +### Cursor + +Create new Cursor instance from the PreparedStatement. + +```python +async def main() -> None: + connection = await db_pool.connection() + prepared_stmt = await connection.prepare( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + ) + + result: QueryResult = await prepared_stmt.execute() +``` \ No newline at end of file diff --git a/package.json b/package.json index 0af7b856..14193ae6 100644 --- a/package.json +++ b/package.json @@ -5,21 +5,32 @@ "license": "MIT", "private": false, "scripts": { - "docs:dev": "vuepress dev docs --clean-cache", - "docs:build": "vuepress build docs", - "docs:serve": "vuepress serve docs" + "docs:dev": "vuepress-vite dev docs", + "docs:build": "vuepress-vite build docs", + "docs:serve": "vuepress serve docs", + "docs:clean-dev": "vuepress-vite dev docs --clean-cache", + "docs:update-package": "pnpm dlx vp-update" }, "packageManager": "pnpm@7.22.0", "devDependencies": { - "@vuepress/bundler-vite": "2.0.0-rc.9", - "mermaid": "^10.8.0", - "sass-loader": "^14.1.0", - "vue": "^3.4.15", - "vuepress": "2.0.0-rc.9", - "vuepress-plugin-search-pro": "2.0.0-rc.36", - "vuepress-theme-hope": "2.0.0-rc.36" + "@vuepress/bundler-vite": "2.0.0-rc.23", + "@vuepress/plugin-markdown-tab": "2.0.0-rc.47", + "@vuepress/plugin-slimsearch": "2.0.0-rc.106", + "mermaid": "^11.6.0", + "sass": "^1.89.0", + "sass-embedded": "^1.88.0", + "sass-loader": "^16.0.5", + "vue": "^3.5.13", + "vuepress": "2.0.0-rc.23", + "vuepress-plugin-md-enhance": "2.0.0-rc.88", + "vuepress-theme-hope": "2.0.0-rc.88" }, "dependencies": { - "chart.js": "^4.4.3" + "chart.js": "^4.4.9" + }, + "pnpm": { + "onlyBuiltDependencies": [ + "esbuild" + ] } -} \ No newline at end of file +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 80c92f84..1f1b31e4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -4,529 +4,625 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false +onlyBuiltDependencies: + - esbuild + dependencies: chart.js: - specifier: ^4.4.3 - version: 4.4.3 + specifier: ^4.4.9 + version: 4.4.9 devDependencies: '@vuepress/bundler-vite': - specifier: 2.0.0-rc.9 - version: 2.0.0-rc.9 + specifier: 2.0.0-rc.23 + version: 2.0.0-rc.23(sass-embedded@1.89.0)(sass@1.89.0) + '@vuepress/plugin-markdown-tab': + specifier: 2.0.0-rc.47 + version: 2.0.0-rc.47(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-slimsearch': + specifier: 2.0.0-rc.106 + version: 2.0.0-rc.106(vuepress@2.0.0-rc.23) mermaid: - specifier: ^10.8.0 - version: 10.9.1 + specifier: ^11.6.0 + version: 11.6.0 + sass: + specifier: ^1.89.0 + version: 1.89.0 + sass-embedded: + specifier: ^1.88.0 + version: 1.89.0 sass-loader: - specifier: ^14.1.0 - version: 14.2.1 + specifier: ^16.0.5 + version: 16.0.5(sass-embedded@1.89.0)(sass@1.89.0) vue: - specifier: ^3.4.15 - version: 3.4.30 + specifier: ^3.5.13 + version: 3.5.15 vuepress: - specifier: 2.0.0-rc.9 - version: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-plugin-search-pro: - specifier: 2.0.0-rc.36 - version: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) + specifier: 2.0.0-rc.23 + version: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + vuepress-plugin-md-enhance: + specifier: 2.0.0-rc.88 + version: 2.0.0-rc.88(chart.js@4.4.9)(markdown-it@14.1.0)(mermaid@11.6.0)(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) vuepress-theme-hope: - specifier: 2.0.0-rc.36 - version: 2.0.0-rc.36(chart.js@4.4.3)(markdown-it@14.1.0)(mermaid@10.9.1)(sass-loader@14.2.1)(vuepress-plugin-search-pro@2.0.0-rc.36)(vuepress@2.0.0-rc.9) + specifier: 2.0.0-rc.88 + version: 2.0.0-rc.88(@vuepress/plugin-slimsearch@2.0.0-rc.106)(chart.js@4.4.9)(markdown-it@14.1.0)(mermaid@11.6.0)(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) packages: - /@babel/helper-string-parser@7.24.7: - resolution: {integrity: sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==} + /@antfu/install-pkg@1.1.0: + resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} + dependencies: + package-manager-detector: 1.3.0 + tinyexec: 1.0.1 + dev: true + + /@antfu/utils@8.1.1: + resolution: {integrity: sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==} + dev: true + + /@babel/helper-string-parser@7.27.1: + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} engines: {node: '>=6.9.0'} dev: true - /@babel/helper-validator-identifier@7.24.7: - resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} + /@babel/helper-validator-identifier@7.27.1: + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} engines: {node: '>=6.9.0'} dev: true - /@babel/parser@7.24.7: - resolution: {integrity: sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==} + /@babel/parser@7.27.2: + resolution: {integrity: sha512-QYLs8299NA7WM/bZAdp+CviYYkVoYXlDW2rzliy3chxd1PQjej7JORuMJDJXJUb9g0TT+B99EwaVLKmX+sPXWw==} engines: {node: '>=6.0.0'} hasBin: true dependencies: - '@babel/types': 7.24.7 + '@babel/types': 7.27.1 dev: true - /@babel/types@7.24.7: - resolution: {integrity: sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==} + /@babel/types@7.27.1: + resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} engines: {node: '>=6.9.0'} dependencies: - '@babel/helper-string-parser': 7.24.7 - '@babel/helper-validator-identifier': 7.24.7 - to-fast-properties: 2.0.0 + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 dev: true - /@braintree/sanitize-url@6.0.4: - resolution: {integrity: sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==} + /@braintree/sanitize-url@7.1.1: + resolution: {integrity: sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==} dev: true - /@esbuild/aix-ppc64@0.20.2: - resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} - engines: {node: '>=12'} + /@bufbuild/protobuf@2.5.0: + resolution: {integrity: sha512-nniMblXT+dNyubek2OLKAYJnG/in4tmfS2c5CDnIvqfF9kFlERSG3FCBvmdqerpkWuPv0qhdGKReQ2OqKPG20w==} + dev: true + + /@chevrotain/cst-dts-gen@11.0.3: + resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==} + dependencies: + '@chevrotain/gast': 11.0.3 + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.21 + dev: true + + /@chevrotain/gast@11.0.3: + resolution: {integrity: sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==} + dependencies: + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.21 + dev: true + + /@chevrotain/regexp-to-ast@11.0.3: + resolution: {integrity: sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==} + dev: true + + /@chevrotain/types@11.0.3: + resolution: {integrity: sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==} + dev: true + + /@chevrotain/utils@11.0.3: + resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==} + dev: true + + /@esbuild/aix-ppc64@0.25.4: + resolution: {integrity: sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q==} + engines: {node: '>=18'} cpu: [ppc64] os: [aix] - requiresBuild: true dev: true optional: true - /@esbuild/android-arm64@0.20.2: - resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} - engines: {node: '>=12'} + /@esbuild/android-arm64@0.25.4: + resolution: {integrity: sha512-bBy69pgfhMGtCnwpC/x5QhfxAz/cBgQ9enbtwjf6V9lnPI/hMyT9iWpR1arm0l3kttTr4L0KSLpKmLp/ilKS9A==} + engines: {node: '>=18'} cpu: [arm64] os: [android] - requiresBuild: true dev: true optional: true - /@esbuild/android-arm@0.20.2: - resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} - engines: {node: '>=12'} + /@esbuild/android-arm@0.25.4: + resolution: {integrity: sha512-QNdQEps7DfFwE3hXiU4BZeOV68HHzYwGd0Nthhd3uCkkEKK7/R6MTgM0P7H7FAs5pU/DIWsviMmEGxEoxIZ+ZQ==} + engines: {node: '>=18'} cpu: [arm] os: [android] - requiresBuild: true dev: true optional: true - /@esbuild/android-x64@0.20.2: - resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} - engines: {node: '>=12'} + /@esbuild/android-x64@0.25.4: + resolution: {integrity: sha512-TVhdVtQIFuVpIIR282btcGC2oGQoSfZfmBdTip2anCaVYcqWlZXGcdcKIUklfX2wj0JklNYgz39OBqh2cqXvcQ==} + engines: {node: '>=18'} cpu: [x64] os: [android] - requiresBuild: true dev: true optional: true - /@esbuild/darwin-arm64@0.20.2: - resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} - engines: {node: '>=12'} + /@esbuild/darwin-arm64@0.25.4: + resolution: {integrity: sha512-Y1giCfM4nlHDWEfSckMzeWNdQS31BQGs9/rouw6Ub91tkK79aIMTH3q9xHvzH8d0wDru5Ci0kWB8b3up/nl16g==} + engines: {node: '>=18'} cpu: [arm64] os: [darwin] - requiresBuild: true dev: true optional: true - /@esbuild/darwin-x64@0.20.2: - resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} - engines: {node: '>=12'} + /@esbuild/darwin-x64@0.25.4: + resolution: {integrity: sha512-CJsry8ZGM5VFVeyUYB3cdKpd/H69PYez4eJh1W/t38vzutdjEjtP7hB6eLKBoOdxcAlCtEYHzQ/PJ/oU9I4u0A==} + engines: {node: '>=18'} cpu: [x64] os: [darwin] - requiresBuild: true dev: true optional: true - /@esbuild/freebsd-arm64@0.20.2: - resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} - engines: {node: '>=12'} + /@esbuild/freebsd-arm64@0.25.4: + resolution: {integrity: sha512-yYq+39NlTRzU2XmoPW4l5Ifpl9fqSk0nAJYM/V/WUGPEFfek1epLHJIkTQM6bBs1swApjO5nWgvr843g6TjxuQ==} + engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - requiresBuild: true dev: true optional: true - /@esbuild/freebsd-x64@0.20.2: - resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} - engines: {node: '>=12'} + /@esbuild/freebsd-x64@0.25.4: + resolution: {integrity: sha512-0FgvOJ6UUMflsHSPLzdfDnnBBVoCDtBTVyn/MrWloUNvq/5SFmh13l3dvgRPkDihRxb77Y17MbqbCAa2strMQQ==} + engines: {node: '>=18'} cpu: [x64] os: [freebsd] - requiresBuild: true dev: true optional: true - /@esbuild/linux-arm64@0.20.2: - resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} - engines: {node: '>=12'} + /@esbuild/linux-arm64@0.25.4: + resolution: {integrity: sha512-+89UsQTfXdmjIvZS6nUnOOLoXnkUTB9hR5QAeLrQdzOSWZvNSAXAtcRDHWtqAUtAmv7ZM1WPOOeSxDzzzMogiQ==} + engines: {node: '>=18'} cpu: [arm64] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-arm@0.20.2: - resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} - engines: {node: '>=12'} + /@esbuild/linux-arm@0.25.4: + resolution: {integrity: sha512-kro4c0P85GMfFYqW4TWOpvmF8rFShbWGnrLqlzp4X1TNWjRY3JMYUfDCtOxPKOIY8B0WC8HN51hGP4I4hz4AaQ==} + engines: {node: '>=18'} cpu: [arm] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-ia32@0.20.2: - resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} - engines: {node: '>=12'} + /@esbuild/linux-ia32@0.25.4: + resolution: {integrity: sha512-yTEjoapy8UP3rv8dB0ip3AfMpRbyhSN3+hY8mo/i4QXFeDxmiYbEKp3ZRjBKcOP862Ua4b1PDfwlvbuwY7hIGQ==} + engines: {node: '>=18'} cpu: [ia32] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-loong64@0.20.2: - resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} - engines: {node: '>=12'} + /@esbuild/linux-loong64@0.25.4: + resolution: {integrity: sha512-NeqqYkrcGzFwi6CGRGNMOjWGGSYOpqwCjS9fvaUlX5s3zwOtn1qwg1s2iE2svBe4Q/YOG1q6875lcAoQK/F4VA==} + engines: {node: '>=18'} cpu: [loong64] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-mips64el@0.20.2: - resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} - engines: {node: '>=12'} + /@esbuild/linux-mips64el@0.25.4: + resolution: {integrity: sha512-IcvTlF9dtLrfL/M8WgNI/qJYBENP3ekgsHbYUIzEzq5XJzzVEV/fXY9WFPfEEXmu3ck2qJP8LG/p3Q8f7Zc2Xg==} + engines: {node: '>=18'} cpu: [mips64el] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-ppc64@0.20.2: - resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} - engines: {node: '>=12'} + /@esbuild/linux-ppc64@0.25.4: + resolution: {integrity: sha512-HOy0aLTJTVtoTeGZh4HSXaO6M95qu4k5lJcH4gxv56iaycfz1S8GO/5Jh6X4Y1YiI0h7cRyLi+HixMR+88swag==} + engines: {node: '>=18'} cpu: [ppc64] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-riscv64@0.20.2: - resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} - engines: {node: '>=12'} + /@esbuild/linux-riscv64@0.25.4: + resolution: {integrity: sha512-i8JUDAufpz9jOzo4yIShCTcXzS07vEgWzyX3NH2G7LEFVgrLEhjwL3ajFE4fZI3I4ZgiM7JH3GQ7ReObROvSUA==} + engines: {node: '>=18'} cpu: [riscv64] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-s390x@0.20.2: - resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} - engines: {node: '>=12'} + /@esbuild/linux-s390x@0.25.4: + resolution: {integrity: sha512-jFnu+6UbLlzIjPQpWCNh5QtrcNfMLjgIavnwPQAfoGx4q17ocOU9MsQ2QVvFxwQoWpZT8DvTLooTvmOQXkO51g==} + engines: {node: '>=18'} cpu: [s390x] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/linux-x64@0.20.2: - resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} - engines: {node: '>=12'} + /@esbuild/linux-x64@0.25.4: + resolution: {integrity: sha512-6e0cvXwzOnVWJHq+mskP8DNSrKBr1bULBvnFLpc1KY+d+irZSgZ02TGse5FsafKS5jg2e4pbvK6TPXaF/A6+CA==} + engines: {node: '>=18'} cpu: [x64] os: [linux] - requiresBuild: true dev: true optional: true - /@esbuild/netbsd-x64@0.20.2: - resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} - engines: {node: '>=12'} + /@esbuild/netbsd-arm64@0.25.4: + resolution: {integrity: sha512-vUnkBYxZW4hL/ie91hSqaSNjulOnYXE1VSLusnvHg2u3jewJBz3YzB9+oCw8DABeVqZGg94t9tyZFoHma8gWZQ==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + dev: true + optional: true + + /@esbuild/netbsd-x64@0.25.4: + resolution: {integrity: sha512-XAg8pIQn5CzhOB8odIcAm42QsOfa98SBeKUdo4xa8OvX8LbMZqEtgeWE9P/Wxt7MlG2QqvjGths+nq48TrUiKw==} + engines: {node: '>=18'} cpu: [x64] os: [netbsd] - requiresBuild: true dev: true optional: true - /@esbuild/openbsd-x64@0.20.2: - resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} - engines: {node: '>=12'} + /@esbuild/openbsd-arm64@0.25.4: + resolution: {integrity: sha512-Ct2WcFEANlFDtp1nVAXSNBPDxyU+j7+tId//iHXU2f/lN5AmO4zLyhDcpR5Cz1r08mVxzt3Jpyt4PmXQ1O6+7A==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + dev: true + optional: true + + /@esbuild/openbsd-x64@0.25.4: + resolution: {integrity: sha512-xAGGhyOQ9Otm1Xu8NT1ifGLnA6M3sJxZ6ixylb+vIUVzvvd6GOALpwQrYrtlPouMqd/vSbgehz6HaVk4+7Afhw==} + engines: {node: '>=18'} cpu: [x64] os: [openbsd] - requiresBuild: true dev: true optional: true - /@esbuild/sunos-x64@0.20.2: - resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} - engines: {node: '>=12'} + /@esbuild/sunos-x64@0.25.4: + resolution: {integrity: sha512-Mw+tzy4pp6wZEK0+Lwr76pWLjrtjmJyUB23tHKqEDP74R3q95luY/bXqXZeYl4NYlvwOqoRKlInQialgCKy67Q==} + engines: {node: '>=18'} cpu: [x64] os: [sunos] - requiresBuild: true dev: true optional: true - /@esbuild/win32-arm64@0.20.2: - resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} - engines: {node: '>=12'} + /@esbuild/win32-arm64@0.25.4: + resolution: {integrity: sha512-AVUP428VQTSddguz9dO9ngb+E5aScyg7nOeJDrF1HPYu555gmza3bDGMPhmVXL8svDSoqPCsCPjb265yG/kLKQ==} + engines: {node: '>=18'} cpu: [arm64] os: [win32] - requiresBuild: true dev: true optional: true - /@esbuild/win32-ia32@0.20.2: - resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} - engines: {node: '>=12'} + /@esbuild/win32-ia32@0.25.4: + resolution: {integrity: sha512-i1sW+1i+oWvQzSgfRcxxG2k4I9n3O9NRqy8U+uugaT2Dy7kLO9Y7wI72haOahxceMX8hZAzgGou1FhndRldxRg==} + engines: {node: '>=18'} cpu: [ia32] os: [win32] - requiresBuild: true dev: true optional: true - /@esbuild/win32-x64@0.20.2: - resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} - engines: {node: '>=12'} + /@esbuild/win32-x64@0.25.4: + resolution: {integrity: sha512-nOT2vZNw6hJ+z43oP1SPea/G/6AbN6X+bGNhNuq8NtRHy4wsMhw765IKLNmnjek7GvjWBYQ8Q5VBoYTFg9y1UQ==} + engines: {node: '>=18'} cpu: [x64] os: [win32] - requiresBuild: true dev: true optional: true - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} + /@iconify/types@2.0.0: + resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} + dev: true + + /@iconify/utils@2.3.0: + resolution: {integrity: sha512-GmQ78prtwYW6EtzXRU1rY+KwOKfz32PD7iJh6Iyqw68GiKuoZ2A6pRtzWONz5VQJbp50mEjXh/7NkumtrAgRKA==} + dependencies: + '@antfu/install-pkg': 1.1.0 + '@antfu/utils': 8.1.1 + '@iconify/types': 2.0.0 + debug: 4.4.1 + globals: 15.15.0 + kolorist: 1.8.0 + local-pkg: 1.1.1 + mlly: 1.7.4 + transitivePeerDependencies: + - supports-color + dev: true + + /@jridgewell/sourcemap-codec@1.5.0: + resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} dev: true - /@kurkle/color@0.3.2: - resolution: {integrity: sha512-fuscdXJ9G1qb7W8VdHi+IwRqij3lBkosAm4ydQtEmbY58OzHXqQhvlxqEkoz0yssNVn38bcpRWgA9PP+OGoisw==} + /@kurkle/color@0.3.4: + resolution: {integrity: sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==} - /@lit-labs/ssr-dom-shim@1.2.0: - resolution: {integrity: sha512-yWJKmpGE6lUURKAaIltoPIE/wrbY3TEkqQt+X0m+7fQNnAv0keydnYvbiJFP1PnMhizmIWRWOG5KLhYyc/xl+g==} + /@lit-labs/ssr-dom-shim@1.3.0: + resolution: {integrity: sha512-nQIWonJ6eFAvUUrSlwyHDm/aE8PBDu5kRpL0vHMg6K8fK3Diq1xdPjTnsJSwxABhaZ+5eBi1btQB5ShUTKo4nQ==} dev: true - /@lit/reactive-element@2.0.4: - resolution: {integrity: sha512-GFn91inaUa2oHLak8awSIigYz0cU0Payr1rcFsrkf5OJ5eSPxElyZfKh0f2p9FsTiZWXQdWGJeXZICEfXXYSXQ==} + /@lit/reactive-element@2.1.0: + resolution: {integrity: sha512-L2qyoZSQClcBmq0qajBVbhYEcG6iK0XfLn66ifLe/RfC0/ihpc+pl0Wdn8bJ8o+hj38cG0fGXRgSS20MuXn7qA==} dependencies: - '@lit-labs/ssr-dom-shim': 1.2.0 + '@lit-labs/ssr-dom-shim': 1.3.0 dev: true - /@mdit-vue/plugin-component@2.1.3: - resolution: {integrity: sha512-9AG17beCgpEw/4ldo/M6Y/1Rh4E1bqMmr/rCkWKmCAxy9tJz3lzY7HQJanyHMJufwsb3WL5Lp7Om/aPcQTZ9SA==} + /@mdit-vue/plugin-component@2.1.4: + resolution: {integrity: sha512-fiLbwcaE6gZE4c8Mkdkc4X38ltXh/EdnuPE1hepFT2dLiW6I4X8ho2Wq7nhYuT8RmV4OKlCFENwCuXlKcpV/sw==} dependencies: - '@types/markdown-it': 14.1.1 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/plugin-frontmatter@2.1.3: - resolution: {integrity: sha512-KxsSCUVBEmn6sJcchSTiI5v9bWaoRxe68RBYRDGcSEY1GTnfQ5gQPMIsM48P4q1luLEIWurVGGrRu7u93//LDQ==} + /@mdit-vue/plugin-frontmatter@2.1.4: + resolution: {integrity: sha512-mOlavV176njnozIf0UZGFYymmQ2LK5S1rjrbJ1uGz4Df59tu0DQntdE7YZXqmJJA9MiSx7ViCTUQCNPKg7R8Ow==} dependencies: - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 gray-matter: 4.0.3 markdown-it: 14.1.0 dev: true - /@mdit-vue/plugin-headers@2.1.3: - resolution: {integrity: sha512-AcL7a7LHQR3ISINhfjGJNE/bHyM0dcl6MYm1Sr//zF7ZgokPGwD/HhD7TzwmrKA9YNYCcO9P3QmF/RN9XyA6CA==} + /@mdit-vue/plugin-headers@2.1.4: + resolution: {integrity: sha512-tyZwGZu2mYkNSqigFP1CK3aZYxuYwrqcrIh8ljd8tfD1UDPJkAbQeayq62U572po2IuWVB1BqIG8JIXp5POOTA==} dependencies: - '@mdit-vue/shared': 2.1.3 - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/shared': 2.1.4 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/plugin-sfc@2.1.3: - resolution: {integrity: sha512-Ezl0dNvQNS639Yl4siXm+cnWtQvlqHrg+u+lnau/OHpj9Xh3LVap/BSQVugKIV37eR13jXXYf3VaAOP1fXPN+w==} + /@mdit-vue/plugin-sfc@2.1.4: + resolution: {integrity: sha512-oqAlMulkz280xUJIkormzp6Ps0x5WULZrwRivylWJWDEyVAFCj5VgR3Dx6CP2jdgyuPXwW3+gh2Kzw+Xe+kEIQ==} dependencies: - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/plugin-title@2.1.3: - resolution: {integrity: sha512-XWVOQoZqczoN97xCDrnQicmXKoqwOjIymIm9HQnRXhHnYKOgJPW1CxSGhkcOGzvDU1v0mD/adojVyyj/s6ggWw==} + /@mdit-vue/plugin-title@2.1.4: + resolution: {integrity: sha512-uuF24gJvvLVIWG/VBtCDRqMndfd5JzOXoBoHPdKKLk3PA4P84dsB0u0NnnBUEl/YBOumdCotasn7OfFMmco9uQ==} dependencies: - '@mdit-vue/shared': 2.1.3 - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/shared': 2.1.4 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/plugin-toc@2.1.3: - resolution: {integrity: sha512-41Q+iXpLHZt0zJdApVwoVt7WF6za/xUjtjEPf90Z3KLzQO01TXsv48Xp9BsrFHPcPcm8tiZ0+O1/ICJO80V/MQ==} + /@mdit-vue/plugin-toc@2.1.4: + resolution: {integrity: sha512-vvOU7u6aNmvPwKXzmoHion1sv4zChBp20LDpSHlRlXc3btLwdYIA0DR+UiO5YeyLUAO0XSHQKBpsIWi57K9/3w==} dependencies: - '@mdit-vue/shared': 2.1.3 - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/shared': 2.1.4 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/shared@2.1.3: - resolution: {integrity: sha512-27YI8b0VVZsAlNwaWoaOCWbr4eL8B04HxiYk/y2ktblO/nMcOEOLt4p0RjuobvdyUyjHvGOS09RKhq7qHm1CHQ==} + /@mdit-vue/shared@2.1.4: + resolution: {integrity: sha512-Axd8g2iKQTMuHcPXZH5JY3hbSMeLyoeu0ftdgMrjuPzHpJnWiPSAnA0dAx5NQFQqZkXHhyIrAssLSrOWjFmPKg==} dependencies: - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 14.1.1 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit-vue/types@2.1.0: - resolution: {integrity: sha512-TMBB/BQWVvwtpBdWD75rkZx4ZphQ6MN0O4QB2Bc0oI5PC2uE57QerhNxdRZ7cvBHE2iY2C+BUNUziCfJbjIRRA==} + /@mdit-vue/types@2.1.4: + resolution: {integrity: sha512-QiGNZslz+zXUs2X8D11UQhB4KAMZ0DZghvYxa7+1B+VMLcDtz//XHpWbcuexjzE3kBXSxIUTPH3eSQCa0puZHA==} + dev: true + + /@mdit/helper@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-/4w+hKHmJUutRhmwX8w7dpYW4lgaNXW055m/x+apvemLGlDoRd3VZbAR5Gt0zWdkE0l4b5FWqbydiig9Sgj5gQ==} + engines: {node: '>= 18'} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true + dependencies: + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.0 dev: true - /@mdit/plugin-alert@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-mxA/lhOyDDR6/qSAegGG/XZRjUbr1wjwdULudbpkA/CCQi6piW9D0Z8crDQGYz4KPQM9Bgx4Ac81QFSzHOV66Q==} + /@mdit/plugin-alert@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-nm6BJPZG6ux6hTUGstKEDL14AWwMTxTU7mxZFKUVqC/qDgCgmzeoFINE4N+4mrDKAnAF5uF5APfIZCh481PnaQ==} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-align@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-OJPYzSdmT0UZj/QTvnKYE4GelAL0OD8bNIPxpidXbFd3IqYv/8+xMjT6XeR+R3oZEvtbYSc2e1MmO5fo3DopJA==} + /@mdit/plugin-align@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-93znJvVPOx1NY88Q1GLDSsPa272CPlXwrHTLiuWgBfKwbFQ016KKtrI5AyUgpA//BGv9QiTgYZI+mUR6MKdBgg==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@mdit/plugin-container': 0.8.0(markdown-it@14.1.0) - '@types/markdown-it': 13.0.8 + '@mdit/plugin-container': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-attrs@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-ewmx5i+b3M4CRJNDpDNBA0YTHa1snn+adDsDDpDtPPSzCH1NhtWXdzwI0TrcCQUnueeSEEWX/wY4ESo+NRkBNQ==} + /@mdit/plugin-attrs@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-WyjzLxKYuJYgcw9ydhVRzbcGLQ7h6CFZmoXZvou/K7fIJVF0XcAJ03uKPIRqp9F44qr+n3mzj0hUc4CnUMZtsQ==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-container@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-uWK3t0CWssintcmT5PTJVhAwbstcD+SrtijQKs6BhLRtGGgHJ9mOf0ybGjlJhn4077yFFTHmaCIT3K+n5ZVjPg==} + /@mdit/plugin-container@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-lNXFxhgPU44UmrElp5oRUGUYx4q0Nkta6BYDC7tYIzqk3BBJLccBMv2iI0Hejz+LFTRytyMUBAuxh/F+i1DsGw==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-demo@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-yFRXnp3Lj0g4H9ImzHKQwwgtSykrL/BDNEQzql9fdA9FbSygfu0CIxfm+A8lsVos8cAvdsgxy3gILySxpfR89g==} + /@mdit/plugin-demo@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-oWrADUZmkFm0YTFKg8NT/YRDekL5I+xreryiwyFldVo7WyXDUxLJ7ae4TLHwU4KV2heCm3A1jTtPNORzQewUmA==} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-figure@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-/o4RoKjnkdWc+K7m6mR7BAu2J79yYE38s8HUc8iKk9v+e9j1E+6LeXcpx1LoPnHzUhT4EO2QmUsv+kAaPFfZYw==} + /@mdit/plugin-figure@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-LRSNtPDZJbCPtiMOEcSS2pPL9rN2s//MWlL5lvc/EF1XlB/ZMO7Huj9dqQZ/I34vNR+TlZtwt1WifAh2GSKZBw==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-footnote@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-AaX1rfkJwq9vLX+H/a+XQ3ZxahOXrnMLr5dVZfNdazjqdDEJ7Cc/A7UFtLfOM19F2w3EgvcHR1gbINxIVDn/eg==} + /@mdit/plugin-footnote@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-Qr99rLRBEAmcFo2rPtmdpZNrTK4cVH4cwSutLu2v1fWxsXP6Hfks1O8cmkRGKbcsBE2mqh1r3yo0w178zDO/cQ==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 + dependencies: + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.0 + dev: true + + /@mdit/plugin-icon@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-w7RA1vqP/ZumkKLSRGDm5D7bXFJJkD+VBtAurgR4J/slATcZWAmAGBUC/dq2tp1UueNGg0kJfrKvcqQMdxXqTA==} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-img-lazyload@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-Rrlf2FzOxxyszbv3DpkIwEgmYKmtwHdxIO+Whkn0a9QckxnEKkaGl5KARCnM7LqX2fhEyFLgnfkr3onVOJG54g==} + /@mdit/plugin-img-lazyload@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-X2gbtWTHfW7NdPW2ceQrCnLZ9D/xvY1TTSPa3GBa2+7vR02bL36kRBIXTEyS0anC08R+7FHBN2uJljk8qQ2AJw==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-img-mark@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-4P6z2QOfLHLMSXUP4mB/2Rnd6KeHmJBkUXJWJhybcXoIG5S5FDTFHJxOycSP4eGzfdOYAWSlkx6XwXEUGGZz5w==} + /@mdit/plugin-img-mark@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-dLPOuUUcP/tlOVRTWJYeIhcFLE4DQgFovDE0ojlGZS/nS5YYjZGELujADHL2zUruCe99uJqAbHzTPeQvUsWckw==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-img-size@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-r+LbAizP/hw5SisY44VbHEnR7XUKpcHM2k2fwu5wb1+V1crxeigG4sa8rzrJEddU+k6uCl27yL5FTGbHjAl82Q==} + /@mdit/plugin-img-size@0.18.1(markdown-it@14.1.0): + resolution: {integrity: sha512-jomprQOuv2xEe4CDbDA+KmRlxmUhOF5psXvl+sjgNmxR2+So6q7hbPLJVTVV3Z2jK5sXmogOPQz0Wf7h72LNHA==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-include@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-e8Z8q5VkJ6UX04tTgELraupB/MdHio7hkdYT71wBJ6UQuhSmFv/xMOxFfTcGKH5yzsbEM45BtAFHzSXIi3dMCw==} + /@mdit/plugin-include@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-OsxTbORV3ZcmqIebAxV6Rxm9FFykGY1UePLJIffI46JYVZf6Nv6E1ArtHbz5ntr0sEKPifDXQtkmksl2SX9UKA==} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 upath: 2.0.1 dev: true - /@mdit/plugin-katex@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-u7CX3Xv5nuc2bu2sHrk1nil83/9ETKTBMmy0icbW8zlqBC0ykLo1xTCEBXmdhXtnJtPi9f/wUZVs6iMZrJzbNg==} + /@mdit/plugin-katex-slim@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-VQE4UkTIV2Uv7Jzm2EnrvJamjkUXKnzHgpYpdVuLk954kwPTmSkBqGstly9G7C6CL34tqulpyBA2Pq90zHu79Q==} engines: {node: '>= 18'} peerDependencies: katex: ^0.16.9 - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: katex: optional: true markdown-it: optional: true dependencies: - '@mdit/plugin-tex': 0.8.0(markdown-it@14.1.0) - '@types/katex': 0.16.7 - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-tex': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-mark@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-1hImu8FskIZ9dumWD2VIyB5USyVGwGY2IuaPxYO25tFvMZkhu4rYBjkSK8x+vXExwp94OLzFUlGgVl94S+nw9w==} + /@mdit/plugin-mark@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-mFEUrNV63z6Os5r5cZzjjdVaU5pz6YdwON8jM71h6HTgaM5EjsXm2c4zyfnUdN24lWz+O6gaYw67inhAFxZGwg==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-mathjax@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-y016KQHa3PoXDUIcQseISMAz5q2mZJ/qocEs2EABT4PjquXPEh/4rw7Ql7KX9gf/SQIUyzj8hYs4bHyRZc6x4w==} + /@mdit/plugin-mathjax-slim@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-U1LU/moBIda1auMPP657w6nL7zNcnw6c+LnuD8Kf/HgfGEBx5mEBO7ReEmrpXu9i6urP78/C0/oVGPrWZDzB8w==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 mathjax-full: ^3.2.2 peerDependenciesMeta: markdown-it: @@ -534,102 +630,150 @@ packages: mathjax-full: optional: true dependencies: - '@mdit/plugin-tex': 0.8.0(markdown-it@14.1.0) - '@types/markdown-it': 13.0.8 + '@mdit/plugin-tex': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 upath: 2.0.1 dev: true - /@mdit/plugin-stylize@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-oNFI3Z7UTxP8CKxS3CIuawLmsyrc0n9jIw9mPzUcPNp+LtYmLktfZc3FIRlqpUUq34YwHTH3yihayBRdSkVV6A==} + /@mdit/plugin-plantuml@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-JZCv706q5yLiGHRD0s/R65hCKBaHOrOgdyQM1d6kkcdakWwyeLJUeBSb+v6fFnbjM2GgGpoeioUMQHlSK/uiTQ==} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true + dependencies: + '@mdit/plugin-uml': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.0 + dev: true + + /@mdit/plugin-spoiler@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-YIJMUBkBme/DuIgH7ZwbpF4/jIRmzaQqdGNowmC5PVMd5OryBulVwDQ4lYKi4qeaS/xxIFe7ssgMpB1XNjA7JA==} + engines: {node: '>= 18'} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true + dependencies: + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.0 + dev: true + + /@mdit/plugin-stylize@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-6pJkvsB+HwN9up9lQo1tXRfoK5nj8u9z+Q7HM9AvWWc42JnsZ4iHgz2LMu5AqjaqfwAXfOLwJ20FZy6OSeHfcQ==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-sub@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-oqCcmJVJykESgNJ4fFmDKKxRRQddwkXWIT4PjF83XSeXHxTOz8gMfke/V1mE7BAfKKCLP4io8HbrYfvIiOTZ4A==} + /@mdit/plugin-sub@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-VqNwTeUjbLP2AZv9WGbGJPY7q32nf24fkFbuXlalzJk5FNArOo4qHCqWmswjBmfGWbtG5ZkrRM0v+f4adRELrQ==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-sup@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-5/uE2lONNjCgGDXC8jZ265tzefjUNQNakmK4PSCI4D5jD80xFrxc6MKh70VLCOL8Xk6COK/K9f0SAU2lwa97Tg==} + /@mdit/plugin-sup@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-2l5BcRrZfivZi2vwrC/an4K/wZYsgA1k8m4VGgOcDllTKUW8IQHAmievYo7IuAwvY3CwBdqEtc5s0DHlHRFdnQ==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-tab@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-SNa1S14Buuy564egiUTkU9HTTNFrEURJZLqA1+jr/2xYCdICPym0FWcB0cLtBl3lrQZkFtbxhzC6ws5JBt/ERQ==} + /@mdit/plugin-tab@0.13.2(markdown-it@14.1.0): + resolution: {integrity: sha512-evpIXvo6vXRWhgNE6vu4ok1I2dVOzrBYmBUGc1gW8nT9MvkW9litu7RbJ6CafscqaiiYRIM5Oib1ahS0lwte6g==} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-tasklist@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-vfOTZdXIL/jk/ConUqCODI5WuqgB9qiBGc+wIa7UMhe73KcpwFeGFJVQZm9AvjhXDDYqznJxSMVRP/TN7TxVVw==} + /@mdit/plugin-tab@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-nM/cqa8q7x2L6bXkrmePk9IEaSONhxIkgTVsmM4b6PQ3zoXq83SxGR+8vC7AFhiRYAjmtV8psBjy1pyUtY4Syw==} + peerDependencies: + markdown-it: ^14.1.0 + peerDependenciesMeta: + markdown-it: + optional: true + dependencies: + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + markdown-it: 14.1.0 + dev: true + + /@mdit/plugin-tasklist@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-b1Fwx19Jaugvfp4i/bJQRmoAupNhG8ZGjn+sSAPtji9VyiYaqrfB19cPTImZA2/Z4LFPxMH4ZzwC2kOl/qDy/g==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-tex@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-uh4kOhwBVEESz6dMmHk4Hn/AVfVtUhMA1UKpwMc1EL9qelodJ0YzSYfNXp6d/PS+E1l53yp8nMZK90DUO+3vpA==} + /@mdit/plugin-tex@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-k4xSML1N6gFkCm/zsiRO1eI0IFpi+Nr6WcYs1Y8NojyuIgDUZSrKbIJTwuqIo/TrCUfgUQANpibYskRxQpgZLg==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true - /@mdit/plugin-uml@0.8.0(markdown-it@14.1.0): - resolution: {integrity: sha512-6TOVxLhmdzV7bzjlJCRP5uCFq62Xwk2ZAeYUK3RLx9lgM3s2Mww5ENhdysnQMd7VQlUHsPmp4XIMBZZjPddg3g==} + /@mdit/plugin-uml@0.18.0(markdown-it@14.1.0): + resolution: {integrity: sha512-zT08h34NF40LQcXSDuVvW0hANte6zYotSMMxW/288Ux5BSeY55yHfLrBFs4uRLRa989d0Ib7PcXrRkPN+kxHcA==} engines: {node: '>= 18'} peerDependencies: - markdown-it: ^14.0.0 + markdown-it: ^14.1.0 peerDependenciesMeta: markdown-it: optional: true dependencies: - '@types/markdown-it': 13.0.8 + '@mdit/helper': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true + /@mermaid-js/parser@0.4.0: + resolution: {integrity: sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==} + dependencies: + langium: 3.3.1 + dev: true + /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} @@ -648,245 +792,601 @@ packages: engines: {node: '>= 8'} dependencies: '@nodelib/fs.scandir': 2.1.5 - fastq: 1.17.1 - dev: true - - /@rollup/rollup-android-arm-eabi@4.18.0: - resolution: {integrity: sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==} - cpu: [arm] - os: [android] - requiresBuild: true + fastq: 1.19.1 dev: true - optional: true - /@rollup/rollup-android-arm64@4.18.0: - resolution: {integrity: sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==} + /@parcel/watcher-android-arm64@2.5.1: + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} + engines: {node: '>= 10.0.0'} cpu: [arm64] os: [android] - requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-arm64@4.18.0: - resolution: {integrity: sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==} + /@parcel/watcher-darwin-arm64@2.5.1: + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} + engines: {node: '>= 10.0.0'} cpu: [arm64] os: [darwin] - requiresBuild: true dev: true optional: true - /@rollup/rollup-darwin-x64@4.18.0: - resolution: {integrity: sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==} + /@parcel/watcher-darwin-x64@2.5.1: + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} + engines: {node: '>= 10.0.0'} cpu: [x64] os: [darwin] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.18.0: - resolution: {integrity: sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==} - cpu: [arm] - os: [linux] - requiresBuild: true + /@parcel/watcher-freebsd-x64@2.5.1: + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] dev: true optional: true - /@rollup/rollup-linux-arm-musleabihf@4.18.0: - resolution: {integrity: sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==} + /@parcel/watcher-linux-arm-glibc@2.5.1: + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} + engines: {node: '>= 10.0.0'} cpu: [arm] os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-gnu@4.18.0: - resolution: {integrity: sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==} - cpu: [arm64] + /@parcel/watcher-linux-arm-musl@2.5.1: + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-arm64-musl@4.18.0: - resolution: {integrity: sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==} + /@parcel/watcher-linux-arm64-glibc@2.5.1: + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} + engines: {node: '>= 10.0.0'} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - - /@rollup/rollup-linux-powerpc64le-gnu@4.18.0: - resolution: {integrity: sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==} - cpu: [ppc64] - os: [linux] - requiresBuild: true - dev: true - optional: true - - /@rollup/rollup-linux-riscv64-gnu@4.18.0: - resolution: {integrity: sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==} - cpu: [riscv64] - os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-s390x-gnu@4.18.0: - resolution: {integrity: sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==} - cpu: [s390x] + /@parcel/watcher-linux-arm64-musl@2.5.1: + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-gnu@4.18.0: - resolution: {integrity: sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==} + /@parcel/watcher-linux-x64-glibc@2.5.1: + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} + engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-linux-x64-musl@4.18.0: - resolution: {integrity: sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==} + /@parcel/watcher-linux-x64-musl@2.5.1: + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} + engines: {node: '>= 10.0.0'} cpu: [x64] os: [linux] - requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-arm64-msvc@4.18.0: - resolution: {integrity: sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==} + /@parcel/watcher-win32-arm64@2.5.1: + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} + engines: {node: '>= 10.0.0'} cpu: [arm64] os: [win32] - requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-ia32-msvc@4.18.0: - resolution: {integrity: sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==} + /@parcel/watcher-win32-ia32@2.5.1: + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} cpu: [ia32] os: [win32] - requiresBuild: true dev: true optional: true - /@rollup/rollup-win32-x64-msvc@4.18.0: - resolution: {integrity: sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==} + /@parcel/watcher-win32-x64@2.5.1: + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} + engines: {node: '>= 10.0.0'} cpu: [x64] os: [win32] - requiresBuild: true dev: true optional: true - /@sindresorhus/merge-streams@2.3.0: - resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==} - engines: {node: '>=18'} + /@parcel/watcher@2.5.1: + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} + engines: {node: '>= 10.0.0'} + dependencies: + detect-libc: 1.0.3 + is-glob: 4.0.3 + micromatch: 4.0.8 + node-addon-api: 7.1.1 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 dev: true + optional: true - /@stackblitz/sdk@1.10.0: - resolution: {integrity: sha512-IcvE9Xifo2c4/f+yNqjFM/OW5VTBPLed3TxsQ+n8n81Py358IqD5w0IYfFgV5gbDjp2g5H5YK2/Shls/kQNTWQ==} + /@pkgr/core@0.2.4: + resolution: {integrity: sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} dev: true - /@types/d3-scale-chromatic@3.0.3: - resolution: {integrity: sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==} + /@rollup/rollup-android-arm-eabi@4.41.1: + resolution: {integrity: sha512-NELNvyEWZ6R9QMkiytB4/L4zSEaBC03KIXEghptLGLZWJ6VPrL63ooZQCOnlx36aQPGhzuOMwDerC1Eb2VmrLw==} + cpu: [arm] + os: [android] dev: true + optional: true - /@types/d3-scale@4.0.8: - resolution: {integrity: sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==} - dependencies: - '@types/d3-time': 3.0.3 + /@rollup/rollup-android-arm64@4.41.1: + resolution: {integrity: sha512-DXdQe1BJ6TK47ukAoZLehRHhfKnKg9BjnQYUu9gzhI8Mwa1d2fzxA1aw2JixHVl403bwp1+/o/NhhHtxWJBgEA==} + cpu: [arm64] + os: [android] dev: true + optional: true - /@types/d3-time@3.0.3: - resolution: {integrity: sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==} + /@rollup/rollup-darwin-arm64@4.41.1: + resolution: {integrity: sha512-5afxvwszzdulsU2w8JKWwY8/sJOLPzf0e1bFuvcW5h9zsEg+RQAojdW0ux2zyYAz7R8HvvzKCjLNJhVq965U7w==} + cpu: [arm64] + os: [darwin] dev: true + optional: true - /@types/debug@4.1.12: - resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} - dependencies: - '@types/ms': 0.7.34 + /@rollup/rollup-darwin-x64@4.41.1: + resolution: {integrity: sha512-egpJACny8QOdHNNMZKf8xY0Is6gIMz+tuqXlusxquWu3F833DcMwmGM7WlvCO9sB3OsPjdC4U0wHw5FabzCGZg==} + cpu: [x64] + os: [darwin] dev: true + optional: true - /@types/estree@1.0.5: - resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + /@rollup/rollup-freebsd-arm64@4.41.1: + resolution: {integrity: sha512-DBVMZH5vbjgRk3r0OzgjS38z+atlupJ7xfKIDJdZZL6sM6wjfDNo64aowcLPKIx7LMQi8vybB56uh1Ftck/Atg==} + cpu: [arm64] + os: [freebsd] dev: true + optional: true - /@types/fs-extra@11.0.4: - resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - dependencies: - '@types/jsonfile': 6.1.4 - '@types/node': 20.14.9 + /@rollup/rollup-freebsd-x64@4.41.1: + resolution: {integrity: sha512-3FkydeohozEskBxNWEIbPfOE0aqQgB6ttTkJ159uWOFn42VLyfAiyD9UK5mhu+ItWzft60DycIN1Xdgiy8o/SA==} + cpu: [x64] + os: [freebsd] dev: true + optional: true - /@types/hash-sum@1.0.2: - resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==} + /@rollup/rollup-linux-arm-gnueabihf@4.41.1: + resolution: {integrity: sha512-wC53ZNDgt0pqx5xCAgNunkTzFE8GTgdZ9EwYGVcg+jEjJdZGtq9xPjDnFgfFozQI/Xm1mh+D9YlYtl+ueswNEg==} + cpu: [arm] + os: [linux] dev: true + optional: true - /@types/jsonfile@6.1.4: - resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - dependencies: - '@types/node': 20.14.9 + /@rollup/rollup-linux-arm-musleabihf@4.41.1: + resolution: {integrity: sha512-jwKCca1gbZkZLhLRtsrka5N8sFAaxrGz/7wRJ8Wwvq3jug7toO21vWlViihG85ei7uJTpzbXZRcORotE+xyrLA==} + cpu: [arm] + os: [linux] dev: true + optional: true - /@types/katex@0.16.7: - resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==} + /@rollup/rollup-linux-arm64-gnu@4.41.1: + resolution: {integrity: sha512-g0UBcNknsmmNQ8V2d/zD2P7WWfJKU0F1nu0k5pW4rvdb+BIqMm8ToluW/eeRmxCared5dD76lS04uL4UaNgpNA==} + cpu: [arm64] + os: [linux] dev: true + optional: true - /@types/linkify-it@3.0.5: - resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} - dev: true + /@rollup/rollup-linux-arm64-musl@4.41.1: + resolution: {integrity: sha512-XZpeGB5TKEZWzIrj7sXr+BEaSgo/ma/kCgrZgL0oo5qdB1JlTzIYQKel/RmhT6vMAvOdM2teYlAaOGJpJ9lahg==} + cpu: [arm64] + os: [linux] + dev: true + optional: true - /@types/linkify-it@5.0.0: - resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} + /@rollup/rollup-linux-loongarch64-gnu@4.41.1: + resolution: {integrity: sha512-bkCfDJ4qzWfFRCNt5RVV4DOw6KEgFTUZi2r2RuYhGWC8WhCA8lCAJhDeAmrM/fdiAH54m0mA0Vk2FGRPyzI+tw==} + cpu: [loong64] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-linux-powerpc64le-gnu@4.41.1: + resolution: {integrity: sha512-3mr3Xm+gvMX+/8EKogIZSIEF0WUu0HL9di+YWlJpO8CQBnoLAEL/roTCxuLncEdgcfJcvA4UMOf+2dnjl4Ut1A==} + cpu: [ppc64] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-linux-riscv64-gnu@4.41.1: + resolution: {integrity: sha512-3rwCIh6MQ1LGrvKJitQjZFuQnT2wxfU+ivhNBzmxXTXPllewOF7JR1s2vMX/tWtUYFgphygxjqMl76q4aMotGw==} + cpu: [riscv64] + os: [linux] dev: true + optional: true - /@types/markdown-it-emoji@2.0.5: - resolution: {integrity: sha512-iJLsmCNpSWKtV6Ia3mLSjcXJPEt7ubGG342z+hGvYx++TpM19oTUrJcI7XjbOqRQ+W2UQ323E7B0eCLwlgT/9g==} + /@rollup/rollup-linux-riscv64-musl@4.41.1: + resolution: {integrity: sha512-LdIUOb3gvfmpkgFZuccNa2uYiqtgZAz3PTzjuM5bH3nvuy9ty6RGc/Q0+HDFrHrizJGVpjnTZ1yS5TNNjFlklw==} + cpu: [riscv64] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-linux-s390x-gnu@4.41.1: + resolution: {integrity: sha512-oIE6M8WC9ma6xYqjvPhzZYk6NbobIURvP/lEbh7FWplcMO6gn7MM2yHKA1eC/GvYwzNKK/1LYgqzdkZ8YFxR8g==} + cpu: [s390x] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-linux-x64-gnu@4.41.1: + resolution: {integrity: sha512-cWBOvayNvA+SyeQMp79BHPK8ws6sHSsYnK5zDcsC3Hsxr1dgTABKjMnMslPq1DvZIp6uO7kIWhiGwaTdR4Og9A==} + cpu: [x64] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-linux-x64-musl@4.41.1: + resolution: {integrity: sha512-y5CbN44M+pUCdGDlZFzGGBSKCA4A/J2ZH4edTYSSxFg7ce1Xt3GtydbVKWLlzL+INfFIZAEg1ZV6hh9+QQf9YQ==} + cpu: [x64] + os: [linux] + dev: true + optional: true + + /@rollup/rollup-win32-arm64-msvc@4.41.1: + resolution: {integrity: sha512-lZkCxIrjlJlMt1dLO/FbpZbzt6J/A8p4DnqzSa4PWqPEUUUnzXLeki/iyPLfV0BmHItlYgHUqJe+3KiyydmiNQ==} + cpu: [arm64] + os: [win32] + dev: true + optional: true + + /@rollup/rollup-win32-ia32-msvc@4.41.1: + resolution: {integrity: sha512-+psFT9+pIh2iuGsxFYYa/LhS5MFKmuivRsx9iPJWNSGbh2XVEjk90fmpUEjCnILPEPJnikAU6SFDiEUyOv90Pg==} + cpu: [ia32] + os: [win32] + dev: true + optional: true + + /@rollup/rollup-win32-x64-msvc@4.41.1: + resolution: {integrity: sha512-Wq2zpapRYLfi4aKxf2Xff0tN+7slj2d4R87WEzqw7ZLsVvO5zwYCIuEGSZYiK41+GlwUo1HiR+GdkLEJnCKTCw==} + cpu: [x64] + os: [win32] + dev: true + optional: true + + /@shikijs/core@3.4.2: + resolution: {integrity: sha512-AG8vnSi1W2pbgR2B911EfGqtLE9c4hQBYkv/x7Z+Kt0VxhgQKcW7UNDVYsu9YxwV6u+OJrvdJrMq6DNWoBjihQ==} dependencies: - '@types/markdown-it': 13.0.8 + '@shikijs/types': 3.4.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 dev: true - /@types/markdown-it@13.0.8: - resolution: {integrity: sha512-V+KmpgiipS+zoypeUSS9ojesWtY/0k4XfqcK2fnVrX/qInJhX7rsCxZ/rygiPH2zxlPPrhfuW0I6ddMcWTKLsg==} + /@shikijs/engine-javascript@3.4.2: + resolution: {integrity: sha512-1/adJbSMBOkpScCE/SB6XkjJU17ANln3Wky7lOmrnpl+zBdQ1qXUJg2GXTYVHRq+2j3hd1DesmElTXYDgtfSOQ==} dependencies: - '@types/linkify-it': 3.0.5 - '@types/mdurl': 1.0.5 + '@shikijs/types': 3.4.2 + '@shikijs/vscode-textmate': 10.0.2 + oniguruma-to-es: 4.3.3 dev: true - /@types/markdown-it@14.1.1: - resolution: {integrity: sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==} + /@shikijs/engine-oniguruma@3.4.2: + resolution: {integrity: sha512-zcZKMnNndgRa3ORja6Iemsr3DrLtkX3cAF7lTJkdMB6v9alhlBsX9uNiCpqofNrXOvpA3h6lHcLJxgCIhVOU5Q==} dependencies: - '@types/linkify-it': 5.0.0 - '@types/mdurl': 2.0.0 + '@shikijs/types': 3.4.2 + '@shikijs/vscode-textmate': 10.0.2 + dev: true + + /@shikijs/langs@3.4.2: + resolution: {integrity: sha512-H6azIAM+OXD98yztIfs/KH5H4PU39t+SREhmM8LaNXyUrqj2mx+zVkr8MWYqjceSjDw9I1jawm1WdFqU806rMA==} + dependencies: + '@shikijs/types': 3.4.2 dev: true - /@types/mdast@3.0.15: - resolution: {integrity: sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==} + /@shikijs/themes@3.4.2: + resolution: {integrity: sha512-qAEuAQh+brd8Jyej2UDDf+b4V2g1Rm8aBIdvt32XhDPrHvDkEnpb7Kzc9hSuHUxz0Iuflmq7elaDuQAP9bHIhg==} dependencies: - '@types/unist': 2.0.10 + '@shikijs/types': 3.4.2 dev: true - /@types/mdurl@1.0.5: - resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==} + /@shikijs/transformers@3.4.2: + resolution: {integrity: sha512-I5baLVi/ynLEOZoWSAMlACHNnG+yw5HDmse0oe+GW6U1u+ULdEB3UHiVWaHoJSSONV7tlcVxuaMy74sREDkSvg==} + dependencies: + '@shikijs/core': 3.4.2 + '@shikijs/types': 3.4.2 + dev: true + + /@shikijs/types@3.4.2: + resolution: {integrity: sha512-zHC1l7L+eQlDXLnxvM9R91Efh2V4+rN3oMVS2swCBssbj2U/FBwybD1eeLaq8yl/iwT+zih8iUbTBCgGZOYlVg==} + dependencies: + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 + dev: true + + /@shikijs/vscode-textmate@10.0.2: + resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} + dev: true + + /@sindresorhus/merge-streams@2.3.0: + resolution: {integrity: sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==} + engines: {node: '>=18'} + dev: true + + /@stackblitz/sdk@1.11.0: + resolution: {integrity: sha512-DFQGANNkEZRzFk1/rDP6TcFdM82ycHE+zfl9C/M/jXlH68jiqHWHFMQURLELoD8koxvu/eW5uhg94NSAZlYrUQ==} + dev: true + + /@types/d3-array@3.2.1: + resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==} + dev: true + + /@types/d3-axis@3.0.6: + resolution: {integrity: sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==} + dependencies: + '@types/d3-selection': 3.0.11 + dev: true + + /@types/d3-brush@3.0.6: + resolution: {integrity: sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==} + dependencies: + '@types/d3-selection': 3.0.11 + dev: true + + /@types/d3-chord@3.0.6: + resolution: {integrity: sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==} + dev: true + + /@types/d3-color@3.1.3: + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + dev: true + + /@types/d3-contour@3.0.6: + resolution: {integrity: sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==} + dependencies: + '@types/d3-array': 3.2.1 + '@types/geojson': 7946.0.16 + dev: true + + /@types/d3-delaunay@6.0.4: + resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} + dev: true + + /@types/d3-dispatch@3.0.6: + resolution: {integrity: sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==} + dev: true + + /@types/d3-drag@3.0.7: + resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + dependencies: + '@types/d3-selection': 3.0.11 + dev: true + + /@types/d3-dsv@3.0.7: + resolution: {integrity: sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==} + dev: true + + /@types/d3-ease@3.0.2: + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + dev: true + + /@types/d3-fetch@3.0.7: + resolution: {integrity: sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==} + dependencies: + '@types/d3-dsv': 3.0.7 + dev: true + + /@types/d3-force@3.0.10: + resolution: {integrity: sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==} + dev: true + + /@types/d3-format@3.0.4: + resolution: {integrity: sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==} + dev: true + + /@types/d3-geo@3.1.0: + resolution: {integrity: sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==} + dependencies: + '@types/geojson': 7946.0.16 + dev: true + + /@types/d3-hierarchy@3.1.7: + resolution: {integrity: sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==} + dev: true + + /@types/d3-interpolate@3.0.4: + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + dependencies: + '@types/d3-color': 3.1.3 + dev: true + + /@types/d3-path@3.1.1: + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + dev: true + + /@types/d3-polygon@3.0.2: + resolution: {integrity: sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==} + dev: true + + /@types/d3-quadtree@3.0.6: + resolution: {integrity: sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==} + dev: true + + /@types/d3-random@3.0.3: + resolution: {integrity: sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==} + dev: true + + /@types/d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==} + dev: true + + /@types/d3-scale@4.0.9: + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} + dependencies: + '@types/d3-time': 3.0.4 + dev: true + + /@types/d3-selection@3.0.11: + resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} + dev: true + + /@types/d3-shape@3.1.7: + resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==} + dependencies: + '@types/d3-path': 3.1.1 + dev: true + + /@types/d3-time-format@4.0.3: + resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==} + dev: true + + /@types/d3-time@3.0.4: + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + dev: true + + /@types/d3-timer@3.0.2: + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + dev: true + + /@types/d3-transition@3.0.9: + resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==} + dependencies: + '@types/d3-selection': 3.0.11 + dev: true + + /@types/d3-zoom@3.0.8: + resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + dependencies: + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + dev: true + + /@types/d3@7.4.3: + resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==} + dependencies: + '@types/d3-array': 3.2.1 + '@types/d3-axis': 3.0.6 + '@types/d3-brush': 3.0.6 + '@types/d3-chord': 3.0.6 + '@types/d3-color': 3.1.3 + '@types/d3-contour': 3.0.6 + '@types/d3-delaunay': 6.0.4 + '@types/d3-dispatch': 3.0.6 + '@types/d3-drag': 3.0.7 + '@types/d3-dsv': 3.0.7 + '@types/d3-ease': 3.0.2 + '@types/d3-fetch': 3.0.7 + '@types/d3-force': 3.0.10 + '@types/d3-format': 3.0.4 + '@types/d3-geo': 3.1.0 + '@types/d3-hierarchy': 3.1.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-path': 3.1.1 + '@types/d3-polygon': 3.0.2 + '@types/d3-quadtree': 3.0.6 + '@types/d3-random': 3.0.3 + '@types/d3-scale': 4.0.9 + '@types/d3-scale-chromatic': 3.1.0 + '@types/d3-selection': 3.0.11 + '@types/d3-shape': 3.1.7 + '@types/d3-time': 3.0.4 + '@types/d3-time-format': 4.0.3 + '@types/d3-timer': 3.0.2 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + dev: true + + /@types/debug@4.1.12: + resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + dependencies: + '@types/ms': 2.1.0 + dev: true + + /@types/estree@1.0.7: + resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + dev: true + + /@types/fs-extra@11.0.4: + resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} + dependencies: + '@types/jsonfile': 6.1.4 + '@types/node': 22.15.21 + dev: true + + /@types/geojson@7946.0.16: + resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} + dev: true + + /@types/hash-sum@1.0.2: + resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==} + dev: true + + /@types/hast@3.0.4: + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + dependencies: + '@types/unist': 3.0.3 + dev: true + + /@types/jsonfile@6.1.4: + resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} + dependencies: + '@types/node': 22.15.21 + dev: true + + /@types/linkify-it@5.0.0: + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} + dev: true + + /@types/markdown-it-emoji@3.0.1: + resolution: {integrity: sha512-cz1j8R35XivBqq9mwnsrP2fsz2yicLhB8+PDtuVkKOExwEdsVBNI+ROL3sbhtR5occRZ66vT0QnwFZCqdjf3pA==} + dependencies: + '@types/markdown-it': 14.1.2 + dev: true + + /@types/markdown-it@14.1.2: + resolution: {integrity: sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==} + dependencies: + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 + dev: true + + /@types/mdast@4.0.4: + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + dependencies: + '@types/unist': 3.0.3 dev: true /@types/mdurl@2.0.0: resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} dev: true - /@types/ms@0.7.34: - resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} + /@types/ms@2.1.0: + resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} dev: true /@types/node@17.0.45: resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} dev: true - /@types/node@20.14.9: - resolution: {integrity: sha512-06OCtnTXtWOZBJlRApleWndH4JsRVs1pDCc8dLSQp+7PpUpX3ePdHyeNSFTeSe7FtKyQkrlPvHwJOW3SLd8Oyg==} + /@types/node@22.15.21: + resolution: {integrity: sha512-EV/37Td6c+MgKAbkcLG6vqZ2zEYHD7bvSrzqqs2RIhbA6w3x+Dqz8MZM3sP6kGTeLrdoOgKZe+Xja7tUB2DNkQ==} dependencies: - undici-types: 5.26.5 + undici-types: 6.21.0 dev: true /@types/sax@1.2.7: @@ -899,498 +1399,854 @@ packages: resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} dev: true - /@types/unist@2.0.10: - resolution: {integrity: sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==} + /@types/unist@3.0.3: + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} dev: true /@types/web-bluetooth@0.0.20: resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==} dev: true - /@vitejs/plugin-vue@5.0.5(vite@5.2.13)(vue@3.4.30): - resolution: {integrity: sha512-LOjm7XeIimLBZyzinBQ6OSm3UBCNVCpLkxGC0oWmm2YPzVZoxMsdvNVimLTBzpAnR9hl/yn1SHGuRfe6/Td9rQ==} + /@types/web-bluetooth@0.0.21: + resolution: {integrity: sha512-oIQLCGWtcFZy2JW77j9k8nHzAOpqMHLQejDA48XXMWH6tjCQHz5RCFz1bzsmROyL6PUm+LLnUiI4BCn221inxA==} + dev: true + + /@ungap/structured-clone@1.3.0: + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + dev: true + + /@vitejs/plugin-vue@5.2.4(vite@6.3.5)(vue@3.5.15): + resolution: {integrity: sha512-7Yx/SXSOcQq5HiiV3orevHUFn+pmMB4cgbEkDYgnkUWb0WfeQ/wa2yFv6D5ICiCQOVpjA7vYDXrC7AGO8yjDHA==} engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: - vite: ^5.0.0 + vite: ^5.0.0 || ^6.0.0 vue: ^3.2.25 dependencies: - vite: 5.2.13 - vue: 3.4.30 + vite: 6.3.5(sass-embedded@1.89.0)(sass@1.89.0) + vue: 3.5.15 dev: true - /@vue/compiler-core@3.4.30: - resolution: {integrity: sha512-ZL8y4Xxdh8O6PSwfdZ1IpQ24PjTAieOz3jXb/MDTfDtANcKBMxg1KLm6OX2jofsaQGYfIVzd3BAG22i56/cF1w==} + /@vue/compiler-core@3.5.15: + resolution: {integrity: sha512-nGRc6YJg/kxNqbv/7Tg4juirPnjHvuVdhcmDvQWVZXlLHjouq7VsKmV1hIxM/8yKM0VUfwT/Uzc0lO510ltZqw==} dependencies: - '@babel/parser': 7.24.7 - '@vue/shared': 3.4.30 + '@babel/parser': 7.27.2 + '@vue/shared': 3.5.15 entities: 4.5.0 estree-walker: 2.0.2 - source-map-js: 1.2.0 + source-map-js: 1.2.1 dev: true - /@vue/compiler-dom@3.4.30: - resolution: {integrity: sha512-+16Sd8lYr5j/owCbr9dowcNfrHd+pz+w2/b5Lt26Oz/kB90C9yNbxQ3bYOvt7rI2bxk0nqda39hVcwDFw85c2Q==} + /@vue/compiler-dom@3.5.15: + resolution: {integrity: sha512-ZelQd9n+O/UCBdL00rlwCrsArSak+YLZpBVuNDio1hN3+wrCshYZEDUO3khSLAzPbF1oQS2duEoMDUHScUlYjA==} dependencies: - '@vue/compiler-core': 3.4.30 - '@vue/shared': 3.4.30 + '@vue/compiler-core': 3.5.15 + '@vue/shared': 3.5.15 dev: true - /@vue/compiler-sfc@3.4.30: - resolution: {integrity: sha512-8vElKklHn/UY8+FgUFlQrYAPbtiSB2zcgeRKW7HkpSRn/JjMRmZvuOtwDx036D1aqKNSTtXkWRfqx53Qb+HmMg==} + /@vue/compiler-sfc@3.5.15: + resolution: {integrity: sha512-3zndKbxMsOU6afQWer75Zot/aydjtxNj0T2KLg033rAFaQUn2PGuE32ZRe4iMhflbTcAxL0yEYsRWFxtPro8RQ==} dependencies: - '@babel/parser': 7.24.7 - '@vue/compiler-core': 3.4.30 - '@vue/compiler-dom': 3.4.30 - '@vue/compiler-ssr': 3.4.30 - '@vue/shared': 3.4.30 + '@babel/parser': 7.27.2 + '@vue/compiler-core': 3.5.15 + '@vue/compiler-dom': 3.5.15 + '@vue/compiler-ssr': 3.5.15 + '@vue/shared': 3.5.15 estree-walker: 2.0.2 - magic-string: 0.30.10 - postcss: 8.4.38 - source-map-js: 1.2.0 + magic-string: 0.30.17 + postcss: 8.5.3 + source-map-js: 1.2.1 dev: true - /@vue/compiler-ssr@3.4.30: - resolution: {integrity: sha512-ZJ56YZGXJDd6jky4mmM0rNaNP6kIbQu9LTKZDhcpddGe/3QIalB1WHHmZ6iZfFNyj5mSypTa4+qDJa5VIuxMSg==} + /@vue/compiler-ssr@3.5.15: + resolution: {integrity: sha512-gShn8zRREZbrXqTtmLSCffgZXDWv8nHc/GhsW+mbwBfNZL5pI96e7IWcIq8XGQe1TLtVbu7EV9gFIVSmfyarPg==} dependencies: - '@vue/compiler-dom': 3.4.30 - '@vue/shared': 3.4.30 + '@vue/compiler-dom': 3.5.15 + '@vue/shared': 3.5.15 dev: true - /@vue/devtools-api@6.6.3: - resolution: {integrity: sha512-0MiMsFma/HqA6g3KLKn+AGpL1kgKhFWszC9U29NfpWK5LE7bjeXxySWJrOJ77hBz+TBrBQ7o4QJqbPbqbs8rJw==} + /@vue/devtools-api@6.6.4: + resolution: {integrity: sha512-sGhTPMuXqZ1rVOk32RylztWkfXTRhuS7vgAKv0zjqk8gbsHkJ7xfFf+jbySxt7tWObEJwyKaHMikV/WGDiQm8g==} dev: true - /@vue/reactivity@3.4.30: - resolution: {integrity: sha512-bVJurnCe3LS0JII8PPoAA63Zd2MBzcKrEzwdQl92eHCcxtIbxD2fhNwJpa+KkM3Y/A4T5FUnmdhgKwOf6BfbcA==} + /@vue/devtools-api@7.7.6: + resolution: {integrity: sha512-b2Xx0KvXZObePpXPYHvBRRJLDQn5nhKjXh7vUhMEtWxz1AYNFOVIsh5+HLP8xDGL7sy+Q7hXeUxPHB/KgbtsPw==} dependencies: - '@vue/shared': 3.4.30 + '@vue/devtools-kit': 7.7.6 dev: true - /@vue/runtime-core@3.4.30: - resolution: {integrity: sha512-qaFEbnNpGz+tlnkaualomogzN8vBLkgzK55uuWjYXbYn039eOBZrWxyXWq/7qh9Bz2FPifZqGjVDl/FXiq9L2g==} + /@vue/devtools-kit@7.7.6: + resolution: {integrity: sha512-geu7ds7tem2Y7Wz+WgbnbZ6T5eadOvozHZ23Atk/8tksHMFOFylKi1xgGlQlVn0wlkEf4hu+vd5ctj1G4kFtwA==} dependencies: - '@vue/reactivity': 3.4.30 - '@vue/shared': 3.4.30 + '@vue/devtools-shared': 7.7.6 + birpc: 2.3.0 + hookable: 5.5.3 + mitt: 3.0.1 + perfect-debounce: 1.0.0 + speakingurl: 14.0.1 + superjson: 2.2.2 dev: true - /@vue/runtime-dom@3.4.30: - resolution: {integrity: sha512-tV6B4YiZRj5QsaJgw2THCy5C1H+2UeywO9tqgWEc21tn85qHEERndHN/CxlyXvSBFrpmlexCIdnqPuR9RM9thw==} + /@vue/devtools-shared@7.7.6: + resolution: {integrity: sha512-yFEgJZ/WblEsojQQceuyK6FzpFDx4kqrz2ohInxNj5/DnhoX023upTv4OD6lNPLAA5LLkbwPVb10o/7b+Y4FVA==} dependencies: - '@vue/reactivity': 3.4.30 - '@vue/runtime-core': 3.4.30 - '@vue/shared': 3.4.30 + rfdc: 1.4.1 + dev: true + + /@vue/reactivity@3.5.15: + resolution: {integrity: sha512-GaA5VUm30YWobCwpvcs9nvFKf27EdSLKDo2jA0IXzGS344oNpFNbEQ9z+Pp5ESDaxyS8FcH0vFN/XSe95BZtHQ==} + dependencies: + '@vue/shared': 3.5.15 + dev: true + + /@vue/runtime-core@3.5.15: + resolution: {integrity: sha512-CZAlIOQ93nj0OPpWWOx4+QDLCMzBNY85IQR4Voe6vIID149yF8g9WQaWnw042f/6JfvLttK7dnyWlC1EVCRK8Q==} + dependencies: + '@vue/reactivity': 3.5.15 + '@vue/shared': 3.5.15 + dev: true + + /@vue/runtime-dom@3.5.15: + resolution: {integrity: sha512-wFplHKzKO/v998up2iCW3RN9TNUeDMhdBcNYZgs5LOokHntrB48dyuZHspcahKZczKKh3v6i164gapMPxBTKNw==} + dependencies: + '@vue/reactivity': 3.5.15 + '@vue/runtime-core': 3.5.15 + '@vue/shared': 3.5.15 csstype: 3.1.3 dev: true - /@vue/server-renderer@3.4.30(vue@3.4.30): - resolution: {integrity: sha512-TBD3eqR1DeDc0cMrXS/vEs/PWzq1uXxnvjoqQuDGFIEHFIwuDTX/KWAQKIBjyMWLFHEeTDGYVsYci85z2UbTDg==} + /@vue/server-renderer@3.5.15(vue@3.5.15): + resolution: {integrity: sha512-Gehc693kVTYkLt6QSYEjGvqvdK2zZ/gf/D5zkgmvBdeB30dNnVZS8yY7+IlBmHRd1rR/zwaqeu06Ij04ZxBscg==} peerDependencies: - vue: 3.4.30 + vue: 3.5.15 dependencies: - '@vue/compiler-ssr': 3.4.30 - '@vue/shared': 3.4.30 - vue: 3.4.30 + '@vue/compiler-ssr': 3.5.15 + '@vue/shared': 3.5.15 + vue: 3.5.15 dev: true - /@vue/shared@3.4.30: - resolution: {integrity: sha512-CLg+f8RQCHQnKvuHY9adMsMaQOcqclh6Z5V9TaoMgy0ut0tz848joZ7/CYFFyF/yZ5i2yaw7Fn498C+CNZVHIg==} + /@vue/shared@3.5.15: + resolution: {integrity: sha512-bKvgFJJL1ZX9KxMCTQY6xD9Dhe3nusd1OhyOb1cJYGqvAr0Vg8FIjHPMOEVbJ9GDT9HG+Bjdn4oS8ohKP8EvoA==} dev: true - /@vuepress/bundler-vite@2.0.0-rc.9: - resolution: {integrity: sha512-GcM2eSqW2mPY5xXX4i5kuZujvwUeiTpsLX5kgau9LzPox+FdA3SMUkppCY3hsou2o2RxXPTfjocE7OlYQrUqvA==} + /@vuepress/bundler-vite@2.0.0-rc.23(sass-embedded@1.89.0)(sass@1.89.0): + resolution: {integrity: sha512-59oBof+QaCyrZVOussrmv3bHxpwFPsLlI9yQbq2ubR+dFNzgfAtb8Dpm2z9iB/duZnx6PgmWPke4qGl9wOjEKw==} dependencies: - '@vitejs/plugin-vue': 5.0.5(vite@5.2.13)(vue@3.4.30) - '@vuepress/client': 2.0.0-rc.9 - '@vuepress/core': 2.0.0-rc.9 - '@vuepress/shared': 2.0.0-rc.9 - '@vuepress/utils': 2.0.0-rc.9 - autoprefixer: 10.4.19(postcss@8.4.38) + '@vitejs/plugin-vue': 5.2.4(vite@6.3.5)(vue@3.5.15) + '@vuepress/bundlerutils': 2.0.0-rc.23 + '@vuepress/client': 2.0.0-rc.23 + '@vuepress/core': 2.0.0-rc.23 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 + autoprefixer: 10.4.21(postcss@8.5.3) connect-history-api-fallback: 2.0.0 - postcss: 8.4.38 - postcss-load-config: 5.1.0(postcss@8.4.38) - rollup: 4.18.0 - vite: 5.2.13 - vue: 3.4.30 - vue-router: 4.4.0(vue@3.4.30) + postcss: 8.5.3 + postcss-load-config: 6.0.1(postcss@8.5.3) + rollup: 4.41.1 + vite: 6.3.5(sass-embedded@1.89.0)(sass@1.89.0) + vue: 3.5.15 + vue-router: 4.5.1(vue@3.5.15) transitivePeerDependencies: - '@types/node' - jiti - less - lightningcss - sass + - sass-embedded - stylus - sugarss - supports-color - terser - tsx - typescript + - yaml + dev: true + + /@vuepress/bundlerutils@2.0.0-rc.23: + resolution: {integrity: sha512-XgDbIT10xI7m8Pto+N8Mi+o+s1oAg+Mo65WLeHkaCexSRrF9Fa9WRun28EtB5PnyVhaZvnXh5XXuthXZl206JA==} + dependencies: + '@vuepress/client': 2.0.0-rc.23 + '@vuepress/core': 2.0.0-rc.23 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 + vue: 3.5.15 + vue-router: 4.5.1(vue@3.5.15) + transitivePeerDependencies: + - supports-color + - typescript dev: true - /@vuepress/cli@2.0.0-rc.9: - resolution: {integrity: sha512-uv7Xmv3QmPpzCaUAq0oKEwp2tY64AO+7mxamgr7tr+t6FEnCYqr+X0nLlH17UtMkmGWIsbHLIlMjteprxGxIMg==} + /@vuepress/cli@2.0.0-rc.23: + resolution: {integrity: sha512-lNAvRf4zyfnl8pgUA/uj2yCgsroJJzUm2dEwmudOIvfSV+N5jMUQuomdE5gZemDDk2oE2gqyRPBOZ12LP2EEIg==} hasBin: true dependencies: - '@vuepress/core': 2.0.0-rc.9 - '@vuepress/shared': 2.0.0-rc.9 - '@vuepress/utils': 2.0.0-rc.9 + '@vuepress/core': 2.0.0-rc.23 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 cac: 6.7.14 chokidar: 3.6.0 - envinfo: 7.13.0 - esbuild: 0.20.2 + envinfo: 7.14.0 + esbuild: 0.25.4 transitivePeerDependencies: - supports-color - typescript dev: true - /@vuepress/client@2.0.0-rc.9: - resolution: {integrity: sha512-V5jA6L1nHQ8tXBshRHBJKei7HPFonGxFzmVK5yjj2Ho/Xtp/SD9rBS6dyYd5CSkKRGQDgy19Z+BUUPXtdI1qzg==} + /@vuepress/client@2.0.0-rc.23: + resolution: {integrity: sha512-/2sdQTOELCUgoEjy2XGqcDMHSAz1kdaMYBr+8zv5et2aYzpn9rYdW0SzXTprhc354ccN65xNHarr6uIbVJ1m0g==} dependencies: - '@vue/devtools-api': 6.6.3 - '@vuepress/shared': 2.0.0-rc.9 - vue: 3.4.30 - vue-router: 4.4.0(vue@3.4.30) + '@vue/devtools-api': 7.7.6 + '@vue/devtools-kit': 7.7.6 + '@vuepress/shared': 2.0.0-rc.23 + vue: 3.5.15 + vue-router: 4.5.1(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/core@2.0.0-rc.9: - resolution: {integrity: sha512-uvMkIqYJ7vjfYEC91rMmT8YJt8xXnob5YYY3TzlwWUSEv4yoV3nlVu0l6Zfhenx/7FwKaxRJ/ePlUGIgUHBcBw==} + /@vuepress/core@2.0.0-rc.23: + resolution: {integrity: sha512-CkXDOCKJATxFciEuLCDtAzdCkGyNfCcmBYyhsvYLSJU8oiXgt27EjmXNKTpN+MNXSl934/353UERExGafhsTfg==} dependencies: - '@vuepress/client': 2.0.0-rc.9 - '@vuepress/markdown': 2.0.0-rc.9 - '@vuepress/shared': 2.0.0-rc.9 - '@vuepress/utils': 2.0.0-rc.9 - vue: 3.4.30 + '@vuepress/client': 2.0.0-rc.23 + '@vuepress/markdown': 2.0.0-rc.23 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 + vue: 3.5.15 transitivePeerDependencies: - supports-color - typescript dev: true - /@vuepress/helper@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-qXC+tXTKfZ7eJ+h3wYC/7Q903Tbqcz9Vqxku63R6pmcpbsRtt3l8XQRdJ/LMT5yX0wZln4Qzx1NY6S4psr0lzw==} + /@vuepress/helper@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-GmsFstdmryNLjCDF+wVTP6wBmHYAenAbtd04TG4se/ZB+pfhCNT5Zq6dEO3TG35JLcdUm/bI4uE3BE4WVBkSgw==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vue/shared': 3.4.30 - cheerio: 1.0.0-rc.12 + '@vue/shared': 3.5.15 + '@vueuse/core': 13.2.0(vue@3.5.15) + cheerio: 1.0.0 fflate: 0.8.2 gray-matter: 4.0.3 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/markdown@2.0.0-rc.9: - resolution: {integrity: sha512-e7as2ar3RQp0bUyMiwBPi7L/G2fzscb3s0BywNcAwubFR22o0/dBEYRYdrN0clPQ2FXpPxF6AFj4aD7O1heCbw==} - dependencies: - '@mdit-vue/plugin-component': 2.1.3 - '@mdit-vue/plugin-frontmatter': 2.1.3 - '@mdit-vue/plugin-headers': 2.1.3 - '@mdit-vue/plugin-sfc': 2.1.3 - '@mdit-vue/plugin-title': 2.1.3 - '@mdit-vue/plugin-toc': 2.1.3 - '@mdit-vue/shared': 2.1.3 - '@mdit-vue/types': 2.1.0 - '@types/markdown-it': 13.0.8 - '@types/markdown-it-emoji': 2.0.5 - '@vuepress/shared': 2.0.0-rc.9 - '@vuepress/utils': 2.0.0-rc.9 + /@vuepress/helper@2.0.0-rc.106(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-z55+VY6jh6TBnluXH5DralRDvLEiaGRn53iqi6BrWD+f8Hef+Jus1ivOnjM5awitXaBYu9e4rrqC2IMtuSyWkA==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@vue/shared': 3.5.15 + '@vueuse/core': 13.2.0(vue@3.5.15) + cheerio: 1.0.0 + fflate: 0.8.2 + gray-matter: 4.0.3 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/helper@2.0.0-rc.47(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-XlfrkRjxv7Id3sc8Wjh7pHs2eFhOE/HAA+u9AMLZHcfxZUBkIftBfrSqg9ZBGybVkm4aGT+K/sC2IZxFhqukiA==} + peerDependencies: + vuepress: 2.0.0-rc.15 + dependencies: + '@vue/shared': 3.5.15 + '@vueuse/core': 11.3.0(vue@3.5.15) + cheerio: 1.0.0 + fflate: 0.8.2 + gray-matter: 4.0.3 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - '@vue/composition-api' + - typescript + dev: true + + /@vuepress/highlighter-helper@2.0.0-rc.103(@vueuse/core@13.2.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-gYOF+5Q4ilo3Km5KSZfie8w1Fs2Nit/YnvWaIenWelSfp3DHweLNiwOhVjp8e/s8bmCEozP3CtOxmWXEZNrHng==} + peerDependencies: + '@vueuse/core': ^13.1.0 + vuepress: 2.0.0-rc.23 + peerDependenciesMeta: + '@vueuse/core': + optional: true + dependencies: + '@vueuse/core': 13.2.0(vue@3.5.15) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + dev: true + + /@vuepress/markdown@2.0.0-rc.23: + resolution: {integrity: sha512-KDC5xtd6GQBKsKkOKchJ5yxof/JES6StsBAmm5+S6WVJGOFRCVw5tpicFO9tgm1alwWFbX0WD5oloPq/ZOJtfA==} + dependencies: + '@mdit-vue/plugin-component': 2.1.4 + '@mdit-vue/plugin-frontmatter': 2.1.4 + '@mdit-vue/plugin-headers': 2.1.4 + '@mdit-vue/plugin-sfc': 2.1.4 + '@mdit-vue/plugin-title': 2.1.4 + '@mdit-vue/plugin-toc': 2.1.4 + '@mdit-vue/shared': 2.1.4 + '@mdit-vue/types': 2.1.4 + '@types/markdown-it': 14.1.2 + '@types/markdown-it-emoji': 3.0.1 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 markdown-it: 14.1.0 - markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.8)(markdown-it@14.1.0) + markdown-it-anchor: 9.2.0(@types/markdown-it@14.1.2)(markdown-it@14.1.0) markdown-it-emoji: 3.0.0 mdurl: 2.0.0 transitivePeerDependencies: - supports-color dev: true - /@vuepress/plugin-active-header-links@2.0.0-rc.21(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-6i9TfGDV1zfszQ5aw6bV+/UvPdBWt3VxN2WB4Dg5o1g8Qn4z5CI6AW6VfLKRyaKUD+Rzj6W+Ikgx4xnF5RZAdA==} + /@vuepress/plugin-active-header-links@2.0.0-rc.103(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-NStHWt6pYpkytULiAN2HWExsETbJKo8iCRGVbKkm6rn4NFM5v5zODv/0Mw7aRZ35X8b6H75BYVY3zKe8ahUkDQ==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vueuse/core': 10.11.0(vue@3.4.30) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /@vuepress/plugin-back-to-top@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-cU5KtsuqUBcDiNlAD+I2NaaEd7ZRDldWPggJMgE7VvhEQ8uJMOq4ogh2IabeqGZ26XiUYuGnNrp4JK5mDkQlvw==} + /@vuepress/plugin-back-to-top@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-8pdtoK1LlH+VVV+tAZuv5J6jyeILVtip51TkzfeenHtI6NSba3SxPq9qaUhX2GPHTcUZV62higM4rwG2zAEz5A==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /@vuepress/plugin-blog@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-EEpJcTHhlB6/LWXWdhBN3f9dFRrkOJSWw9KyD/7/GBImqbPKrdWh2y6VZejUvZBK+1Onv0/KEXMgE3zI3LAB/g==} + /@vuepress/plugin-blog@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-WFr4yx5hgYE8RVOqE/sXqovAujd2HgAQEIFgZwKU1u1ncmtlbwBwdKWOU8I2mv0iEhYWu53TUyP+uZvNf/kVLg==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) chokidar: 3.6.0 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-catalog@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-MkJ14qOd0KoKb8cmFqT0tPNK9REJNP8bm1dZBdYOrqX8mDgt4nq2EyVOZTBZWqaYyXekJZyNfXkN4i556/8x+w==} + /@vuepress/plugin-catalog@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-HQSmDhHpx+1cPL0jWmKlLUzc0e0XwoqFbX5X5MX5wDVohqjx3j04iEKWEeYhE7NbirbxlA9dXc3/ssb/WoYY+A==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-comment@2.0.0-rc.24(sass-loader@14.2.1)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-Kl5LHCbyoTIXFZwMmJa4f8neMbebC4ZhASf8cnfdNTBf6XRVbSH1fGJKGFK1lUm3EcjjBHIAuZIrlMWPmepUGQ==} + /@vuepress/plugin-comment@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-I0OENWem7z7g+OXOU9L9RGj2hfBZ3MRl8Wd3+1yXE3t8PUimUKrUUj3x0eVQPhAwupmzV1ncTsm72M2IwIVRmQ==} peerDependencies: - '@waline/client': ^3.1.0 - artalk: ^2.7.3 - sass-loader: ^14.0.0 - twikoo: ^1.5.0 - vuepress: 2.0.0-rc.9 + '@waline/client': ^3.5.5 + artalk: ^2.9.1 + twikoo: ^1.6.41 + vuepress: 2.0.0-rc.23 peerDependenciesMeta: '@waline/client': optional: true artalk: optional: true - sass-loader: - optional: true twikoo: optional: true dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - giscus: 1.5.0 - sass-loader: 14.2.1 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + giscus: 1.6.0 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-copy-code@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-anLxeQqTiU+LNdEBK7EUbeIcU0YEh6x9Bs9SQV4wrdFKjYc5x2U4Gf9l9NagOSf1e3c0QZRYL3wLQ5q3XZnGjA==} + /@vuepress/plugin-copy-code@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-WROusuYp+EWCZcfAu1MX/DtvWbfmDYipDmCDAdwA5C78qbjWJbEDeMOJKvyO4AhfjxrdS6wmOjw61M9t2ZJUIQ==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /@vuepress/plugin-copyright@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-sposicjAxALPhXb6TBVq5x6dE2/87OvwyB4RFs0kCeUjE4Tg7WKj2E28vdRQtNGE0P8MC0D0qJbi/ORbg9UObw==} + /@vuepress/plugin-copyright@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-bURyvHxFWdwL+fz3OGjlEgmQiZTdu+WhaJixrRyRi05L7r4sBnV1MIbTH29757HerdFu5THona0mF15xgj0B2A==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /@vuepress/plugin-external-link-icon@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-kry1EFkv6WaGOCzk9vRGHGcAuQHNVQ/jDEgtagUFaRk5+HtCQB60VzhmFdwM08DC2XAmDieBLm1MMR0T2DdHSw==} + /@vuepress/plugin-git@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-Gw9aLcrMKJ+ThLCFSeYMZRXkW2tak3OhuwyObchij63SiR8G8I6EZWzGaZAT8ad3BUUcst6qLX6phXVkShtQWQ==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 + dependencies: + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + rehype-parse: 9.0.1 + rehype-sanitize: 6.0.0 + rehype-stringify: 10.0.1 + unified: 11.0.5 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/plugin-icon@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-UNopisXX8hWPRSyw2tgbz6u7Mtj/sm6moUbTCPD6YcJqJAyp9SnkQDzBMUtmvPnWrKUwbWdJvCvAdMIMKa/mSw==} + peerDependencies: + vuepress: 2.0.0-rc.23 dependencies: - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@mdit/plugin-icon': 0.18.0(markdown-it@14.1.0) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: + - markdown-it - typescript dev: true - /@vuepress/plugin-git@2.0.0-rc.22(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-+T50AdCZ68Pkld4r8IEHTXLugfNVCxxPp2G1hlI/lpQ6IZcpLbswMI6l9xbbo15RrOBg/V0jkim/B/jaaVIM6A==} + /@vuepress/plugin-links-check@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-duE6CUoc2eorW4sYw8tMFuPEStm35dmnk0XhxQQqkeIg2gpWDMkrb4sYqfU8twLU3Z+Tcq9FWYwyQ2QjyT8TYQ==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - execa: 8.0.1 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript dev: true - /@vuepress/plugin-links-check@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-+HPIutNZhMP2eSf1Gb217WLCQlQhsMkebTfuZYyDSGGvY5TQmXOAuu/X7Xwh1lJlml9asPUXTcFe2cZcEtHHIA==} + /@vuepress/plugin-markdown-ext@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-W3vWwFsoAVgEWrNa8u+/FgGrRjIM7XYJYXhJzfJJdh1wOKJUOavBGdn20JGRnD3b3KAw0q4UzSeDeJPLA+vTKA==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@mdit/plugin-container': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-footnote': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-tasklist': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + js-yaml: 4.1.0 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - markdown-it + - typescript + dev: true + + /@vuepress/plugin-markdown-hint@2.0.0-rc.104(markdown-it@14.1.0)(vue@3.5.15)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-5ozO5Q9gTh7x7ZPAvAAXULdD6S0A1KrsunMtjG3TPREL99ExILwMnnRtgnDSDA11gH7SBMrKuS18E77ccHLaeA==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@mdit/plugin-alert': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-container': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - markdown-it + - typescript + - vue + dev: true + + /@vuepress/plugin-markdown-image@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-UV/nRsvt7/aTfRFrdVD1n/M70IdTT988pGMdzAOexlcs0xdStLHmo9w9d9CwXOh5rrIWMM4JLzvJnpYY3Njluw==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@mdit/plugin-figure': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-img-lazyload': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-img-mark': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-img-size': 0.18.1(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: + - markdown-it - typescript dev: true - /@vuepress/plugin-nprogress@2.0.0-rc.21(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-qpGA76195SyfpuQC1Pb9LwgCYIp/zg+BBDnexukJMdLjP1KnaU7HLhS5NnRNIWv8E+IC61zLvlh/wRox17QE+w==} + /@vuepress/plugin-markdown-include@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-MiuwUQZB4dz9Zx0o5wzP/GX23dzPLaEPATqhgN6+roO75tHaGUUpt0752b5HrUtny8R1evFHz/kk3mDjQfIN7g==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@mdit/plugin-include': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: + - markdown-it - typescript dev: true - /@vuepress/plugin-photo-swipe@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-2Rvi8ODFJgIDDfrXzt7ynY3nizCiEte2Cna4W73bH1+s9PMiOoa5rQ54/r+jbLe4Nw5Iw4x+PXcRN8fDQPllKg==} + /@vuepress/plugin-markdown-math@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-w+Dn425WKY3oMf5jl1hjoDFsoENvEhIANMsLCDPDa9GRxTdxd1LZdRJ6UMfsolC2nPoa+diZG9/Z+Nk9jAIz1Q==} peerDependencies: - vuepress: 2.0.0-rc.9 + katex: ^0.16.21 + mathjax-full: ^3.2.2 + vuepress: 2.0.0-rc.23 + peerDependenciesMeta: + katex: + optional: true + mathjax-full: + optional: true dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - photoswipe: 5.4.4 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@mdit/plugin-katex-slim': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-mathjax-slim': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - markdown-it + - typescript + dev: true + + /@vuepress/plugin-markdown-stylize@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-jSIpE+p6/aPXZx4ipU3YVnqB9wPFwBRmbDhsw0xVg7x+9TBLHQGmp/fUg6bYWjgU/TSnrdzri6B1sgkzuoWZKg==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@mdit/plugin-align': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-attrs': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-mark': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-spoiler': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-stylize': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-sub': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-sup': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - markdown-it + - typescript + dev: true + + /@vuepress/plugin-markdown-tab@2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-M/c08lAfSg2fJq8KqOoX/kdFWlFjt+rT+0v8JzKtDjqNcK8GzhCsaLNw+9X09FLBqDOolrXYnjvGvGPSNh2JxA==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@mdit/plugin-tab': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - markdown-it + - typescript + dev: true + + /@vuepress/plugin-markdown-tab@2.0.0-rc.47(markdown-it@14.1.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-oB3/slwgCSOxPejsi8nuzVCFRpaVFqQXUwzyH5DJYofTiwl51ELT2Jhbiz3fqBljTCQk0Ts2e+H10jUgo8Yg+w==} + peerDependencies: + vuepress: 2.0.0-rc.15 + dependencies: + '@mdit/plugin-tab': 0.13.2(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.47(vuepress@2.0.0-rc.23) + '@vueuse/core': 11.3.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - '@vue/composition-api' + - markdown-it - typescript dev: true - /@vuepress/plugin-prismjs@2.0.0-rc.21(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-dMTCu/TZ1QCmTHXL4THVeh9gWzuqkJV8qhck5U77OP1qmgyf+r529A+MTOgp3ddcph1Yzb/FRb2orlefHk+yNQ==} + /@vuepress/plugin-notice@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-L5QQcCX5pQiPmI6g7j062m+DlSoiCpQClhq4sv+2vkpPefct4hpdH2h3L3WBwiijUUH2fZ/aZ8SPNI4aeqm4Nw==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - prismjs: 1.29.0 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + chokidar: 3.6.0 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript dev: true - /@vuepress/plugin-reading-time@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-La6dgul551Xp2Iacs1URZnLX5YdakfJWFfE9vIhhX/Q1+slUGRVftFLh/nb0oVUrsXNeRlqCUncTyilg51Q1fA==} + /@vuepress/plugin-nprogress@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-ZjdWXpoTY/+Aa+24mNLwMwFjx4qSn/jeKTAqtJTK697Tro7BCUa/KdKtDlXfdUVsm1O9Ewc/Nh0T9eMjEaWAfA==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-rtl@2.0.0-rc.21(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-r+4aP898KsFbF6m1J0e+776ZlSE9yaHr9zsMlib1GEUDcqP/OykMYaNKwRsOMB1eFXNmymgHlXFvswBGEHxS7w==} + /@vuepress/plugin-photo-swipe@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-/NiQ2n3wvhQ3vq9jwI98p9ecdfh6Q7bXfFZbkfVoYD3fQMgfg1pkRfO2VnR6uJUQkKOE7x/c1REB5ulm/lZQ1A==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + photoswipe: 5.4.4 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-seo@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-E0dRCNqV6RLoVV4j8xchmlsnlR7OyPQxWmWrk20mBiyluRI05OXdb20ZQbYJe3PfK8f8DnyETzob943HBg3sVA==} + /@vuepress/plugin-reading-time@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-5podLkwt9uJZy4rikwc2FP6R2ObWS9t73sUi0XBTUlAmv0fh55kXDY0bbyiOv0odlWwrmq48O2si3hggFZVdGA==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-sitemap@2.0.0-rc.24(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-su5ZD8vGuNpbqmb+uCOzWXCZ0eii8wnkdhn4V1xtmmXsrmYDr0FFHp61Ebb6EYAquB3HH1v3hWdfLRMU9DM6VQ==} + /@vuepress/plugin-redirect@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-/71G5+8NzhOFOxgrju3ocNJ0NocvRLBWIEXIDKscBqMMD5gD12WdkSS6TsFKDNLRWEaTgTsljD75yYn9WvNNNQ==} + hasBin: true peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - sitemap: 7.1.2 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + commander: 13.1.0 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/plugin-theme-data@2.0.0-rc.21(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-vLXvTKx4gWXY6oVaJ9Z2ECnojnKQuXBIe1ZGIAwJdxCYfr6aaqggrVvmphB8BwTURh0XAuis/l6YTcMrs0bX8Q==} + /@vuepress/plugin-rtl@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-nz4TkvdyWPoBeCB19UW0FThH/WqG0eH/KVr95bU9hSAQ3NCBxcSxT3RMBN+7gG0Mryfj5niSVb0HXMSCPf7RBw==} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vue/devtools-api': 6.6.3 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - typescript dev: true - /@vuepress/shared@2.0.0-rc.9: - resolution: {integrity: sha512-XfI6CWNv4/Vp9Iew6GJil9RUSy1rM7zGdjwikr0j3Rkh55q3f00w1wud47wE9kxRqsZ0PIvsMget5CxEn5rA/w==} + /@vuepress/plugin-sass-palette@2.0.0-rc.104(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-IwYUTRSPU4yMikfg8tfdFwWLx1Msv1y/KbMUrpn2nlVzEqpmYCDorvskM45jlTv05PIrcerrVbEfYLyJTIVmAQ==} + peerDependencies: + sass: ^1.86.3 + sass-embedded: ^1.86.3 + sass-loader: ^16.0.5 + vuepress: 2.0.0-rc.23 + peerDependenciesMeta: + sass: + optional: true + sass-embedded: + optional: true + sass-loader: + optional: true + dependencies: + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + chokidar: 4.0.3 + sass: 1.89.0 + sass-embedded: 1.89.0 + sass-loader: 16.0.5(sass-embedded@1.89.0)(sass@1.89.0) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/plugin-seo@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-/+ssrAl8z5lT+Z6/dzTxlOEbqspJiCN/w5NpeAu46Tvrf9XqqBO47vQURAdRxJeUGi7ddsWt61ctthQnayOOmg==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/plugin-shiki@2.0.0-rc.104(@vueuse/core@13.2.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-FDkdUsXdBcxU2yya7/69qCogxjoRf+GCoKMyJ2GOyZ71hX48h4e9EngF0Tds81FI0yofDiA2XAN396IxiYTV+A==} + peerDependencies: + '@vuepress/shiki-twoslash': 2.0.0-rc.104 + vuepress: 2.0.0-rc.23 + peerDependenciesMeta: + '@vuepress/shiki-twoslash': + optional: true + dependencies: + '@shikijs/transformers': 3.4.2 + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/highlighter-helper': 2.0.0-rc.103(@vueuse/core@13.2.0)(vuepress@2.0.0-rc.23) + nanoid: 5.1.5 + shiki: 3.4.2 + synckit: 0.11.6 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - '@vueuse/core' + - typescript + dev: true + + /@vuepress/plugin-sitemap@2.0.0-rc.104(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-pisuHpJoM/b0jhP/LgnLeqbiCIyPBIzzN5SpZa6ivB+gt3hKqHxki/gE4GwgrlY/VrUkq2gOss9+LwHFyY7iGw==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@vuepress/helper': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + sitemap: 8.0.0 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/plugin-slimsearch@2.0.0-rc.106(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-bybriFMkazqK3gPBoEO2w2jNLO5ko+78kpIJ//BrfEXQ3VfSnLv5buEjQLAxwm9JboBteNYD+vCTn7iEdm6ZCw==} + peerDependencies: + vuepress: 2.0.0-rc.23 dependencies: - '@mdit-vue/types': 2.1.0 + '@vuepress/helper': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + cheerio: 1.0.0 + chokidar: 3.6.0 + slimsearch: 2.2.2 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript dev: true - /@vuepress/utils@2.0.0-rc.9: - resolution: {integrity: sha512-qk6Pel4JVKYKxp3bWxyvnwchvx3QaCWc7SqUw7L6qUo/um+0U2U45L0anWoAfckw12RXYhoIEbJ9UZpueiKOPg==} + /@vuepress/plugin-theme-data@2.0.0-rc.103(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-pHkCf7VZJDlVVE+LkDTmHMTdDxYkaUTNbglgTI1QoTq8cMpOb/M8BaBDtb8//DC2gbNTgx/x6wsPogwm0K45+w==} + peerDependencies: + vuepress: 2.0.0-rc.23 + dependencies: + '@vue/devtools-api': 7.7.6 + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + transitivePeerDependencies: + - typescript + dev: true + + /@vuepress/shared@2.0.0-rc.23: + resolution: {integrity: sha512-keUT4ZXVN0LvNWRxDOSjvyePZHoAmedVQvFqFWfH/3JjzLU1nrhn+WXucNtlJh6OqZZD5sdzCxnrotkb7MEnVw==} + dependencies: + '@mdit-vue/types': 2.1.4 + dev: true + + /@vuepress/utils@2.0.0-rc.23: + resolution: {integrity: sha512-nuert5yo58GS5g9UVGNPY3xCLuob1jg7p5t9gYThUIjWp4treFJZDgV8YGbrhmNxrvrS5pWyC9HYMTWRDdO98A==} dependencies: '@types/debug': 4.1.12 '@types/fs-extra': 11.0.4 '@types/hash-sum': 1.0.2 - '@vuepress/shared': 2.0.0-rc.9 - debug: 4.3.5 - fs-extra: 11.2.0 - globby: 14.0.1 + '@vuepress/shared': 2.0.0-rc.23 + debug: 4.4.1 + fs-extra: 11.3.0 + globby: 14.1.0 hash-sum: 2.0.0 - ora: 8.0.1 - picocolors: 1.0.1 + ora: 8.2.0 + picocolors: 1.1.1 upath: 2.0.1 transitivePeerDependencies: - supports-color dev: true - /@vueuse/core@10.11.0(vue@3.4.30): - resolution: {integrity: sha512-x3sD4Mkm7PJ+pcq3HX8PLPBadXCAlSDR/waK87dz0gQE+qJnaaFhc/dZVfJz+IUYzTMVGum2QlR7ImiJQN4s6g==} + /@vueuse/core@11.3.0(vue@3.5.15): + resolution: {integrity: sha512-7OC4Rl1f9G8IT6rUfi9JrKiXy4bfmHhZ5x2Ceojy0jnd3mHNEvV4JaRygH362ror6/NZ+Nl+n13LPzGiPN8cKA==} dependencies: '@types/web-bluetooth': 0.0.20 - '@vueuse/metadata': 10.11.0 - '@vueuse/shared': 10.11.0(vue@3.4.30) - vue-demi: 0.14.8(vue@3.4.30) + '@vueuse/metadata': 11.3.0 + '@vueuse/shared': 11.3.0(vue@3.5.15) + vue-demi: 0.14.10(vue@3.5.15) transitivePeerDependencies: - '@vue/composition-api' - vue dev: true - /@vueuse/metadata@10.11.0: - resolution: {integrity: sha512-kQX7l6l8dVWNqlqyN3ePW3KmjCQO3ZMgXuBMddIu83CmucrsBfXlH+JoviYyRBws/yLTQO8g3Pbw+bdIoVm4oQ==} + /@vueuse/core@13.2.0(vue@3.5.15): + resolution: {integrity: sha512-n5TZoIAxbWAQ3PqdVPDzLgIRQOujFfMlatdI+f7ditSmoEeNpPBvp7h2zamzikCmrhFIePAwdEQB6ENccHr7Rg==} + peerDependencies: + vue: ^3.5.0 + dependencies: + '@types/web-bluetooth': 0.0.21 + '@vueuse/metadata': 13.2.0 + '@vueuse/shared': 13.2.0(vue@3.5.15) + vue: 3.5.15 dev: true - /@vueuse/shared@10.11.0(vue@3.4.30): - resolution: {integrity: sha512-fyNoIXEq3PfX1L3NkNhtVQUSRtqYwJtJg+Bp9rIzculIZWHTkKSysujrOk2J+NrRulLTQH9+3gGSfYLWSEWU1A==} + /@vueuse/metadata@11.3.0: + resolution: {integrity: sha512-pwDnDspTqtTo2HwfLw4Rp6yywuuBdYnPYDq+mO38ZYKGebCUQC/nVj/PXSiK9HX5otxLz8Fn7ECPbjiRz2CC3g==} + dev: true + + /@vueuse/metadata@13.2.0: + resolution: {integrity: sha512-kPpzuQCU0+D8DZCzK0iPpIcXI+6ufWSgwnjJ6//GNpEn+SHViaCtR+XurzORChSgvpHO9YC8gGM97Y1kB+UabA==} + dev: true + + /@vueuse/shared@11.3.0(vue@3.5.15): + resolution: {integrity: sha512-P8gSSWQeucH5821ek2mn/ciCk+MS/zoRKqdQIM3bHq6p7GXDAJLmnRRKmF5F65sAVJIfzQlwR3aDzwCn10s8hA==} dependencies: - vue-demi: 0.14.8(vue@3.4.30) + vue-demi: 0.14.10(vue@3.5.15) transitivePeerDependencies: - '@vue/composition-api' - vue dev: true + /@vueuse/shared@13.2.0(vue@3.5.15): + resolution: {integrity: sha512-vx9ZPDF5HcU9up3Jgt3G62dMUfZEdk6tLyBAHYAG4F4n73vpaA7J5hdncDI/lS9Vm7GA/FPlbOmh9TrDZROTpg==} + peerDependencies: + vue: ^3.5.0 + dependencies: + vue: 3.5.15 + dev: true + + /acorn@8.14.1: + resolution: {integrity: sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==} + engines: {node: '>=0.4.0'} + hasBin: true + dev: true + /ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} dev: true - /ansi-regex@6.0.1: - resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + /ansi-regex@6.1.0: + resolution: {integrity: sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==} engines: {node: '>=12'} dev: true @@ -1423,29 +2279,33 @@ packages: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} dev: true - /autoprefixer@10.4.19(postcss@8.4.38): - resolution: {integrity: sha512-BaENR2+zBZ8xXhM4pUaKUxlVdxZ0EZhjvbopwnXmxRUfqDmwSpC2lAi/QXvx7NRdPCo1WKEcEF6mV64si1z4Ew==} + /autoprefixer@10.4.21(postcss@8.5.3): + resolution: {integrity: sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ==} engines: {node: ^10 || ^12 || >=14} hasBin: true peerDependencies: postcss: ^8.1.0 dependencies: - browserslist: 4.23.1 - caniuse-lite: 1.0.30001638 + browserslist: 4.24.5 + caniuse-lite: 1.0.30001718 fraction.js: 4.3.7 normalize-range: 0.1.2 - picocolors: 1.0.1 - postcss: 8.4.38 + picocolors: 1.1.1 + postcss: 8.5.3 postcss-value-parser: 4.2.0 dev: true + /bail@2.0.2: + resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + dev: true + /balloon-css@1.2.0: resolution: {integrity: sha512-urXwkHgwp6GsXVF+it01485Z2Cj4pnW02ICnM0TemOlkKmCNnDLmyy+ZZiRXBpwldUXO+aRNr7Hdia4CBvXJ5A==} dev: true - /bcrypt-ts@5.0.2: - resolution: {integrity: sha512-gDwQ5784AkkfhHACh3jGcg1hUubyZyeq9AtVd5gXkcyHGVOC+mORjRIHSj+fHfqwY5vxwyBLXQpcfk8MpK0ROg==} - engines: {node: '>=18'} + /bcrypt-ts@7.0.0: + resolution: {integrity: sha512-JMr30sbKPwF+2TccaNOYJuDx+nCmnTvHGB2rwkj+To/xZhBTX9f8zpTqGy3kpkS26KWOEYPsQlOJ5MVD00RHQQ==} + engines: {node: '>=20'} dev: true /binary-extensions@2.3.0: @@ -1453,6 +2313,10 @@ packages: engines: {node: '>=8'} dev: true + /birpc@2.3.0: + resolution: {integrity: sha512-ijbtkn/F3Pvzb6jHypHRyve2QApOCZDR25D/VnkY2G/lBNcXCTsnsCxgY4k4PkVB7zfwzYbY3O9Lcqe3xufS5g==} + dev: true + /boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} dev: true @@ -1464,15 +2328,19 @@ packages: fill-range: 7.1.1 dev: true - /browserslist@4.23.1: - resolution: {integrity: sha512-TUfofFo/KsK/bWZ9TWQ5O26tsWW4Uhmt8IYklbnUa70udB6P2wA7w7o4PY4muaEPBQaAX+CEnmmIA41NVHtPVw==} + /browserslist@4.24.5: + resolution: {integrity: sha512-FDToo4Wo82hIdgc1CQ+NQD0hEhmpPjrZ3hiUgwgOG6IuTdlpr8jdjyG24P6cNP1yJpTLzS5OcGgSw0xmDU1/Tw==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true dependencies: - caniuse-lite: 1.0.30001638 - electron-to-chromium: 1.4.812 - node-releases: 2.0.14 - update-browserslist-db: 1.0.16(browserslist@4.23.1) + caniuse-lite: 1.0.30001718 + electron-to-chromium: 1.5.157 + node-releases: 2.0.19 + update-browserslist-db: 1.1.3(browserslist@4.24.5) + dev: true + + /buffer-builder@0.2.0: + resolution: {integrity: sha512-7VPMEPuYznPSoR21NE1zvd2Xna6c/CloiZCfcMXR1Jny6PjX0N4Nsa38zcBFo/FMK+BlA+FLKbJCQ0i2yxp+Xg==} dev: true /cac@6.7.14: @@ -1485,24 +2353,32 @@ packages: engines: {node: '>=6'} dev: true - /caniuse-lite@1.0.30001638: - resolution: {integrity: sha512-5SuJUJ7cZnhPpeLHaH0c/HPAnAHZvS6ElWyHK9GSIbVOQABLzowiI2pjmpvZ1WEbkyz46iFd4UXlOHR5SqgfMQ==} + /caniuse-lite@1.0.30001718: + resolution: {integrity: sha512-AflseV1ahcSunK53NfEs9gFWgOEmzr0f+kaMFA4xiLZlr9Hzt7HxcSpIFcnNCUkz6R6dWKa54rUz3HUmI3nVcw==} + dev: true + + /ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} dev: true - /chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} + /chalk@5.4.1: + resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} dev: true - /character-entities@2.0.2: - resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + /character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + dev: true + + /character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} dev: true - /chart.js@4.4.3: - resolution: {integrity: sha512-qK1gkGSRYcJzqrrzdR6a+I0vQ4/R+SoODXyAjscQ/4mzuNzySaMCd+hyVxitSY1+L2fjPD1Gbn+ibNqRmwQeLw==} + /chart.js@4.4.9: + resolution: {integrity: sha512-EyZ9wWKgpAU0fLJ43YAEIF8sr5F2W3LqbS40ZJyHIner2lY14ufqv2VMp69MAiZ2rpwxEUxEhIH/0U3xyRynxg==} engines: {pnpm: '>=8'} dependencies: - '@kurkle/color': 0.3.2 + '@kurkle/color': 0.3.4 /cheerio-select@2.1.0: resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} @@ -1512,20 +2388,44 @@ packages: css-what: 6.1.0 domelementtype: 2.3.0 domhandler: 5.0.3 - domutils: 3.1.0 + domutils: 3.2.2 dev: true - /cheerio@1.0.0-rc.12: - resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} - engines: {node: '>= 6'} + /cheerio@1.0.0: + resolution: {integrity: sha512-quS9HgjQpdaXOvsZz82Oz7uxtXiy6UIsIQcpBj7HRw2M63Skasm9qlDocAM7jNuaxdhpPU7c4kJN+gA5MCu4ww==} + engines: {node: '>=18.17'} dependencies: cheerio-select: 2.1.0 dom-serializer: 2.0.0 domhandler: 5.0.3 - domutils: 3.1.0 - htmlparser2: 8.0.2 - parse5: 7.1.2 - parse5-htmlparser2-tree-adapter: 7.0.0 + domutils: 3.2.2 + encoding-sniffer: 0.2.0 + htmlparser2: 9.1.0 + parse5: 7.3.0 + parse5-htmlparser2-tree-adapter: 7.1.0 + parse5-parser-stream: 7.1.2 + undici: 6.21.3 + whatwg-mimetype: 4.0.0 + dev: true + + /chevrotain-allstar@0.3.1(chevrotain@11.0.3): + resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==} + peerDependencies: + chevrotain: ^11.0.0 + dependencies: + chevrotain: 11.0.3 + lodash-es: 4.17.21 + dev: true + + /chevrotain@11.0.3: + resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==} + dependencies: + '@chevrotain/cst-dts-gen': 11.0.3 + '@chevrotain/gast': 11.0.3 + '@chevrotain/regexp-to-ast': 11.0.3 + '@chevrotain/types': 11.0.3 + '@chevrotain/utils': 11.0.3 + lodash-es: 4.17.21 dev: true /chokidar@3.6.0: @@ -1543,11 +2443,18 @@ packages: fsevents: 2.3.3 dev: true - /cli-cursor@4.0.0: - resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + /chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} dependencies: - restore-cursor: 4.0.0 + readdirp: 4.1.2 + dev: true + + /cli-cursor@5.0.0: + resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} + engines: {node: '>=18'} + dependencies: + restore-cursor: 5.1.0 dev: true /cli-spinners@2.9.2: @@ -1574,6 +2481,19 @@ packages: resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} dev: true + /colorjs.io@0.5.2: + resolution: {integrity: sha512-twmVoizEW7ylZSN32OgKdXRmo1qg+wT5/6C3xu5b9QsWzSFAhHLn2xd8ro0diCsKfCj1RdaTP/nrcW+vAoQPIw==} + dev: true + + /comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + dev: true + + /commander@13.1.0: + resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==} + engines: {node: '>=18'} + dev: true + /commander@7.2.0: resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} engines: {node: '>= 10'} @@ -1584,28 +2504,41 @@ packages: engines: {node: '>= 12'} dev: true + /confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + dev: true + + /confbox@0.2.2: + resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==} + dev: true + /connect-history-api-fallback@2.0.0: resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==} engines: {node: '>=0.8'} dev: true + /copy-anything@3.0.5: + resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} + engines: {node: '>=12.13'} + dependencies: + is-what: 4.1.16 + dev: true + /cose-base@1.0.3: resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} dependencies: layout-base: 1.0.2 dev: true - /create-codepen@1.0.1: - resolution: {integrity: sha512-XzSWwGCFNeOnNGp3KdCDGaKq4Cp1SvjzpPGQqO0tj1HT3BhksLdl/xQ2ZEY4+0MQ3m1I/K1Fvpm4GGMthtamyA==} + /cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + dependencies: + layout-base: 2.0.1 dev: true - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} - dependencies: - path-key: 3.1.1 - shebang-command: 2.0.0 - which: 2.0.2 + /create-codepen@2.0.0: + resolution: {integrity: sha512-ehJ0Zw5RSV2G4+/azUb7vEZWRSA/K9cW7HDock1Y9ViDexkgSJUZJRcObdw/YAWeXKjreEQV9l/igNSsJ1yw5A==} + engines: {node: '>=18'} dev: true /css-select@5.1.0: @@ -1614,7 +2547,7 @@ packages: boolbase: 1.0.0 css-what: 6.1.0 domhandler: 5.0.3 - domutils: 3.1.0 + domutils: 3.2.2 nth-check: 2.1.1 dev: true @@ -1627,17 +2560,26 @@ packages: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} dev: true - /cytoscape-cose-bilkent@4.1.0(cytoscape@3.30.0): + /cytoscape-cose-bilkent@4.1.0(cytoscape@3.32.0): resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} peerDependencies: cytoscape: ^3.2.0 dependencies: cose-base: 1.0.3 - cytoscape: 3.30.0 + cytoscape: 3.32.0 dev: true - /cytoscape@3.30.0: - resolution: {integrity: sha512-l590mjTHT6/Cbxp13dGPC2Y7VXdgc+rUeF8AnF/JPzhjNevbDJfObnJgaSjlldOgBQZbue+X6IUZ7r5GAgvauQ==} + /cytoscape-fcose@2.2.0(cytoscape@3.32.0): + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + dependencies: + cose-base: 2.2.0 + cytoscape: 3.32.0 + dev: true + + /cytoscape@3.32.0: + resolution: {integrity: sha512-5JHBC9n75kz5851jeklCPmZWcg3hUe6sjqJvyk3+hVqFaKcHwHgxsjeN1yLmggoUc6STbtm9/NQyabQehfjvWQ==} engines: {node: '>=0.10'} dev: true @@ -1912,19 +2854,19 @@ packages: d3-zoom: 3.0.0 dev: true - /dagre-d3-es@7.0.10: - resolution: {integrity: sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==} + /dagre-d3-es@7.0.11: + resolution: {integrity: sha512-tvlJLyQf834SylNKax8Wkzco/1ias1OPw8DcUMDE7oUIoSEW25riQVuiu/0OWEFqT0cxHT3Pa9/D82Jr47IONw==} dependencies: d3: 7.9.0 lodash-es: 4.17.21 dev: true - /dayjs@1.11.11: - resolution: {integrity: sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==} + /dayjs@1.11.13: + resolution: {integrity: sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg==} dev: true - /debug@4.3.5: - resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} + /debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -1932,7 +2874,7 @@ packages: supports-color: optional: true dependencies: - ms: 2.1.2 + ms: 2.1.3 dev: true /decamelize@1.2.0: @@ -1940,12 +2882,6 @@ packages: engines: {node: '>=0.10.0'} dev: true - /decode-named-character-reference@1.0.2: - resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==} - dependencies: - character-entities: 2.0.2 - dev: true - /delaunator@5.0.1: resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} dependencies: @@ -1957,9 +2893,17 @@ packages: engines: {node: '>=6'} dev: true - /diff@5.2.0: - resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} - engines: {node: '>=0.3.1'} + /detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + dev: true + optional: true + + /devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + dependencies: + dequal: 2.0.3 dev: true /dijkstrajs@1.0.3: @@ -1985,36 +2929,37 @@ packages: domelementtype: 2.3.0 dev: true - /dompurify@3.1.5: - resolution: {integrity: sha512-lwG+n5h8QNpxtyrJW/gJWckL+1/DQiYMX8f7t8Z2AZTPw1esVrqjI63i7Zc2Gz0aKzLVMYC1V1PL/ky+aY/NgA==} + /dompurify@3.2.6: + resolution: {integrity: sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==} + optionalDependencies: + '@types/trusted-types': 2.0.7 dev: true - /domutils@3.1.0: - resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + /domutils@3.2.2: + resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==} dependencies: dom-serializer: 2.0.0 domelementtype: 2.3.0 domhandler: 5.0.3 dev: true - /electron-to-chromium@1.4.812: - resolution: {integrity: sha512-7L8fC2Ey/b6SePDFKR2zHAy4mbdp1/38Yk5TsARO66W3hC5KEaeKMMHoxwtuH+jcu2AYLSn9QX04i95t6Fl1Hg==} - dev: true - - /elkjs@0.9.3: - resolution: {integrity: sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==} + /electron-to-chromium@1.5.157: + resolution: {integrity: sha512-/0ybgsQd1muo8QlnuTpKwtl0oX5YMlUGbm8xyqgDU00motRkKFFbUJySAQBWcY79rVqNLWIWa87BGVGClwAB2w==} dev: true - /emoji-regex@10.3.0: - resolution: {integrity: sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==} + /emoji-regex@10.4.0: + resolution: {integrity: sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw==} dev: true /emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} dev: true - /encode-utf8@1.0.3: - resolution: {integrity: sha512-ucAnuBEhUK4boH2HjVYG5Q2mQyPorvv0u/ocS+zhdw0S8AlHYY+GOFhP1Gio5z4icpP2ivFSvhtFjQi8+T9ppw==} + /encoding-sniffer@0.2.0: + resolution: {integrity: sha512-ju7Wq1kg04I3HtiYIOrUrdfdDvkyO9s5XM8QAj/bN61Yo/Vb4vgJxy5vi4Yxk01gWHbrofpPtpxM8bKger9jhg==} + dependencies: + iconv-lite: 0.6.3 + whatwg-encoding: 3.1.1 dev: true /entities@4.5.0: @@ -2022,45 +2967,52 @@ packages: engines: {node: '>=0.12'} dev: true - /envinfo@7.13.0: - resolution: {integrity: sha512-cvcaMr7KqXVh4nyzGTVqTum+gAiL265x5jUWQIDLq//zOGbW+gSW/C+OWLleY/rs9Qole6AZLMXPbtIFQbqu+Q==} + /entities@6.0.0: + resolution: {integrity: sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw==} + engines: {node: '>=0.12'} + dev: true + + /envinfo@7.14.0: + resolution: {integrity: sha512-CO40UI41xDQzhLB1hWyqUKgFhs250pNcGbyGKe1l/e4FSaI/+YE4IMG76GDt0In67WLPACIITC+sOi08x4wIvg==} engines: {node: '>=4'} hasBin: true dev: true - /esbuild@0.20.2: - resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} - engines: {node: '>=12'} + /esbuild@0.25.4: + resolution: {integrity: sha512-8pgjLUcUjcgDg+2Q4NYXnPbo/vncAY4UmyaCm0jZevERqCHZIaWwdJHkf8XQtu4AxSKCdvrUbT0XUr1IdZzI8Q==} + engines: {node: '>=18'} hasBin: true requiresBuild: true optionalDependencies: - '@esbuild/aix-ppc64': 0.20.2 - '@esbuild/android-arm': 0.20.2 - '@esbuild/android-arm64': 0.20.2 - '@esbuild/android-x64': 0.20.2 - '@esbuild/darwin-arm64': 0.20.2 - '@esbuild/darwin-x64': 0.20.2 - '@esbuild/freebsd-arm64': 0.20.2 - '@esbuild/freebsd-x64': 0.20.2 - '@esbuild/linux-arm': 0.20.2 - '@esbuild/linux-arm64': 0.20.2 - '@esbuild/linux-ia32': 0.20.2 - '@esbuild/linux-loong64': 0.20.2 - '@esbuild/linux-mips64el': 0.20.2 - '@esbuild/linux-ppc64': 0.20.2 - '@esbuild/linux-riscv64': 0.20.2 - '@esbuild/linux-s390x': 0.20.2 - '@esbuild/linux-x64': 0.20.2 - '@esbuild/netbsd-x64': 0.20.2 - '@esbuild/openbsd-x64': 0.20.2 - '@esbuild/sunos-x64': 0.20.2 - '@esbuild/win32-arm64': 0.20.2 - '@esbuild/win32-ia32': 0.20.2 - '@esbuild/win32-x64': 0.20.2 - dev: true - - /escalade@3.1.2: - resolution: {integrity: sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==} + '@esbuild/aix-ppc64': 0.25.4 + '@esbuild/android-arm': 0.25.4 + '@esbuild/android-arm64': 0.25.4 + '@esbuild/android-x64': 0.25.4 + '@esbuild/darwin-arm64': 0.25.4 + '@esbuild/darwin-x64': 0.25.4 + '@esbuild/freebsd-arm64': 0.25.4 + '@esbuild/freebsd-x64': 0.25.4 + '@esbuild/linux-arm': 0.25.4 + '@esbuild/linux-arm64': 0.25.4 + '@esbuild/linux-ia32': 0.25.4 + '@esbuild/linux-loong64': 0.25.4 + '@esbuild/linux-mips64el': 0.25.4 + '@esbuild/linux-ppc64': 0.25.4 + '@esbuild/linux-riscv64': 0.25.4 + '@esbuild/linux-s390x': 0.25.4 + '@esbuild/linux-x64': 0.25.4 + '@esbuild/netbsd-arm64': 0.25.4 + '@esbuild/netbsd-x64': 0.25.4 + '@esbuild/openbsd-arm64': 0.25.4 + '@esbuild/openbsd-x64': 0.25.4 + '@esbuild/sunos-x64': 0.25.4 + '@esbuild/win32-arm64': 0.25.4 + '@esbuild/win32-ia32': 0.25.4 + '@esbuild/win32-x64': 0.25.4 + dev: true + + /escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} dev: true @@ -2074,19 +3026,8 @@ packages: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} dev: true - /execa@8.0.1: - resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} - engines: {node: '>=16.17'} - dependencies: - cross-spawn: 7.0.3 - get-stream: 8.0.1 - human-signals: 5.0.0 - is-stream: 3.0.0 - merge-stream: 2.0.0 - npm-run-path: 5.3.0 - onetime: 6.0.0 - signal-exit: 4.1.0 - strip-final-newline: 3.0.0 + /exsolve@1.0.5: + resolution: {integrity: sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==} dev: true /extend-shallow@2.0.1: @@ -2096,21 +3037,36 @@ packages: is-extendable: 0.1.1 dev: true - /fast-glob@3.3.2: - resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} + /extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + dev: true + + /fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} engines: {node: '>=8.6.0'} dependencies: '@nodelib/fs.stat': 2.0.5 '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.7 + micromatch: 4.0.8 dev: true - /fastq@1.17.1: - resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + /fastq@1.19.1: + resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} dependencies: - reusify: 1.0.4 + reusify: 1.1.0 + dev: true + + /fdir@6.4.4(picomatch@4.0.2): + resolution: {integrity: sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + dependencies: + picomatch: 4.0.2 dev: true /fflate@0.8.2: @@ -2136,8 +3092,8 @@ packages: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} dev: true - /fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} + /fs-extra@11.3.0: + resolution: {integrity: sha512-Z4XaCL6dUDHfP/jT25jJKMmtxvuwbkrD1vNSMFlo9lNLY2c5FHYSQgHPRZUjAB26TpDEoW9HCOgplrdbaPV/ew==} engines: {node: '>=14.14'} dependencies: graceful-fs: 4.2.11 @@ -2149,7 +3105,6 @@ packages: resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] - requiresBuild: true dev: true optional: true @@ -2158,20 +3113,15 @@ packages: engines: {node: 6.* || 8.* || >= 10.*} dev: true - /get-east-asian-width@1.2.0: - resolution: {integrity: sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA==} + /get-east-asian-width@1.3.0: + resolution: {integrity: sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ==} engines: {node: '>=18'} dev: true - /get-stream@8.0.1: - resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} - engines: {node: '>=16'} - dev: true - - /giscus@1.5.0: - resolution: {integrity: sha512-t3LL0qbSO3JXq3uyQeKpF5CegstGfKX/0gI6eDe1cmnI7D56R7j52yLdzw4pdKrg3VnufwCgCM3FDz7G1Qr6lg==} + /giscus@1.6.0: + resolution: {integrity: sha512-Zrsi8r4t1LVW950keaWcsURuZUQwUaMKjvJgTCY125vkW6OiEBkatE7ScJDbpqKHdZwb///7FVC21SE3iFK3PQ==} dependencies: - lit: 3.1.4 + lit: 3.3.0 dev: true /glob-parent@5.1.2: @@ -2181,50 +3131,137 @@ packages: is-glob: 4.0.3 dev: true - /globby@14.0.1: - resolution: {integrity: sha512-jOMLD2Z7MAhyG8aJpNOpmziMOP4rPLcc95oQPKXBazW82z+CEgPFBQvEpRUa1KeIMUJo4Wsm+q6uzO/Q/4BksQ==} + /globals@15.15.0: + resolution: {integrity: sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==} + engines: {node: '>=18'} + dev: true + + /globby@14.1.0: + resolution: {integrity: sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==} engines: {node: '>=18'} dependencies: '@sindresorhus/merge-streams': 2.3.0 - fast-glob: 3.3.2 - ignore: 5.3.1 - path-type: 5.0.0 + fast-glob: 3.3.3 + ignore: 7.0.4 + path-type: 6.0.0 slash: 5.1.0 - unicorn-magic: 0.1.0 + unicorn-magic: 0.3.0 + dev: true + + /graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + dev: true + + /gray-matter@4.0.3: + resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} + engines: {node: '>=6.0'} + dependencies: + js-yaml: 3.14.1 + kind-of: 6.0.3 + section-matter: 1.0.0 + strip-bom-string: 1.0.0 + dev: true + + /hachure-fill@0.5.2: + resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} + dev: true + + /has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + dev: true + + /hash-sum@2.0.0: + resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} + dev: true + + /hast-util-from-html@2.0.3: + resolution: {integrity: sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==} + dependencies: + '@types/hast': 3.0.4 + devlop: 1.1.0 + hast-util-from-parse5: 8.0.3 + parse5: 7.3.0 + vfile: 6.0.3 + vfile-message: 4.0.2 + dev: true + + /hast-util-from-parse5@8.0.3: + resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==} + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + devlop: 1.1.0 + hastscript: 9.0.1 + property-information: 7.1.0 + vfile: 6.0.3 + vfile-location: 5.0.3 + web-namespaces: 2.0.1 + dev: true + + /hast-util-parse-selector@4.0.0: + resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==} + dependencies: + '@types/hast': 3.0.4 + dev: true + + /hast-util-sanitize@5.0.2: + resolution: {integrity: sha512-3yTWghByc50aGS7JlGhk61SPenfE/p1oaFeNwkOOyrscaOkMGrcW9+Cy/QAIOBpZxP1yqDIzFMR0+Np0i0+usg==} + dependencies: + '@types/hast': 3.0.4 + '@ungap/structured-clone': 1.3.0 + unist-util-position: 5.0.0 + dev: true + + /hast-util-to-html@9.0.5: + resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==} + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + comma-separated-tokens: 2.0.3 + hast-util-whitespace: 3.0.0 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + stringify-entities: 4.0.4 + zwitch: 2.0.4 dev: true - /graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + /hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + dependencies: + '@types/hast': 3.0.4 dev: true - /gray-matter@4.0.3: - resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} - engines: {node: '>=6.0'} + /hastscript@9.0.1: + resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==} dependencies: - js-yaml: 3.14.1 - kind-of: 6.0.3 - section-matter: 1.0.0 - strip-bom-string: 1.0.0 + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + hast-util-parse-selector: 4.0.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 dev: true - /hash-sum@2.0.0: - resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} + /hookable@5.5.3: + resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} + dev: true + + /html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} dev: true - /htmlparser2@8.0.2: - resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} + /htmlparser2@9.1.0: + resolution: {integrity: sha512-5zfg6mHUoaer/97TxnGpxmbR7zJtPwIYFMZ/H5ucTlPZhKvtum05yiPK3Mgai3a0DyVxv7qYqoweaEd2nrYQzQ==} dependencies: domelementtype: 2.3.0 domhandler: 5.0.3 - domutils: 3.1.0 + domutils: 3.2.2 entities: 4.5.0 dev: true - /human-signals@5.0.0: - resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} - engines: {node: '>=16.17.0'} - dev: true - /iconv-lite@0.6.3: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} @@ -2232,13 +3269,13 @@ packages: safer-buffer: 2.1.2 dev: true - /ignore@5.3.1: - resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} + /ignore@7.0.4: + resolution: {integrity: sha512-gJzzk+PQNznz8ysRrC0aOkBNVRBDtE1n53IqyqEf3PXrYwomFs5q4pGMizBMJF+ykh03insJ27hB8gSrD2Hn8A==} engines: {node: '>= 4'} dev: true - /immutable@4.3.6: - resolution: {integrity: sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ==} + /immutable@5.1.2: + resolution: {integrity: sha512-qHKXW1q6liAk1Oys6umoaZbDRqjcjgSrbnrifHsfsttza7zcvRAsL7mMV6xWcyhwQy7Xj5v4hhbr6b+iDYwlmQ==} dev: true /internmap@1.0.1: @@ -2289,9 +3326,9 @@ packages: engines: {node: '>=0.12.0'} dev: true - /is-stream@3.0.0: - resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + /is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} dev: true /is-unicode-supported@1.3.0: @@ -2299,13 +3336,14 @@ packages: engines: {node: '>=12'} dev: true - /is-unicode-supported@2.0.0: - resolution: {integrity: sha512-FRdAyx5lusK1iHG0TWpVtk9+1i+GjrzRffhDg4ovQ7mcidMQ6mj+MhKPmvh7Xwyv5gIS06ns49CA7Sqg7lC22Q==} + /is-unicode-supported@2.1.0: + resolution: {integrity: sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==} engines: {node: '>=18'} dev: true - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + /is-what@4.1.16: + resolution: {integrity: sha512-ZhMwEosbFJkA0YhFnNDgTM4ZxDRsS6HqTo7qsZM08fehyRYIYa0yHu5R6mgo1n/8MgaPBXiPimPD77baVFYg+A==} + engines: {node: '>=12.13'} dev: true /js-yaml@3.14.1: @@ -2331,8 +3369,8 @@ packages: graceful-fs: 4.2.11 dev: true - /katex@0.16.10: - resolution: {integrity: sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==} + /katex@0.16.22: + resolution: {integrity: sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==} hasBin: true dependencies: commander: 8.3.0 @@ -2347,17 +3385,31 @@ packages: engines: {node: '>=0.10.0'} dev: true - /kleur@4.1.5: - resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} - engines: {node: '>=6'} + /kolorist@1.8.0: + resolution: {integrity: sha512-Y+60/zizpJ3HRH8DCss+q95yr6145JXZo46OTpFvDZWLfRCE4qChOyk1b26nMaNpfHHgxagk9dXT5OP0Tfe+dQ==} + dev: true + + /langium@3.3.1: + resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==} + engines: {node: '>=16.0.0'} + dependencies: + chevrotain: 11.0.3 + chevrotain-allstar: 0.3.1(chevrotain@11.0.3) + vscode-languageserver: 9.0.1 + vscode-languageserver-textdocument: 1.0.12 + vscode-uri: 3.0.8 dev: true /layout-base@1.0.2: resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} dev: true - /lilconfig@3.1.2: - resolution: {integrity: sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow==} + /layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + dev: true + + /lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} engines: {node: '>=14'} dev: true @@ -2367,26 +3419,35 @@ packages: uc.micro: 2.1.0 dev: true - /lit-element@4.0.6: - resolution: {integrity: sha512-U4sdJ3CSQip7sLGZ/uJskO5hGiqtlpxndsLr6mt3IQIjheg93UKYeGQjWMRql1s/cXNOaRrCzC2FQwjIwSUqkg==} + /lit-element@4.2.0: + resolution: {integrity: sha512-MGrXJVAI5x+Bfth/pU9Kst1iWID6GHDLEzFEnyULB/sFiRLgkd8NPK/PeeXxktA3T6EIIaq8U3KcbTU5XFcP2Q==} dependencies: - '@lit-labs/ssr-dom-shim': 1.2.0 - '@lit/reactive-element': 2.0.4 - lit-html: 3.1.4 + '@lit-labs/ssr-dom-shim': 1.3.0 + '@lit/reactive-element': 2.1.0 + lit-html: 3.3.0 dev: true - /lit-html@3.1.4: - resolution: {integrity: sha512-yKKO2uVv7zYFHlWMfZmqc+4hkmSbFp8jgjdZY9vvR9jr4J8fH6FUMXhr+ljfELgmjpvlF7Z1SJ5n5/Jeqtc9YA==} + /lit-html@3.3.0: + resolution: {integrity: sha512-RHoswrFAxY2d8Cf2mm4OZ1DgzCoBKUKSPvA1fhtSELxUERq2aQQ2h05pO9j81gS1o7RIRJ+CePLogfyahwmynw==} dependencies: '@types/trusted-types': 2.0.7 dev: true - /lit@3.1.4: - resolution: {integrity: sha512-q6qKnKXHy2g1kjBaNfcoLlgbI3+aSOZ9Q4tiGa9bGYXq5RBXxkVTqTIVmP2VWMp29L4GyvCFm8ZQ2o56eUAMyA==} + /lit@3.3.0: + resolution: {integrity: sha512-DGVsqsOIHBww2DqnuZzW7QsuCdahp50ojuDaBPC7jUDRpYoH0z7kHBBYZewRzer75FwtrkmkKk7iOAwSaWdBmw==} dependencies: - '@lit/reactive-element': 2.0.4 - lit-element: 4.0.6 - lit-html: 3.1.4 + '@lit/reactive-element': 2.1.0 + lit-element: 4.2.0 + lit-html: 3.3.0 + dev: true + + /local-pkg@1.1.1: + resolution: {integrity: sha512-WunYko2W1NcdfAFpuLUoucsgULmgDBRkdxHxWQ7mK0cQqwPiy8E1enjuRBrhLtZkB5iScJ1XIPdhVEFK8aOLSg==} + engines: {node: '>=14'} + dependencies: + mlly: 1.7.4 + pkg-types: 2.1.0 + quansync: 0.2.10 dev: true /locate-path@5.0.0: @@ -2404,23 +3465,23 @@ packages: resolution: {integrity: sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw==} engines: {node: '>=18'} dependencies: - chalk: 5.3.0 + chalk: 5.4.1 is-unicode-supported: 1.3.0 dev: true - /magic-string@0.30.10: - resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} + /magic-string@0.30.17: + resolution: {integrity: sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==} dependencies: - '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/sourcemap-codec': 1.5.0 dev: true - /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.8)(markdown-it@14.1.0): - resolution: {integrity: sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==} + /markdown-it-anchor@9.2.0(@types/markdown-it@14.1.2)(markdown-it@14.1.0): + resolution: {integrity: sha512-sa2ErMQ6kKOA4l31gLGYliFQrMKkqSO0ZJgGhDHKijPf0pNFM9vghjAh3gn26pS4JDRs7Iwa9S36gxm3vgZTzg==} peerDependencies: '@types/markdown-it': '*' markdown-it: '*' dependencies: - '@types/markdown-it': 13.0.8 + '@types/markdown-it': 14.1.2 markdown-it: 14.1.0 dev: true @@ -2440,289 +3501,142 @@ packages: uc.micro: 2.1.0 dev: true - /mdast-util-from-markdown@1.3.1: - resolution: {integrity: sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==} - dependencies: - '@types/mdast': 3.0.15 - '@types/unist': 2.0.10 - decode-named-character-reference: 1.0.2 - mdast-util-to-string: 3.2.0 - micromark: 3.2.0 - micromark-util-decode-numeric-character-reference: 1.1.0 - micromark-util-decode-string: 1.1.0 - micromark-util-normalize-identifier: 1.1.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - unist-util-stringify-position: 3.0.3 - uvu: 0.5.6 - transitivePeerDependencies: - - supports-color + /marked@15.0.12: + resolution: {integrity: sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==} + engines: {node: '>= 18'} + hasBin: true dev: true - /mdast-util-to-string@3.2.0: - resolution: {integrity: sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==} + /mdast-util-to-hast@13.2.0: + resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==} dependencies: - '@types/mdast': 3.0.15 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 dev: true /mdurl@2.0.0: resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} dev: true - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true - /merge2@1.4.1: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} dev: true - /mermaid@10.9.1: - resolution: {integrity: sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==} + /mermaid@11.6.0: + resolution: {integrity: sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==} dependencies: - '@braintree/sanitize-url': 6.0.4 - '@types/d3-scale': 4.0.8 - '@types/d3-scale-chromatic': 3.0.3 - cytoscape: 3.30.0 - cytoscape-cose-bilkent: 4.1.0(cytoscape@3.30.0) + '@braintree/sanitize-url': 7.1.1 + '@iconify/utils': 2.3.0 + '@mermaid-js/parser': 0.4.0 + '@types/d3': 7.4.3 + cytoscape: 3.32.0 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.32.0) + cytoscape-fcose: 2.2.0(cytoscape@3.32.0) d3: 7.9.0 d3-sankey: 0.12.3 - dagre-d3-es: 7.0.10 - dayjs: 1.11.11 - dompurify: 3.1.5 - elkjs: 0.9.3 - katex: 0.16.10 + dagre-d3-es: 7.0.11 + dayjs: 1.11.13 + dompurify: 3.2.6 + katex: 0.16.22 khroma: 2.1.0 lodash-es: 4.17.21 - mdast-util-from-markdown: 1.3.1 - non-layered-tidy-tree-layout: 2.0.2 - stylis: 4.3.2 + marked: 15.0.12 + roughjs: 4.6.6 + stylis: 4.3.6 ts-dedent: 2.2.0 - uuid: 9.0.1 - web-worker: 1.3.0 + uuid: 11.1.0 transitivePeerDependencies: - supports-color dev: true - /micromark-core-commonmark@1.1.0: - resolution: {integrity: sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==} - dependencies: - decode-named-character-reference: 1.0.2 - micromark-factory-destination: 1.1.0 - micromark-factory-label: 1.1.0 - micromark-factory-space: 1.1.0 - micromark-factory-title: 1.1.0 - micromark-factory-whitespace: 1.1.0 - micromark-util-character: 1.2.0 - micromark-util-chunked: 1.1.0 - micromark-util-classify-character: 1.1.0 - micromark-util-html-tag-name: 1.2.0 - micromark-util-normalize-identifier: 1.1.0 - micromark-util-resolve-all: 1.1.0 - micromark-util-subtokenize: 1.1.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - uvu: 0.5.6 - dev: true - - /micromark-factory-destination@1.1.0: - resolution: {integrity: sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==} - dependencies: - micromark-util-character: 1.2.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-factory-label@1.1.0: - resolution: {integrity: sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==} - dependencies: - micromark-util-character: 1.2.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - uvu: 0.5.6 - dev: true - - /micromark-factory-space@1.1.0: - resolution: {integrity: sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==} - dependencies: - micromark-util-character: 1.2.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-factory-title@1.1.0: - resolution: {integrity: sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==} - dependencies: - micromark-factory-space: 1.1.0 - micromark-util-character: 1.2.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-factory-whitespace@1.1.0: - resolution: {integrity: sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==} - dependencies: - micromark-factory-space: 1.1.0 - micromark-util-character: 1.2.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-util-character@1.2.0: - resolution: {integrity: sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==} - dependencies: - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-util-chunked@1.1.0: - resolution: {integrity: sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==} - dependencies: - micromark-util-symbol: 1.1.0 - dev: true - - /micromark-util-classify-character@1.1.0: - resolution: {integrity: sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==} - dependencies: - micromark-util-character: 1.2.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-util-combine-extensions@1.1.0: - resolution: {integrity: sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==} - dependencies: - micromark-util-chunked: 1.1.0 - micromark-util-types: 1.1.0 - dev: true - - /micromark-util-decode-numeric-character-reference@1.1.0: - resolution: {integrity: sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==} + /micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} dependencies: - micromark-util-symbol: 1.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 dev: true - /micromark-util-decode-string@1.1.0: - resolution: {integrity: sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==} - dependencies: - decode-named-character-reference: 1.0.2 - micromark-util-character: 1.2.0 - micromark-util-decode-numeric-character-reference: 1.1.0 - micromark-util-symbol: 1.1.0 - dev: true - - /micromark-util-encode@1.1.0: - resolution: {integrity: sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==} - dev: true - - /micromark-util-html-tag-name@1.2.0: - resolution: {integrity: sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==} - dev: true - - /micromark-util-normalize-identifier@1.1.0: - resolution: {integrity: sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==} - dependencies: - micromark-util-symbol: 1.1.0 + /micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} dev: true - /micromark-util-resolve-all@1.1.0: - resolution: {integrity: sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==} + /micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} dependencies: - micromark-util-types: 1.1.0 - dev: true - - /micromark-util-sanitize-uri@1.2.0: - resolution: {integrity: sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==} - dependencies: - micromark-util-character: 1.2.0 - micromark-util-encode: 1.1.0 - micromark-util-symbol: 1.1.0 - dev: true - - /micromark-util-subtokenize@1.1.0: - resolution: {integrity: sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==} - dependencies: - micromark-util-chunked: 1.1.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - uvu: 0.5.6 - dev: true - - /micromark-util-symbol@1.1.0: - resolution: {integrity: sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==} + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 dev: true - /micromark-util-types@1.1.0: - resolution: {integrity: sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==} + /micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} dev: true - /micromark@3.2.0: - resolution: {integrity: sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==} - dependencies: - '@types/debug': 4.1.12 - debug: 4.3.5 - decode-named-character-reference: 1.0.2 - micromark-core-commonmark: 1.1.0 - micromark-factory-space: 1.1.0 - micromark-util-character: 1.2.0 - micromark-util-chunked: 1.1.0 - micromark-util-combine-extensions: 1.1.0 - micromark-util-decode-numeric-character-reference: 1.1.0 - micromark-util-encode: 1.1.0 - micromark-util-normalize-identifier: 1.1.0 - micromark-util-resolve-all: 1.1.0 - micromark-util-sanitize-uri: 1.2.0 - micromark-util-subtokenize: 1.1.0 - micromark-util-symbol: 1.1.0 - micromark-util-types: 1.1.0 - uvu: 0.5.6 - transitivePeerDependencies: - - supports-color + /micromark-util-types@2.0.2: + resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==} dev: true - /micromatch@4.0.7: - resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} + /micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} dependencies: braces: 3.0.3 picomatch: 2.3.1 dev: true - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} + /mimic-function@5.0.1: + resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} + engines: {node: '>=18'} dev: true - /mimic-fn@4.0.0: - resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} - engines: {node: '>=12'} + /mitt@3.0.1: + resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==} dev: true - /mri@1.2.0: - resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} - engines: {node: '>=4'} + /mlly@1.7.4: + resolution: {integrity: sha512-qmdSIPC4bDJXgZTCR7XosJiNKySV7O215tsPtDN9iEO/7q/76b/ijtgRu/+epFXSJhijtTCCGp3DWS549P3xKw==} + dependencies: + acorn: 8.14.1 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.1 dev: true - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + /ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} dev: true - /nanoid@3.3.7: - resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + /nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true dev: true + /nanoid@5.1.5: + resolution: {integrity: sha512-Ir/+ZpE9fDsNH0hQ3C68uyThDXzYcim2EqcZ8zn8Chtt1iylPT9xXJB0kPCnqzgcEGikO9RxSrh63MsmVCU7Fw==} + engines: {node: ^18 || >=20} + hasBin: true + dev: true + /neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} dev: true - /node-releases@2.0.14: - resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} + /node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} dev: true + optional: true - /non-layered-tidy-tree-layout@2.0.2: - resolution: {integrity: sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==} + /node-releases@2.0.19: + resolution: {integrity: sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==} dev: true /normalize-path@3.0.0: @@ -2735,45 +3649,43 @@ packages: engines: {node: '>=0.10.0'} dev: true - /npm-run-path@5.3.0: - resolution: {integrity: sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - path-key: 4.0.0 - dev: true - /nth-check@2.1.1: resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} dependencies: boolbase: 1.0.0 dev: true - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} + /onetime@7.0.0: + resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} + engines: {node: '>=18'} dependencies: - mimic-fn: 2.1.0 + mimic-function: 5.0.1 dev: true - /onetime@6.0.0: - resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} - engines: {node: '>=12'} + /oniguruma-parser@0.12.1: + resolution: {integrity: sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==} + dev: true + + /oniguruma-to-es@4.3.3: + resolution: {integrity: sha512-rPiZhzC3wXwE59YQMRDodUwwT9FZ9nNBwQQfsd1wfdtlKEyCdRV0avrTcSZ5xlIvGRVPd/cx6ZN45ECmS39xvg==} dependencies: - mimic-fn: 4.0.0 + oniguruma-parser: 0.12.1 + regex: 6.0.1 + regex-recursion: 6.0.2 dev: true - /ora@8.0.1: - resolution: {integrity: sha512-ANIvzobt1rls2BDny5fWZ3ZVKyD6nscLvfFRpQgfWsythlcsVUC9kL0zq6j2Z5z9wwp1kd7wpsD/T9qNPVLCaQ==} + /ora@8.2.0: + resolution: {integrity: sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw==} engines: {node: '>=18'} dependencies: - chalk: 5.3.0 - cli-cursor: 4.0.0 + chalk: 5.4.1 + cli-cursor: 5.0.0 cli-spinners: 2.9.2 is-interactive: 2.0.0 - is-unicode-supported: 2.0.0 + is-unicode-supported: 2.1.0 log-symbols: 6.0.0 stdin-discarder: 0.2.2 - string-width: 7.1.0 + string-width: 7.2.0 strip-ansi: 7.1.0 dev: true @@ -2796,17 +3708,31 @@ packages: engines: {node: '>=6'} dev: true - /parse5-htmlparser2-tree-adapter@7.0.0: - resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} + /package-manager-detector@1.3.0: + resolution: {integrity: sha512-ZsEbbZORsyHuO00lY1kV3/t72yp6Ysay6Pd17ZAlNGuGwmWDLCJxFpRs0IzfXfj1o4icJOkUEioexFHzyPurSQ==} + dev: true + + /parse5-htmlparser2-tree-adapter@7.1.0: + resolution: {integrity: sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==} dependencies: domhandler: 5.0.3 - parse5: 7.1.2 + parse5: 7.3.0 dev: true - /parse5@7.1.2: - resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + /parse5-parser-stream@7.1.2: + resolution: {integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==} dependencies: - entities: 4.5.0 + parse5: 7.3.0 + dev: true + + /parse5@7.3.0: + resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + dependencies: + entities: 6.0.0 + dev: true + + /path-data-parser@0.1.0: + resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==} dev: true /path-exists@4.0.0: @@ -2814,19 +3740,17 @@ packages: engines: {node: '>=8'} dev: true - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} + /path-type@6.0.0: + resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==} + engines: {node: '>=18'} dev: true - /path-key@4.0.0: - resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} - engines: {node: '>=12'} + /pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} dev: true - /path-type@5.0.0: - resolution: {integrity: sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==} - engines: {node: '>=12'} + /perfect-debounce@1.0.0: + resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} dev: true /photoswipe@5.4.4: @@ -2834,8 +3758,8 @@ packages: engines: {node: '>= 0.12.0'} dev: true - /picocolors@1.0.1: - resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + /picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} dev: true /picomatch@2.3.1: @@ -2843,18 +3767,51 @@ packages: engines: {node: '>=8.6'} dev: true + /picomatch@4.0.2: + resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + engines: {node: '>=12'} + dev: true + + /pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + dependencies: + confbox: 0.1.8 + mlly: 1.7.4 + pathe: 2.0.3 + dev: true + + /pkg-types@2.1.0: + resolution: {integrity: sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==} + dependencies: + confbox: 0.2.2 + exsolve: 1.0.5 + pathe: 2.0.3 + dev: true + /pngjs@5.0.0: resolution: {integrity: sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw==} engines: {node: '>=10.13.0'} dev: true - /postcss-load-config@5.1.0(postcss@8.4.38): - resolution: {integrity: sha512-G5AJ+IX0aD0dygOE0yFZQ/huFFMSNneyfp0e3/bT05a8OfPC5FUoZRPfGijUdGOJNMewJiwzcHJXFafFzeKFVA==} + /points-on-curve@0.2.0: + resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} + dev: true + + /points-on-path@0.2.1: + resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} + dependencies: + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + dev: true + + /postcss-load-config@6.0.1(postcss@8.5.3): + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} engines: {node: '>= 18'} peerDependencies: jiti: '>=1.21.0' postcss: '>=8.0.9' tsx: ^4.8.1 + yaml: ^2.4.2 peerDependenciesMeta: jiti: optional: true @@ -2862,28 +3819,28 @@ packages: optional: true tsx: optional: true + yaml: + optional: true dependencies: - lilconfig: 3.1.2 - postcss: 8.4.38 - yaml: 2.4.5 + lilconfig: 3.1.3 + postcss: 8.5.3 dev: true /postcss-value-parser@4.2.0: resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} dev: true - /postcss@8.4.38: - resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} + /postcss@8.5.3: + resolution: {integrity: sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==} engines: {node: ^10 || ^12 || >=14} dependencies: - nanoid: 3.3.7 - picocolors: 1.0.1 - source-map-js: 1.2.0 + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 dev: true - /prismjs@1.29.0: - resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} - engines: {node: '>=6'} + /property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} dev: true /punycode.js@2.3.1: @@ -2891,103 +3848,362 @@ packages: engines: {node: '>=6'} dev: true - /qrcode@1.5.3: - resolution: {integrity: sha512-puyri6ApkEHYiVl4CFzo1tDkAZ+ATcnbJrJ6RiBM1Fhctdn/ix9MTE3hRph33omisEbC/2fcfemsseiKgBPKZg==} + /qrcode@1.5.4: + resolution: {integrity: sha512-1ca71Zgiu6ORjHqFBDpnSMTR2ReToX4l1Au1VFLyVeBTFavzQnv5JxMFr3ukHVKpSrSA2MCk0lNJSykjUfz7Zg==} engines: {node: '>=10.13.0'} hasBin: true dependencies: dijkstrajs: 1.0.3 - encode-utf8: 1.0.3 pngjs: 5.0.0 yargs: 15.4.1 dev: true + /quansync@0.2.10: + resolution: {integrity: sha512-t41VRkMYbkHyCYmOvx/6URnN80H7k4X0lLdBMGsz+maAwrJQYB1djpV6vHrQIBE0WBSGqhtEHrK9U3DWWH8v7A==} + dev: true + /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true - /readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} - dependencies: - picomatch: 2.3.1 + /readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + dependencies: + picomatch: 2.3.1 + dev: true + + /readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + dev: true + + /regex-recursion@6.0.2: + resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==} + dependencies: + regex-utilities: 2.3.0 + dev: true + + /regex-utilities@2.3.0: + resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==} + dev: true + + /regex@6.0.1: + resolution: {integrity: sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==} + dependencies: + regex-utilities: 2.3.0 + dev: true + + /rehype-parse@9.0.1: + resolution: {integrity: sha512-ksCzCD0Fgfh7trPDxr2rSylbwq9iYDkSn8TCDmEJ49ljEUBxDVCzCHv7QNzZOfODanX4+bWQ4WZqLCRWYLfhag==} + dependencies: + '@types/hast': 3.0.4 + hast-util-from-html: 2.0.3 + unified: 11.0.5 + dev: true + + /rehype-sanitize@6.0.0: + resolution: {integrity: sha512-CsnhKNsyI8Tub6L4sm5ZFsme4puGfc6pYylvXo1AeqaGbjOYyzNv3qZPwvs0oMJ39eryyeOdmxwUIo94IpEhqg==} + dependencies: + '@types/hast': 3.0.4 + hast-util-sanitize: 5.0.2 + dev: true + + /rehype-stringify@10.0.1: + resolution: {integrity: sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==} + dependencies: + '@types/hast': 3.0.4 + hast-util-to-html: 9.0.5 + unified: 11.0.5 + dev: true + + /require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /require-main-filename@2.0.0: + resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} + dev: true + + /restore-cursor@5.1.0: + resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} + engines: {node: '>=18'} + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + dev: true + + /reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + dev: true + + /rfdc@1.4.1: + resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==} + dev: true + + /robust-predicates@3.0.2: + resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + dev: true + + /rollup@4.41.1: + resolution: {integrity: sha512-cPmwD3FnFv8rKMBc1MxWCwVQFxwf1JEmSX3iQXrRVVG15zerAIXRjMFVWnd5Q5QvgKF7Aj+5ykXFhUl+QGnyOw==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + dependencies: + '@types/estree': 1.0.7 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.41.1 + '@rollup/rollup-android-arm64': 4.41.1 + '@rollup/rollup-darwin-arm64': 4.41.1 + '@rollup/rollup-darwin-x64': 4.41.1 + '@rollup/rollup-freebsd-arm64': 4.41.1 + '@rollup/rollup-freebsd-x64': 4.41.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.41.1 + '@rollup/rollup-linux-arm-musleabihf': 4.41.1 + '@rollup/rollup-linux-arm64-gnu': 4.41.1 + '@rollup/rollup-linux-arm64-musl': 4.41.1 + '@rollup/rollup-linux-loongarch64-gnu': 4.41.1 + '@rollup/rollup-linux-powerpc64le-gnu': 4.41.1 + '@rollup/rollup-linux-riscv64-gnu': 4.41.1 + '@rollup/rollup-linux-riscv64-musl': 4.41.1 + '@rollup/rollup-linux-s390x-gnu': 4.41.1 + '@rollup/rollup-linux-x64-gnu': 4.41.1 + '@rollup/rollup-linux-x64-musl': 4.41.1 + '@rollup/rollup-win32-arm64-msvc': 4.41.1 + '@rollup/rollup-win32-ia32-msvc': 4.41.1 + '@rollup/rollup-win32-x64-msvc': 4.41.1 + fsevents: 2.3.3 + dev: true + + /roughjs@4.6.6: + resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==} + dependencies: + hachure-fill: 0.5.2 + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + points-on-path: 0.2.1 + dev: true + + /run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + dependencies: + queue-microtask: 1.2.3 + dev: true + + /rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + dev: true + + /rxjs@7.8.2: + resolution: {integrity: sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==} + dependencies: + tslib: 2.8.1 + dev: true + + /safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + dev: true + + /sass-embedded-android-arm64@1.89.0: + resolution: {integrity: sha512-pr4R3p5R+Ul9ZA5nzYbBJQFJXW6dMGzgpNBhmaToYDgDhmNX5kg0mZAUlGLHvisLdTiR6oEfDDr9QI6tnD2nqA==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [android] + dev: true + optional: true + + /sass-embedded-android-arm@1.89.0: + resolution: {integrity: sha512-s6jxkEZQQrtyIGZX6Sbcu7tEixFG2VkqFgrX11flm/jZex7KaxnZtFace+wnYAgHqzzYpx0kNzJUpT+GXxm8CA==} + engines: {node: '>=14.0.0'} + cpu: [arm] + os: [android] + dev: true + optional: true + + /sass-embedded-android-ia32@1.89.0: + resolution: {integrity: sha512-GoNnNGYmp1F0ZMHqQbAurlQsjBMZKtDd5H60Ruq86uQFdnuNqQ9wHKJsJABxMnjfAn60IjefytM5PYTMcAmbfA==} + engines: {node: '>=14.0.0'} + cpu: [ia32] + os: [android] + dev: true + optional: true + + /sass-embedded-android-riscv64@1.89.0: + resolution: {integrity: sha512-di+i4KkKAWTNksaQYTqBEERv46qV/tvv14TPswEfak7vcTQ2pj2mvV4KGjLYfU2LqRkX/NTXix9KFthrzFN51Q==} + engines: {node: '>=14.0.0'} + cpu: [riscv64] + os: [android] + dev: true + optional: true + + /sass-embedded-android-x64@1.89.0: + resolution: {integrity: sha512-1cRRDAnmAS1wLaxfFf6PCHu9sKW8FNxdM7ZkanwxO9mztrCu/uvfqTmaurY9+RaKvPus7sGYFp46/TNtl/wRjg==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [android] + dev: true + optional: true + + /sass-embedded-darwin-arm64@1.89.0: + resolution: {integrity: sha512-EUNUzI0UkbQ6dASPyf09S3x7fNT54PjyD594ZGTY14Yh4qTuacIj27ckLmreAJNNu5QxlbhyYuOtz+XN5bMMxA==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [darwin] + dev: true + optional: true + + /sass-embedded-darwin-x64@1.89.0: + resolution: {integrity: sha512-23R8zSuB31Fq/MYpmQ38UR2C26BsYb66VVpJgWmWl/N+sgv/+l9ECuSPMbYNgM3vb9TP9wk9dgL6KkiCS5tAyg==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [darwin] + dev: true + optional: true + + /sass-embedded-linux-arm64@1.89.0: + resolution: {integrity: sha512-g9Lp57qyx51ttKj0AN/edV43Hu1fBObvD7LpYwVfs6u3I95r0Adi90KujzNrUqXxJVmsfUwseY8kA8zvcRjhYA==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [linux] + dev: true + optional: true + + /sass-embedded-linux-arm@1.89.0: + resolution: {integrity: sha512-KAzA1XD74d8/fiJXxVnLfFwfpmD2XqUJZz+DL6ZAPNLH1sb+yCP7brktaOyClDc/MBu61JERdHaJjIZhfX0Yqw==} + engines: {node: '>=14.0.0'} + cpu: [arm] + os: [linux] + dev: true + optional: true + + /sass-embedded-linux-ia32@1.89.0: + resolution: {integrity: sha512-5fxBeXyvBr3pb+vyrx9V6yd7QDRXkAPbwmFVVhjqshBABOXelLysEFea7xokh/tM8JAAQ4O8Ls3eW3Eojb477g==} + engines: {node: '>=14.0.0'} + cpu: [ia32] + os: [linux] dev: true + optional: true - /require-directory@2.1.1: - resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} - engines: {node: '>=0.10.0'} + /sass-embedded-linux-musl-arm64@1.89.0: + resolution: {integrity: sha512-50oelrOtN64u15vJN9uJryIuT0+UPjyeoq0zdWbY8F7LM9294Wf+Idea+nqDUWDCj1MHndyPFmR1mjeuRouJhw==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [linux] dev: true + optional: true - /require-main-filename@2.0.0: - resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==} + /sass-embedded-linux-musl-arm@1.89.0: + resolution: {integrity: sha512-0Q1JeEU4/tzH7fwAwarfIh+Swn3aXG/jPhVsZpbR1c1VzkeaPngmXdmLJcVXsdb35tjk84DuYcFtJlE1HYGw4Q==} + engines: {node: '>=14.0.0'} + cpu: [arm] + os: [linux] dev: true + optional: true - /restore-cursor@4.0.0: - resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 + /sass-embedded-linux-musl-ia32@1.89.0: + resolution: {integrity: sha512-ILWqpTd+0RdsSw977iVAJf4CLetIbcQgLQf17ycS1N4StZKVRZs1bBfZhg/f/HU/4p5HondPAwepgJepZZdnFA==} + engines: {node: '>=14.0.0'} + cpu: [ia32] + os: [linux] dev: true + optional: true - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + /sass-embedded-linux-musl-riscv64@1.89.0: + resolution: {integrity: sha512-n2V+Tdjj7SAuiuElJYhWiHjjB1YU0cuFvL1/m5K+ecdNStfHFWIzvBT6/vzQnBOWjI4eZECNVuQ8GwGWCufZew==} + engines: {node: '>=14.0.0'} + cpu: [riscv64] + os: [linux] dev: true + optional: true - /robust-predicates@3.0.2: - resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + /sass-embedded-linux-musl-x64@1.89.0: + resolution: {integrity: sha512-KOHJdouBK3SLJKZLnFYzuxs3dn+6jaeO3p4p1JUYAcVfndcvh13Sg2sLGfOfpg7Og6ws2Nnqnx0CyL26jPJ7ag==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [linux] dev: true + optional: true - /rollup@4.18.0: - resolution: {integrity: sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} - hasBin: true - dependencies: - '@types/estree': 1.0.5 - optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.18.0 - '@rollup/rollup-android-arm64': 4.18.0 - '@rollup/rollup-darwin-arm64': 4.18.0 - '@rollup/rollup-darwin-x64': 4.18.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.18.0 - '@rollup/rollup-linux-arm-musleabihf': 4.18.0 - '@rollup/rollup-linux-arm64-gnu': 4.18.0 - '@rollup/rollup-linux-arm64-musl': 4.18.0 - '@rollup/rollup-linux-powerpc64le-gnu': 4.18.0 - '@rollup/rollup-linux-riscv64-gnu': 4.18.0 - '@rollup/rollup-linux-s390x-gnu': 4.18.0 - '@rollup/rollup-linux-x64-gnu': 4.18.0 - '@rollup/rollup-linux-x64-musl': 4.18.0 - '@rollup/rollup-win32-arm64-msvc': 4.18.0 - '@rollup/rollup-win32-ia32-msvc': 4.18.0 - '@rollup/rollup-win32-x64-msvc': 4.18.0 - fsevents: 2.3.3 + /sass-embedded-linux-riscv64@1.89.0: + resolution: {integrity: sha512-0A/UWeKX6MYhVLWLkdX3NPKHO+mvIwzaf6TxGCy3vS3TODWaeDUeBhHShAr7YlOKv5xRGxf7Gx7FXCPV0mUyMA==} + engines: {node: '>=14.0.0'} + cpu: [riscv64] + os: [linux] dev: true + optional: true - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} - dependencies: - queue-microtask: 1.2.3 + /sass-embedded-linux-x64@1.89.0: + resolution: {integrity: sha512-dRBoOFPDWctHPYK3hTk3YzyX/icVrXiw7oOjbtpaDr6JooqIWBe16FslkWyvQzdmfOFy80raKVjgoqT7DsznkQ==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [linux] dev: true + optional: true - /rw@1.3.3: - resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + /sass-embedded-win32-arm64@1.89.0: + resolution: {integrity: sha512-RnlVZ14hC/W7ubzvhqnbGfjU5PFNoFP/y5qycgCy+Mezb0IKbWvZ2Lyzux8TbL3OIjOikkNpfXoNQrX706WLAA==} + engines: {node: '>=14.0.0'} + cpu: [arm64] + os: [win32] dev: true + optional: true - /sade@1.8.1: - resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} - engines: {node: '>=6'} - dependencies: - mri: 1.2.0 + /sass-embedded-win32-ia32@1.89.0: + resolution: {integrity: sha512-eFe9VMNG+90nuoE3eXDy+38+uEHGf7xcqalq5+0PVZfR+H9RlaEbvIUNflZV94+LOH8Jb4lrfuekhHgWDJLfSg==} + engines: {node: '>=14.0.0'} + cpu: [ia32] + os: [win32] dev: true + optional: true - /safer-buffer@2.1.2: - resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + /sass-embedded-win32-x64@1.89.0: + resolution: {integrity: sha512-AaGpr5R6MLCuSvkvDdRq49ebifwLcuGPk0/10hbYw9nh3jpy2/CylYubQpIpR4yPcuD1wFwFqufTXC3HJYGb0g==} + engines: {node: '>=14.0.0'} + cpu: [x64] + os: [win32] dev: true + optional: true - /sass-loader@14.2.1: - resolution: {integrity: sha512-G0VcnMYU18a4N7VoNDegg2OuMjYtxnqzQWARVWCIVSZwJeiL9kg8QMsuIZOplsJgTzZLF6jGxI3AClj8I9nRdQ==} + /sass-embedded@1.89.0: + resolution: {integrity: sha512-EDrK1el9zdgJFpocCGlxatDWaP18tJBWoM1hxzo2KJBvjdmBichXI6O6KlQrigvQPO3uJ8DfmFmAAx7s7CG6uw==} + engines: {node: '>=16.0.0'} + hasBin: true + dependencies: + '@bufbuild/protobuf': 2.5.0 + buffer-builder: 0.2.0 + colorjs.io: 0.5.2 + immutable: 5.1.2 + rxjs: 7.8.2 + supports-color: 8.1.1 + sync-child-process: 1.0.2 + varint: 6.0.0 + optionalDependencies: + sass-embedded-android-arm: 1.89.0 + sass-embedded-android-arm64: 1.89.0 + sass-embedded-android-ia32: 1.89.0 + sass-embedded-android-riscv64: 1.89.0 + sass-embedded-android-x64: 1.89.0 + sass-embedded-darwin-arm64: 1.89.0 + sass-embedded-darwin-x64: 1.89.0 + sass-embedded-linux-arm: 1.89.0 + sass-embedded-linux-arm64: 1.89.0 + sass-embedded-linux-ia32: 1.89.0 + sass-embedded-linux-musl-arm: 1.89.0 + sass-embedded-linux-musl-arm64: 1.89.0 + sass-embedded-linux-musl-ia32: 1.89.0 + sass-embedded-linux-musl-riscv64: 1.89.0 + sass-embedded-linux-musl-x64: 1.89.0 + sass-embedded-linux-riscv64: 1.89.0 + sass-embedded-linux-x64: 1.89.0 + sass-embedded-win32-arm64: 1.89.0 + sass-embedded-win32-ia32: 1.89.0 + sass-embedded-win32-x64: 1.89.0 + dev: true + + /sass-loader@16.0.5(sass-embedded@1.89.0)(sass@1.89.0): + resolution: {integrity: sha512-oL+CMBXrj6BZ/zOq4os+UECPL+bWqt6OAC6DWS8Ln8GZRcMDjlJ4JC3FBDuHJdYaFWIdKNIBYmtZtK2MaMkNIw==} engines: {node: '>= 18.12.0'} peerDependencies: '@rspack/core': 0.x || 1.x @@ -3008,16 +4224,20 @@ packages: optional: true dependencies: neo-async: 2.6.2 + sass: 1.89.0 + sass-embedded: 1.89.0 dev: true - /sass@1.77.6: - resolution: {integrity: sha512-ByXE1oLD79GVq9Ht1PeHWCPMPB8XHpBuz1r85oByKHjZY6qV6rWnQovQzXJXuQ/XyE1Oj3iPk3lo28uzaRA2/Q==} + /sass@1.89.0: + resolution: {integrity: sha512-ld+kQU8YTdGNjOLfRWBzewJpU5cwEv/h5yyqlSeJcj6Yh8U4TDA9UA5FPicqDz/xgRPWRSYIQNiFks21TbA9KQ==} engines: {node: '>=14.0.0'} hasBin: true dependencies: - chokidar: 3.6.0 - immutable: 4.3.6 - source-map-js: 1.2.0 + chokidar: 4.0.3 + immutable: 5.1.2 + source-map-js: 1.2.1 + optionalDependencies: + '@parcel/watcher': 2.5.1 dev: true /sax@1.4.1: @@ -3032,30 +4252,21 @@ packages: kind-of: 6.0.3 dev: true - /semver@7.6.2: - resolution: {integrity: sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==} - engines: {node: '>=10'} - hasBin: true - dev: true - /set-blocking@2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} dev: true - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} + /shiki@3.4.2: + resolution: {integrity: sha512-wuxzZzQG8kvZndD7nustrNFIKYJ1jJoWIPaBpVe2+KHSvtzMi4SBjOxrigs8qeqce/l3U0cwiC+VAkLKSunHQQ==} dependencies: - shebang-regex: 3.0.0 - dev: true - - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true - - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + '@shikijs/core': 3.4.2 + '@shikijs/engine-javascript': 3.4.2 + '@shikijs/engine-oniguruma': 3.4.2 + '@shikijs/langs': 3.4.2 + '@shikijs/themes': 3.4.2 + '@shikijs/types': 3.4.2 + '@shikijs/vscode-textmate': 10.0.2 + '@types/hast': 3.0.4 dev: true /signal-exit@4.1.0: @@ -3063,9 +4274,9 @@ packages: engines: {node: '>=14'} dev: true - /sitemap@7.1.2: - resolution: {integrity: sha512-ARCqzHJ0p4gWt+j7NlU5eDlIO9+Rkr/JhPFZKKQ1l5GCus7rJH4UdrlVAh0xC/gDS/Qir2UMxqYNHtsKr2rpCw==} - engines: {node: '>=12.0.0', npm: '>=5.6.0'} + /sitemap@8.0.0: + resolution: {integrity: sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A==} + engines: {node: '>=14.0.0', npm: '>=6.0.0'} hasBin: true dependencies: '@types/node': 17.0.45 @@ -3079,13 +4290,22 @@ packages: engines: {node: '>=14.16'} dev: true - /slimsearch@2.1.1: - resolution: {integrity: sha512-l1utJWal8F/RIheYk88DE2+enI12nIrn5SHt4ih/CNAH81PzkTv2GVBODlLynDJb7xan5hjd8XTL5f0L4cxLQA==} + /slimsearch@2.2.2: + resolution: {integrity: sha512-C+E3y4sKKzPzcOyty0G9CjXdLvY0ZWPSCQCDNqSv/P6+rvoL6RiHvuwr0wnxf0QgdbdyNiJQ0w7OdudHoabpCg==} engines: {node: '>=18.18.0'} dev: true - /source-map-js@1.2.0: - resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + /source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + dev: true + + /space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + dev: true + + /speakingurl@14.0.1: + resolution: {integrity: sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==} engines: {node: '>=0.10.0'} dev: true @@ -3107,15 +4327,22 @@ packages: strip-ansi: 6.0.1 dev: true - /string-width@7.1.0: - resolution: {integrity: sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==} + /string-width@7.2.0: + resolution: {integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==} engines: {node: '>=18'} dependencies: - emoji-regex: 10.3.0 - get-east-asian-width: 1.2.0 + emoji-regex: 10.4.0 + get-east-asian-width: 1.3.0 strip-ansi: 7.1.0 dev: true + /stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + dev: true + /strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -3127,7 +4354,7 @@ packages: resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} engines: {node: '>=12'} dependencies: - ansi-regex: 6.0.1 + ansi-regex: 6.1.0 dev: true /strip-bom-string@1.0.0: @@ -3135,18 +4362,53 @@ packages: engines: {node: '>=0.10.0'} dev: true - /strip-final-newline@3.0.0: - resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} - engines: {node: '>=12'} + /stylis@4.3.6: + resolution: {integrity: sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==} + dev: true + + /superjson@2.2.2: + resolution: {integrity: sha512-5JRxVqC8I8NuOUjzBbvVJAKNM8qoVuH0O77h4WInc/qC2q5IreqKxYwgkga3PfA22OayK2ikceb/B26dztPl+Q==} + engines: {node: '>=16'} + dependencies: + copy-anything: 3.0.5 + dev: true + + /supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + dependencies: + has-flag: 4.0.0 dev: true - /stylis@4.3.2: - resolution: {integrity: sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==} + /sync-child-process@1.0.2: + resolution: {integrity: sha512-8lD+t2KrrScJ/7KXCSyfhT3/hRq78rC0wBFqNJXv3mZyn6hW2ypM05JmlSvtqRbeq6jqA94oHbxAr2vYsJ8vDA==} + engines: {node: '>=16.0.0'} + dependencies: + sync-message-port: 1.1.3 dev: true - /to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} + /sync-message-port@1.1.3: + resolution: {integrity: sha512-GTt8rSKje5FilG+wEdfCkOcLL7LWqpMlr2c3LRuKt/YXxcJ52aGSbGBAdI4L3aaqfrBt6y711El53ItyH1NWzg==} + engines: {node: '>=16.0.0'} + dev: true + + /synckit@0.11.6: + resolution: {integrity: sha512-2pR2ubZSV64f/vqm9eLPz/KOvR9Dm+Co/5ChLgeHl0yEDRc6h5hXHoxEQH8Y5Ljycozd3p1k5TTSVdzYGkPvLw==} + engines: {node: ^14.18.0 || >=16.0.0} + dependencies: + '@pkgr/core': 0.2.4 + dev: true + + /tinyexec@1.0.1: + resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==} + dev: true + + /tinyglobby@0.2.14: + resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} + engines: {node: '>=12.0.0'} + dependencies: + fdir: 6.4.4(picomatch@4.0.2) + picomatch: 4.0.2 dev: true /to-regex-range@5.0.1: @@ -3156,28 +4418,88 @@ packages: is-number: 7.0.0 dev: true + /trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + dev: true + + /trough@2.2.0: + resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + dev: true + /ts-dedent@2.2.0: resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} engines: {node: '>=6.10'} dev: true + /tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + dev: true + /uc.micro@2.1.0: resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} dev: true - /undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + /ufo@1.6.1: + resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} + dev: true + + /undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} dev: true - /unicorn-magic@0.1.0: - resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} + /undici@6.21.3: + resolution: {integrity: sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==} + engines: {node: '>=18.17'} + dev: true + + /unicorn-magic@0.3.0: + resolution: {integrity: sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==} engines: {node: '>=18'} dev: true - /unist-util-stringify-position@3.0.3: - resolution: {integrity: sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==} + /unified@11.0.5: + resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} + dependencies: + '@types/unist': 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + dev: true + + /unist-util-is@6.0.0: + resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} + dependencies: + '@types/unist': 3.0.3 + dev: true + + /unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + dependencies: + '@types/unist': 3.0.3 + dev: true + + /unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + dependencies: + '@types/unist': 3.0.3 + dev: true + + /unist-util-visit-parents@6.0.1: + resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==} + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 + dev: true + + /unist-util-visit@5.0.0: + resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==} dependencies: - '@types/unist': 2.0.10 + '@types/unist': 3.0.3 + unist-util-is: 6.0.0 + unist-util-visit-parents: 6.0.1 dev: true /universalify@2.0.1: @@ -3190,73 +4512,134 @@ packages: engines: {node: '>=4'} dev: true - /update-browserslist-db@1.0.16(browserslist@4.23.1): - resolution: {integrity: sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==} + /update-browserslist-db@1.1.3(browserslist@4.24.5): + resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' dependencies: - browserslist: 4.23.1 - escalade: 3.1.2 - picocolors: 1.0.1 + browserslist: 4.24.5 + escalade: 3.2.0 + picocolors: 1.1.1 dev: true - /uuid@9.0.1: - resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + /uuid@11.1.0: + resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} hasBin: true dev: true - /uvu@0.5.6: - resolution: {integrity: sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==} - engines: {node: '>=8'} - hasBin: true + /varint@6.0.0: + resolution: {integrity: sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==} + dev: true + + /vfile-location@5.0.3: + resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} dependencies: - dequal: 2.0.3 - diff: 5.2.0 - kleur: 4.1.5 - sade: 1.8.1 + '@types/unist': 3.0.3 + vfile: 6.0.3 dev: true - /vite@5.2.13: - resolution: {integrity: sha512-SSq1noJfY9pR3I1TUENL3rQYDQCFqgD+lM6fTRAM8Nv6Lsg5hDLaXkjETVeBt+7vZBCMoibD+6IWnT2mJ+Zb/A==} - engines: {node: ^18.0.0 || >=20.0.0} + /vfile-message@4.0.2: + resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + dev: true + + /vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + dependencies: + '@types/unist': 3.0.3 + vfile-message: 4.0.2 + dev: true + + /vite@6.3.5(sass-embedded@1.89.0)(sass@1.89.0): + resolution: {integrity: sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ==} + engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 + '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 + jiti: '>=1.21.0' less: '*' lightningcss: ^1.21.0 sass: '*' + sass-embedded: '*' stylus: '*' sugarss: '*' - terser: ^5.4.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 peerDependenciesMeta: '@types/node': optional: true + jiti: + optional: true less: optional: true lightningcss: optional: true sass: optional: true + sass-embedded: + optional: true stylus: optional: true sugarss: optional: true terser: optional: true + tsx: + optional: true + yaml: + optional: true dependencies: - esbuild: 0.20.2 - postcss: 8.4.38 - rollup: 4.18.0 + esbuild: 0.25.4 + fdir: 6.4.4(picomatch@4.0.2) + picomatch: 4.0.2 + postcss: 8.5.3 + rollup: 4.41.1 + sass: 1.89.0 + sass-embedded: 1.89.0 + tinyglobby: 0.2.14 optionalDependencies: fsevents: 2.3.3 dev: true - /vue-demi@0.14.8(vue@3.4.30): - resolution: {integrity: sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==} + /vscode-jsonrpc@8.2.0: + resolution: {integrity: sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==} + engines: {node: '>=14.0.0'} + dev: true + + /vscode-languageserver-protocol@3.17.5: + resolution: {integrity: sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==} + dependencies: + vscode-jsonrpc: 8.2.0 + vscode-languageserver-types: 3.17.5 + dev: true + + /vscode-languageserver-textdocument@1.0.12: + resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==} + dev: true + + /vscode-languageserver-types@3.17.5: + resolution: {integrity: sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==} + dev: true + + /vscode-languageserver@9.0.1: + resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==} + hasBin: true + dependencies: + vscode-languageserver-protocol: 3.17.5 + dev: true + + /vscode-uri@3.0.8: + resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==} + dev: true + + /vue-demi@0.14.10(vue@3.5.15): + resolution: {integrity: sha512-nMZBOwuzabUO0nLgIcc6rycZEebF6eeUfaiQx9+WSk8e29IbLvPU9feI6tqW4kTo3hvoYAJkMh8n8D0fuISphg==} engines: {node: '>=12'} hasBin: true - requiresBuild: true peerDependencies: '@vue/composition-api': ^1.0.0-rc.1 vue: ^3.0.0-0 || ^2.6.0 @@ -3264,44 +4647,46 @@ packages: '@vue/composition-api': optional: true dependencies: - vue: 3.4.30 + vue: 3.5.15 dev: true - /vue-router@4.4.0(vue@3.4.30): - resolution: {integrity: sha512-HB+t2p611aIZraV2aPSRNXf0Z/oLZFrlygJm+sZbdJaW6lcFqEDQwnzUBXn+DApw+/QzDU/I9TeWx9izEjTmsA==} + /vue-router@4.5.1(vue@3.5.15): + resolution: {integrity: sha512-ogAF3P97NPm8fJsE4by9dwSYtDwXIY1nFY9T6DyQnGHd1E2Da94w9JIolpe42LJGIl0DwOHBi8TcRPlPGwbTtw==} peerDependencies: vue: ^3.2.0 dependencies: - '@vue/devtools-api': 6.6.3 - vue: 3.4.30 + '@vue/devtools-api': 6.6.4 + vue: 3.5.15 dev: true - /vue@3.4.30: - resolution: {integrity: sha512-NcxtKCwkdf1zPsr7Y8+QlDBCGqxvjLXF2EX+yi76rV5rrz90Y6gK1cq0olIhdWGgrlhs9ElHuhi9t3+W5sG5Xw==} + /vue@3.5.15: + resolution: {integrity: sha512-aD9zK4rB43JAMK/5BmS4LdPiEp8Fdh8P1Ve/XNuMF5YRf78fCyPE6FUbQwcaWQ5oZ1R2CD9NKE0FFOVpMR7gEQ==} peerDependencies: typescript: '*' peerDependenciesMeta: typescript: optional: true dependencies: - '@vue/compiler-dom': 3.4.30 - '@vue/compiler-sfc': 3.4.30 - '@vue/runtime-dom': 3.4.30 - '@vue/server-renderer': 3.4.30(vue@3.4.30) - '@vue/shared': 3.4.30 + '@vue/compiler-dom': 3.5.15 + '@vue/compiler-sfc': 3.5.15 + '@vue/runtime-dom': 3.5.15 + '@vue/server-renderer': 3.5.15(vue@3.5.15) + '@vue/shared': 3.5.15 dev: true - /vuepress-plugin-components@2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-JMIj+1VA+euB4TvmeUt0Fdm4inpAndzLgasadUd/C8j4Jj/99PtoTzBLR7Gxk6LaIYaR9IUsOTKPi3wnS0EOdg==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} + /vuepress-plugin-components@2.0.0-rc.88(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-TwcHHiCdx9g8VlUhRTcMvvPAw1dmtmKNnfs9XgK3ejLMzVkmxT5uEH9xqmkbDdIFo4oy/CYBBX9qSCXH5/Qx0A==} + engines: {node: '>= 20.6.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} peerDependencies: artplayer: ^5.0.0 dashjs: 4.7.4 hls.js: ^1.4.12 mpegts.js: ^1.7.3 - sass-loader: ^14.0.0 - vidstack: ^1.11.11 - vuepress: 2.0.0-rc.9 + sass: ^1.88.0 + sass-embedded: ^1.88.0 + sass-loader: ^16.0.5 + vidstack: ^1.12.9 + vuepress: 2.0.0-rc.23 peerDependenciesMeta: artplayer: optional: true @@ -3311,50 +4696,51 @@ packages: optional: true mpegts.js: optional: true + sass: + optional: true + sass-embedded: + optional: true sass-loader: optional: true vidstack: optional: true dependencies: - '@stackblitz/sdk': 1.10.0 - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) + '@stackblitz/sdk': 1.11.0 + '@vuepress/helper': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vuepress/plugin-sass-palette': 2.0.0-rc.104(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) balloon-css: 1.2.0 - create-codepen: 1.0.1 - qrcode: 1.5.3 - sass-loader: 14.2.1 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-plugin-sass-palette: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-shared: 2.0.0-rc.36(vuepress@2.0.0-rc.9) + create-codepen: 2.0.0 + qrcode: 1.5.4 + sass: 1.89.0 + sass-embedded: 1.89.0 + sass-loader: 16.0.5(sass-embedded@1.89.0)(sass@1.89.0) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + vuepress-shared: 2.0.0-rc.88(vuepress@2.0.0-rc.23) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /vuepress-plugin-md-enhance@2.0.0-rc.36(chart.js@4.4.3)(markdown-it@14.1.0)(mermaid@10.9.1)(sass-loader@14.2.1)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-FvQ4foaqsE13WEXN2TaBqtSEN3tKnkkuDX7daLbNPQ1z0ghaMgcCDJ+ehEwwcNgXSs1vlbTRYL/Tf4M4RC1nfA==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} + /vuepress-plugin-md-enhance@2.0.0-rc.88(chart.js@4.4.9)(markdown-it@14.1.0)(mermaid@11.6.0)(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-829rH7XUkRD9yYdnJxDmUleRx+mCXxO7A7xtQAXBlZbqX1dvB540iB1Uh8DYJCf8F/LmAmg7R2ftZjUqfhUy3Q==} + engines: {node: '>= 20.6.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} peerDependencies: - '@types/reveal.js': ^5.0.0 '@vue/repl': ^4.1.1 chart.js: ^4.0.0 echarts: ^5.0.0 - flowchart.ts: ^2.0.0 || ^3.0.0 - katex: ^0.16.0 + flowchart.ts: ^3.0.0 kotlin-playground: ^1.23.0 - markmap-lib: ^0.15.5 || ^0.16.0 - markmap-toolbar: ^0.15.5 || ^0.16.0 - markmap-view: ^0.15.5 || ^0.16.0 - mathjax-full: ^3.2.2 - mermaid: ^10.8.0 - reveal.js: ^5.0.0 + markmap-lib: ^0.18.5 + markmap-toolbar: ^0.18.5 + markmap-view: ^0.18.5 + mermaid: ^11.6.0 sandpack-vue3: ^3.0.0 - sass-loader: ^14.0.0 - vuepress: 2.0.0-rc.9 + sass: ^1.88.0 + sass-embedded: ^1.88.0 + sass-loader: ^16.0.5 + vuepress: 2.0.0-rc.23 peerDependenciesMeta: - '@types/reveal.js': - optional: true '@vue/repl': optional: true chart.js: @@ -3363,8 +4749,6 @@ packages: optional: true flowchart.ts: optional: true - katex: - optional: true kotlin-playground: optional: true markmap-lib: @@ -3373,190 +4757,146 @@ packages: optional: true markmap-view: optional: true - mathjax-full: - optional: true mermaid: optional: true - reveal.js: - optional: true sandpack-vue3: optional: true + sass: + optional: true + sass-embedded: + optional: true sass-loader: optional: true dependencies: - '@mdit/plugin-alert': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-align': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-attrs': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-container': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-demo': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-figure': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-footnote': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-img-lazyload': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-img-mark': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-img-size': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-include': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-katex': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-mark': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-mathjax': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-stylize': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-sub': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-sup': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-tab': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-tasklist': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-tex': 0.8.0(markdown-it@14.1.0) - '@mdit/plugin-uml': 0.8.0(markdown-it@14.1.0) - '@types/markdown-it': 14.1.1 - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) + '@mdit/plugin-container': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-demo': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-plantuml': 0.18.0(markdown-it@14.1.0) + '@mdit/plugin-uml': 0.18.0(markdown-it@14.1.0) + '@types/markdown-it': 14.1.2 + '@vuepress/helper': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vuepress/plugin-sass-palette': 2.0.0-rc.104(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) balloon-css: 1.2.0 - chart.js: 4.4.3 + chart.js: 4.4.9 js-yaml: 4.1.0 - mermaid: 10.9.1 - sass-loader: 14.2.1 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-plugin-sass-palette: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-shared: 2.0.0-rc.36(vuepress@2.0.0-rc.9) + mermaid: 11.6.0 + sass: 1.89.0 + sass-embedded: 1.89.0 + sass-loader: 16.0.5(sass-embedded@1.89.0)(sass@1.89.0) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + vuepress-shared: 2.0.0-rc.88(vuepress@2.0.0-rc.23) transitivePeerDependencies: - - '@vue/composition-api' - markdown-it - typescript dev: true - /vuepress-plugin-sass-palette@2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-kOfZHGZxfplVq/z7QtHmsrKfOlR6/s37QA/DilIYGFzj8XVL8h1eJ0ty7J1ySTZFVvDkK7r3TVVQZ2sPIEjbYQ==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - sass-loader: ^14.0.0 - vuepress: 2.0.0-rc.9 - peerDependenciesMeta: - sass-loader: - optional: true - dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - chokidar: 3.6.0 - sass: 1.77.6 - sass-loader: 14.2.1 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-shared: 2.0.0-rc.36(vuepress@2.0.0-rc.9) - transitivePeerDependencies: - - '@vue/composition-api' - - typescript - dev: true - - /vuepress-plugin-search-pro@2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-xumvvm0/V7WrWJXMs6ZVqFjjN6h4oIEm6YrBpeteBpbG2iUkuF9E2xhVPwhrV/dYzCSkKpR7bA31qp1CngWknA==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - sass-loader: ^14.0.0 - vuepress: 2.0.0-rc.9 - peerDependenciesMeta: - sass-loader: - optional: true - dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - cheerio: 1.0.0-rc.12 - chokidar: 3.6.0 - sass-loader: 14.2.1 - slimsearch: 2.1.1 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-plugin-sass-palette: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-shared: 2.0.0-rc.36(vuepress@2.0.0-rc.9) - transitivePeerDependencies: - - '@vue/composition-api' - - typescript - dev: true - - /vuepress-shared@2.0.0-rc.36(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-a4XLodJk5U8qeon7jNqsyLGNUgOAdVr8YBZD7E9BGiu84+S3P6e5SCJLCLQ17v37jwdLoYwT2Fv67k586GAx7w==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} + /vuepress-shared@2.0.0-rc.88(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-BbNs0idGkZIOuuRtH111Y4qDX0Rah4LBCyMGlMj5rPHtnSmrMXHzjZlevkzwpSsPTLVoOuYvreFuocI1bLl6qA==} + engines: {node: '>= 20.6.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} peerDependencies: - vuepress: 2.0.0-rc.9 + vuepress: 2.0.0-rc.23 dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) - cheerio: 1.0.0-rc.12 - dayjs: 1.11.11 - execa: 8.0.1 - fflate: 0.8.2 - gray-matter: 4.0.3 - semver: 7.6.2 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) + '@vuepress/helper': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) transitivePeerDependencies: - - '@vue/composition-api' - typescript dev: true - /vuepress-theme-hope@2.0.0-rc.36(chart.js@4.4.3)(markdown-it@14.1.0)(mermaid@10.9.1)(sass-loader@14.2.1)(vuepress-plugin-search-pro@2.0.0-rc.36)(vuepress@2.0.0-rc.9): - resolution: {integrity: sha512-kMCmlwPw4S0P0LYnAulaT98L3Z2jD+A4bgQ93rfuCVxmfCJEUs99V6ms9iu/GZ8CqJC10anDqwfJ+SjHCs0V8g==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} + /vuepress-theme-hope@2.0.0-rc.88(@vuepress/plugin-slimsearch@2.0.0-rc.106)(chart.js@4.4.9)(markdown-it@14.1.0)(mermaid@11.6.0)(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23): + resolution: {integrity: sha512-XV33tgx84TuowHqxnYgG+rusqpdqbznPqDGbHrzOzJ+DMqGDHkVa6pejnUshpNryPSsHlNXtGLoqKLNQf/YVNw==} + engines: {node: '>= 20.6.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} peerDependencies: - '@vuepress/plugin-docsearch': 2.0.0-rc.24 - '@vuepress/plugin-feed': 2.0.0-rc.24 - '@vuepress/plugin-pwa': 2.0.0-rc.24 - '@vuepress/plugin-redirect': 2.0.0-rc.24 - '@vuepress/plugin-search': 2.0.0-rc.24 - nodejs-jieba: ^0.1.2 - sass-loader: ^14.0.0 - vuepress: 2.0.0-rc.9 - vuepress-plugin-search-pro: 2.0.0-rc.36 + '@vuepress/plugin-docsearch': 2.0.0-rc.104 + '@vuepress/plugin-feed': 2.0.0-rc.104 + '@vuepress/plugin-meilisearch': 2.0.0-rc.104 + '@vuepress/plugin-prismjs': 2.0.0-rc.104 + '@vuepress/plugin-pwa': 2.0.0-rc.104 + '@vuepress/plugin-revealjs': 2.0.0-rc.104 + '@vuepress/plugin-search': 2.0.0-rc.104 + '@vuepress/plugin-slimsearch': 2.0.0-rc.104 + '@vuepress/plugin-watermark': 2.0.0-rc.104 + '@vuepress/shiki-twoslash': 2.0.0-rc.104 + nodejs-jieba: ^0.2.1 || ^0.3.0 + sass: ^1.88.0 + sass-embedded: ^1.88.0 + sass-loader: ^16.0.5 + vuepress: 2.0.0-rc.23 peerDependenciesMeta: '@vuepress/plugin-docsearch': optional: true '@vuepress/plugin-feed': optional: true + '@vuepress/plugin-meilisearch': + optional: true + '@vuepress/plugin-prismjs': + optional: true '@vuepress/plugin-pwa': optional: true - '@vuepress/plugin-redirect': + '@vuepress/plugin-revealjs': optional: true '@vuepress/plugin-search': optional: true + '@vuepress/plugin-slimsearch': + optional: true + '@vuepress/plugin-watermark': + optional: true + '@vuepress/shiki-twoslash': + optional: true nodejs-jieba: optional: true + sass: + optional: true + sass-embedded: + optional: true sass-loader: optional: true - vuepress-plugin-search-pro: - optional: true - dependencies: - '@vuepress/helper': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-active-header-links': 2.0.0-rc.21(vuepress@2.0.0-rc.9) - '@vuepress/plugin-back-to-top': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-blog': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-catalog': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-comment': 2.0.0-rc.24(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - '@vuepress/plugin-copy-code': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-copyright': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-external-link-icon': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-git': 2.0.0-rc.22(vuepress@2.0.0-rc.9) - '@vuepress/plugin-links-check': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-nprogress': 2.0.0-rc.21(vuepress@2.0.0-rc.9) - '@vuepress/plugin-photo-swipe': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-prismjs': 2.0.0-rc.21(vuepress@2.0.0-rc.9) - '@vuepress/plugin-reading-time': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-rtl': 2.0.0-rc.21(vuepress@2.0.0-rc.9) - '@vuepress/plugin-seo': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-sitemap': 2.0.0-rc.24(vuepress@2.0.0-rc.9) - '@vuepress/plugin-theme-data': 2.0.0-rc.21(vuepress@2.0.0-rc.9) - '@vueuse/core': 10.11.0(vue@3.4.30) + dependencies: + '@vuepress/helper': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vuepress/plugin-active-header-links': 2.0.0-rc.103(vuepress@2.0.0-rc.23) + '@vuepress/plugin-back-to-top': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-blog': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-catalog': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-comment': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-copy-code': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-copyright': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-git': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-icon': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-links-check': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-ext': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-hint': 2.0.0-rc.104(markdown-it@14.1.0)(vue@3.5.15)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-image': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-include': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-math': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-stylize': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-markdown-tab': 2.0.0-rc.104(markdown-it@14.1.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-notice': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-nprogress': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-photo-swipe': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-reading-time': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-redirect': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-rtl': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-sass-palette': 2.0.0-rc.104(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-seo': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-shiki': 2.0.0-rc.104(@vueuse/core@13.2.0)(vuepress@2.0.0-rc.23) + '@vuepress/plugin-sitemap': 2.0.0-rc.104(vuepress@2.0.0-rc.23) + '@vuepress/plugin-slimsearch': 2.0.0-rc.106(vuepress@2.0.0-rc.23) + '@vuepress/plugin-theme-data': 2.0.0-rc.103(vuepress@2.0.0-rc.23) + '@vueuse/core': 13.2.0(vue@3.5.15) balloon-css: 1.2.0 - bcrypt-ts: 5.0.2 - cheerio: 1.0.0-rc.12 + bcrypt-ts: 7.0.0 chokidar: 3.6.0 - gray-matter: 4.0.3 - sass-loader: 14.2.1 - vue: 3.4.30 - vuepress: 2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30) - vuepress-plugin-components: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-plugin-md-enhance: 2.0.0-rc.36(chart.js@4.4.3)(markdown-it@14.1.0)(mermaid@10.9.1)(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-plugin-sass-palette: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-plugin-search-pro: 2.0.0-rc.36(sass-loader@14.2.1)(vuepress@2.0.0-rc.9) - vuepress-shared: 2.0.0-rc.36(vuepress@2.0.0-rc.9) + sass: 1.89.0 + sass-embedded: 1.89.0 + sass-loader: 16.0.5(sass-embedded@1.89.0)(sass@1.89.0) + vue: 3.5.15 + vuepress: 2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15) + vuepress-plugin-components: 2.0.0-rc.88(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) + vuepress-plugin-md-enhance: 2.0.0-rc.88(chart.js@4.4.9)(markdown-it@14.1.0)(mermaid@11.6.0)(sass-embedded@1.89.0)(sass-loader@16.0.5)(sass@1.89.0)(vuepress@2.0.0-rc.23) + vuepress-shared: 2.0.0-rc.88(vuepress@2.0.0-rc.23) transitivePeerDependencies: - - '@types/reveal.js' - - '@vue/composition-api' - '@vue/repl' - '@waline/client' - artalk @@ -3575,54 +4915,57 @@ packages: - mathjax-full - mermaid - mpegts.js - - reveal.js - sandpack-vue3 - twikoo - typescript - vidstack dev: true - /vuepress@2.0.0-rc.9(@vuepress/bundler-vite@2.0.0-rc.9)(vue@3.4.30): - resolution: {integrity: sha512-jT1ln2lawdph+vVI6n2JfEUhQIcyc1RQWDdQu9DffhJGywJunFcumnUJudpqd1SNIES2Fz1hVCD6gdrE/rVKOQ==} - engines: {node: '>=18.16.0'} + /vuepress@2.0.0-rc.23(@vuepress/bundler-vite@2.0.0-rc.23)(vue@3.5.15): + resolution: {integrity: sha512-XID/zr7qDGLg7oYGwDTZpWRNXCVQcI1wQTfkN0spyumV2EpHe7XBsmnwICd+dTqRNZuD+JHyJsYLEqDEszFObw==} + engines: {node: ^18.19.0 || >=20.4.0} hasBin: true peerDependencies: - '@vuepress/bundler-vite': 2.0.0-rc.9 - '@vuepress/bundler-webpack': 2.0.0-rc.9 - vue: ^3.4.0 + '@vuepress/bundler-vite': 2.0.0-rc.23 + '@vuepress/bundler-webpack': 2.0.0-rc.23 + vue: ^3.5.13 peerDependenciesMeta: '@vuepress/bundler-vite': optional: true '@vuepress/bundler-webpack': optional: true dependencies: - '@vuepress/bundler-vite': 2.0.0-rc.9 - '@vuepress/cli': 2.0.0-rc.9 - '@vuepress/client': 2.0.0-rc.9 - '@vuepress/core': 2.0.0-rc.9 - '@vuepress/markdown': 2.0.0-rc.9 - '@vuepress/shared': 2.0.0-rc.9 - '@vuepress/utils': 2.0.0-rc.9 - vue: 3.4.30 + '@vuepress/bundler-vite': 2.0.0-rc.23(sass-embedded@1.89.0)(sass@1.89.0) + '@vuepress/cli': 2.0.0-rc.23 + '@vuepress/client': 2.0.0-rc.23 + '@vuepress/core': 2.0.0-rc.23 + '@vuepress/markdown': 2.0.0-rc.23 + '@vuepress/shared': 2.0.0-rc.23 + '@vuepress/utils': 2.0.0-rc.23 + vue: 3.5.15 transitivePeerDependencies: - supports-color - typescript dev: true - /web-worker@1.3.0: - resolution: {integrity: sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==} + /web-namespaces@2.0.1: + resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==} dev: true - /which-module@2.0.1: - resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==} + /whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + dependencies: + iconv-lite: 0.6.3 dev: true - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true - dependencies: - isexe: 2.0.0 + /whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + dev: true + + /which-module@2.0.1: + resolution: {integrity: sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==} dev: true /wrap-ansi@6.2.0: @@ -3638,12 +4981,6 @@ packages: resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==} dev: true - /yaml@2.4.5: - resolution: {integrity: sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==} - engines: {node: '>= 14'} - hasBin: true - dev: true - /yargs-parser@18.1.3: resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==} engines: {node: '>=6'} @@ -3668,3 +5005,7 @@ packages: y18n: 4.0.3 yargs-parser: 18.1.3 dev: true + + /zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + dev: true diff --git a/python/psqlpy/_internal/__init__.pyi b/python/psqlpy/_internal/__init__.pyi index 10ac499e..73011b45 100644 --- a/python/psqlpy/_internal/__init__.pyi +++ b/python/psqlpy/_internal/__init__.pyi @@ -341,6 +341,26 @@ class Transaction: `commit()` can be called only once per transaction. """ + async def rollback(self: Self) -> None: + """Rollback all queries in the transaction. + + It can be done only one, after execution transaction marked + as `done`. + + ### Example: + ```python + import asyncio + + from psqlpy import PSQLPool, QueryResult + + async def main() -> None: + db_pool = PSQLPool() + connection = await db_pool.connection() + transaction = connection.transaction() + await transaction.execute(...) + await transaction.rollback() + ``` + """ async def execute( self: Self, querystring: str, @@ -622,26 +642,6 @@ class Transaction: await transaction.rollback_savepoint("my_savepoint") ``` """ - async def rollback(self: Self) -> None: - """Rollback all queries in the transaction. - - It can be done only one, after execution transaction marked - as `done`. - - ### Example: - ```python - import asyncio - - from psqlpy import PSQLPool, QueryResult - - async def main() -> None: - db_pool = PSQLPool() - connection = await db_pool.connection() - transaction = connection.transaction() - await transaction.execute(...) - await transaction.rollback() - ``` - """ async def rollback_savepoint(self: Self, savepoint_name: str) -> None: """ROLLBACK to the specified `savepoint_name`. @@ -696,8 +696,6 @@ class Transaction: querystring: str, parameters: ParamsT = None, fetch_number: int | None = None, - scroll: bool | None = None, - prepared: bool = True, ) -> Cursor: """Create new cursor object. @@ -707,9 +705,6 @@ class Transaction: - `querystring`: querystring to execute. - `parameters`: list of parameters to pass in the query. - `fetch_number`: how many rows need to fetch. - - `scroll`: SCROLL or NO SCROLL cursor. - - `prepared`: should the querystring be prepared before the request. - By default any querystring will be prepared. ### Returns: new initialized cursor. @@ -811,6 +806,25 @@ class Connection: exception: BaseException | None, traceback: types.TracebackType | None, ) -> None: ... + async def prepare( + self, + querystring: str, + parameters: ParamsT = None, + ) -> PreparedStatement: + """Prepare statement. + + Return representation of prepared statement. + """ + async def commit(self: Self) -> None: + """Commit the transaction. + + Do nothing if there is no active transaction. + """ + async def rollback(self: Self) -> None: + """Rollback the transaction. + + Do nothing if there is no active transaction. + """ async def execute( self: Self, querystring: str, @@ -1017,9 +1031,6 @@ class Connection: - `querystring`: querystring to execute. - `parameters`: list of parameters to pass in the query. - `fetch_number`: how many rows need to fetch. - - `scroll`: SCROLL or NO SCROLL cursor. - - `prepared`: should the querystring be prepared before the request. - By default any querystring will be prepared. ### Returns: new initialized cursor. @@ -1050,6 +1061,7 @@ class Connection: It necessary to commit all transactions and close all cursor made by this connection. Otherwise, it won't have any practical usage. """ + async def binary_copy_to_table( self: Self, source: bytes | bytearray | Buffer | BytesIO, @@ -1759,3 +1771,15 @@ class ListenerNotificationMsg: channel: str payload: str connection: Connection + +class Column: + name: str + table_oid: int | None + +class PreparedStatement: + async def execute(self: Self) -> QueryResult: + """Execute prepared statement.""" + def cursor(self: Self) -> Cursor: + """Create new server-side cursor based on prepared statement.""" + def columns(self: Self) -> list[Column]: + """Return information about statement columns.""" diff --git a/src/driver/common.rs b/src/driver/common.rs index ac447184..bab84049 100644 --- a/src/driver/common.rs +++ b/src/driver/common.rs @@ -2,7 +2,10 @@ use tokio_postgres::config::Host; use std::net::IpAddr; -use super::{connection::Connection, cursor::Cursor, transaction::Transaction}; +use super::{ + connection::Connection, cursor::Cursor, prepared_statement::PreparedStatement, + transaction::Transaction, +}; use pyo3::{pymethods, Py, PyAny}; @@ -125,11 +128,11 @@ impl_is_closed_method!(Transaction); impl_is_closed_method!(Connection); impl_is_closed_method!(Cursor); -macro_rules! impl_portal_method { +macro_rules! impl_cursor_method { ($name:ident) => { #[pymethods] impl $name { - #[pyo3(signature = (querystring, parameters=None, fetch_number=None))] + #[pyo3(signature = (querystring=None, parameters=None, fetch_number=None))] pub fn cursor( &self, querystring: Option, @@ -149,8 +152,40 @@ macro_rules! impl_portal_method { }; } -impl_portal_method!(Transaction); -impl_portal_method!(Connection); +impl_cursor_method!(Transaction); +impl_cursor_method!(Connection); + +macro_rules! impl_prepare_method { + ($name:ident) => { + #[pymethods] + impl $name { + #[pyo3(signature = (querystring, parameters=None))] + pub async fn prepare( + &self, + querystring: String, + parameters: Option>, + ) -> PSQLPyResult { + let Some(conn) = &self.conn else { + return Err(RustPSQLDriverError::ConnectionClosedError); + }; + + let read_conn_g = conn.read().await; + let prep_stmt = read_conn_g + .prepare_statement(querystring, parameters) + .await?; + + Ok(PreparedStatement::new( + self.conn.clone(), + self.pg_config.clone(), + prep_stmt, + )) + } + } + }; +} + +impl_prepare_method!(Transaction); +impl_prepare_method!(Connection); macro_rules! impl_transaction_methods { ($name:ident, $val:expr $(,)?) => { @@ -189,7 +224,6 @@ macro_rules! impl_transaction_methods { }; } -impl_transaction_methods!(Connection, false); impl_transaction_methods!(Transaction, true); macro_rules! impl_binary_copy_method { diff --git a/src/driver/connection.rs b/src/driver/connection.rs index 3486141d..e9972e58 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -15,9 +15,7 @@ use crate::{ runtime::tokio_runtime, }; -use super::{ - connection_pool::connect_pool, prepared_statement::PreparedStatement, transaction::Transaction, -}; +use super::{connection_pool::connect_pool, transaction::Transaction}; /// Make new connection pool. /// @@ -416,31 +414,6 @@ impl Connection { )) } - #[pyo3(signature = ( - querystring, - parameters=None, - ))] - pub async fn prepare( - &self, - querystring: String, - parameters: Option>, - ) -> PSQLPyResult { - let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::ConnectionClosedError); - }; - - let read_conn_g = conn.read().await; - let prep_stmt = read_conn_g - .prepare_statement(querystring, parameters) - .await?; - - Ok(PreparedStatement::new( - self.conn.clone(), - self.pg_config.clone(), - prep_stmt, - )) - } - #[allow(clippy::needless_pass_by_value)] pub fn close(self_: pyo3::Py) { pyo3::Python::with_gil(|gil| { diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..6cf86fe1 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,14 @@ +{ + "compilerOptions": { + "module": "NodeNext", + "moduleResolution": "NodeNext", + "target": "ES2022" + }, + "include": [ + "docs/.vuepress/**/*.ts", + "docs/.vuepress/**/*.vue" + ], + "exclude": [ + "node_modules" + ] +} From 1619e6e3f9cec337b3d10e2b4258372298fe031c Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 27 May 2025 00:04:35 +0200 Subject: [PATCH 59/65] Updated docs --- .github/workflows/deploy-docs.yml | 50 ------------------------------- 1 file changed, 50 deletions(-) delete mode 100644 .github/workflows/deploy-docs.yml diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml deleted file mode 100644 index 62be81ec..00000000 --- a/.github/workflows/deploy-docs.yml +++ /dev/null @@ -1,50 +0,0 @@ - -name: Deploy Docs - -on: - push: - branches: - - support_dbapi - -permissions: - contents: write - -jobs: - deploy-gh-pages: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - # if your docs needs submodules, uncomment the following line - # submodules: true - - - name: Setup pnpm - uses: pnpm/action-setup@v4 - - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 22 - cache: pnpm - - - name: Install Deps - run: | - corepack enable - pnpm install --frozen-lockfile - - - name: Build Docs - env: - NODE_OPTIONS: --max_old_space_size=8192 - run: |- - pnpm run docs:build - > docs/.vuepress/dist/.nojekyll - - - name: Deploy Docs - uses: JamesIves/github-pages-deploy-action@v4 - with: - # Deploy Docs - branch: gh-pages - folder: docs/.vuepress/dist From c682a02e8fd199d230dbc4d037ebcc865319de2d Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 27 May 2025 20:46:35 +0200 Subject: [PATCH 60/65] Added more documentation --- Cargo.toml | 9 ++- docs/.vuepress/sidebar.ts | 18 ++--- docs/README.md | 8 +-- docs/components/cursor.md | 112 ++++++++++++++++++++++++------ docs/components/transaction.md | 2 - docs/introduction/introduction.md | 10 +-- src/connection/traits.rs | 36 ---------- 7 files changed, 115 insertions(+), 80 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 68160a6a..e6dc160d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,14 @@ crate-type = ["cdylib"] [dependencies] deadpool-postgres = { git = "https://github.com/chandr-andr/deadpool.git", branch = "psqlpy" } -pyo3 = { version = "0.23.4", features = ["chrono", "experimental-async", "rust_decimal", "py-clone", "macros", "multiple-pymethods"] } +pyo3 = { version = "0.23.4", features = [ + "chrono", + "experimental-async", + "rust_decimal", + "py-clone", + "macros", + "multiple-pymethods", +] } pyo3-async-runtimes = { git = "https://github.com/chandr-andr/pyo3-async-runtimes.git", branch = "psqlpy", features = [ "tokio-runtime", ] } diff --git a/docs/.vuepress/sidebar.ts b/docs/.vuepress/sidebar.ts index 9fc80456..7a03377d 100644 --- a/docs/.vuepress/sidebar.ts +++ b/docs/.vuepress/sidebar.ts @@ -46,6 +46,15 @@ export default sidebar({ "advanced_type_usage", ] }, + { + text: "Row Factories Usage", + prefix: "row_factories/", + collapsible: true, + children: [ + "row_factories", + "predefined_row_factories", + ] + }, { text: "Frameworks Usage", prefix: "frameworks/", @@ -58,15 +67,6 @@ export default sidebar({ "robyn", ] }, - { - text: "Row Factories Usage", - prefix: "row_factories/", - collapsible: true, - children: [ - "row_factories", - "predefined_row_factories", - ] - }, ], }, { diff --git a/docs/README.md b/docs/README.md index d2dece04..00c1b642 100644 --- a/docs/README.md +++ b/docs/README.md @@ -28,10 +28,9 @@ highlights: details: PSQLPy is under active development. --- ## What is PSQLPy -`PSQLPy` is a new Python driver for `PostgreSQL` fully written in Rust. It was inspired by `Psycopg3` and `AsyncPG`. +`PSQLPy` is a Python driver for `PostgreSQL` fully written in Rust. It was inspired by `Psycopg3` and `AsyncPG`. This project has two main goals: -Make a interaction with the database as fast as possible and now `PSQLPy` shows itself to be times faster than the above drivers. -Don't make useless abstractions and make it like a mirror to `PostgreSQL`. +We found that communication with the database can be faster and safer, so we tried to implement a new PostgreSQL driver in Rust for Python. It has all necessary components to create high-load and fault tolerance applications. @@ -59,4 +58,5 @@ pip install git+https://github.com/psqlpy-python/psqlpy ## Join community! You can get support from the creators and users of `PSQLPy` in some social media: -- [Telegram](https://t.me/+f3Y8mYKgXxhmYThi) \ No newline at end of file +- [Telegram](https://t.me/+f3Y8mYKgXxhmYThi) +- [Discord](https://discord.gg/ugNhzmhZ) diff --git a/docs/components/cursor.md b/docs/components/cursor.md index ca931a58..eed81560 100644 --- a/docs/components/cursor.md +++ b/docs/components/cursor.md @@ -3,44 +3,71 @@ title: Cursor --- `Cursor` objects represents server-side `Cursor` in the `PostgreSQL`. [PostgreSQL docs](https://www.postgresql.org/docs/current/plpgsql-cursors.html). +::: important +Cursor always lives inside a transaction. If you don't begin transaction explicitly, it will be opened anyway. +::: ## Cursor Parameters - `querystring`: specify query for cursor. - `parameters`: parameters for the querystring. Default `None` -- `fetch_number`: default fetch number. It is used in `fetch()` method and in async iterator. Default 10 -- `scroll`: is cursor scrollable or not. Default as in `PostgreSQL`. +- `fetch_number`: default fetch number. It is used in `fetch()` method and in async iterator. -## Cursor as async iterator +## Usage -The most common situation is using `Cursor` as async iterator. +Cursor can be used in different ways. +::: tabs +@tab Pre-Initialization ```python from psqlpy import ConnectionPool, QueryResult - async def main() -> None: db_pool = ConnectionPool() + connection = await db_pool.connection() + cursor = connection.cursor( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + fetch_number=10, + ) + await cursor.start() +``` +@tab Post-Initialization +```python +from psqlpy import ConnectionPool, QueryResult + +async def main() -> None: + db_pool = ConnectionPool() connection = await db_pool.connection() - transaction = await connection.transaction() - - # Here we fetch 5 results in each iteration. - async with cursor in transaction.cursor( - querystring="SELECT * FROM users WHERE username = $1", - parameters=["Some_Username"], - fetch_number=5, - ): - async for fetched_result in cursor: - dict_result: List[Dict[Any, Any]] = fetched_result.result() - ... # do something with this result. + + cursor = connection.cursor() + await cursor.execute( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + ) + result: QueryResult = await cursor.fetchone() ``` -## Cursor methods +@tab Async Context Manager +```python +from psqlpy import ConnectionPool, QueryResult -There are a lot of methods to work with cursor. +async def main() -> None: + db_pool = ConnectionPool() + connection = await db_pool.connection() + + async with connection.cursor( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + fetch_number=10, + ) as cursor: + result: QueryResult = await cursor.fetchone() +``` +::: +## Cursor methods ### Start Declare (create) cursor. @@ -58,14 +85,53 @@ async def main() -> None: await cursor.close() ``` -### Fetch +### Execute + +Initialize cursor and make it ready for fetching. -You can fetch next `N` records from the cursor. -It's possible to specify `N` fetch record with parameter `fetch_number`, otherwise will be used `fetch_number` from the `Cursor` initialization. +::: important +If you initialized cursor with `start` method or via context manager, you don't have to use this method. +::: + +#### Parameters: +- `querystring`: specify query for cursor. +- `parameters`: parameters for the querystring. Default `None` ```python async def main() -> None: - result: QueryResult = await cursor.fetch( - fetch_number=100, + await cursor.execute( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], ) + result: QueryResult = await cursor.fetchone() +``` + +### Fetchone + +Fetch one result from the cursor. + +```python +async def main() -> None: + result: QueryResult = await cursor.fetchone() +``` + +### Fetchmany + +Fetch N results from the cursor. + +#### Parameters: +- `size`: number of records to fetch. + +```python +async def main() -> None: + result: QueryResult = await cursor.fetchmany(size=10) +``` + +### Fetchall + +Fetch all results from the cursor. + +```python +async def main() -> None: + result: QueryResult = await cursor.fetchall() ``` diff --git a/docs/components/transaction.md b/docs/components/transaction.md index 1bed17e4..8f18a097 100644 --- a/docs/components/transaction.md +++ b/docs/components/transaction.md @@ -407,8 +407,6 @@ async def main() -> None: - `querystring`: Statement string. - `parameters`: List of list of parameters for the statement string. - `fetch_number`: rewrite default fetch_number. Default is 10. -- `scroll`: make cursor scrollable or not. Default is like in `PostgreSQL`. -- `prepared`: prepare querystring or not. From `Transaction` you can create new `Cursor` object which represents cursor in the `PostgreSQL`. [PostgreSQL Docs](https://www.postgresql.org/docs/current/plpgsql-cursors.html) diff --git a/docs/introduction/introduction.md b/docs/introduction/introduction.md index 545eb700..27f142d9 100644 --- a/docs/introduction/introduction.md +++ b/docs/introduction/introduction.md @@ -5,15 +5,14 @@ title: What is PSQLPy? `PSQLPy` is a new Python driver for PostgreSQL fully written in Rust. It was inspired by `Psycopg3` and `AsyncPG`. With `PSQLPy` you can: -- Make an interaction with the PostgeSQL in your application much faster (2-3 times). +- Make an interaction with the PostgeSQL in your application faster. - Be sure that there won't be any unexpected errors. -- Don't usually go to the documentation to search every question - we have awesome docstrings for every component. +- Don't usually go to the documentation to search every question - we have docstrings for every component. - Use `MyPy` (or any other Python type checker) with confidence that exactly the types specified in the typing will be returned. - Concentrate on writing your code, not understanding new abstractions in this library, we only have classes which represents PostgreSQL object (transaction, cursor, etc). ::: info -It is extremely important to understand that the library will provide a noticeable acceleration in working with the database only if your queries are optimized. -Otherwise, there will be acceleration, but not so significant +The library will provide a noticeable acceleration in working with the database only if your queries are optimized. ::: ## Important notes @@ -22,4 +21,5 @@ But in some situations this behavior can break you application. As an example, i ## Join community! You can get support from the creators of `PSQLPy` in some social media: -- [Telegram](https://t.me/+f3Y8mYKgXxhmYThi) \ No newline at end of file +- [Telegram](https://t.me/+f3Y8mYKgXxhmYThi) +- [Discord](https://discord.gg/ugNhzmhZ) diff --git a/src/connection/traits.rs b/src/connection/traits.rs index 5d8d49ae..ccf8f467 100644 --- a/src/connection/traits.rs +++ b/src/connection/traits.rs @@ -101,39 +101,3 @@ pub trait CloseTransaction: StartTransaction { fn rollback(&mut self) -> impl std::future::Future>; } - -// pub trait Cursor { -// fn build_cursor_start_qs( -// &self, -// cursor_name: &str, -// scroll: &Option, -// querystring: &str, -// ) -> String { -// let mut cursor_init_query = format!("DECLARE {cursor_name}"); -// if let Some(scroll) = scroll { -// if *scroll { -// cursor_init_query.push_str(" SCROLL"); -// } else { -// cursor_init_query.push_str(" NO SCROLL"); -// } -// } - -// cursor_init_query.push_str(format!(" CURSOR FOR {querystring}").as_str()); - -// cursor_init_query -// } - -// fn start_cursor( -// &mut self, -// cursor_name: &str, -// scroll: &Option, -// querystring: String, -// prepared: &Option, -// parameters: Option>, -// ) -> impl std::future::Future>; - -// fn close_cursor( -// &mut self, -// cursor_name: &str, -// ) -> impl std::future::Future>; -// } From 5bcf10face60311667bef74c2b998bfd4977f18d Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 27 May 2025 20:57:34 +0200 Subject: [PATCH 61/65] Added more documentation and added one test --- docs/components/connection.md | 7 +++---- docs/components/cursor.md | 26 ++++++++++++++++++++++++-- python/tests/test_cursor.py | 16 ++++++++++++++++ src/driver/common.rs | 6 +++--- src/driver/connection.rs | 2 +- src/driver/cursor.rs | 3 +-- 6 files changed, 48 insertions(+), 12 deletions(-) diff --git a/docs/components/connection.md b/docs/components/connection.md index bd73d933..180777d2 100644 --- a/docs/components/connection.md +++ b/docs/components/connection.md @@ -24,11 +24,10 @@ async def main() -> None: ```python from psqlpy import connect -db_connection: Final = connect( - dsn="postgres://postgres:postgres@localhost:5432/postgres", -) - async def main() -> None: + db_connection: Final = await connect( + dsn="postgres://postgres:postgres@localhost:5432/postgres", + ) await db_connection.execute(...) ``` diff --git a/docs/components/cursor.md b/docs/components/cursor.md index eed81560..f8c2fbd3 100644 --- a/docs/components/cursor.md +++ b/docs/components/cursor.md @@ -61,12 +61,34 @@ async def main() -> None: async with connection.cursor( querystring="SELECT * FROM users WHERE id > $1", parameters=[100], - fetch_number=10, + array_size=10, ) as cursor: result: QueryResult = await cursor.fetchone() ``` + +@tab Async Iterator +```python +from psqlpy import ConnectionPool, QueryResult + +async def main() -> None: + db_pool = ConnectionPool() + connection = await db_pool.connection() + + cursor = connection.cursor( + querystring="SELECT * FROM users WHERE id > $1", + parameters=[100], + fetch_number=10, + ) + await cursor.start() + + async for result in cursor: + print(result) +``` ::: +## Cursor attributes +- `array_size`: get and set attribute. Used in async iterator and `fetch_many` method. + ## Cursor methods ### Start Declare (create) cursor. @@ -117,7 +139,7 @@ async def main() -> None: ### Fetchmany -Fetch N results from the cursor. +Fetch N results from the cursor. Default is `array_size`. #### Parameters: - `size`: number of records to fetch. diff --git a/python/tests/test_cursor.py b/python/tests/test_cursor.py index fdd53ba7..bd423a9b 100644 --- a/python/tests/test_cursor.py +++ b/python/tests/test_cursor.py @@ -65,6 +65,22 @@ async def test_cursor_as_async_context_manager( assert len(results.result()) == number_database_records +async def test_cursor_as_async_iterator( + psql_pool: ConnectionPool, + table_name: str, + number_database_records: int, +) -> None: + connection = await psql_pool.connection() + all_results = [] + async with connection.cursor( + querystring=f"SELECT * FROM {table_name}", + ) as cursor: + async for results in cursor: + all_results.extend(results.result()) + + assert len(all_results) == number_database_records + + async def test_cursor_send_underlying_connection_to_pool( psql_pool: ConnectionPool, table_name: str, diff --git a/src/driver/common.rs b/src/driver/common.rs index bab84049..d2f6aec8 100644 --- a/src/driver/common.rs +++ b/src/driver/common.rs @@ -132,18 +132,18 @@ macro_rules! impl_cursor_method { ($name:ident) => { #[pymethods] impl $name { - #[pyo3(signature = (querystring=None, parameters=None, fetch_number=None))] + #[pyo3(signature = (querystring=None, parameters=None, array_size=None))] pub fn cursor( &self, querystring: Option, parameters: Option>, - fetch_number: Option, + array_size: Option, ) -> PSQLPyResult { Ok(Cursor::new( self.conn.clone(), querystring, parameters, - fetch_number, + array_size, self.pg_config.clone(), None, )) diff --git a/src/driver/connection.rs b/src/driver/connection.rs index e9972e58..0d562b00 100644 --- a/src/driver/connection.rs +++ b/src/driver/connection.rs @@ -1,5 +1,5 @@ use deadpool_postgres::Pool; -use pyo3::{ffi::PyObject, pyclass, pyfunction, pymethods, Py, PyAny, PyErr}; +use pyo3::{pyclass, pyfunction, pymethods, Py, PyAny, PyErr}; use std::sync::Arc; use tokio::sync::RwLock; use tokio_postgres::Config; diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index ab40526a..248bd08f 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -1,8 +1,7 @@ use std::sync::Arc; use pyo3::{ - exceptions::PyStopAsyncIteration, pyclass, pymethods, types::PyNone, Py, PyAny, PyErr, - PyObject, Python, + exceptions::PyStopAsyncIteration, pyclass, pymethods, Py, PyAny, PyErr, PyObject, Python, }; use tokio::sync::RwLock; use tokio_postgres::{Config, Portal as tp_Portal}; From 9d896e3b4c091c3f3ad5a9ce14d8b368f4764276 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Tue, 27 May 2025 21:05:30 +0200 Subject: [PATCH 62/65] Bumped version to 0.11.0 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 076ac4c3..5fdbf0c0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1006,7 +1006,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.10.1" +version = "0.11.0" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index e6dc160d..5697df3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.10.1" +version = "0.11.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html From 4f8337f1a6fb30bfd03834df418b779f29068a58 Mon Sep 17 00:00:00 2001 From: Aleksandr Kiselev <62915291+chandr-andr@users.noreply.github.com> Date: Wed, 28 May 2025 11:11:26 +0200 Subject: [PATCH 63/65] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 1272db9c..57f7509b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ [![PyPI - Python Version](https://img.shields.io/badge/PYTHON-3.9%20%7C%203.10%20%7C%203.11%20%7C%203.12%20%7C%203.13-blue?style=for-the-badge )](https://pypi.org/project/psqlpy/) +[![PyPI - Python Version](https://img.shields.io/badge/Tested%20On%20PostgreSQL-14%20%7C%2015%20%7C%2016%20%7C17-2be28a?style=for-the-badge +)](https://pypi.org/project/psqlpy/) [![PyPI](https://img.shields.io/pypi/v/psqlpy?style=for-the-badge)](https://pypi.org/project/psqlpy/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/psqlpy?style=for-the-badge)](https://pypistats.org/packages/psqlpy) From f6c9f17a5d117e8fcbea06e8a00417f4bd3b0061 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Wed, 28 May 2025 14:58:16 +0200 Subject: [PATCH 64/65] Removed mistake in errors --- README.md | 2 ++ src/driver/common.rs | 4 ++-- src/driver/cursor.rs | 8 ++++---- src/driver/prepared_statement.rs | 2 +- src/driver/transaction.rs | 26 +++++++++++++------------- src/exceptions/rust_errors.rs | 6 +++--- 6 files changed, 25 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 1272db9c..57f7509b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ [![PyPI - Python Version](https://img.shields.io/badge/PYTHON-3.9%20%7C%203.10%20%7C%203.11%20%7C%203.12%20%7C%203.13-blue?style=for-the-badge )](https://pypi.org/project/psqlpy/) +[![PyPI - Python Version](https://img.shields.io/badge/Tested%20On%20PostgreSQL-14%20%7C%2015%20%7C%2016%20%7C17-2be28a?style=for-the-badge +)](https://pypi.org/project/psqlpy/) [![PyPI](https://img.shields.io/pypi/v/psqlpy?style=for-the-badge)](https://pypi.org/project/psqlpy/) [![PyPI - Downloads](https://img.shields.io/pypi/dm/psqlpy?style=for-the-badge)](https://pypistats.org/packages/psqlpy) diff --git a/src/driver/common.rs b/src/driver/common.rs index d2f6aec8..dc92a28f 100644 --- a/src/driver/common.rs +++ b/src/driver/common.rs @@ -194,7 +194,7 @@ macro_rules! impl_transaction_methods { pub async fn commit(&mut self) -> PSQLPyResult<()> { let conn = self.conn.clone(); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError("1".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let mut write_conn_g = conn.write().await; write_conn_g.commit().await?; @@ -209,7 +209,7 @@ macro_rules! impl_transaction_methods { pub async fn rollback(&mut self) -> PSQLPyResult<()> { let conn = self.conn.clone(); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError("2".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let mut write_conn_g = conn.write().await; write_conn_g.rollback().await?; diff --git a/src/driver/cursor.rs b/src/driver/cursor.rs index 248bd08f..3a8abe59 100644 --- a/src/driver/cursor.rs +++ b/src/driver/cursor.rs @@ -54,10 +54,10 @@ impl Cursor { async fn query_portal(&self, size: i32) -> PSQLPyResult { let Some(transaction) = &self.transaction else { - return Err(RustPSQLDriverError::TransactionClosedError("3".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let Some(portal) = &self.inner else { - return Err(RustPSQLDriverError::TransactionClosedError("4".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; transaction.query_portal(&portal, size).await } @@ -162,10 +162,10 @@ impl Cursor { let py_future = Python::with_gil(move |gil| { rustdriver_future(gil, async move { let Some(txid) = &txid else { - return Err(RustPSQLDriverError::TransactionClosedError("5".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let Some(portal) = &portal else { - return Err(RustPSQLDriverError::TransactionClosedError("6".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let result = txid.query_portal(&portal, size).await?; diff --git a/src/driver/prepared_statement.rs b/src/driver/prepared_statement.rs index 1880449c..03b6c27d 100644 --- a/src/driver/prepared_statement.rs +++ b/src/driver/prepared_statement.rs @@ -39,7 +39,7 @@ impl PreparedStatement { impl PreparedStatement { async fn execute(&self) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("12".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; diff --git a/src/driver/transaction.rs b/src/driver/transaction.rs index 81845d40..87f1a282 100644 --- a/src/driver/transaction.rs +++ b/src/driver/transaction.rs @@ -71,7 +71,7 @@ impl Transaction { }); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError("7".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let mut write_conn_g = conn.write().await; write_conn_g @@ -98,7 +98,7 @@ impl Transaction { }); let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError("8".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let mut write_conn_g = conn.write().await; if is_exception_none { @@ -121,7 +121,7 @@ impl Transaction { pub async fn begin(&mut self) -> PSQLPyResult<()> { let conn = &self.conn; let Some(conn) = conn else { - return Err(RustPSQLDriverError::TransactionClosedError("9".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let mut write_conn_g = conn.write().await; write_conn_g @@ -139,7 +139,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("10".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -154,7 +154,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("11".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -169,7 +169,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("12".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -180,7 +180,7 @@ impl Transaction { pub async fn execute_batch(&self, querystring: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("13".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -195,7 +195,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("14".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -212,7 +212,7 @@ impl Transaction { prepared: Option, ) -> PSQLPyResult { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("15".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -223,7 +223,7 @@ impl Transaction { pub async fn create_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("16".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -236,7 +236,7 @@ impl Transaction { pub async fn release_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("17".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -249,7 +249,7 @@ impl Transaction { pub async fn rollback_savepoint(&mut self, savepoint_name: String) -> PSQLPyResult<()> { let Some(conn) = &self.conn else { - return Err(RustPSQLDriverError::TransactionClosedError("18".into())); + return Err(RustPSQLDriverError::TransactionClosedError); }; let read_conn_g = conn.read().await; @@ -305,6 +305,6 @@ impl Transaction { return future::try_join_all(futures).await; } - Err(RustPSQLDriverError::TransactionClosedError("19".into())) + Err(RustPSQLDriverError::TransactionClosedError) } } diff --git a/src/exceptions/rust_errors.rs b/src/exceptions/rust_errors.rs index 9062a37e..f133321b 100644 --- a/src/exceptions/rust_errors.rs +++ b/src/exceptions/rust_errors.rs @@ -49,8 +49,8 @@ pub enum RustPSQLDriverError { TransactionSavepointError(String), #[error("Transaction execute error: {0}")] TransactionExecuteError(String), - #[error("Underlying connection is returned to the pool: {0}")] - TransactionClosedError(String), + #[error("Underlying connection is returned to the pool")] + TransactionClosedError, // Cursor Errors #[error("Cursor error: {0}")] @@ -162,7 +162,7 @@ impl From for pyo3::PyErr { RustPSQLDriverError::TransactionExecuteError(_) => { TransactionExecuteError::new_err((error_desc,)) } - RustPSQLDriverError::TransactionClosedError(_) => { + RustPSQLDriverError::TransactionClosedError => { TransactionClosedError::new_err((error_desc,)) } RustPSQLDriverError::BaseCursorError(_) => BaseCursorError::new_err((error_desc,)), From d504d801dc6cb5eb6cbe64c67ff371ad45976037 Mon Sep 17 00:00:00 2001 From: "chandr-andr (Kiselev Aleksandr)" Date: Wed, 28 May 2025 15:02:04 +0200 Subject: [PATCH 65/65] Bumped version to 0.11.1 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5fdbf0c0..e7e6678a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1006,7 +1006,7 @@ dependencies = [ [[package]] name = "psqlpy" -version = "0.11.0" +version = "0.11.1" dependencies = [ "byteorder", "bytes", diff --git a/Cargo.toml b/Cargo.toml index 5697df3c..d4c171a7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "psqlpy" -version = "0.11.0" +version = "0.11.1" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html