diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
index 0f80778..efad52c 100644
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -86,6 +86,31 @@ jobs:
name: wheels-macos-arm64
path: dist
+ windows-x86_64:
+ runs-on: windows-latest
+ strategy:
+ matrix:
+ target: [x86_64]
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-python@v4
+ with:
+ python-version: '3.10'
+ - name: Build wheels
+ uses: PyO3/maturin-action@v1
+ env:
+ CXX: clang++
+ CC: clang
+ with:
+ target: ${{ matrix.target }}
+ args: --release --out dist --find-interpreter
+ sccache: 'true'
+ - name: Upload wheels
+ uses: actions/upload-artifact@v4
+ with:
+ name: wheels-windows-x86_64
+ path: dist
+
sdist:
runs-on: ubuntu-latest
steps:
@@ -105,7 +130,7 @@ jobs:
name: Release
runs-on: ubuntu-latest
if: "startsWith(github.ref, 'refs/tags/')"
- needs: [linux, macos-arm64, macos-x86_64, sdist]
+ needs: [linux, macos-arm64, macos-x86_64, windows-x86_64, sdist]
steps:
- uses: actions/download-artifact@v4
with:
@@ -122,5 +147,5 @@ jobs:
MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }}
with:
command: upload
- args: --skip-existing *
+ args: --skip-existing dist/*
diff --git a/Cargo.lock b/Cargo.lock
index dd4bd57..b5b1fee 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
-version = 3
+version = 4
[[package]]
name = "addr2line"
@@ -95,7 +95,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -106,7 +106,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -215,7 +215,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
- "syn 2.0.72",
+ "syn",
"which",
]
@@ -326,6 +326,15 @@ dependencies = [
"libloading",
]
+[[package]]
+name = "cmake"
+version = "0.1.54"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "core-foundation"
version = "0.9.4"
@@ -389,7 +398,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
dependencies = [
"libc",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -472,7 +481,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -582,6 +591,12 @@ dependencies = [
"hashbrown 0.14.5",
]
+[[package]]
+name = "heck"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
+
[[package]]
name = "hermit-abi"
version = "0.3.9"
@@ -594,7 +609,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
dependencies = [
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -654,7 +669,7 @@ dependencies = [
"httpdate",
"itoa",
"pin-project-lite",
- "socket2",
+ "socket2 0.5.7",
"tokio",
"tower-service",
"tracing",
@@ -737,9 +752,9 @@ dependencies = [
[[package]]
name = "indoc"
-version = "1.0.9"
+version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306"
+checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
[[package]]
name = "inout"
@@ -751,6 +766,17 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "io-uring"
+version = "0.7.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
+dependencies = [
+ "bitflags 2.6.0",
+ "cfg-if",
+ "libc",
+]
+
[[package]]
name = "itertools"
version = "0.12.1"
@@ -790,9 +816,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
-version = "0.2.155"
+version = "0.2.174"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
+checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776"
[[package]]
name = "libloading"
@@ -806,9 +832,9 @@ dependencies = [
[[package]]
name = "libsql"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f99d7048440c5f443f7e121b27159d4d3c66d0a75e20754fd6cbb00f3cc8c18"
+checksum = "c92f460194a673c29e82520a061a82f83892faca9ce6881db93d591cd38cb3dc"
dependencies = [
"anyhow",
"async-stream",
@@ -846,20 +872,21 @@ dependencies = [
[[package]]
name = "libsql-ffi"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0e12afae859e7dc096ebc7a7cee806efa948e5b78ff9d0b4573ca7815480ab0"
+checksum = "64691b229b9d5c0754115f59a0e5c0d1bcc102bfe402b96f2bbf9d5150a4ab3c"
dependencies = [
"bindgen",
"cc",
+ "cmake",
"glob",
]
[[package]]
name = "libsql-hrana"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71f16d7adbad5bee861c955011c155893937bd4c8711c91d949060363f24e0f6"
+checksum = "c18b78daf4af8603c048f584faada18c8970f4f3af16df0c3319a92c90d0d696"
dependencies = [
"base64 0.21.7",
"bytes",
@@ -867,23 +894,11 @@ dependencies = [
"serde",
]
-[[package]]
-name = "libsql-python"
-version = "0.0.41"
-dependencies = [
- "libsql",
- "pyo3",
- "pyo3-build-config",
- "tokio",
- "tracing-subscriber",
- "version_check",
-]
-
[[package]]
name = "libsql-rusqlite"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96c4bd62bf66a14e46643bd44adbb843c791e7c3f835a1424ffa629df8a7a9f4"
+checksum = "c502ebc6fd1a7ba1b686c47226016fb8e913080c4021491bda33ca33c6fa1ab4"
dependencies = [
"bitflags 2.6.0",
"fallible-iterator 0.2.0",
@@ -913,9 +928,9 @@ dependencies = [
[[package]]
name = "libsql-sys"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "659587f9ebebdb2ff1112e8af7f30fc212f8d5457a3a9a549eda48544e942a7f"
+checksum = "9e7b9ab82fa6122efe2b2fe066043d6330aa4a7aa656b4ec0df9768ffd1ea6a0"
dependencies = [
"bytes",
"libsql-ffi",
@@ -927,9 +942,9 @@ dependencies = [
[[package]]
name = "libsql_replication"
-version = "0.9.1"
+version = "0.9.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0666c23acca99c2ebc317071f87814ad006243fa6e15c02a83e40ff8e6948832"
+checksum = "aa911faee7c1e039a44aeced5e61b5106fb714782e26d89216c87c83eb1ce410"
dependencies = [
"aes",
"async-stream",
@@ -1024,7 +1039,7 @@ dependencies = [
"hermit-abi",
"libc",
"wasi",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1174,7 +1189,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1189,6 +1204,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+[[package]]
+name = "portable-atomic"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
+
[[package]]
name = "ppv-lite86"
version = "0.2.17"
@@ -1202,7 +1223,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e"
dependencies = [
"proc-macro2",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1234,20 +1255,32 @@ dependencies = [
"itertools",
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
+]
+
+[[package]]
+name = "pylibsql"
+version = "0.1.7"
+dependencies = [
+ "libsql",
+ "pyo3",
+ "pyo3-build-config",
+ "tokio",
+ "tracing-subscriber",
+ "version_check",
]
[[package]]
name = "pyo3"
-version = "0.19.2"
+version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e681a6cfdc4adcc93b4d3cf993749a4552018ee0a9b65fc0ccfad74352c72a38"
+checksum = "8970a78afe0628a3e3430376fc5fd76b6b45c4d43360ffd6cdd40bdde72b682a"
dependencies = [
- "cfg-if",
"indoc",
"libc",
"memoffset",
- "parking_lot",
+ "once_cell",
+ "portable-atomic",
"pyo3-build-config",
"pyo3-ffi",
"pyo3-macros",
@@ -1256,9 +1289,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
-version = "0.19.2"
+version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "076c73d0bc438f7a4ef6fdd0c3bb4732149136abd952b110ac93e4edb13a6ba5"
+checksum = "458eb0c55e7ece017adeba38f2248ff3ac615e53660d7c71a238d7d2a01c7598"
dependencies = [
"once_cell",
"target-lexicon",
@@ -1266,9 +1299,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
-version = "0.19.2"
+version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e53cee42e77ebe256066ba8aa77eff722b3bb91f3419177cf4cd0f304d3284d9"
+checksum = "7114fe5457c61b276ab77c5055f206295b812608083644a5c5b2640c3102565c"
dependencies = [
"libc",
"pyo3-build-config",
@@ -1276,25 +1309,27 @@ dependencies = [
[[package]]
name = "pyo3-macros"
-version = "0.19.2"
+version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dfeb4c99597e136528c6dd7d5e3de5434d1ceaf487436a3f03b2d56b6fc9efd1"
+checksum = "a8725c0a622b374d6cb051d11a0983786448f7785336139c3c94f5aa6bef7e50"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
- "syn 1.0.109",
+ "syn",
]
[[package]]
name = "pyo3-macros-backend"
-version = "0.19.2"
+version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "947dc12175c254889edc0c02e399476c2f652b4b9ebd123aa655c224de259536"
+checksum = "4109984c22491085343c05b0dbc54ddc405c3cf7b4374fc533f5c3313a572ccc"
dependencies = [
+ "heck",
"proc-macro2",
+ "pyo3-build-config",
"quote",
- "syn 1.0.109",
+ "syn",
]
[[package]]
@@ -1386,7 +1421,7 @@ dependencies = [
"libc",
"spin",
"untrusted",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1411,7 +1446,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1486,7 +1521,7 @@ version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534"
dependencies = [
- "windows-sys",
+ "windows-sys 0.52.0",
]
[[package]]
@@ -1535,7 +1570,7 @@ checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1601,7 +1636,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [
"libc",
- "windows-sys",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "socket2"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
+dependencies = [
+ "libc",
+ "windows-sys 0.59.0",
]
[[package]]
@@ -1616,17 +1661,6 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
-[[package]]
-name = "syn"
-version = "1.0.109"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
[[package]]
name = "syn"
version = "2.0.72"
@@ -1646,9 +1680,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]]
name = "target-lexicon"
-version = "0.12.15"
+version = "0.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4873307b7c257eddcb50c9bedf158eb669578359fb28428bef438fec8e6ba7c2"
+checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a"
[[package]]
name = "thiserror"
@@ -1667,7 +1701,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1682,20 +1716,22 @@ dependencies = [
[[package]]
name = "tokio"
-version = "1.39.1"
+version = "1.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d040ac2b29ab03b09d4129c2f5bbd012a3ac2f79d38ff506a4bf8dd34b0eac8a"
+checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
dependencies = [
"backtrace",
"bytes",
+ "io-uring",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
- "socket2",
+ "slab",
+ "socket2 0.6.0",
"tokio-macros",
- "windows-sys",
+ "windows-sys 0.59.0",
]
[[package]]
@@ -1710,13 +1746,13 @@ dependencies = [
[[package]]
name = "tokio-macros"
-version = "2.4.0"
+version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
+checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1873,7 +1909,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
@@ -1899,9 +1935,9 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
-version = "0.3.18"
+version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [
"nu-ansi-term",
"sharded-slab",
@@ -1940,9 +1976,9 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unindent"
-version = "0.1.11"
+version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
+checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
[[package]]
name = "untrusted"
@@ -2009,7 +2045,7 @@ dependencies = [
"log",
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
"wasm-bindgen-shared",
]
@@ -2031,7 +2067,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@@ -2112,6 +2148,15 @@ dependencies = [
"windows-targets",
]
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets",
+]
+
[[package]]
name = "windows-targets"
version = "0.52.6"
@@ -2194,7 +2239,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2",
"quote",
- "syn 2.0.72",
+ "syn",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index fd5cdc8..73f1b13 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,19 +1,18 @@
[package]
-name = "libsql-python"
-version = "0.0.41"
-edition = "2021"
+name = "pylibsql"
+version = "0.1.7"
+edition = "2024"
[lib]
-name = "libsql_experimental"
crate-type = ["cdylib"]
[dependencies]
-pyo3 = "0.19.0"
-libsql = { version = "0.9.1", features = ["encryption"] }
-tokio = { version = "1.29.1", features = [ "rt-multi-thread" ] }
-tracing-subscriber = "0.3"
+pyo3 = "0.25.1"
+libsql = { version = "0.9.19", features = ["encryption"] }
+tokio = { version = "1.47.0", features = [ "rt-multi-thread" ] }
+tracing-subscriber = "0.3.19"
[build-dependencies]
version_check = "0.9.5"
# used where logic has to be version/distribution specific, e.g. pypy
-pyo3-build-config = { version = "0.19.0" }
+pyo3-build-config = { version = "0.25.1" }
diff --git a/README.md b/README.md
index 68b5eb1..c1e2ffe 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,10 @@
-
-
- Turso + Python (experimental)
+
+
+ Turso + Python
-This package is experimental, which means it is not consider to be production grade. Furthermore, the package currently only supports Linux and macOS.
-
SQLite for Production. Powered by libSQL .
@@ -21,8 +19,8 @@
-
-
+
+
@@ -45,7 +43,5 @@ Learn more about what you can do with Turso:
- [Embedded Replicas](https://docs.turso.tech/features/embedded-replicas)
- [Platform API](https://docs.turso.tech/features/platform-api)
-- [Data Edge](https://docs.turso.tech/features/data-edge)
- [Branching](https://docs.turso.tech/features/branching)
- [Point-in-Time Recovery](https://docs.turso.tech/features/point-in-time-recovery)
-- [Scale to Zero](https://docs.turso.tech/features/scale-to-zero)
diff --git a/docs/api.md b/docs/api.md
index f63d4f0..08d0b15 100644
--- a/docs/api.md
+++ b/docs/api.md
@@ -32,6 +32,16 @@ Rolls back the current transaction and starts a new one.
Closes the database connection.
+### `with` statement
+
+Connection objects can be used as context managers to ensure that transactions are properly committed or rolled back. When entering the context, the connection object is returned. When exiting:
+- Without exception: automatically commits the transaction
+- With exception: automatically rolls back the transaction
+
+This behavior is compatible with Python's `sqlite3` module. Context managers work correctly in both transactional and autocommit modes.
+
+When mixing manual transaction control with context managers, the context manager's commit/rollback will apply to any active transaction at the time of exit. Manual calls to `commit()` or `rollback()` within the context are allowed and will start a new transaction as usual.
+
### execute(sql, parameters=())
Create a new cursor object and executes the SQL statement.
diff --git a/example.py b/example.py
index b69aabf..af49bce 100644
--- a/example.py
+++ b/example.py
@@ -1,6 +1,6 @@
-import libsql_experimental
+import libsql
-con = libsql_experimental.connect("hello.db", sync_url="http://localhost:8080",
+con = libsql.connect("hello.db", sync_url="http://localhost:8080",
auth_token="")
con.sync()
diff --git a/examples/batch/README.md b/examples/batch/README.md
index 1f499bd..092bba6 100644
--- a/examples/batch/README.md
+++ b/examples/batch/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL to execute a batch of SQL statements
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/batch/main.py b/examples/batch/main.py
index eb81b31..11a6047 100644
--- a/examples/batch/main.py
+++ b/examples/batch/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect("local.db")
cur = conn.cursor()
diff --git a/examples/encryption/README.md b/examples/encryption/README.md
index 0cd65af..9b1acc1 100644
--- a/examples/encryption/README.md
+++ b/examples/encryption/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to create and use an encrypted SQLite database wit
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/encryption/main.py b/examples/encryption/main.py
index 5016f16..ce10b56 100644
--- a/examples/encryption/main.py
+++ b/examples/encryption/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
# You should set the ENCRYPTION_KEY in a environment variable
# For demo purposes, we're using a fixed key
diff --git a/examples/execute_script.py b/examples/execute_script.py
index 3ac3be2..ab22f45 100644
--- a/examples/execute_script.py
+++ b/examples/execute_script.py
@@ -3,7 +3,7 @@
"""
import os
-import libsql_experimental as libsql
+import libsql
def execute_script(conn, file_path: os.PathLike):
with open(file_path, 'r') as file:
diff --git a/examples/local/README.md b/examples/local/README.md
index 0a8aefe..22ef15d 100644
--- a/examples/local/README.md
+++ b/examples/local/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL with a local SQLite file.
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/local/main.py b/examples/local/main.py
index 2b685bf..5efa6fb 100644
--- a/examples/local/main.py
+++ b/examples/local/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect("local.db")
cur = conn.cursor()
diff --git a/examples/memory/README.md b/examples/memory/README.md
index cca5ee0..e5e072f 100644
--- a/examples/memory/README.md
+++ b/examples/memory/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL with an in-memory SQLite database.
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/memory/main.py b/examples/memory/main.py
index 1e66b46..8084f6c 100644
--- a/examples/memory/main.py
+++ b/examples/memory/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect(":memory:")
cur = conn.cursor()
diff --git a/examples/remote/README.md b/examples/remote/README.md
index 61919f4..99996a7 100644
--- a/examples/remote/README.md
+++ b/examples/remote/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL with a remote database.
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/remote/main.py b/examples/remote/main.py
index 8c68671..f884518 100644
--- a/examples/remote/main.py
+++ b/examples/remote/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
import os
url = os.getenv("TURSO_DATABASE_URL")
diff --git a/examples/remote_connect.py b/examples/remote_connect.py
index 056fd8e..801d545 100644
--- a/examples/remote_connect.py
+++ b/examples/remote_connect.py
@@ -5,7 +5,7 @@
"""
import os
-import libsql_experimental as libsql
+import libsql
print(F"connecting to {os.getenv('LIBSQL_URL')}")
conn = libsql.connect(database=os.getenv('LIBSQL_URL'),
diff --git a/examples/sqlalchemy/dialect.py b/examples/sqlalchemy/dialect.py
index 1bd396d..0e45cf9 100644
--- a/examples/sqlalchemy/dialect.py
+++ b/examples/sqlalchemy/dialect.py
@@ -53,12 +53,12 @@ class SQLiteDialect_libsql(SQLiteDialect_pysqlite):
@classmethod
def import_dbapi(cls):
- import libsql_experimental as libsql
+ import libsql
return libsql
def on_connect(self):
- import libsql_experimental as libsql
+ import libsql
sqlite3_connect = super().on_connect()
diff --git a/examples/sync/README.md b/examples/sync/README.md
index 36c97dc..dcd5a7c 100644
--- a/examples/sync/README.md
+++ b/examples/sync/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL with a synced database (local file s
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/sync/main.py b/examples/sync/main.py
index 0b1a524..b937016 100644
--- a/examples/sync/main.py
+++ b/examples/sync/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
import os
url = os.getenv("TURSO_DATABASE_URL")
diff --git a/examples/sync_write.py b/examples/sync_write.py
index 853aa83..9fd93b1 100644
--- a/examples/sync_write.py
+++ b/examples/sync_write.py
@@ -5,7 +5,7 @@
"""
import os
-import libsql_experimental as libsql
+import libsql
print(F"syncing with {os.getenv('LIBSQL_URL')}")
conn = libsql.connect("hello.db", sync_url=os.getenv("LIBSQL_URL"),
diff --git a/examples/transaction/README.md b/examples/transaction/README.md
index 54bb441..965a6d5 100644
--- a/examples/transaction/README.md
+++ b/examples/transaction/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to create and use an encrypted SQLite database wit
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/transaction/main.py b/examples/transaction/main.py
index 4148cbc..c0f1810 100644
--- a/examples/transaction/main.py
+++ b/examples/transaction/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect("local.db")
cur = conn.cursor()
diff --git a/examples/vector.py b/examples/vector.py
index 12a1ccf..1a99e02 100644
--- a/examples/vector.py
+++ b/examples/vector.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect("vector.db")
diff --git a/examples/vector/README.md b/examples/vector/README.md
index db8a345..5e9ecff 100644
--- a/examples/vector/README.md
+++ b/examples/vector/README.md
@@ -5,7 +5,7 @@ This example demonstrates how to use libSQL vector search with a local database.
## Install Dependencies
```bash
-pip install libsql-experimental
+pip install libsql
```
## Running
diff --git a/examples/vector/main.py b/examples/vector/main.py
index b365b5f..2cd3d4f 100644
--- a/examples/vector/main.py
+++ b/examples/vector/main.py
@@ -1,4 +1,4 @@
-import libsql_experimental as libsql
+import libsql
conn = libsql.connect("local.db")
diff --git a/perf-libsql.py b/perf-libsql.py
index bbe06fe..744a233 100755
--- a/perf-libsql.py
+++ b/perf-libsql.py
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
-import libsql_experimental
+import libsql
import pyperf
import time
-con = libsql_experimental.connect(":memory:")
+con = libsql.connect(":memory:")
cur = con.cursor()
def func():
diff --git a/pyproject.toml b/pyproject.toml
index ee603c7..5d95e0e 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,8 +3,8 @@ requires = ["maturin>=1.1,<2.0"]
build-backend = "maturin"
[project]
-name = "libsql-experimental"
-version = "0.0.41"
+name = "libsql"
+version = "0.1.7"
requires-python = ">=3.7"
classifiers = [
"Programming Language :: Rust",
@@ -15,3 +15,4 @@ classifiers = [
[tool.maturin]
features = ["pyo3/extension-module"]
+module-name = "libsql"
diff --git a/src/lib.rs b/src/lib.rs
index 9a3e030..8cd8863 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -2,13 +2,25 @@ use ::libsql as libsql_core;
use pyo3::create_exception;
use pyo3::exceptions::PyValueError;
use pyo3::prelude::*;
-use pyo3::types::{PyList, PyTuple};
-use std::cell::{OnceCell, RefCell};
+use pyo3::types::{PyList, PyModule, PyTuple};
+use std::cell::RefCell;
use std::sync::{Arc, OnceLock};
+use std::time::Duration;
use tokio::runtime::{Handle, Runtime};
const LEGACY_TRANSACTION_CONTROL: i32 = -1;
+#[derive(Clone)]
+enum ListOrTuple<'py> {
+ List(Bound<'py, PyList>),
+ Tuple(Bound<'py, PyTuple>),
+}
+
+struct ListOrTupleIterator<'py> {
+ index: usize,
+ inner: ListOrTuple<'py>,
+}
+
fn rt() -> Handle {
static RT: OnceLock = OnceLock::new();
@@ -25,7 +37,7 @@ fn rt() -> Handle {
fn to_py_err(error: libsql_core::errors::Error) -> PyErr {
let msg = match error {
- libsql::Error::SqliteFailure(_, err) => err,
+ libsql_core::Error::SqliteFailure(_, err) => err,
_ => error.to_string(),
};
PyValueError::new_err(msg)
@@ -37,13 +49,14 @@ fn is_remote_path(path: &str) -> bool {
#[pyfunction]
#[cfg(not(Py_3_12))]
-#[pyo3(signature = (database, isolation_level="DEFERRED".to_string(), check_same_thread=true, uri=false, sync_url=None, sync_interval=None, auth_token="", encryption_key=None))]
+#[pyo3(signature = (database, timeout=5.0, isolation_level="DEFERRED".to_string(), _check_same_thread=true, _uri=false, sync_url=None, sync_interval=None, auth_token="", encryption_key=None))]
fn connect(
py: Python<'_>,
database: String,
+ timeout: f64,
isolation_level: Option,
- check_same_thread: bool,
- uri: bool,
+ _check_same_thread: bool,
+ _uri: bool,
sync_url: Option,
sync_interval: Option,
auth_token: &str,
@@ -52,9 +65,10 @@ fn connect(
let conn = _connect_core(
py,
database,
+ timeout,
isolation_level,
- check_same_thread,
- uri,
+ _check_same_thread,
+ _uri,
sync_url,
sync_interval,
auth_token,
@@ -65,13 +79,14 @@ fn connect(
#[pyfunction]
#[cfg(Py_3_12)]
-#[pyo3(signature = (database, isolation_level="DEFERRED".to_string(), check_same_thread=true, uri=false, sync_url=None, sync_interval=None, auth_token="", encryption_key=None, autocommit = LEGACY_TRANSACTION_CONTROL))]
+#[pyo3(signature = (database, timeout=5.0, isolation_level="DEFERRED".to_string(), _check_same_thread=true, _uri=false, sync_url=None, sync_interval=None, auth_token="", encryption_key=None, autocommit = LEGACY_TRANSACTION_CONTROL))]
fn connect(
py: Python<'_>,
database: String,
+ timeout: f64,
isolation_level: Option,
- check_same_thread: bool,
- uri: bool,
+ _check_same_thread: bool,
+ _uri: bool,
sync_url: Option,
sync_interval: Option,
auth_token: &str,
@@ -81,9 +96,10 @@ fn connect(
let mut conn = _connect_core(
py,
database,
+ timeout,
isolation_level.clone(),
- check_same_thread,
- uri,
+ _check_same_thread,
+ _uri,
sync_url,
sync_interval,
auth_token,
@@ -104,9 +120,10 @@ fn connect(
fn _connect_core(
py: Python<'_>,
database: String,
+ timeout: f64,
isolation_level: Option,
- check_same_thread: bool,
- uri: bool,
+ _check_same_thread: bool,
+ _uri: bool,
sync_url: Option,
sync_interval: Option,
auth_token: &str,
@@ -117,34 +134,37 @@ fn _connect_core(
let rt = rt();
let encryption_config = match encryption_key {
Some(key) => {
- let cipher = libsql::Cipher::default();
- let encryption_config = libsql::EncryptionConfig::new(cipher, key.into());
+ let cipher = libsql_core::Cipher::default();
+ let encryption_config = libsql_core::EncryptionConfig::new(cipher, key.into());
Some(encryption_config)
}
None => None,
};
let db = if is_remote_path(&database) {
- let result = libsql::Database::open_remote_internal(database.clone(), auth_token, ver);
+ let result = libsql_core::Database::open_remote_internal(database.clone(), auth_token, ver);
result.map_err(to_py_err)?
} else {
match sync_url {
Some(sync_url) => {
let sync_interval = sync_interval.map(|i| std::time::Duration::from_secs_f64(i));
- let fut = libsql::Database::open_with_remote_sync_internal(
+ let mut builder = libsql_core::Builder::new_remote_replica(
database,
sync_url,
- auth_token,
- Some(ver),
- true,
- encryption_config,
- sync_interval,
+ auth_token.to_string(),
);
+ if let Some(encryption_config) = encryption_config {
+ builder = builder.encryption_config(encryption_config);
+ }
+ if let Some(sync_interval) = sync_interval {
+ builder = builder.sync_interval(sync_interval);
+ }
+ let fut = builder.build();
tokio::pin!(fut);
let result = rt.block_on(check_signals(py, fut));
result.map_err(to_py_err)?
}
None => {
- let mut builder = libsql::Builder::new_local(database);
+ let mut builder = libsql_core::Builder::new_local(database);
if let Some(config) = encryption_config {
builder = builder.encryption_config(config);
}
@@ -158,12 +178,14 @@ fn _connect_core(
let autocommit = isolation_level.is_none() as i32;
let conn = db.connect().map_err(to_py_err)?;
+ let timeout = Duration::from_secs_f64(timeout);
+ conn.busy_timeout(timeout).map_err(to_py_err)?;
Ok(Connection {
db,
- conn: Arc::new(ConnectionGuard {
+ conn: RefCell::new(Some(Arc::new(ConnectionGuard {
conn: Some(conn),
handle: rt.clone(),
- }),
+ }))),
isolation_level,
autocommit,
})
@@ -199,20 +221,26 @@ impl Drop for ConnectionGuard {
#[pyclass]
pub struct Connection {
db: libsql_core::Database,
- conn: Arc,
+ conn: RefCell>>,
isolation_level: Option,
autocommit: i32,
}
// SAFETY: The libsql crate guarantees that `Connection` is thread-safe.
unsafe impl Send for Connection {}
+unsafe impl Sync for Connection {}
#[pymethods]
impl Connection {
+ fn close(self_: PyRef<'_, Self>, _py: Python<'_>) -> PyResult<()> {
+ self_.conn.replace(None);
+ Ok(())
+ }
+
fn cursor(&self) -> PyResult {
Ok(Cursor {
arraysize: 1,
- conn: self.conn.clone(),
+ conn: RefCell::new(Some(self.conn.borrow().as_ref().unwrap().clone())),
stmt: RefCell::new(None),
rows: RefCell::new(None),
rowcount: RefCell::new(0),
@@ -235,40 +263,58 @@ impl Connection {
fn commit(self_: PyRef<'_, Self>) -> PyResult<()> {
// TODO: Switch to libSQL transaction API
- if !self_.conn.is_autocommit() {
- rt().block_on(async { self_.conn.execute("COMMIT", ()).await })
- .map_err(to_py_err)?;
+ if !self_.conn.borrow().as_ref().unwrap().is_autocommit() {
+ rt().block_on(async {
+ self_
+ .conn
+ .borrow()
+ .as_ref()
+ .unwrap()
+ .execute("COMMIT", ())
+ .await
+ })
+ .map_err(to_py_err)?;
}
Ok(())
}
fn rollback(self_: PyRef<'_, Self>) -> PyResult<()> {
// TODO: Switch to libSQL transaction API
- if !self_.conn.is_autocommit() {
- rt().block_on(async { self_.conn.execute("ROLLBACK", ()).await })
- .map_err(to_py_err)?;
+ if !self_.conn.borrow().as_ref().unwrap().is_autocommit() {
+ rt().block_on(async {
+ self_
+ .conn
+ .borrow()
+ .as_ref()
+ .unwrap()
+ .execute("ROLLBACK", ())
+ .await
+ })
+ .map_err(to_py_err)?;
}
Ok(())
}
+ #[pyo3(signature = (sql, parameters=None))]
fn execute(
self_: PyRef<'_, Self>,
sql: String,
- parameters: Option<&PyTuple>,
+ parameters: Option>,
) -> PyResult {
let cursor = Connection::cursor(&self_)?;
rt().block_on(async { execute(&cursor, sql, parameters).await })?;
Ok(cursor)
}
+ #[pyo3(signature = (sql, parameters=None))]
fn executemany(
self_: PyRef<'_, Self>,
sql: String,
- parameters: Option<&PyList>,
+ parameters: Option<&Bound<'_, PyList>>,
) -> PyResult {
let cursor = Connection::cursor(&self_)?;
for parameters in parameters.unwrap().iter() {
- let parameters = parameters.extract::<&PyTuple>()?;
+ let parameters = parameters.extract::()?;
rt().block_on(async { execute(&cursor, sql.clone(), Some(parameters)).await })?;
}
Ok(cursor)
@@ -276,7 +322,15 @@ impl Connection {
fn executescript(self_: PyRef<'_, Self>, script: String) -> PyResult<()> {
let _ = rt()
- .block_on(async { self_.conn.execute_batch(&script).await })
+ .block_on(async {
+ self_
+ .conn
+ .borrow()
+ .as_ref()
+ .unwrap()
+ .execute_batch(&script)
+ .await
+ })
.map_err(to_py_err);
Ok(())
}
@@ -290,9 +344,12 @@ impl Connection {
fn in_transaction(self_: PyRef<'_, Self>) -> PyResult {
#[cfg(Py_3_12)]
{
- return Ok(!self_.conn.is_autocommit() || self_.autocommit == 0);
+ Ok(!self_.conn.borrow().as_ref().unwrap().is_autocommit() || self_.autocommit == 0)
+ }
+ #[cfg(not(Py_3_12))]
+ {
+ Ok(!self_.conn.borrow().as_ref().unwrap().is_autocommit())
}
- Ok(!self_.conn.is_autocommit())
}
#[getter]
@@ -312,13 +369,34 @@ impl Connection {
self_.autocommit = autocommit;
Ok(())
}
+
+ fn __enter__(slf: PyRef<'_, Self>) -> PyResult> {
+ Ok(slf)
+ }
+
+ #[pyo3(signature = (exc_type=None, _exc_val=None, _exc_tb=None))]
+ fn __exit__(
+ self_: PyRef<'_, Self>,
+ exc_type: Option<&Bound<'_, PyAny>>,
+ _exc_val: Option<&Bound<'_, PyAny>>,
+ _exc_tb: Option<&Bound<'_, PyAny>>,
+ ) -> PyResult {
+ if exc_type.is_none() {
+ // Commit on clean exit
+ Connection::commit(self_)?;
+ } else {
+ // Rollback on error
+ Connection::rollback(self_)?;
+ }
+ Ok(false) // Always propagate exceptions
+ }
}
#[pyclass]
pub struct Cursor {
#[pyo3(get, set)]
arraysize: usize,
- conn: Arc,
+ conn: RefCell>>,
stmt: RefCell >,
rows: RefCell >,
rowcount: RefCell,
@@ -329,25 +407,47 @@ pub struct Cursor {
// SAFETY: The libsql crate guarantees that `Connection` is thread-safe.
unsafe impl Send for Cursor {}
+unsafe impl Sync for Cursor {}
+
+impl Drop for Cursor {
+ fn drop(&mut self) {
+ let _enter = rt().enter();
+ self.conn.replace(None);
+ self.stmt.replace(None);
+ self.rows.replace(None);
+ }
+}
#[pymethods]
impl Cursor {
+ fn close(self_: PyRef<'_, Self>) -> PyResult<()> {
+ rt().block_on(async {
+ let cursor: &Cursor = &self_;
+ cursor.conn.replace(None);
+ cursor.stmt.replace(None);
+ cursor.rows.replace(None);
+ });
+ Ok(())
+ }
+
+ #[pyo3(signature = (sql, parameters=None))]
fn execute<'a>(
self_: PyRef<'a, Self>,
sql: String,
- parameters: Option<&PyTuple>,
+ parameters: Option>,
) -> PyResult> {
rt().block_on(async { execute(&self_, sql, parameters).await })?;
Ok(self_)
}
+ #[pyo3(signature = (sql, parameters=None))]
fn executemany<'a>(
self_: PyRef<'a, Self>,
sql: String,
- parameters: Option<&PyList>,
+ parameters: Option<&Bound<'_, PyList>>,
) -> PyResult> {
for parameters in parameters.unwrap().iter() {
- let parameters = parameters.extract::<&PyTuple>()?;
+ let parameters = parameters.extract::()?;
rt().block_on(async { execute(&self_, sql.clone(), Some(parameters)).await })?;
}
Ok(self_)
@@ -357,13 +457,21 @@ impl Cursor {
self_: PyRef<'a, Self>,
script: String,
) -> PyResult> {
- rt().block_on(async { self_.conn.execute_batch(&script).await })
- .map_err(to_py_err)?;
+ rt().block_on(async {
+ self_
+ .conn
+ .borrow()
+ .as_ref()
+ .unwrap()
+ .execute_batch(&script)
+ .await
+ })
+ .map_err(to_py_err)?;
Ok(self_)
}
#[getter]
- fn description(self_: PyRef<'_, Self>) -> PyResult> {
+ fn description(self_: PyRef<'_, Self>) -> PyResult >> {
let stmt = self_.stmt.borrow();
let mut elements: Vec> = vec![];
match stmt.as_ref() {
@@ -379,17 +487,18 @@ impl Cursor {
self_.py().None(),
self_.py().None(),
)
- .to_object(self_.py());
- elements.push(element);
+ .into_pyobject(self_.py())
+ .unwrap();
+ elements.push(element.into());
}
- let elements = PyTuple::new(self_.py(), elements);
+ let elements = PyTuple::new(self_.py(), elements)?;
Ok(Some(elements))
}
None => Ok(None),
}
}
- fn fetchone(self_: PyRef<'_, Self>) -> PyResult> {
+ fn fetchone(self_: PyRef<'_, Self>) -> PyResult >> {
let mut rows = self_.rows.borrow_mut();
match rows.as_mut() {
Some(rows) => {
@@ -406,7 +515,8 @@ impl Cursor {
}
}
- fn fetchmany(self_: PyRef<'_, Self>, size: Option) -> PyResult> {
+ #[pyo3(signature = (size=None))]
+ fn fetchmany(self_: PyRef<'_, Self>, size: Option) -> PyResult>> {
let mut rows = self_.rows.borrow_mut();
match rows.as_mut() {
Some(rows) => {
@@ -432,13 +542,13 @@ impl Cursor {
}
}
}
- Ok(Some(PyList::new(self_.py(), elements)))
+ Ok(Some(PyList::new(self_.py(), elements)?))
}
None => Ok(None),
}
}
- fn fetchall(self_: PyRef<'_, Self>) -> PyResult > {
+ fn fetchall(self_: PyRef<'_, Self>) -> PyResult >> {
let mut rows = self_.rows.borrow_mut();
match rows.as_mut() {
Some(rows) => {
@@ -455,7 +565,7 @@ impl Cursor {
None => break,
}
}
- Ok(Some(PyList::new(self_.py(), elements)))
+ Ok(Some(PyList::new(self_.py(), elements)?))
}
None => Ok(None),
}
@@ -465,7 +575,9 @@ impl Cursor {
fn lastrowid(self_: PyRef<'_, Self>) -> PyResult > {
let stmt = self_.stmt.borrow();
match stmt.as_ref() {
- Some(_) => Ok(Some(self_.conn.last_insert_rowid())),
+ Some(_) => Ok(Some(
+ self_.conn.borrow().as_ref().unwrap().last_insert_rowid(),
+ )),
None => Ok(None),
}
}
@@ -474,11 +586,6 @@ impl Cursor {
fn rowcount(self_: PyRef<'_, Self>) -> PyResult {
Ok(*self_.rowcount.borrow())
}
-
- fn close(_self: PyRef<'_, Self>) -> PyResult<()> {
- // TODO
- Ok(())
- }
}
async fn begin_transaction(conn: &libsql_core::Connection) -> PyResult<()> {
@@ -486,11 +593,18 @@ async fn begin_transaction(conn: &libsql_core::Connection) -> PyResult<()> {
Ok(())
}
-async fn execute(cursor: &Cursor, sql: String, parameters: Option<&PyTuple>) -> PyResult<()> {
+async fn execute<'py>(
+ cursor: &Cursor,
+ sql: String,
+ parameters: Option>,
+) -> PyResult<()> {
+ if cursor.conn.borrow().as_ref().is_none() {
+ return Err(PyValueError::new_err("Connection already closed"));
+ }
let stmt_is_dml = stmt_is_dml(&sql);
let autocommit = determine_autocommit(cursor);
- if !autocommit && stmt_is_dml && cursor.conn.is_autocommit() {
- begin_transaction(&cursor.conn).await?;
+ if !autocommit && stmt_is_dml && cursor.conn.borrow().as_ref().unwrap().is_autocommit() {
+ begin_transaction(&cursor.conn.borrow().as_ref().unwrap()).await?;
}
let params = match parameters {
Some(parameters) => {
@@ -507,7 +621,10 @@ async fn execute(cursor: &Cursor, sql: String, parameters: Option<&PyTuple>) ->
} else if let Ok(value) = param.extract::<&[u8]>() {
libsql_core::Value::Blob(value.to_vec())
} else {
- return Err(PyValueError::new_err("Unsupported parameter type"));
+ return Err(PyValueError::new_err(format!(
+ "Unsupported parameter type {}",
+ param.to_string()
+ )));
};
params.push(param);
}
@@ -515,16 +632,27 @@ async fn execute(cursor: &Cursor, sql: String, parameters: Option<&PyTuple>) ->
}
None => libsql_core::params::Params::None,
};
- let mut stmt = cursor.conn.prepare(&sql).await.map_err(to_py_err)?;
- let rows = stmt.query(params).await.map_err(to_py_err)?;
- if stmt_is_dml {
- let mut rowcount = cursor.rowcount.borrow_mut();
- *rowcount += cursor.conn.changes() as i64;
+ let mut stmt = cursor
+ .conn
+ .borrow()
+ .as_ref()
+ .unwrap()
+ .prepare(&sql)
+ .await
+ .map_err(to_py_err)?;
+
+ if stmt.columns().iter().len() > 0 {
+ let rows = stmt.query(params).await.map_err(to_py_err)?;
+ cursor.rows.replace(Some(rows));
} else {
- cursor.rowcount.replace(-1);
+ stmt.execute(params).await.map_err(to_py_err)?;
+ cursor.rows.replace(None);
}
+
+ let mut rowcount = cursor.rowcount.borrow_mut();
+ *rowcount += cursor.conn.borrow().as_ref().unwrap().changes() as i64;
+
cursor.stmt.replace(Some(stmt));
- cursor.rows.replace(Some(rows));
Ok(())
}
@@ -549,32 +677,74 @@ fn stmt_is_dml(sql: &str) -> bool {
sql.starts_with("INSERT") || sql.starts_with("UPDATE") || sql.starts_with("DELETE")
}
-fn convert_row(py: Python, row: libsql_core::Row, column_count: i32) -> PyResult<&PyTuple> {
+fn convert_row(
+ py: Python,
+ row: libsql_core::Row,
+ column_count: i32,
+) -> PyResult> {
let mut elements: Vec> = vec![];
for col_idx in 0..column_count {
let libsql_value = row.get_value(col_idx).map_err(to_py_err)?;
let value = match libsql_value {
libsql_core::Value::Integer(v) => {
let value = v as i64;
- value.into_py(py)
+ value.into_pyobject(py).unwrap().into()
}
- libsql_core::Value::Real(v) => v.into_py(py),
- libsql_core::Value::Text(v) => v.into_py(py),
+ libsql_core::Value::Real(v) => v.into_pyobject(py).unwrap().into(),
+ libsql_core::Value::Text(v) => v.into_pyobject(py).unwrap().into(),
libsql_core::Value::Blob(v) => {
let value = v.as_slice();
- value.into_py(py)
+ value.into_pyobject(py).unwrap().into()
}
libsql_core::Value::Null => py.None(),
};
elements.push(value);
}
- Ok(PyTuple::new(py, elements))
+ Ok(PyTuple::new(py, elements)?)
}
-create_exception!(libsql_experimental, Error, pyo3::exceptions::PyException);
+create_exception!(libsql, Error, pyo3::exceptions::PyException);
+impl<'py> FromPyObject<'py> for ListOrTuple<'py> {
+ fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult {
+ if let Ok(list) = ob.downcast::() {
+ Ok(ListOrTuple::List(list.clone()))
+ } else if let Ok(tuple) = ob.downcast::() {
+ Ok(ListOrTuple::Tuple(tuple.clone()))
+ } else {
+ Err(PyValueError::new_err(
+ "Expected a list or tuple for parameters",
+ ))
+ }
+ }
+}
+
+impl<'py> ListOrTuple<'py> {
+ pub fn iter(&self) -> ListOrTupleIterator<'py> {
+ ListOrTupleIterator {
+ index: 0,
+ inner: self.clone(),
+ }
+ }
+}
+
+impl<'py> Iterator for ListOrTupleIterator<'py> {
+ type Item = Bound<'py, PyAny>;
+
+ fn next(&mut self) -> Option {
+ let rv = match &self.inner {
+ ListOrTuple::List(list) => list.get_item(self.index),
+ ListOrTuple::Tuple(tuple) => tuple.get_item(self.index),
+ };
+
+ rv.ok().map(|item| {
+ self.index += 1;
+ item
+ })
+ }
+}
#[pymodule]
-fn libsql_experimental(py: Python, m: &PyModule) -> PyResult<()> {
+fn libsql(py: Python<'_>, m: &Bound<'_, PyModule>) -> PyResult<()> {
let _ = tracing_subscriber::fmt::try_init();
m.add("LEGACY_TRANSACTION_CONTROL", LEGACY_TRANSACTION_CONTROL)?;
m.add("paramstyle", "qmark")?;
diff --git a/tests/test_suite.py b/tests/test_suite.py
index eda5203..fb83a63 100644
--- a/tests/test_suite.py
+++ b/tests/test_suite.py
@@ -2,8 +2,21 @@
import sqlite3
import sys
-import libsql_experimental
+import libsql
import pytest
+import tempfile
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_connection_timeout(provider):
+ conn = connect(provider, ":memory:", timeout=1.0)
+ conn.close()
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_connection_close(provider):
+ conn = connect(provider, ":memory:")
+ conn.close()
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
@@ -13,6 +26,9 @@ def test_execute(provider):
conn.execute("INSERT INTO users VALUES (1, 'alice@example.com')")
res = conn.execute("SELECT * FROM users")
assert (1, "alice@example.com") == res.fetchone()
+ # allow lists for parameters as well
+ res = conn.execute("SELECT * FROM users WHERE id = ?", [1])
+ assert (1, "alice@example.com") == res.fetchone()
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
@@ -25,6 +41,20 @@ def test_cursor_execute(provider):
assert (1, "alice@example.com") == res.fetchone()
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_cursor_close(provider):
+ conn = connect(provider, ":memory:")
+ cur = conn.cursor()
+ cur.execute("CREATE TABLE users (id INTEGER, email TEXT)")
+ cur.execute("INSERT INTO users VALUES (1, 'alice@example.com')")
+ cur.execute("INSERT INTO users VALUES (2, 'bob@example.com')")
+ res = cur.execute("SELECT * FROM users")
+ assert [(1, "alice@example.com"), (2, "bob@example.com")] == res.fetchall()
+ cur.close()
+ with pytest.raises(Exception):
+ cur.execute("SELECT * FROM users")
+
+
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
def test_executemany(provider):
conn = connect(provider, ":memory:")
@@ -146,7 +176,7 @@ def test_commit_and_rollback(provider):
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
def test_autocommit(provider):
- conn = connect(provider, ":memory:", None)
+ conn = connect(provider, ":memory:", timeout=4, isolation_level=None)
assert conn.isolation_level == None
assert conn.in_transaction == False
cur = conn.cursor()
@@ -165,7 +195,7 @@ def test_autocommit(provider):
@pytest.mark.skipif(sys.version_info < (3, 12), reason="requires python3.12 or higher")
def test_connection_autocommit(provider):
# Test LEGACY_TRANSACTION_CONTROL (-1)
- conn = connect(provider, ":memory:", None, autocommit=-1)
+ conn = connect(provider, ":memory:", timeout=5, isolation_level=None, autocommit=-1)
assert conn.isolation_level is None
assert conn.autocommit == -1
cur = conn.cursor()
@@ -176,7 +206,9 @@ def test_connection_autocommit(provider):
res = cur.execute("SELECT * FROM users")
assert (1, "alice@example.com") == res.fetchone()
- conn = connect(provider, ":memory:", isolation_level="DEFERRED", autocommit=-1)
+ conn = connect(
+ provider, ":memory:", timeout=5, isolation_level="DEFERRED", autocommit=-1
+ )
assert conn.isolation_level == "DEFERRED"
assert conn.autocommit == -1
cur = conn.cursor()
@@ -188,7 +220,9 @@ def test_connection_autocommit(provider):
assert (1, "alice@example.com") == res.fetchone()
# Test autocommit Enabled (True)
- conn = connect(provider, ":memory:", None, autocommit=True)
+ conn = connect(
+ provider, ":memory:", timeout=5, isolation_level=None, autocommit=True
+ )
assert conn.isolation_level == None
assert conn.autocommit == True
cur = conn.cursor()
@@ -199,7 +233,9 @@ def test_connection_autocommit(provider):
res = cur.execute("SELECT * FROM users")
assert (1, "bob@example.com") == res.fetchone()
- conn = connect(provider, ":memory:", isolation_level="DEFERRED", autocommit=True)
+ conn = connect(
+ provider, ":memory:", timeout=5, isolation_level="DEFERRED", autocommit=True
+ )
assert conn.isolation_level == "DEFERRED"
assert conn.autocommit == True
cur = conn.cursor()
@@ -211,7 +247,9 @@ def test_connection_autocommit(provider):
assert (1, "bob@example.com") == res.fetchone()
# Test autocommit Disabled (False)
- conn = connect(provider, ":memory:", isolation_level="DEFERRED", autocommit=False)
+ conn = connect(
+ provider, ":memory:", timeout=5, isolation_level="DEFERRED", autocommit=False
+ )
assert conn.isolation_level == "DEFERRED"
assert conn.autocommit == False
cur = conn.cursor()
@@ -226,7 +264,7 @@ def test_connection_autocommit(provider):
# Test invalid autocommit value (should raise an error)
with pytest.raises(ValueError):
- connect(provider, ":memory:", None, autocommit=999)
+ connect(provider, ":memory:", timeout=5, isolation_level=None, autocommit=999)
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
@@ -238,6 +276,7 @@ def test_params(provider):
res = cur.execute("SELECT * FROM users")
assert (1, "alice@example.com") == res.fetchone()
+
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
def test_none_param(provider):
conn = connect(provider, ":memory:")
@@ -250,6 +289,7 @@ def test_none_param(provider):
assert results[0] == (1, None)
assert results[1] == (2, "alice@example.com")
+
@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
def test_fetchmany(provider):
conn = connect(provider, ":memory:")
@@ -299,7 +339,195 @@ def test_int64(provider):
assert [(1, 1099511627776)] == res.fetchall()
-def connect(provider, database, isolation_level="DEFERRED", autocommit=-1):
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_commit(provider):
+ """Test that context manager commits on clean exit"""
+ conn = connect(provider, ":memory:")
+ with conn as c:
+ c.execute("CREATE TABLE t(x)")
+ c.execute("INSERT INTO t VALUES (1)")
+ # Changes should be committed
+ cur = conn.cursor()
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 1
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_rollback(provider):
+ """Test that context manager rolls back on exception"""
+ conn = connect(provider, ":memory:")
+ try:
+ with conn as c:
+ c.execute("CREATE TABLE t(x)")
+ c.execute("INSERT INTO t VALUES (1)")
+ raise ValueError("Test exception")
+ except ValueError:
+ pass
+ # Changes should be rolled back
+ cur = conn.cursor()
+ try:
+ cur.execute("SELECT COUNT(*) FROM t")
+ # If we get here, the table exists (rollback didn't work)
+ assert False, "Table should not exist after rollback"
+ except Exception:
+ # Table doesn't exist, which is what we expect after rollback
+ pass
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_autocommit(provider):
+ """Test that context manager works correctly with autocommit mode"""
+ conn = connect(provider, ":memory:", isolation_level=None) # autocommit mode
+ with conn as c:
+ c.execute("CREATE TABLE t(x)")
+ c.execute("INSERT INTO t VALUES (1)")
+ # In autocommit mode, changes are committed immediately
+ cur = conn.cursor()
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 1
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_nested(provider):
+ """Test nested context managers"""
+ conn = connect(provider, ":memory:")
+ with conn as c1:
+ c1.execute("CREATE TABLE t(x)")
+ c1.execute("INSERT INTO t VALUES (1)")
+ with conn as c2:
+ c2.execute("INSERT INTO t VALUES (2)")
+ # Inner context commits
+ cur = conn.cursor()
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 2
+ # Outer context also commits
+ cur = conn.cursor()
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 2
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_connection_reuse(provider):
+ """Test that connection remains usable after context manager exit"""
+ conn = connect(provider, ":memory:")
+
+ # First use with context manager
+ with conn as c:
+ c.execute("CREATE TABLE t(x)")
+ c.execute("INSERT INTO t VALUES (1)")
+
+ # Connection should still be valid
+ cur = conn.cursor()
+ cur.execute("INSERT INTO t VALUES (2)")
+ conn.commit()
+
+ # Verify both inserts worked
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 2
+
+ # Use context manager again
+ with conn as c:
+ c.execute("INSERT INTO t VALUES (3)")
+
+ # Final verification
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 3
+
+ conn.close()
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_nested_exception(provider):
+ """Test exception handling in nested context managers"""
+ conn = connect(provider, ":memory:")
+
+ # Create table outside context
+ conn.execute("CREATE TABLE t(x)")
+ conn.commit()
+
+ # Test that nested context managers share the same transaction
+ # An exception in an inner context will roll back the entire transaction
+ try:
+ with conn as c1:
+ c1.execute("INSERT INTO t VALUES (1)")
+ try:
+ with conn as c2:
+ c2.execute("INSERT INTO t VALUES (2)")
+ raise ValueError("Inner exception")
+ except ValueError:
+ pass
+ # The inner rollback affects the entire transaction
+ # So value 1 is also rolled back
+ c1.execute("INSERT INTO t VALUES (3)")
+ except:
+ pass
+
+ # Only value 3 should be committed (1 and 2 were rolled back together)
+ cur = conn.cursor()
+ cur.execute("SELECT x FROM t ORDER BY x")
+ results = cur.fetchall()
+ assert results == [(3,)]
+
+ # Test outer exception after nested context commits
+ conn.execute("DROP TABLE t")
+ conn.execute("CREATE TABLE t(x)")
+ conn.commit()
+
+ try:
+ with conn as c1:
+ c1.execute("INSERT INTO t VALUES (10)")
+ with conn as c2:
+ c2.execute("INSERT INTO t VALUES (20)")
+ # Inner context will commit both values
+ # This will cause outer rollback but values are already committed
+ raise RuntimeError("Outer exception")
+ except RuntimeError:
+ pass
+
+ # Values 10 and 20 should be committed by inner context
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 2
+
+
+@pytest.mark.parametrize("provider", ["libsql", "sqlite"])
+def test_context_manager_manual_transaction_control(provider):
+ """Test mixing manual transaction control with context managers"""
+ conn = connect(provider, ":memory:")
+
+ with conn as c:
+ c.execute("CREATE TABLE t(x)")
+ c.execute("INSERT INTO t VALUES (1)")
+
+ # Manual commit within context
+ c.commit()
+
+ # Start new transaction
+ c.execute("INSERT INTO t VALUES (2)")
+ # This will be committed by context manager
+
+ # Both values should be present
+ cur = conn.cursor()
+ cur.execute("SELECT COUNT(*) FROM t")
+ assert cur.fetchone()[0] == 2
+
+ # Test manual rollback within context
+ with conn as c:
+ c.execute("INSERT INTO t VALUES (3)")
+
+ # Manual rollback
+ c.rollback()
+
+ # New transaction
+ c.execute("INSERT INTO t VALUES (4)")
+ # This will be committed by context manager
+
+ # Should have values 1, 2, and 4 (not 3)
+ cur.execute("SELECT x FROM t ORDER BY x")
+ results = cur.fetchall()
+ assert results == [(1,), (2,), (4,)]
+
+
+def connect(provider, database, timeout=5, isolation_level="DEFERRED", autocommit=-1):
if provider == "libsql-remote":
from urllib import request
@@ -309,27 +537,33 @@ def connect(provider, database, isolation_level="DEFERRED", autocommit=-1):
raise Exception("libsql-remote server is not running")
if res.getcode() != 200:
raise Exception("libsql-remote server is not running")
- return libsql_experimental.connect(
- database, sync_url="http://localhost:8080", auth_token=""
- )
+ return libsql.connect(database, sync_url="http://localhost:8080", auth_token="")
if provider == "libsql":
if sys.version_info < (3, 12):
- return libsql_experimental.connect(
- database, isolation_level=isolation_level
+ return libsql.connect(
+ database, timeout=timeout, isolation_level=isolation_level
)
else:
if autocommit == -1:
- autocommit = libsql_experimental.LEGACY_TRANSACTION_CONTROL
- return libsql_experimental.connect(
- database, isolation_level=isolation_level, autocommit=autocommit
+ autocommit = libsql.LEGACY_TRANSACTION_CONTROL
+ return libsql.connect(
+ database,
+ timeout=timeout,
+ isolation_level=isolation_level,
+ autocommit=autocommit,
)
if provider == "sqlite":
if sys.version_info < (3, 12):
- return sqlite3.connect(database, isolation_level=isolation_level)
+ return sqlite3.connect(
+ database, timeout=timeout, isolation_level=isolation_level
+ )
else:
if autocommit == -1:
autocommit = sqlite3.LEGACY_TRANSACTION_CONTROL
return sqlite3.connect(
- database, isolation_level=isolation_level, autocommit=autocommit
+ database,
+ timeout=timeout,
+ isolation_level=isolation_level,
+ autocommit=autocommit,
)
raise Exception(f"Provider `{provider}` is not supported")