diff --git a/.github/workflows/python-sdk.yml b/.github/workflows/python-sdk.yml index 8a24ca249..91491791e 100644 --- a/.github/workflows/python-sdk.yml +++ b/.github/workflows/python-sdk.yml @@ -1,6 +1,11 @@ name: deploy python sdk on: workflow_dispatch: + inputs: + deploy_to_pypi: + description: "Whether to deploy to PyPI (true) or TestPyPI (false)" + required: false + default: "false" jobs: deploy-python-sdk-linux: strategy: @@ -24,31 +29,33 @@ jobs: DEBIAN_FRONTEND: noninteractive TZ: Etc/UTC run: | - sudo apt update - sudo apt-get -y install software-properties-common - sudo add-apt-repository -y ppa:deadsnakes/ppa - sudo add-apt-repository -y ppa:apt-fast/stable - sudo apt update - sudo apt-get -y install apt-fast - sudo apt-fast -y install \ - python3.7 python3.7-dev \ - python3.8 python3.8-dev \ - python3.9 python3.9-dev \ - python3.10 python3.10-dev \ - python3.11 python3.11-dev \ - python3-pip \ - git - pip install maturin - pip install patchelf - - name: Create dist directory - run: mkdir ../../python/dist - - name: Build wheels - run: maturin build --release --strip -i python3.7 -i python3.8 -i python3.9 -i python3.10 -i python3.11 -o ../../python/pgml/dist -F python - - name: Deploy wheels + sudo apt update + sudo apt-get -y install software-properties-common + sudo add-apt-repository -y ppa:deadsnakes/ppa + sudo add-apt-repository -y ppa:apt-fast/stable + sudo apt update + sudo apt-get -y install apt-fast + sudo apt-fast -y install \ + python3.7 python3.7-dev \ + python3.8 python3.8-dev \ + python3.9 python3.9-dev \ + python3.10 python3.10-dev \ + python3.11 python3.11-dev \ + python3-pip \ + git + pip install maturin + pip install patchelf + - name: Build and deploy wheels to TestPyPI + if: github.event.inputs.deploy_to_pypi == 'false' + env: + MATURIN_PYPI_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + run: maturin publish -r testpypi -i python3.7 -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python + - name: Build and deploy wheels to PyPI + if: github.event.inputs.deploy_to_pypi == 'true' env: MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} - working-directory: pgml-sdks/python/pgml - run: maturin upload --skip-existing dist/* + run: maturin publish -i python3.7 -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python + deploy-python-sdk-mac: runs-on: macos-latest defaults: @@ -72,15 +79,17 @@ jobs: brew install python@3.10 brew install python@3.11 pip3 install maturin - - name: Create dist directory - run: mkdir ../../python/dist - - name: Build wheels - run: maturin build --release --strip -i python3.8 -i python3.9 -i python3.10 -i python3.11 -o ../../python/pgml/dist -F python - - name: Deploy wheels + - name: Build and deploy wheels to TestPyPI + if: github.event.inputs.deploy_to_pypi == 'false' + env: + MATURIN_PYPI_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + run: maturin publish -r testpypi -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python + - name: Build and deploy wheels to PyPI + if: github.event.inputs.deploy_to_pypi == 'true' env: MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} - working-directory: pgml-sdks/python/pgml - run: maturin upload --skip-existing dist/* + run: maturin publish -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python + deploy-python-sdk-windows: runs-on: windows-latest strategy: @@ -101,17 +110,18 @@ jobs: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip3 install maturin - - name: Create dist directory - run: mkdir ..\..\python\dist - - name: Build wheels - run: maturin build --release --strip -o ..\..\python\pgml\dist -F python - - name: Deploy wheels + - name: Build and deploy wheels to TestPyPI + if: github.event.inputs.deploy_to_pypi == 'false' + env: + MATURIN_PYPI_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + run: maturin publish -r testpypi -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python + - name: Build and deploy wheels to PyPI + if: github.event.inputs.deploy_to_pypi == 'true' env: MATURIN_PYPI_TOKEN: ${{ secrets.PYPI_API_TOKEN }} - working-directory: pgml-sdks\python\pgml - run: maturin upload --skip-existing dist\* + run: maturin publish -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python diff --git a/pgml-extension/sql/pgml--2.7.6--2.7.7.sql b/pgml-extension/sql/pgml--2.7.6--2.7.7.sql new file mode 100644 index 000000000..fa19e5090 --- /dev/null +++ b/pgml-extension/sql/pgml--2.7.6--2.7.7.sql @@ -0,0 +1,8 @@ +-- Drop the constraint on snapshot_id for models +ALTER TABLE pgml.models ALTER COLUMN snapshot_id DROP NOT NULL; + +-- Add openai option to pgml.runtime +ALTER TYPE pgml.runtime ADD VALUE IF NOT EXISTS 'openai'; + +-- Add embedding option to pgml.task +ALTER TYPE pgml.task ADD VALUE IF NOT EXISTS 'embedding'; diff --git a/pgml-sdks/rust/pgml-macros/src/javascript.rs b/pgml-sdks/rust/pgml-macros/src/javascript.rs index a4b4c91d9..79f142bce 100644 --- a/pgml-sdks/rust/pgml-macros/src/javascript.rs +++ b/pgml-sdks/rust/pgml-macros/src/javascript.rs @@ -4,67 +4,7 @@ use std::io::{Read, Write}; use syn::{visit::Visit, DeriveInput, ItemImpl, Type}; use crate::common::{AttributeArgs, GetImplMethod}; -use crate::types::{GetSupportedType, OutputType, SupportedType}; - -pub fn generate_custom_into_js_result(parsed: DeriveInput) -> proc_macro::TokenStream { - let name = parsed.ident; - let fields_named = match parsed.data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(n) => n, - _ => panic!("custom_into_js proc_macro structs should only have named fields"), - }, - _ => panic!("custom_into_js proc_macro should only be used on structs"), - }; - - let mut sets = Vec::new(); - let mut interface = format!("\ninterface {} {{\n", name); - - fields_named.named.into_pairs().for_each(|p| { - let v = p.into_value(); - let name = v.ident.to_token_stream().to_string(); - let name_ident = v.ident; - sets.push(quote! { - let js_item = self.#name_ident.into_js_result(cx)?; - js_object.set(cx, #name, js_item)?; - }); - let ty = GetSupportedType::get_type(&v.ty); - let decleration = match &ty { - SupportedType::Option(o) => format!("{}?", get_typescript_type(o)), - _ => get_typescript_type(&ty), - }; - interface.push_str(&format!("\t{}: {},\n", name, decleration)); - }); - - interface.push('}'); - let mut file = OpenOptions::new() - .create(true) - .write(true) - .append(true) - .read(true) - .open("javascript/index.d.ts") - .unwrap(); - let mut contents = String::new(); - file.read_to_string(&mut contents) - .expect("Unable to read typescript decleration file"); - if !contents.contains(&interface) { - file.write_all(interface.as_bytes()) - .expect("Unable to write typescript decleration file"); - } - - let out = quote! { - #[cfg(feature = "javascript")] - impl IntoJsResult for #name { - type Output = neon::types::JsObject; - fn into_js_result<'a, 'b, 'c: 'b, C: neon::context::Context<'c>>(self, cx: &mut C) -> neon::result::JsResult<'b, Self::Output> { - use neon::object::Object; - let js_object = cx.empty_object(); - #(#sets)* - Ok(js_object) - } - } - }; - proc_macro::TokenStream::from(out) -} +use crate::types::{OutputType, SupportedType}; pub fn generate_javascript_derive(parsed: DeriveInput) -> proc_macro::TokenStream { let name_ident = format_ident!("{}Javascript", parsed.ident); @@ -73,21 +13,74 @@ pub fn generate_javascript_derive(parsed: DeriveInput) -> proc_macro::TokenStrea let expanded = quote! { #[cfg(feature = "javascript")] pub struct #name_ident { - wrapped: #wrapped_type_ident + pub wrapped: std::boxed::Box<#wrapped_type_ident> } #[cfg(feature = "javascript")] impl From<#wrapped_type_ident> for #name_ident { fn from(w: #wrapped_type_ident) -> Self { Self { - wrapped: w, + wrapped: std::boxed::Box::new(w), + } + } + } + + #[cfg(feature = "javascript")] + impl From<#name_ident> for #wrapped_type_ident { + fn from(w: #name_ident) -> Self { + *w.wrapped + } + } + + #[cfg(feature = "javascript")] + impl FromJsType for #wrapped_type_ident { + type From = neon::types::JsValue; + fn from_js_type<'a, C: neon::context::Context<'a>>(cx: &mut C, arg: neon::handle::Handle) -> neon::result::NeonResult { + let arg: neon::handle::Handle = arg.downcast(cx).or_throw(cx)?; + use neon::prelude::*; + use core::ops::Deref; + let s: neon::handle::Handle> = arg.get(cx, "s")?; + Ok(*s.wrapped.clone()) + } + } + + #[cfg(feature = "javascript")] + impl FromJsType for &mut #wrapped_type_ident { + type From = neon::types::JsValue; + fn from_js_type<'a, C: neon::context::Context<'a>>(cx: &mut C, arg: neon::handle::Handle) -> neon::result::NeonResult { + use neon::prelude::*; + use core::ops::Deref; + let arg: neon::handle::Handle = arg.downcast(cx).or_throw(cx)?; + let s: neon::handle::Handle> = arg.get(cx, "s")?; + unsafe { + let ptr = &*s.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Ok(Box::leak(boxed)) + } + } + } + + #[cfg(feature = "javascript")] + impl FromJsType for & #wrapped_type_ident { + type From = neon::types::JsValue; + fn from_js_type<'a, C: neon::context::Context<'a>>(cx: &mut C, arg: neon::handle::Handle) -> neon::result::NeonResult { + use neon::prelude::*; + use core::ops::Deref; + let arg: neon::handle::Handle = arg.downcast(cx).or_throw(cx)?; + let s: neon::handle::Handle> = arg.get(cx, "s")?; + unsafe { + let ptr = &*s.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Ok(Box::leak(boxed)) } } } #[cfg(feature = "javascript")] impl IntoJsResult for #wrapped_type_ident { - type Output = neon::types::JsObject; + type Output = neon::types::JsValue; fn into_js_result<'a, 'b, 'c: 'b, C: neon::context::Context<'c>>(self, cx: &mut C) -> neon::result::JsResult<'b, Self::Output> { #name_ident::from(self).into_js_result(cx) } @@ -132,8 +125,10 @@ pub fn generate_javascript_methods( continue; } let method_ident = method.method_ident.clone(); - let (method_arguments, wrapper_arguments) = + + let (outer_prep_arguments, inner_prep_arguments, wrapper_arguments) = get_method_wrapper_arguments_javascript(&method); + let (output_type, convert_from) = match &method.output_type { OutputType::Result(v) | OutputType::Other(v) => { convert_output_type_convert_from_javascript(v, &method) @@ -141,14 +136,27 @@ pub fn generate_javascript_methods( OutputType::Default => (None, None), }; + let does_take_ownership_of_self = method + .receiver + .as_ref() + .is_some_and(|r| r.to_string().replace("mut", "").trim() == "self"); + let p1 = method_ident.to_string(); let p2 = method .method_arguments .iter() .filter(|a| !matches!(a.1, SupportedType::S)) .map(|a| match &a.1 { - SupportedType::Option(o) => format!("{}?: {}", a.0, get_typescript_type(o)), - _ => format!("{}: {}", a.0, get_typescript_type(&a.1)), + SupportedType::Option(o) => format!( + "{}?: {}", + a.0.replace("mut", "").trim(), + get_typescript_type(o) + ), + _ => format!( + "{}: {}", + a.0.replace("mut", "").trim(), + get_typescript_type(&a.1) + ), }) .collect::>() .join(", "); @@ -174,18 +182,43 @@ pub fn generate_javascript_methods( let signature = quote! { pub fn #method_ident<'a>(mut cx: neon::context::FunctionContext<'a>) -> #output_type }; - let prep_arguments = if let Some(_r) = &method.receiver { + + let outer_prepared = if let Some(_r) = &method.receiver { quote! { use core::ops::Deref; - let this = cx.this(); - let s: neon::handle::Handle>> = this.get(&mut cx, "s")?; - let wrapped = (*s).deref().borrow(); - let wrapped = wrapped.wrapped.clone(); - #(#method_arguments)* + let this = cx.this().root(&mut cx); + #(#outer_prep_arguments)* + } + } else { + quote! { + #(#outer_prep_arguments)* + } + }; + + let inner_prepared = if let Some(_r) = &method.receiver { + if does_take_ownership_of_self { + quote! { + let this = this.into_inner(&mut cx); + let s: neon::handle::Handle> = this.get(&mut cx, "s")?; + let wrapped = (*s.wrapped).clone(); + #(#inner_prep_arguments)* + } + } else { + quote! { + let this = this.into_inner(&mut cx); + let s: neon::handle::Handle> = this.get(&mut cx, "s")?; + let wrapped = unsafe { + let ptr = &*s.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Ok(Box::leak(boxed)) + }?; + #(#inner_prep_arguments)* + } } } else { quote! { - #(#method_arguments)* + #(#inner_prep_arguments)* } }; @@ -230,10 +263,11 @@ pub fn generate_javascript_methods( quote! { #signature { use neon::prelude::*; - #prep_arguments + #outer_prepared let channel = cx.channel(); let (deferred, promise) = cx.promise(); deferred.try_settle_with(&channel, move |mut cx| { + #inner_prepared #middle x.into_js_result(&mut cx) }).expect("Error sending js"); @@ -244,7 +278,8 @@ pub fn generate_javascript_methods( quote! { #signature { use neon::prelude::*; - #prep_arguments + #outer_prepared + #inner_prepared #middle x.into_js_result(&mut cx) } @@ -278,85 +313,132 @@ pub fn generate_javascript_methods( #[cfg(feature = "javascript")] impl IntoJsResult for #name_ident { - type Output = neon::types::JsObject; + type Output = neon::types::JsValue; fn into_js_result<'a, 'b, 'c: 'b, C: neon::context::Context<'c>>(self, cx: &mut C) -> neon::result::JsResult<'b, Self::Output> { use neon::object::Object; + use neon::prelude::Value; let obj = cx.empty_object(); - let s = cx.boxed(std::cell::RefCell::new(self)); + let s = cx.boxed(self); obj.set(cx, "s", s)?; #(#object_sets)* - Ok(obj) + Ok(obj.as_value(cx)) } } #[cfg(feature = "javascript")] impl neon::types::Finalize for #name_ident {} }) - - // proc_macro::TokenStream::from(quote! {}) } fn get_method_wrapper_arguments_javascript( method: &GetImplMethod, -) -> (Vec, Vec) { - let mut wrapper_arguments = Vec::new(); +) -> ( + Vec, + Vec, + Vec, +) { + let mut outer_prep_arguments = Vec::new(); + let mut inner_prep_arguments = Vec::new(); let mut method_arguments = Vec::new(); method .method_arguments .iter() .enumerate() - .for_each(|(i, (_argument_name, argument_type))| { - let argument_ident = format_ident!("arg{}", i); - let (argument_type_tokens, wrapper_argument_tokens) = convert_method_wrapper_arguments( - argument_ident.clone(), - argument_type, - ); - let argument_type_js = get_neon_type(argument_type); - let method_argument = match argument_type { - SupportedType::Option(_o) => quote! { - let #argument_ident = cx.argument_opt(#i as i32); - let #argument_ident = <#argument_type_tokens>::from_option_js_type(&mut cx, #argument_ident)?; - }, - _ => quote! { - let #argument_ident = cx.argument::<#argument_type_js>(#i as i32)?; - let #argument_ident = <#argument_type_tokens>::from_js_type(&mut cx, #argument_ident)?; - } - }; + .for_each(|(i, (argument_name, argument_type))| { + let argument_name_ident = format_ident!("{}", argument_name.replace("mut ", "")); + let (outer_prep_argument, inner_prep_argument, method_argument) = + convert_method_wrapper_arguments(argument_name_ident, argument_type, i, false); + outer_prep_arguments.push(outer_prep_argument); + inner_prep_arguments.push(inner_prep_argument); method_arguments.push(method_argument); - wrapper_arguments.push(wrapper_argument_tokens.into_token_stream()); }); - - (method_arguments, wrapper_arguments) + (outer_prep_arguments, inner_prep_arguments, method_arguments) } fn convert_method_wrapper_arguments( name_ident: syn::Ident, ty: &SupportedType, -) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) { - match ty { - SupportedType::Reference(r) => { - let (d, w) = convert_method_wrapper_arguments(name_ident, r); - (d, quote! { & #w}) + index: usize, + checked_basic_reference: bool, +) -> ( + proc_macro2::TokenStream, + proc_macro2::TokenStream, + proc_macro2::TokenStream, +) { + // I think this whole piece could be done better if we fix the way we handle references, but + // I'm not sure how to do that yet + match (&ty, checked_basic_reference) { + (SupportedType::Reference(r), false) => match *r.ty { + SupportedType::str => { + let argument_type_js = get_neon_type(&r.ty); + let t = syn::parse_str::("String") + .unwrap() + .into_token_stream(); + + ( + quote! { + let #name_ident = cx.argument::<#argument_type_js>(#index as i32)?; + let #name_ident = <#t>::from_js_type(&mut cx, #name_ident)?; + }, + quote! {}, + quote! { &#name_ident }, + ) + } + SupportedType::i64 + | SupportedType::u64 + | SupportedType::i32 + | SupportedType::f64 + | SupportedType::bool => { + let argument_type_js = get_neon_type(&r.ty); + let t = r.ty.to_type(None).expect( + "Could not parse type in convert_method_wrapper_arguments in javascript.rs", + ); + ( + quote! { + let #name_ident = cx.argument::<#argument_type_js>(#index as i32)?; + let #name_ident = <#t>::from_js_type(&mut cx, #name_ident)?; + }, + quote! {}, + quote! { &#name_ident }, + ) + } + _ => convert_method_wrapper_arguments(name_ident, ty, index, true), + }, + (SupportedType::Option(_o), _) => { + let t = ty.to_type(None).expect( + "Could not parse type in convert_method_wrapper_arguments in javascript.rs", + ); + ( + quote! { + let #name_ident = cx.argument_opt(#index as i32); + let #name_ident = <#t>::from_option_js_type(&mut cx, #name_ident)?; + }, + quote! {}, + quote! { #name_ident }, + ) } - SupportedType::str => ( - syn::parse_str::("String") - .unwrap() - .into_token_stream(), - quote! { #name_ident}, - ), _ => { - let t = ty.to_type().expect( + let argument_type_js = get_neon_type(&ty); + let t = ty.to_type(None).expect( "Could not parse type in convert_method_wrapper_arguments in javascript.rs", ); - (t.into_token_stream(), quote! {#name_ident}) + ( + quote! { + let #name_ident = cx.argument::<#argument_type_js>(#index as i32)?; + let #name_ident = <#t>::from_js_type(&mut cx, #name_ident)?; + }, + quote! {}, + quote! { #name_ident}, + ) } } } fn get_neon_type(ty: &SupportedType) -> syn::Type { match ty { - SupportedType::Reference(r) => get_neon_type(r), + SupportedType::Reference(r) => get_neon_type(&r.ty), + SupportedType::Option(o) => get_neon_type(o), SupportedType::str | SupportedType::String => { syn::parse_str("neon::types::JsString").unwrap() } @@ -368,13 +450,7 @@ fn get_neon_type(ty: &SupportedType) -> syn::Type { SupportedType::i64 | SupportedType::f64 | SupportedType::u64 => { syn::parse_str("neon::types::JsNumber").unwrap() } - // Our own types - SupportedType::Database - | SupportedType::Collection - | SupportedType::Splitter - | SupportedType::QueryBuilder - | SupportedType::QueryRunner - | SupportedType::Model => syn::parse_str("neon::types::JsObject").unwrap(), + SupportedType::CustomType(_t) => syn::parse_str("neon::types::JsValue").unwrap(), // Add more types as required _ => syn::parse_str("neon::types::JsValue").unwrap(), } @@ -389,13 +465,17 @@ fn convert_output_type_convert_from_javascript( ) { let (output_type, convert_from) = match ty { SupportedType::S => ( - Some(quote! {neon::result::JsResult<'a, neon::types::JsObject>}), + Some(quote! {neon::result::JsResult<'a, neon::types::JsValue>}), Some(format_ident!("Self").into_token_stream()), ), - t @ SupportedType::Database | t @ SupportedType::Collection => ( - Some(quote! {neon::result::JsResult<'a, neon::types::JsObject>}), - Some(format_ident!("{}Javascript", t.to_string()).into_token_stream()), - ), + // t @ SupportedType::Database + // | t @ SupportedType::PipelineSyncData + // | t @ SupportedType::Model + // | t @ SupportedType::Splitter + // | t @ SupportedType::Collection => ( + // Some(quote! {neon::result::JsResult<'a, neon::types::JsObject>}), + // Some(format_ident!("{}Javascript", t.to_string()).into_token_stream()), + // ), t => { let ty = get_neon_type(t); (Some(quote! {neon::result::JsResult<'a, #ty>}), None) @@ -414,7 +494,7 @@ fn convert_output_type_convert_from_javascript( fn get_typescript_type(ty: &SupportedType) -> String { match ty { - SupportedType::Reference(r) => get_typescript_type(r), + SupportedType::Reference(r) => get_typescript_type(&r.ty), SupportedType::str | SupportedType::String => "string".to_string(), SupportedType::bool => "boolean".to_string(), SupportedType::Option(o) => get_typescript_type(o), @@ -426,8 +506,6 @@ fn get_typescript_type(ty: &SupportedType) -> String { get_typescript_type(v) ) } - SupportedType::JsonHashMap => "Map".to_string(), - SupportedType::DateTime => "Date".to_string(), SupportedType::Tuple(t) => { let mut types = Vec::new(); for ty in t { @@ -441,13 +519,8 @@ fn get_typescript_type(ty: &SupportedType) -> String { } } SupportedType::i64 | SupportedType::f64 | SupportedType::u64 => "number".to_string(), - // Our own types - t @ SupportedType::Database - | t @ SupportedType::Collection - | t @ SupportedType::Splitter - | t @ SupportedType::QueryBuilder - | t @ SupportedType::QueryRunner - | t @ SupportedType::Model => t.to_string(), + + SupportedType::CustomType(t) => t.to_string(), // Add more types as required _ => "any".to_string(), } diff --git a/pgml-sdks/rust/pgml-macros/src/lib.rs b/pgml-sdks/rust/pgml-macros/src/lib.rs index c70a9efdd..ba6425f54 100644 --- a/pgml-sdks/rust/pgml-macros/src/lib.rs +++ b/pgml-sdks/rust/pgml-macros/src/lib.rs @@ -36,14 +36,11 @@ pub fn custom_methods( output } -#[proc_macro_derive(custom_into_py)] -pub fn custom_into_py(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let parsed = parse_macro_input!(input as DeriveInput); - python::generate_into_py(parsed) -} - -#[proc_macro_derive(custom_into_js_result)] -pub fn custom_into_js_result(input: proc_macro::TokenStream) -> proc_macro::TokenStream { +#[proc_macro_derive(pgml_alias)] +pub fn pgml_alias(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let mut output = proc_macro::TokenStream::new(); let parsed = parse_macro_input!(input as DeriveInput); - javascript::generate_custom_into_js_result(parsed) + output.extend(python::pgml_alias(parsed.clone())); + // output.extend(javascript::pgml_alias(parsed)); + output } diff --git a/pgml-sdks/rust/pgml-macros/src/python.rs b/pgml-sdks/rust/pgml-macros/src/python.rs index c3334e743..bf9cd61d5 100644 --- a/pgml-sdks/rust/pgml-macros/src/python.rs +++ b/pgml-sdks/rust/pgml-macros/src/python.rs @@ -12,52 +12,64 @@ from typing import List, Dict, Optional, Self, Any "#; -pub fn generate_into_py(parsed: DeriveInput) -> proc_macro::TokenStream { - let name = parsed.ident; - let fields_named = match parsed.data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(n) => n, - _ => panic!("custom_into_py proc_macro structs should only have named fields"), - }, - _ => panic!("custom_into_py proc_macro should only be used on structs"), - }; +/// This function assumes the user has already impliemented: +/// - `FromPyObject` for the wrapped type +/// - `ToPyObject` for the wrapped type +/// - `IntoPy` for the wrapped type +pub fn pgml_alias(parsed: DeriveInput) -> proc_macro::TokenStream { + let name_ident = format_ident!("{}Python", parsed.ident); + let wrapped_type_ident = parsed.ident; - let sets: Vec = fields_named.named.into_pairs().map(|p| { - let v = p.into_value(); - let name = v.ident.to_token_stream().to_string(); - let name_ident = v.ident; - quote! { - dict.set_item(#name, self.#name_ident).expect("Error setting python value in custom_into_py proc_macro"); + let expanded = quote! { + #[cfg(feature = "python")] + #[derive(Clone, Debug)] + pub struct #name_ident { + pub wrapped: #wrapped_type_ident } - }).collect(); - let stub = format!("\n{} = dict[str, Any]\n", name); + #[cfg(feature = "python")] + impl CustomInto<#name_ident> for #wrapped_type_ident { + fn custom_into(self) -> #name_ident { + #name_ident { + wrapped: self, + } + } + } - let mut file = OpenOptions::new() - .create(true) - .write(true) - .append(true) - .read(true) - .open("python/pgml/pgml.pyi") - .unwrap(); - let mut contents = String::new(); - file.read_to_string(&mut contents) - .expect("Unable to read stubs file for python"); - if !contents.contains(&stub) { - file.write_all(stub.as_bytes()) - .expect("Unable to write stubs file for python"); - } + #[cfg(feature = "python")] + impl CustomInto<#wrapped_type_ident> for #name_ident { + fn custom_into(self) -> #wrapped_type_ident { + self.wrapped + } + } - let expanded = quote! { + // From Python to Rust #[cfg(feature = "python")] - impl pyo3::conversion::IntoPy for #name { - fn into_py(self, py: pyo3::marker::Python<'_>) -> pyo3::PyObject { - let dict = pyo3::types::PyDict::new(py); - #(#sets)* - dict.into() + impl pyo3::conversion::FromPyObject<'_> for #name_ident { + fn extract(obj: &pyo3::PyAny) -> pyo3::PyResult { + Ok(Self { + wrapped: obj.extract()? + }) + } + } + + // From Rust to Python + #[cfg(feature = "python")] + impl pyo3::conversion::ToPyObject for #name_ident { + fn to_object(&self, py: pyo3::Python) -> pyo3::PyObject { + use pyo3::conversion::ToPyObject; + self.wrapped.to_object(py) + } + } + #[cfg(feature = "python")] + impl pyo3::conversion::IntoPy for #name_ident { + fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { + use pyo3::conversion::ToPyObject; + self.wrapped.to_object(py) } } }; + proc_macro::TokenStream::from(expanded) } @@ -69,16 +81,88 @@ pub fn generate_python_derive(parsed: DeriveInput) -> proc_macro::TokenStream { let expanded = quote! { #[cfg(feature = "python")] #[pyo3::pyclass(name = #wrapped_type_name)] - #[derive(Debug)] + #[derive(Clone, Debug)] pub struct #name_ident { - wrapped: #wrapped_type_ident + pub wrapped: std::boxed::Box<#wrapped_type_ident> } #[cfg(feature = "python")] - impl From<#wrapped_type_ident> for #name_ident { - fn from(w: #wrapped_type_ident) -> Self { - Self { - wrapped: w, + impl CustomInto<#name_ident> for #wrapped_type_ident { + fn custom_into(self) -> #name_ident { + #name_ident { + wrapped: std::boxed::Box::new(self), + } + } + } + + #[cfg(feature = "python")] + impl CustomInto<#wrapped_type_ident> for #name_ident { + fn custom_into(self) -> #wrapped_type_ident { + *self.wrapped + } + } + + #[cfg(feature = "python")] + impl CustomInto<&'static mut #wrapped_type_ident> for &mut #name_ident { + fn custom_into(self) -> &'static mut #wrapped_type_ident { + // This is how we get around the liftime checker + unsafe { + let ptr = &*self.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Box::leak(boxed) + } + } + } + + #[cfg(feature = "python")] + impl CustomInto<&'static #wrapped_type_ident> for &mut #name_ident { + fn custom_into(self) -> &'static #wrapped_type_ident { + // This is how we get around the liftime checker + unsafe { + let ptr = &*self.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Box::leak(boxed) + } + } + } + + #[cfg(feature = "python")] + impl CustomInto<#wrapped_type_ident> for &mut #name_ident { + fn custom_into(self) -> #wrapped_type_ident { + // This is how we get around the liftime checker + unsafe { + let ptr = &*self.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Box::leak(boxed).to_owned() + } + } + } + + #[cfg(feature = "python")] + impl CustomInto<&'static #wrapped_type_ident> for &#name_ident { + fn custom_into(self) -> &'static #wrapped_type_ident { + // This is how we get around the liftime checker + unsafe { + let ptr = &*self.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Box::leak(boxed) + } + } + } + + #[cfg(feature = "python")] + impl CustomInto<#wrapped_type_ident> for &#name_ident { + fn custom_into(self) -> #wrapped_type_ident { + // This is how we get around the liftime checker + unsafe { + let ptr = &*self.wrapped as *const #wrapped_type_ident; + let ptr = ptr as *mut #wrapped_type_ident; + let boxed = Box::from_raw(ptr); + Box::leak(boxed).to_owned() } } } @@ -87,7 +171,8 @@ pub fn generate_python_derive(parsed: DeriveInput) -> proc_macro::TokenStream { impl pyo3::IntoPy for #wrapped_type_ident { fn into_py(self, py: pyo3::Python) -> pyo3::PyObject { use pyo3::conversion::IntoPy; - #name_ident::from(self.clone()).into_py(py) + let x: #name_ident = self.custom_into(); + x.into_py(py) } } }; @@ -130,7 +215,8 @@ pub fn generate_python_methods( } let method_ident = method.method_ident.clone(); - let (method_arguments, wrapper_arguments) = get_method_wrapper_arguments_python(&method); + let (method_arguments, wrapper_arguments, prep_wrapper_arguments) = + get_method_wrapper_arguments_python(&method); let (output_type, convert_from) = match &method.output_type { OutputType::Result(v) | OutputType::Other(v) => { convert_output_type_convert_from_python(v, &method) @@ -138,6 +224,23 @@ pub fn generate_python_methods( OutputType::Default => (None, None), }; + let some_wrapper_type = match method.receiver.as_ref() { + Some(r) => { + let st = r.to_string(); + Some(if st.contains("&") { + let st = st.replace("self", &wrapped_type_ident.to_string()); + let s = syn::parse_str::(&st).expect(&format!( + "Error converting self type to necessary syn type: {:?}", + r + )); + s.to_token_stream() + } else { + quote! { #wrapped_type_ident } + }) + } + None => None, + }; + let signature = quote! { pub fn #method_ident<'a>(#(#method_arguments),*) -> #output_type }; @@ -147,19 +250,30 @@ pub fn generate_python_methods( "new" => "__init__".to_string(), _ => method_ident.to_string(), }; - let p3 = method + let mut p3 = method .method_arguments .iter() - .map(|a| format!("{}: {}", a.0, get_python_type(&a.1))) - .collect::>() - .join(", "); + .map(|a| { + format!( + "{}: {}", + a.0.replace("mut", "").trim(), + get_python_type(&a.1) + ) + }) + .collect::>(); + p3.insert(0, "self".to_string()); + let p3 = p3.join(", "); let p4 = match &method.output_type { OutputType::Result(v) | OutputType::Other(v) => get_python_type(v), OutputType::Default => "None".to_string(), }; - stubs.push_str(&format!("\t{} {}(self, {}) -> {}", p1, p2, p3, p4)); + stubs.push_str(&format!("\t{} {}({}) -> {}", p1, p2, p3, p4)); stubs.push_str("\n\t\t...\n"); + let prepared_wrapper_arguments = quote! { + #(#prep_wrapper_arguments)* + }; + // The new function for pyO3 requires some unique syntax let (signature, middle) = if method_ident == "new" { let signature = quote! { @@ -188,23 +302,30 @@ pub fn generate_python_methods( } }; let middle = quote! { + use std::ops::DerefMut; + #prepared_wrapper_arguments #middle - Ok(#name_ident::from(x)) + let x: Self = x.custom_into(); + Ok(x) }; (signature, middle) } else { let middle = quote! { #method_ident(#(#wrapper_arguments),*) }; + let middle = if method.is_async { quote! { - wrapped.#middle.await + { + wrapped.#middle.await + } } } else { quote! { wrapped.#middle } }; + let middle = if let OutputType::Result(_r) = method.output_type { quote! { let x = match #middle { @@ -220,22 +341,27 @@ pub fn generate_python_methods( let middle = if let Some(convert) = convert_from { quote! { #middle - let x = #convert::from(x); + // let x = <#convert>::from(x); + let x: #convert = x.custom_into(); } } else { middle }; let middle = if method.is_async { quote! { - let wrapped = self.wrapped.clone(); + let mut wrapped: #some_wrapper_type = self.custom_into(); + #prepared_wrapper_arguments pyo3_asyncio::tokio::future_into_py(py, async move { + use std::ops::DerefMut; #middle Ok(x) }) } } else { quote! { - let wrapped = self.wrapped.clone(); + let mut wrapped: #some_wrapper_type = self.custom_into(); + use std::ops::DerefMut; + #prepared_wrapper_arguments #middle Ok(x) } @@ -280,12 +406,17 @@ pub fn generate_python_methods( pub fn get_method_wrapper_arguments_python( method: &GetImplMethod, -) -> (Vec, Vec) { +) -> ( + Vec, + Vec, + Vec, +) { let mut method_arguments = Vec::new(); let mut wrapper_arguments = Vec::new(); + let mut prep_wrapper_arguments = Vec::new(); if let Some(_receiver) = &method.receiver { - method_arguments.push(quote! { &self }); + method_arguments.push(quote! { &mut self }); } method @@ -293,10 +424,15 @@ pub fn get_method_wrapper_arguments_python( .iter() .for_each(|(argument_name, argument_type)| { let argument_name_ident = format_ident!("{}", argument_name.replace("mut ", "")); - let (method_argument, wrapper_argument) = - convert_method_wrapper_arguments(argument_name_ident, argument_type); + let (method_argument, wrapper_argument, prep_wrapper_argument) = + convert_method_wrapper_arguments( + argument_name_ident, + argument_type, + method.is_async, + ); method_arguments.push(method_argument); wrapper_arguments.push(wrapper_argument); + prep_wrapper_arguments.push(prep_wrapper_argument); }); let extra_arg = quote! { @@ -308,26 +444,32 @@ pub fn get_method_wrapper_arguments_python( method_arguments.push(extra_arg); } - (method_arguments, wrapper_arguments) + (method_arguments, wrapper_arguments, prep_wrapper_arguments) } fn convert_method_wrapper_arguments( name_ident: syn::Ident, ty: &SupportedType, -) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) { - match ty { - SupportedType::Reference(r) => { - let (d, w) = convert_method_wrapper_arguments(name_ident, r); - (d, quote! { & #w}) - } - SupportedType::str => (quote! {#name_ident: String}, quote! { #name_ident}), - _ => { - let t = ty - .to_type() - .expect("Could not parse type in convert_method_type in python.rs"); - (quote! { #name_ident : #t}, quote! {#name_ident}) - } - } + _is_async: bool, +) -> ( + proc_macro2::TokenStream, + proc_macro2::TokenStream, + proc_macro2::TokenStream, +) { + let pyo3_type = ty + .to_type(Some("Python")) + .expect("Could not parse type in convert_method_wrapper_arguments in python.rs"); + let normal_type = ty + .to_type(None) + .expect("Could not parse type in convert_method_wrapper_arguments in python.rs"); + + ( + quote! { #name_ident: #pyo3_type }, + quote! { #name_ident }, + quote! { + let #name_ident: #normal_type = #name_ident.custom_into(); + }, + ) } fn convert_output_type_convert_from_python( @@ -344,9 +486,9 @@ fn convert_output_type_convert_from_python( ), t => { let ty = t - .to_type() + .to_type(Some("Python")) .expect("Error converting to type in convert_output_type_convert_from_python"); - (Some(quote! {pyo3::PyResult<#ty>}), None) + (Some(quote! {pyo3::PyResult<#ty>}), Some(quote! {#ty})) } }; @@ -359,7 +501,7 @@ fn convert_output_type_convert_from_python( fn get_python_type(ty: &SupportedType) -> String { match ty { - SupportedType::Reference(r) => get_python_type(r), + SupportedType::Reference(r) => get_python_type(&r.ty), SupportedType::S => "Self".to_string(), SupportedType::str | SupportedType::String => "str".to_string(), SupportedType::bool => "bool".to_string(), @@ -384,15 +526,10 @@ fn get_python_type(ty: &SupportedType) -> String { format!("tuple[{}]", types.join(", ")) } } - SupportedType::i64 => "int".to_string(), + SupportedType::i64 | SupportedType::u64 => "int".to_string(), SupportedType::f64 => "float".to_string(), // Our own types - t @ SupportedType::Database - | t @ SupportedType::Collection - | t @ SupportedType::Model - | t @ SupportedType::QueryBuilder - | t @ SupportedType::QueryRunner - | t @ SupportedType::Splitter => t.to_string(), + SupportedType::CustomType(t) => t.to_string(), // Add more types as required _ => "Any".to_string(), } @@ -400,15 +537,17 @@ fn get_python_type(ty: &SupportedType) -> String { fn get_type_for_optional(ty: &SupportedType) -> String { match ty { - SupportedType::Reference(r) => get_type_for_optional(r), + SupportedType::Reference(r) => get_type_for_optional(&r.ty), SupportedType::str | SupportedType::String => { "\"Default set in Rust. Please check the documentation.\"".to_string() } SupportedType::HashMap(_) => "{}".to_string(), SupportedType::Vec(_) => "[]".to_string(), - SupportedType::i64 => 1.to_string(), + SupportedType::i64 | SupportedType::u64 => 1.to_string(), SupportedType::f64 => 1.0.to_string(), - SupportedType::Json => "{}".to_string(), - _ => panic!("Type not yet supported for optional python stub: {:?}", ty), + SupportedType::bool => "True".to_string(), + + _ => "Any".to_string(), + // _ => panic!("Type not yet supported for optional python stub: {:?}", ty), } } diff --git a/pgml-sdks/rust/pgml-macros/src/types.rs b/pgml-sdks/rust/pgml-macros/src/types.rs index e69aa8cc4..99947b1da 100644 --- a/pgml-sdks/rust/pgml-macros/src/types.rs +++ b/pgml-sdks/rust/pgml-macros/src/types.rs @@ -3,78 +3,90 @@ use std::boxed::Box; use std::string::ToString; use syn::visit::{self, Visit}; -#[derive(Debug)] +#[derive(Debug, Clone)] +pub struct ReferenceType { + pub ty: Box, + pub mutable: bool, +} + +impl ReferenceType { + pub fn new(ty: SupportedType, mutable: bool) -> Self { + Self { + ty: Box::new(ty), + mutable, + } + } +} + +#[derive(Debug, Clone)] #[allow(non_camel_case_types)] pub enum SupportedType { - Reference(Box), + Reference(ReferenceType), str, String, bool, Vec(Box), HashMap((Box, Box)), Option(Box), - JsonHashMap, - Json, - DateTime, Tuple(Vec), S, // Self for return types only i64, u64, i32, f64, - // Our own types - Database, - Collection, - Splitter, - Model, - QueryBuilder, - QueryRunner + CustomType(String), } impl ToString for SupportedType { fn to_string(&self) -> String { + self.to_language_string(&None) + } +} + +impl SupportedType { + pub fn to_type(&self, language: Option<&str>) -> syn::Result { + syn::parse_str(&self.to_language_string(&language)) + } + + pub fn to_language_string(&self, language: &Option<&str>) -> String { match self { - SupportedType::Reference(t) => format!("&{}", t.to_string()), + SupportedType::Reference(t) => { + if t.mutable { + format!("&mut {}", t.ty.to_language_string(language)) + } else { + format!("&{}", t.ty.to_language_string(language)) + } + } SupportedType::str => "str".to_string(), SupportedType::String => "String".to_string(), SupportedType::bool => "bool".to_string(), - SupportedType::Json => "Json".to_string(), - SupportedType::Vec(v) => format!("Vec<{}>", v.to_string()), + SupportedType::Vec(v) => format!("Vec<{}>", v.to_language_string(language)), SupportedType::HashMap((k, v)) => { - format!("HashMap<{},{}>", k.to_string(), v.to_string()) + format!( + "HashMap<{},{}>", + k.to_language_string(language), + v.to_language_string(language) + ) } SupportedType::Tuple(t) => { let mut types = Vec::new(); for ty in t { - types.push(ty.to_string()); + types.push(ty.to_language_string(language)); } format!("({})", types.join(",")) } SupportedType::S => "Self".to_string(), - SupportedType::Option(v) => format!("Option<{}>", v.to_string()), + SupportedType::Option(v) => format!("Option<{}>", v.to_language_string(language)), SupportedType::i64 => "i64".to_string(), SupportedType::u64 => "u64".to_string(), SupportedType::i32 => "i32".to_string(), SupportedType::f64 => "f64".to_string(), - SupportedType::JsonHashMap => "JsonHashMap".to_string(), - SupportedType::DateTime => "DateTime".to_string(), // Our own types - SupportedType::Database => "Database".to_string(), - SupportedType::Collection => "Collection".to_string(), - SupportedType::Splitter => "Splitter".to_string(), - SupportedType::Model => "Model".to_string(), - SupportedType::QueryBuilder => "QueryBuilder".to_string(), - SupportedType::QueryRunner => "QueryRunner".to_string(), + SupportedType::CustomType(t) => format!("{}{}", t, language.unwrap_or("")), } } } -impl SupportedType { - pub fn to_type(&self) -> syn::Result { - syn::parse_str(&self.to_string()) - } -} - #[derive(Default)] pub struct GetSupportedType { pub ty: Option, @@ -89,7 +101,7 @@ impl GetSupportedType { pub fn get_type_from_path(i: &syn::TypePath) -> SupportedType { let mut s = Self::default(); - s.visit_path(&i.path); + s.visit_path_segment(i.path.segments.last().expect("No path segment found")); s.ty.expect("Error getting type from TypePath") } @@ -117,7 +129,10 @@ impl GetSupportedType { impl<'ast> Visit<'ast> for GetSupportedType { fn visit_type(&mut self, i: &syn::Type) { self.ty = Some(match i { - syn::Type::Reference(r) => SupportedType::Reference(Box::new(Self::get_type(&r.elem))), + syn::Type::Reference(r) => SupportedType::Reference(ReferenceType::new( + Self::get_type(&r.elem), + r.mutability.is_some(), + )), syn::Type::Path(p) => Self::get_type_from_path(p), syn::Type::Tuple(t) => { let values: Vec = t @@ -164,24 +179,17 @@ impl<'ast> Visit<'ast> for GetSupportedType { i.to_token_stream().to_string() ), }, - "JsonHashMap" => Some(SupportedType::JsonHashMap), - "DateTime" => Some(SupportedType::DateTime), "Self" => Some(SupportedType::S), "i64" => Some(SupportedType::i64), "u64" => Some(SupportedType::u64), "i32" => Some(SupportedType::i32), "f64" => Some(SupportedType::f64), - "Json" => Some(SupportedType::Json), // Our own types - "Database" => Some(SupportedType::Database), - "Collection" => Some(SupportedType::Collection), - "Splitter" => Some(SupportedType::Splitter), - "Model" => Some(SupportedType::Model), - "QueryBuilder" => Some(SupportedType::QueryBuilder), - "QueryRunner" => Some(SupportedType::QueryRunner), - _ => None, + t => Some(SupportedType::CustomType(t.to_string())), }; + // println!("SELF TYPE {:?}", self.ty); + if self.ty.is_none() { visit::visit_path_segment(self, i); } diff --git a/pgml-sdks/rust/pgml/.cargo/config.toml b/pgml-sdks/rust/pgml/.cargo/config.toml new file mode 100644 index 000000000..c2ae5ba0a --- /dev/null +++ b/pgml-sdks/rust/pgml/.cargo/config.toml @@ -0,0 +1,5 @@ +[target.x86_64-unknown-linux-gnu] +rustflags = ["-C", "link-args=-Wl,-undefined,dynamic_lookup,-fuse-ld=/usr/bin/mold"] + +[target.aarch64-unknown-linux-gnu] +rustflags = ["-C", "link-args=-Wl,-undefined,dynamic_lookup,-fuse-ld=/usr/bin/mold"] diff --git a/pgml-sdks/rust/pgml/.gitignore b/pgml-sdks/rust/pgml/.gitignore index 7c3e901ae..2d5a692e0 100644 --- a/pgml-sdks/rust/pgml/.gitignore +++ b/pgml-sdks/rust/pgml/.gitignore @@ -2,7 +2,6 @@ __pycache__/ *.py[cod] *$py.class -*.pyi # C extensions *.so @@ -14,7 +13,6 @@ node_modules/ # Distribution / packaging .Python -*.pyi build/ develop-eggs/ dist/ diff --git a/pgml-sdks/rust/pgml/Cargo.lock b/pgml-sdks/rust/pgml/Cargo.lock index 956564dd1..b0db6692f 100644 --- a/pgml-sdks/rust/pgml/Cargo.lock +++ b/pgml-sdks/rust/pgml/Cargo.lock @@ -60,6 +60,17 @@ version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "async-trait" +version = "0.1.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "atoi" version = "1.0.0" @@ -147,6 +158,29 @@ dependencies = [ "winapi", ] +[[package]] +name = "console" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.45.0", +] + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -284,18 +318,78 @@ version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "event-listener" version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -372,7 +466,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", ] [[package]] @@ -426,6 +520,25 @@ dependencies = [ "wasi 0.11.0+wasi-snapshot-preview1", ] +[[package]] +name = "h2" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -469,6 +582,12 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + [[package]] name = "hex" version = "0.4.3" @@ -493,6 +612,77 @@ dependencies = [ "digest", ] +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + [[package]] name = "iana-time-zone" version = "0.1.57" @@ -542,6 +732,19 @@ dependencies = [ "hashbrown 0.12.3", ] +[[package]] +name = "indicatif" +version = "0.17.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + [[package]] name = "indoc" version = "1.0.9" @@ -557,6 +760,23 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.2", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "ipnet" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" + [[package]] name = "itertools" version = "0.10.5" @@ -581,6 +801,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + [[package]] name = "libc" version = "0.2.146" @@ -597,6 +823,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + [[package]] name = "lock_api" version = "0.4.10" @@ -643,6 +875,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -657,7 +895,25 @@ checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", ] [[package]] @@ -711,6 +967,16 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num-traits" version = "0.2.15" @@ -726,16 +992,72 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" dependencies = [ - "hermit-abi", + "hermit-abi 0.2.6", "libc", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "once_cell" version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +[[package]] +name = "openssl" +version = "0.10.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parking_lot" version = "0.11.2" @@ -781,7 +1103,7 @@ dependencies = [ "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets", + "windows-targets 0.48.0", ] [[package]] @@ -798,23 +1120,29 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pgml" -version = "0.8.0" +version = "0.8.1" dependencies = [ "anyhow", + "async-trait", "chrono", + "futures", + "indicatif", "itertools", - "log", "md5", "neon", "pgml-macros", "pyo3", "pyo3-asyncio", "regex", + "reqwest", "sea-query", "sea-query-binder", + "serde", "serde_json", "sqlx", "tokio", + "tracing", + "tracing-subscriber", "uuid", ] @@ -825,7 +1153,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", ] [[package]] @@ -840,6 +1168,18 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "portable-atomic" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -848,9 +1188,9 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.60" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" dependencies = [ "unicode-ident", ] @@ -942,9 +1282,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.28" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] @@ -1025,6 +1365,43 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +[[package]] +name = "reqwest" +version = "0.11.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +dependencies = [ + "base64 0.21.2", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + [[package]] name = "ring" version = "0.16.20" @@ -1040,6 +1417,20 @@ dependencies = [ "winapi", ] +[[package]] +name = "rustix" +version = "0.37.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +dependencies = [ + "bitflags", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + [[package]] name = "rustls" version = "0.20.8" @@ -1067,6 +1458,15 @@ version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys 0.48.0", +] + [[package]] name = "scopeguard" version = "1.1.0" @@ -1130,6 +1530,29 @@ dependencies = [ "thiserror", ] +[[package]] +name = "security-framework" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" version = "0.9.0" @@ -1147,22 +1570,22 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.164" +version = "1.0.181" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" +checksum = "6d3e73c93c3240c0bda063c239298e633114c69a888c3e37ca8bb33f343e9890" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.164" +version = "1.0.181" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" +checksum = "be02f6cb0cd3a5ec20bbcfbcbd749f57daddb1a0882dc2e46a6c236c90b977ed" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", ] [[package]] @@ -1176,6 +1599,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + [[package]] name = "sha1" version = "0.10.5" @@ -1198,6 +1633,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + [[package]] name = "slab" version = "0.4.8" @@ -1372,9 +1816,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.18" +version = "2.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" dependencies = [ "proc-macro2", "quote", @@ -1398,6 +1842,20 @@ version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd1ba337640d60c3e96bc6f0638a939b9c9a7f2c316a1598c279828b3d1dc8c5" +[[package]] +name = "tempfile" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +dependencies = [ + "autocfg", + "cfg-if", + "fastrand", + "redox_syscall 0.3.5", + "rustix", + "windows-sys 0.48.0", +] + [[package]] name = "thiserror" version = "1.0.40" @@ -1415,7 +1873,17 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", ] [[package]] @@ -1485,7 +1953,7 @@ dependencies = [ "pin-project-lite", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1496,7 +1964,17 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", ] [[package]] @@ -1521,6 +1999,103 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "nu-ansi-term", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + [[package]] name = "typenum" version = "1.16.0" @@ -1554,6 +2129,12 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + [[package]] name = "unicode_categories" version = "0.1.1" @@ -1590,14 +2171,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa2982af2eec27de306107c027578ff7f423d65f7250e40ce0fea8f45248b81" dependencies = [ "getrandom", + "serde", ] +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.10.0+wasi-snapshot-preview1" @@ -1631,10 +2234,22 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", "wasm-bindgen-shared", ] +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "wasm-bindgen-macro" version = "0.2.87" @@ -1653,7 +2268,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -1731,7 +2346,16 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets", + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", ] [[package]] @@ -1740,7 +2364,22 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -1749,53 +2388,104 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] diff --git a/pgml-sdks/rust/pgml/Cargo.toml b/pgml-sdks/rust/pgml/Cargo.toml index a9b3e6ed8..25cfbf7c1 100644 --- a/pgml-sdks/rust/pgml/Cargo.toml +++ b/pgml-sdks/rust/pgml/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pgml" -version = "0.8.1" +version = "0.9.0" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -19,13 +19,20 @@ pyo3 = { version = "0.18.3", optional = true, features = ["extension-module"] } pyo3-asyncio = { version = "0.18", features = ["attributes", "tokio-runtime"], optional = true } neon = { version = "0.10", optional = true, default-features = false, features = ["napi-6", "promise-api", "channel-api"] } itertools = "0.10.5" -uuid = {version = "1.3.3", features = ["v4"] } +uuid = {version = "1.3.3", features = ["v4", "serde"] } md5 = "0.7.0" -log = "0.4.18" sea-query = { version = "0.28.5", features = ["attr", "thread-safe", "with-json", "postgres-array"] } sea-query-binder = { version = "0.3.1", features = ["sqlx-postgres", "with-json", "postgres-array"] } regex = "1.8.4" +reqwest = { version = "0.11", features = ["json"] } +async-trait = "0.1.71" +tracing = { version = "0.1.37" } +tracing-subscriber = { version = "0.3.17", features = ["json"] } +indicatif = "0.17.6" +serde = "1.0.181" +futures = "0.3.28" [features] +default = [] python = ["dep:pyo3", "dep:pyo3-asyncio"] javascript = ["dep:neon"] diff --git a/pgml-sdks/rust/pgml/build.rs b/pgml-sdks/rust/pgml/build.rs index 8dfa7a8e9..ea5bb25bd 100644 --- a/pgml-sdks/rust/pgml/build.rs +++ b/pgml-sdks/rust/pgml/build.rs @@ -2,9 +2,37 @@ use std::fs::remove_file; use std::fs::OpenOptions; use std::io::Write; +const ADDITIONAL_DEFAULTS_FOR_PYTHON: &[u8] = br#" +def py_init_logger(level: Optional[str] = "Default set in Rust. Please see documentation.", format: Optional[str] = "Default set in Rust. Please see documentation.") -> None + +Json = Any +DateTime = int +"#; + +const ADDITIONAL_DEFAULTS_FOR_JAVASCRIPT: &[u8] = br#" +export function js_init_logger(level?: string, format?: string): void; + +export type Json = { [key: string]: any }; +export type DateTime = Date; + +export function newCollection(name: string, database_url?: string): Collection; +export function newModel(name?: string, source?: string, parameters?: Json): Model; +export function newSplitter(name?: string, parameters?: Json): Splitter; +export function newBuiltins(database_url?: string): Builtins; +export function newPipeline(name: string, model?: Model, splitter?: Splitter, parameters?: Json): Pipeline; +"#; + fn main() { // Remove python stub file that is auto generated each build remove_file("./python/pgml/pgml.pyi").ok(); + let mut file = OpenOptions::new() + .create(true) + .write(true) + .append(true) + .open("./python/pgml/pgml.pyi") + .unwrap(); + // Add our opening function declaration here + file.write_all(ADDITIONAL_DEFAULTS_FOR_PYTHON).unwrap(); // Remove typescript declaration file that is auto generated each build remove_file("./javascript/index.d.ts").ok(); @@ -14,9 +42,6 @@ fn main() { .append(true) .open("./javascript/index.d.ts") .unwrap(); - // Add our opening function declaration here - file.write_all( - b"\nexport function newDatabase(connection_string: string): Promise;\n", - ) - .unwrap(); + // Add some manual declarations here + file.write_all(ADDITIONAL_DEFAULTS_FOR_JAVASCRIPT).unwrap(); } diff --git a/pgml-sdks/rust/pgml/javascript/examples/README.md b/pgml-sdks/rust/pgml/javascript/examples/README.md index 3ee6a0d01..3c93410c5 100644 --- a/pgml-sdks/rust/pgml/javascript/examples/README.md +++ b/pgml-sdks/rust/pgml/javascript/examples/README.md @@ -4,4 +4,4 @@ Here we have a set of examples of different use cases of the pgml javascript SDK ## Examples: -1. [Getting Started](./getting-started/) - Simple project that uses the pgml SDK to create a collection, upsert documents into the collection, and run a vector search on the collection. +1. [Getting Started](./getting-started/) - Simple project that uses the pgml SDK to create a collection, add a pipeline, upsert documents, and run a vector search on the collection. diff --git a/pgml-sdks/rust/pgml/javascript/examples/getting-started/README.md b/pgml-sdks/rust/pgml/javascript/examples/getting-started/README.md index df955fbf9..293b5a3ca 100644 --- a/pgml-sdks/rust/pgml/javascript/examples/getting-started/README.md +++ b/pgml-sdks/rust/pgml/javascript/examples/getting-started/README.md @@ -1,12 +1,12 @@ # Getting Started with the PGML Javascript SDK -In this example repo you will find a basic script that you can run to get started with the PGML Javascript SDK. This script will create a collection, upsert documents into the collection, generate chunks, generate embeddings, and run a vector search on the collection. +In this example repo you will find a basic script that you can run to get started with the PGML Javascript SDK. This script will create a collection, add a pipeline, and run a vector search on the collection. ## Steps to run the example 1. Clone the repo 2. Install dependencies `npm install` -3. Create a .env file and set `PGML_CONNECTION` to your Postgres connection string +3. Create a .env file and set `DATABASE_URL` to your Postgres connection string 4. Open index.js and check out the code 5. Run the script `node index.js` diff --git a/pgml-sdks/rust/pgml/javascript/examples/getting-started/index.js b/pgml-sdks/rust/pgml/javascript/examples/getting-started/index.js index 20fe6f3e7..75bd75802 100644 --- a/pgml-sdks/rust/pgml/javascript/examples/getting-started/index.js +++ b/pgml-sdks/rust/pgml/javascript/examples/getting-started/index.js @@ -1,34 +1,37 @@ const pgml = require("pgml"); require("dotenv").config(); -const CONNECTION_STRING = - process.env.PGML_CONNECTION || - "postgres://postgres@127.0.0.1:5433/pgml_development"; - const main = async () => { - const db = await pgml.newDatabase(CONNECTION_STRING); - const collection_name = "hello_world"; - const collection = await db.create_or_get_collection(collection_name); + // Initialize the collection + const collection = pgml.newCollection("my_javascript_collection"); + + // Add a pipeline + const model = pgml.newModel(); + const splitter = pgml.newSplitter(); + const pipeline = pgml.newPipeline("my_javascript_pipeline", model, splitter); + await collection.add_pipeline(pipeline); + + // Upsert documents, these documents are automatically split into chunks and embedded by our pipeline const documents = [ { - name: "Document One", + id: "Document One", text: "document one contents...", }, { - name: "Document Two", + id: "Document Two", text: "document two contents...", }, ]; await collection.upsert_documents(documents); - await collection.generate_chunks(); - await collection.generate_embeddings(); - const queryResults = await collection.vector_search( - "What are the contents of document one?", // query text - {}, // embedding model parameters - 1 // top_k - ); - // convert the results to array of objects + // Perform vector search + const queryResults = await collection + .query() + .vector_recall("What are the contents of document one?", pipeline) + .limit(2) + .fetch_all(); + + // Convert the results to an array of objects const results = queryResults.map((result) => { const [similarity, text, metadata] = result; return { @@ -38,7 +41,7 @@ const main = async () => { }; }); - await db.archive_collection(collection_name); + await collection.archive(); return results; }; diff --git a/pgml-sdks/rust/pgml/javascript/package.json b/pgml-sdks/rust/pgml/javascript/package.json index f4d030afa..771b8c24e 100644 --- a/pgml-sdks/rust/pgml/javascript/package.json +++ b/pgml-sdks/rust/pgml/javascript/package.json @@ -1,6 +1,6 @@ { "name": "pgml", - "version": "0.8.1", + "version": "0.9.0", "description": "Open Source Alternative for Building End-to-End Vector Search Applications without OpenAI & Pinecone", "keywords": ["postgres", "machine learning", "vector databases", "embeddings"], "main": "index.js", diff --git a/pgml-sdks/rust/pgml/javascript/tests/javascript-tests/test.js b/pgml-sdks/rust/pgml/javascript/tests/javascript-tests/test.js deleted file mode 100644 index e75921aee..000000000 --- a/pgml-sdks/rust/pgml/javascript/tests/javascript-tests/test.js +++ /dev/null @@ -1,72 +0,0 @@ -import pgml from '../../index.js' - -const CONNECTION_STRING = process.env.DATABASE_URL; - -async function vector_recall() { - let db = await pgml.newDatabase(CONNECTION_STRING); - let collection_name = "jtest7" - let collection = await db.create_or_get_collection(collection_name); - console.log("The Collection:") - console.log(collection) - let doc = { - "name": "Test", - "text": "Hello, World! - From Javascript", - } - await collection.upsert_documents([doc]); - await collection.register_text_splitter("recursive_character", { chunk_size: 1500, chunk_overlap: 4 }) - let splitters = await collection.get_text_splitters(); - console.log("The Splitters:") - splitters.forEach((splitter) => { - console.log(splitter); - }) - await collection.generate_chunks(2); - await collection.register_model("embedding", "intfloat/e5-small"); - let models = await collection.get_models(); - console.log("The Models:") - models.forEach((model) => { - console.log(model); - }) - await collection.generate_embeddings(1, 2); - let results = await collection.vector_search("small", {}, 2, 1, 2); - console.log("The Results:") - results.forEach((result) => { - console.log(result); - }) - await db.archive_collection(collection_name); -} - -async function query_builder() { - let db = await pgml.newDatabase(CONNECTION_STRING); - let collection_name = "jqtest1" - let collection = await db.create_or_get_collection(collection_name); - let docs = [ - { - "name": "Test", - "text": "Hello, World! - From Javascript", - }, - { - "name": "Test", - "text": "Hello, World2! - From Javascript", - } - ] - await collection.upsert_documents(docs); - await collection.generate_chunks(); - await collection.generate_embeddings(); - let results = await collection.query().filter({ - metadata: { - name: {"$eq": "Test"} - } - }).vector_recall("Hello").limit(5).run(); - console.log("The Results:") - results.forEach((result) => { - console.log(result); - }) - await db.archive_collection(collection_name); -} - -async function main() { - // await test(); - await query_builder(); -} - -main().then(() => console.log("\nTests Done!")).catch((err) => console.log(err)); diff --git a/pgml-sdks/rust/pgml/javascript/tests/jest.config.js b/pgml-sdks/rust/pgml/javascript/tests/jest.config.js new file mode 100644 index 000000000..7e67de525 --- /dev/null +++ b/pgml-sdks/rust/pgml/javascript/tests/jest.config.js @@ -0,0 +1,8 @@ +export default { + preset: 'ts-jest', + testEnvironment: 'node', + roots: [''], + transform: { + '^.+\\.tsx?$': 'ts-jest' + } +} diff --git a/pgml-sdks/rust/pgml/javascript/tests/package-lock.json b/pgml-sdks/rust/pgml/javascript/tests/package-lock.json index cdfe437e1..db44b0a58 100644 --- a/pgml-sdks/rust/pgml/javascript/tests/package-lock.json +++ b/pgml-sdks/rust/pgml/javascript/tests/package-lock.json @@ -8,13 +8,3400 @@ "name": "pgml-tests", "version": "0.1.0", "devDependencies": { - "typescript": "^5.1.3" + "@types/jest": "^29.5.3", + "jest": "^29.6.1", + "ts-jest": "^29.1.1", + "typescript": "^5.1.6" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", + "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", + "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.22.9", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.9.tgz", + "integrity": "sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.22.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.9.tgz", + "integrity": "sha512-G2EgeufBcYw27U4hhoIwFcgc1XU7TlXJ3mv04oOv1WCuo900U/anZSPzEqNjwdjgffkk2Gs0AN0dW1CKVLcG7w==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.9", + "@babel/helper-compilation-targets": "^7.22.9", + "@babel/helper-module-transforms": "^7.22.9", + "@babel/helpers": "^7.22.6", + "@babel/parser": "^7.22.7", + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.8", + "@babel/types": "^7.22.5", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.2", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true + }, + "node_modules/@babel/generator": { + "version": "7.22.9", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.9.tgz", + "integrity": "sha512-KtLMbmicyuK2Ak/FTCJVbDnkN1SlT8/kceFTiuDiiRUUSMnHMidxSCdG4ndkTOHHpoomWe/4xkvHkEOncwjYIw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5", + "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.22.9", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.9.tgz", + "integrity": "sha512-7qYrNM6HjpnPHJbopxmb8hSPoZ0gsX8IvUS32JGVoy+pU9e5N0nLr1VjJoR6kA4d9dmGLxNYOjeB8sUDal2WMw==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.22.9", + "@babel/helper-validator-option": "^7.22.5", + "browserslist": "^4.21.9", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", + "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", + "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", + "dev": true, + "dependencies": { + "@babel/template": "^7.22.5", + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", + "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", + "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.22.9", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.9.tgz", + "integrity": "sha512-t+WA2Xn5K+rTeGtC8jCsdAH52bjggG5TKRuRrAGNM/mjIbO4GxvlLMFOEz9wXY5I2XQ60PMFsAG2WIcG82dQMQ==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-module-imports": "^7.22.5", + "@babel/helper-simple-access": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/helper-validator-identifier": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz", + "integrity": "sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", + "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.22.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz", + "integrity": "sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g==", + "dev": true, + "dependencies": { + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", + "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", + "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", + "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.22.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.6.tgz", + "integrity": "sha512-YjDs6y/fVOYFV8hAf1rxd1QvR9wJe1pDBZ2AREKq/SDayfPzgk0PBnVuTCE5X1acEpMMNOVUqoe+OwiZGJ+OaA==", + "dev": true, + "dependencies": { + "@babel/template": "^7.22.5", + "@babel/traverse": "^7.22.6", + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", + "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.5", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/parser": { + "version": "7.22.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.7.tgz", + "integrity": "sha512-7NF8pOkHP5o2vpmGgNGcfAeCvOYhGLyA3Z4eBQkT1RJlWu47n63bCs93QfJ2hIAFCil7L5P2IWhs1oToVgrL0Q==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz", + "integrity": "sha512-gvyP4hZrgrs/wWMaocvxZ44Hw0b3W8Pe+cMxc8V1ULQ07oh8VNbIRaoD1LRZVTvD+0nieDKjfgKg89sD7rrKrg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.22.5.tgz", + "integrity": "sha512-1mS2o03i7t1c6VzH6fdQ3OA8tcEIxwG18zIPRp+UY1Ihv6W+XZzBCVxExF9upussPXJ0xE9XRHwMoNs1ep/nRQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", + "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.22.5", + "@babel/parser": "^7.22.5", + "@babel/types": "^7.22.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.22.8", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.8.tgz", + "integrity": "sha512-y6LPR+wpM2I3qJrsheCTwhIinzkETbplIgPBbwvqPKc+uljeA5gP+3nP8irdYt1mjQaDnlIcG+dw8OjAco4GXw==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.22.5", + "@babel/generator": "^7.22.7", + "@babel/helper-environment-visitor": "^7.22.5", + "@babel/helper-function-name": "^7.22.5", + "@babel/helper-hoist-variables": "^7.22.5", + "@babel/helper-split-export-declaration": "^7.22.6", + "@babel/parser": "^7.22.7", + "@babel/types": "^7.22.5", + "debug": "^4.1.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.22.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", + "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.22.5", + "@babel/helper-validator-identifier": "^7.22.5", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.6.1.tgz", + "integrity": "sha512-Aj772AYgwTSr5w8qnyoJ0eDYvN6bMsH3ORH1ivMotrInHLKdUz6BDlaEXHdM6kODaBIkNIyQGzsMvRdOv7VG7Q==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.6.1", + "jest-util": "^29.6.1", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.6.1.tgz", + "integrity": "sha512-CcowHypRSm5oYQ1obz1wfvkjZZ2qoQlrKKvlfPwh5jUXVU12TWr2qMeH8chLMuTFzHh5a1g2yaqlqDICbr+ukQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.6.1", + "@jest/reporters": "^29.6.1", + "@jest/test-result": "^29.6.1", + "@jest/transform": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.5.0", + "jest-config": "^29.6.1", + "jest-haste-map": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.6.1", + "jest-resolve-dependencies": "^29.6.1", + "jest-runner": "^29.6.1", + "jest-runtime": "^29.6.1", + "jest-snapshot": "^29.6.1", + "jest-util": "^29.6.1", + "jest-validate": "^29.6.1", + "jest-watcher": "^29.6.1", + "micromatch": "^4.0.4", + "pretty-format": "^29.6.1", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.6.1.tgz", + "integrity": "sha512-RMMXx4ws+Gbvw3DfLSuo2cfQlK7IwGbpuEWXCqyYDcqYTI+9Ju3a5hDnXaxjNsa6uKh9PQF2v+qg+RLe63tz5A==", + "dev": true, + "dependencies": { + "@jest/fake-timers": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "jest-mock": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.6.1.tgz", + "integrity": "sha512-N5xlPrAYaRNyFgVf2s9Uyyvr795jnB6rObuPx4QFvNJz8aAjpZUDfO4bh5G/xuplMID8PrnuF1+SfSyDxhsgYg==", + "dev": true, + "dependencies": { + "expect": "^29.6.1", + "jest-snapshot": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.6.1.tgz", + "integrity": "sha512-o319vIf5pEMx0LmzSxxkYYxo4wrRLKHq9dP1yJU7FoPTB0LfAKSz8SWD6D/6U3v/O52t9cF5t+MeJiRsfk7zMw==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.4.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.6.1.tgz", + "integrity": "sha512-RdgHgbXyosCDMVYmj7lLpUwXA4c69vcNzhrt69dJJdf8azUrpRh3ckFCaTPNjsEeRi27Cig0oKDGxy5j7hOgHg==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.6.1", + "jest-mock": "^29.6.1", + "jest-util": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.6.1.tgz", + "integrity": "sha512-2VjpaGy78JY9n9370H8zGRCFbYVWwjY6RdDMhoJHa1sYfwe6XM/azGN0SjY8kk7BOZApIejQ1BFPyH7FPG0w3A==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.6.1", + "@jest/expect": "^29.6.1", + "@jest/types": "^29.6.1", + "jest-mock": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.6.1.tgz", + "integrity": "sha512-9zuaI9QKr9JnoZtFQlw4GREQbxgmNYXU6QuWtmuODvk5nvPUeBYapVR/VYMyi2WSx3jXTLJTJji8rN6+Cm4+FA==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.6.1", + "@jest/test-result": "^29.6.1", + "@jest/transform": "^29.6.1", + "@jest/types": "^29.6.1", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.6.1", + "jest-util": "^29.6.1", + "jest-worker": "^29.6.1", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.0", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.0.tgz", + "integrity": "sha512-rxLjXyJBTL4LQeJW3aKo0M/+GkCOXsO+8i9Iu7eDb6KwtP65ayoDsitrdPBtujxQ88k4wI2FNYfa6TOGwSn6cQ==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.0", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.0.tgz", + "integrity": "sha512-oA+I2SHHQGxDCZpbrsCQSoMLb3Bz547JnM+jUr9qEbuw0vQlWZfpPS7CO9J7XiwKicEz9OFn/IYoLkkiUD7bzA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.6.1.tgz", + "integrity": "sha512-Ynr13ZRcpX6INak0TPUukU8GWRfm/vAytE3JbJNGAvINySWYdfE7dGZMbk36oVuK4CigpbhMn8eg1dixZ7ZJOw==", + "dev": true, + "dependencies": { + "@jest/console": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.6.1.tgz", + "integrity": "sha512-oBkC36PCDf/wb6dWeQIhaviU0l5u6VCsXa119yqdUosYAt7/FbQU2M2UoziO3igj/HBDEgp57ONQ3fm0v9uyyg==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.6.1", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.6.1", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.6.1.tgz", + "integrity": "sha512-URnTneIU3ZjRSaf906cvf6Hpox3hIeJXRnz3VDSw5/X93gR8ycdfSIEy19FlVx8NFmpN7fe3Gb1xF+NjXaQLWg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.1", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.6.1", + "jest-regex-util": "^29.4.3", + "jest-util": "^29.6.1", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.1.tgz", + "integrity": "sha512-tPKQNMPuXgvdOn2/Lg9HNfUvjYVGolt04Hp03f5hAk878uwOLikN+JzeLY0HcVgKgFl9Hs3EIqpu3WX27XNhnw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.0", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", + "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.18", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", + "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" + } + }, + "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, + "node_modules/@sinonjs/commons": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz", + "integrity": "sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.1", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.1.tgz", + "integrity": "sha512-aACu/U/omhdk15O4Nfb+fHgH/z3QsfQzpnvRZhYhThms83ZnAOZz7zZAWO7mn2yyNQaA4xTO8GLK3uqFU4bYYw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.1.tgz", + "integrity": "sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.1", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.1.tgz", + "integrity": "sha512-MitHFXnhtgwsGZWtT68URpOvLN4EREih1u3QtQiN4VdAxWKRVvGCSvw/Qth0M0Qq3pJpnGOu5JaM/ydK7OGbqg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.6.tgz", + "integrity": "sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz", + "integrity": "sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.3", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.3.tgz", + "integrity": "sha512-1Nq7YrO/vJE/FYnqYyw0FS8LdrjExSgIiHyKg7xPpn+yi8Q4huZryKnkJatN1ZRH89Kw2v33/8ZMB7DuZeSLlA==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/node": { + "version": "20.4.4", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.4.tgz", + "integrity": "sha512-CukZhumInROvLq3+b5gLev+vgpsIqC2D0deQr/yS1WnxvmYLlJXZpaQrQiseMY+6xusl79E04UjWoqyr+t1/Ew==", + "dev": true + }, + "node_modules/@types/prettier": { + "version": "2.7.3", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.3.tgz", + "integrity": "sha512-+68kP9yzs4LMp7VNh8gdzMSPZFL44MLGqiHWvttYJe+6qnuVr4Ek9wSBQoveqY/r+LwjCcU29kNVkidwim+kYA==", + "dev": true + }, + "node_modules/@types/stack-utils": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.1.tgz", + "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.24", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.24.tgz", + "integrity": "sha512-6i0aC7jV6QzQB8ne1joVZ0eSFIstHsCrobmOtghM11yGlH0j43FKL2UhWdELkyps0zuf7qVTUVCCR+tgSlyLLw==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.0", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.0.tgz", + "integrity": "sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA==", + "dev": true + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/babel-jest": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.6.1.tgz", + "integrity": "sha512-qu+3bdPEQC6KZSPz+4Fyjbga5OODNcp49j6GKzG1EKbkfyJBxEYGVUmVGpwCSeGouG52R4EgYMLb6p9YeEEQ4A==", + "dev": true, + "dependencies": { + "@jest/transform": "^29.6.1", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.5.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.5.0.tgz", + "integrity": "sha512-zSuuuAlTMT4mzLj2nPnUm6fsE6270vdOfnpbJ+RmruU75UhLFvL0N2NgI7xpeS7NaB6hGqmd5pVpGTDYvi4Q3w==", + "dev": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", + "dev": true, + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-import-meta": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-top-level-await": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.5.0.tgz", + "integrity": "sha512-JOMloxOqdiBSxMAzjRaH023/vvcaSaec49zvg+2LmNsktC7ei39LTJGw02J+9uUtTZUq6xbLyJ4dxe9sSmIuAg==", + "dev": true, + "dependencies": { + "babel-plugin-jest-hoist": "^29.5.0", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.21.9", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.9.tgz", + "integrity": "sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001503", + "electron-to-chromium": "^1.4.431", + "node-releases": "^2.0.12", + "update-browserslist-db": "^1.0.11" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001517", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001517.tgz", + "integrity": "sha512-Vdhm5S11DaFVLlyiKu4hiUTkpZu+y1KA/rZZqVQfOD5YdDT/eQKlkt7NaE0WGOFgX32diqt9MiP9CAiFeRklaA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.8.0.tgz", + "integrity": "sha512-eXTggHWSooYhq49F2opQhuHWgzucfF2YgODK4e1566GQs5BIfP30B0oenwBJHfWxAs2fyPB1s7Mg949zLf61Yw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz", + "integrity": "sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==", + "dev": true + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/dedent": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz", + "integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA==", + "dev": true + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.4.3.tgz", + "integrity": "sha512-ofrBgwpPhCD85kMKtE9RYFFq6OC1A89oW2vvgWZNCwxrUpRUILopY7lsYyMDSjc8g6U6aiO0Qubg6r4Wgt5ZnA==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.468", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.468.tgz", + "integrity": "sha512-6M1qyhaJOt7rQtNti1lBA0GwclPH+oKCmsra/hkcWs5INLxfXXD/dtdnaKUYQu/pjOBP/8Osoe4mAcNvvzoFag==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.6.1.tgz", + "integrity": "sha512-XEdDLonERCU1n9uR56/Stx9OqojaLAQtZf9PrCHH9Hl8YXiEIka3H4NXJ3NOIBmQJTg7+j7buh34PMHfJujc8g==", + "dev": true, + "dependencies": { + "@jest/expect-utils": "^29.6.1", + "@types/node": "*", + "jest-get-type": "^29.4.3", + "jest-matcher-utils": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-util": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.1.tgz", + "integrity": "sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", + "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.6.1.tgz", + "integrity": "sha512-Nirw5B4nn69rVUZtemCQhwxOBhm0nsp3hmtF4rzCeWD7BkjAXRIji7xWQfnTNbz9g0aVsBX6aZK3n+23LM6uDw==", + "dev": true, + "dependencies": { + "@jest/core": "^29.6.1", + "@jest/types": "^29.6.1", + "import-local": "^3.0.2", + "jest-cli": "^29.6.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.5.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.5.0.tgz", + "integrity": "sha512-IFG34IUMUaNBIxjQXF/iu7g6EcdMrGRRxaUSw92I/2g2YC6vCdTltl4nHvt7Ci5nSJwXIkCu8Ka1DKF+X7Z1Ag==", + "dev": true, + "dependencies": { + "execa": "^5.0.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-circus": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.6.1.tgz", + "integrity": "sha512-tPbYLEiBU4MYAL2XoZme/bgfUeotpDBd81lgHLCbDZZFaGmECk0b+/xejPFtmiBP87GgP/y4jplcRpbH+fgCzQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.6.1", + "@jest/expect": "^29.6.1", + "@jest/test-result": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^0.7.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.6.1", + "jest-matcher-utils": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-runtime": "^29.6.1", + "jest-snapshot": "^29.6.1", + "jest-util": "^29.6.1", + "p-limit": "^3.1.0", + "pretty-format": "^29.6.1", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.6.1.tgz", + "integrity": "sha512-607dSgTA4ODIN6go9w6xY3EYkyPFGicx51a69H7yfvt7lN53xNswEVLovq+E77VsTRi5fWprLH0yl4DJgE8Ing==", + "dev": true, + "dependencies": { + "@jest/core": "^29.6.1", + "@jest/test-result": "^29.6.1", + "@jest/types": "^29.6.1", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "import-local": "^3.0.2", + "jest-config": "^29.6.1", + "jest-util": "^29.6.1", + "jest-validate": "^29.6.1", + "prompts": "^2.0.1", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.6.1.tgz", + "integrity": "sha512-XdjYV2fy2xYixUiV2Wc54t3Z4oxYPAELUzWnV6+mcbq0rh742X2p52pii5A3oeRzYjLnQxCsZmp0qpI6klE2cQ==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.6.1", + "@jest/types": "^29.6.1", + "babel-jest": "^29.6.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.6.1", + "jest-environment-node": "^29.6.1", + "jest-get-type": "^29.4.3", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.6.1", + "jest-runner": "^29.6.1", + "jest-util": "^29.6.1", + "jest-validate": "^29.6.1", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.6.1", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.6.1.tgz", + "integrity": "sha512-FsNCvinvl8oVxpNLttNQX7FAq7vR+gMDGj90tiP7siWw1UdakWUGqrylpsYrpvj908IYckm5Y0Q7azNAozU1Kg==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.4.3", + "jest-get-type": "^29.4.3", + "pretty-format": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.4.3.tgz", + "integrity": "sha512-fzdTftThczeSD9nZ3fzA/4KkHtnmllawWrXO69vtI+L9WjEIuXWs4AmyME7lN5hU7dB0sHhuPfcKofRsUb/2Fg==", + "dev": true, + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.6.1.tgz", + "integrity": "sha512-n5eoj5eiTHpKQCAVcNTT7DRqeUmJ01hsAL0Q1SMiBHcBcvTKDELixQOGMCpqhbIuTcfC4kMfSnpmDqRgRJcLNQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "chalk": "^4.0.0", + "jest-get-type": "^29.4.3", + "jest-util": "^29.6.1", + "pretty-format": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.6.1.tgz", + "integrity": "sha512-ZNIfAiE+foBog24W+2caIldl4Irh8Lx1PUhg/GZ0odM1d/h2qORAsejiFc7zb+SEmYPn1yDZzEDSU5PmDkmVLQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.6.1", + "@jest/fake-timers": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "jest-mock": "^29.6.1", + "jest-util": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.4.3.tgz", + "integrity": "sha512-J5Xez4nRRMjk8emnTpWrlkyb9pfRQQanDrvWHhsR1+VUfbwxi30eVcZFlcdGInRibU4G5LwHXpI7IRHU0CY+gg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.6.1.tgz", + "integrity": "sha512-0m7f9PZXxOCk1gRACiVgX85knUKPKLPg4oRCjLoqIm9brTHXaorMA0JpmtmVkQiT8nmXyIVoZd/nnH1cfC33ig==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.4.3", + "jest-util": "^29.6.1", + "jest-worker": "^29.6.1", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.6.1.tgz", + "integrity": "sha512-OrxMNyZirpOEwkF3UHnIkAiZbtkBWiye+hhBweCHkVbCgyEy71Mwbb5zgeTNYWJBi1qgDVfPC1IwO9dVEeTLwQ==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.4.3", + "pretty-format": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.6.1.tgz", + "integrity": "sha512-SLaztw9d2mfQQKHmJXKM0HCbl2PPVld/t9Xa6P9sgiExijviSp7TnZZpw2Fpt+OI3nwUO/slJbOfzfUMKKC5QA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.6.1", + "jest-get-type": "^29.4.3", + "pretty-format": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.6.1.tgz", + "integrity": "sha512-KoAW2zAmNSd3Gk88uJ56qXUWbFk787QKmjjJVOjtGFmmGSZgDBrlIL4AfQw1xyMYPNVD7dNInfIbur9B2rd/wQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.1", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.6.1", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.6.1.tgz", + "integrity": "sha512-brovyV9HBkjXAEdRooaTQK42n8usKoSRR3gihzUpYeV/vwqgSoNfrksO7UfSACnPmxasO/8TmHM3w9Hp3G1dgw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "@types/node": "*", + "jest-util": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.4.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.4.3.tgz", + "integrity": "sha512-O4FglZaMmWXbGHSQInfXewIsd1LMn9p3ZXB/6r4FOkyhX2/iP/soMG98jGvk/A3HAN78+5VWcBGO0BJAPRh4kg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.6.1.tgz", + "integrity": "sha512-AeRkyS8g37UyJiP9w3mmI/VXU/q8l/IH52vj/cDAyScDcemRbSBhfX/NMYIGilQgSVwsjxrCHf3XJu4f+lxCMg==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.6.1", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.6.1", + "jest-validate": "^29.6.1", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.6.1.tgz", + "integrity": "sha512-BbFvxLXtcldaFOhNMXmHRWx1nXQO5LoXiKSGQcA1LxxirYceZT6ch8KTE1bK3X31TNG/JbkI7OkS/ABexVahiw==", + "dev": true, + "dependencies": { + "jest-regex-util": "^29.4.3", + "jest-snapshot": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.6.1.tgz", + "integrity": "sha512-tw0wb2Q9yhjAQ2w8rHRDxteryyIck7gIzQE4Reu3JuOBpGp96xWgF0nY8MDdejzrLCZKDcp8JlZrBN/EtkQvPQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.6.1", + "@jest/environment": "^29.6.1", + "@jest/test-result": "^29.6.1", + "@jest/transform": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.4.3", + "jest-environment-node": "^29.6.1", + "jest-haste-map": "^29.6.1", + "jest-leak-detector": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-resolve": "^29.6.1", + "jest-runtime": "^29.6.1", + "jest-util": "^29.6.1", + "jest-watcher": "^29.6.1", + "jest-worker": "^29.6.1", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.6.1.tgz", + "integrity": "sha512-D6/AYOA+Lhs5e5il8+5pSLemjtJezUr+8zx+Sn8xlmOux3XOqx4d8l/2udBea8CRPqqrzhsKUsN/gBDE/IcaPQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.6.1", + "@jest/fake-timers": "^29.6.1", + "@jest/globals": "^29.6.1", + "@jest/source-map": "^29.6.0", + "@jest/test-result": "^29.6.1", + "@jest/transform": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-mock": "^29.6.1", + "jest-regex-util": "^29.4.3", + "jest-resolve": "^29.6.1", + "jest-snapshot": "^29.6.1", + "jest-util": "^29.6.1", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.6.1.tgz", + "integrity": "sha512-G4UQE1QQ6OaCgfY+A0uR1W2AY0tGXUPQpoUClhWHq1Xdnx1H6JOrC2nH5lqnOEqaDgbHFgIwZ7bNq24HpB180A==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.6.1", + "@jest/transform": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/prettier": "^2.1.5", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.6.1", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.6.1", + "jest-get-type": "^29.4.3", + "jest-matcher-utils": "^29.6.1", + "jest-message-util": "^29.6.1", + "jest-util": "^29.6.1", + "natural-compare": "^1.4.0", + "pretty-format": "^29.6.1", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jest-snapshot/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/jest-util": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.6.1.tgz", + "integrity": "sha512-NRFCcjc+/uO3ijUVyNOQJluf8PtGCe/W6cix36+M3cTFgiYqFOOW5MgN4JOOcvbUhcKTYVd1CvHz/LWi8d16Mg==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.6.1.tgz", + "integrity": "sha512-r3Ds69/0KCN4vx4sYAbGL1EVpZ7MSS0vLmd3gV78O+NAx3PDQQukRU5hNHPXlyqCgFY8XUk7EuTMLugh0KzahA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.1", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.4.3", + "leven": "^3.1.0", + "pretty-format": "^29.6.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.6.1.tgz", + "integrity": "sha512-d4wpjWTS7HEZPaaj8m36QiaP856JthRZkrgcIY/7ISoUWPIillrXM23WPboZVLbiwZBt4/qn2Jke84Sla6JhFA==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.6.1", + "@jest/types": "^29.6.1", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.6.1", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.6.1.tgz", + "integrity": "sha512-U+Wrbca7S8ZAxAe9L6nb6g8kPdia5hj32Puu5iOqBCMTMWFHXuK6dOV2IFrpedbTV8fjMFLdWNttQTBL6u2MRA==", + "dev": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.6.1", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "dependencies": { + "tmpl": "1.0.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dev": true, + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.13.tgz", + "integrity": "sha512-uYr7J37ae/ORWdZeQ1xxMJe3NtdmqMC/JZK+geofDrkLUApKRHPd18/TxtBOJ4A0/+uUIliorNrfYV6s1b02eQ==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pretty-format": { + "version": "29.6.1", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.6.1.tgz", + "integrity": "sha512-7jRj+yXO0W7e4/tSJKoR7HRIHLPPjtNaUGG2xxKQnGvPNRkgWcQ0AZX6P4KBRJN4FcTBWb3sa7DVUJmocYuoog==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.0", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pure-rand": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.0.2.tgz", + "integrity": "sha512-6Yg0ekpKICSjPswYOuC5sku/TSWaRYlA0qsXqJgM/d/4pLPHPuTxK7Nbf7jFKzAeedUhR8C7K9Uv63FBsSo8xQ==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ] + }, + "node_modules/react-is": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", + "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", + "dev": true + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.2", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", + "integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==", + "dev": true, + "dependencies": { + "is-core-module": "^2.11.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz", + "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "dev": true + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-jest": { + "version": "29.1.1", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.1.1.tgz", + "integrity": "sha512-D6xjnnbP17cC85nliwGiL+tpoKN0StpgE0TeOjXQTU6MVCfsB4v7aW05CgQ/1OywGb0x/oy9hHFnN+sczTiRaA==", + "dev": true, + "dependencies": { + "bs-logger": "0.x", + "fast-json-stable-stringify": "2.x", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "^7.5.3", + "yargs-parser": "^21.0.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, + "node_modules/ts-jest/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/semver": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/ts-jest/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/typescript": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.3.tgz", - "integrity": "sha512-XH627E9vkeqhlZFQuL+UsyAXEnibT0kWR2FWONlr4sTjvxyJYnyefgrkyECLzM5NenmKzRAy2rR/OlYLA1HkZw==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", + "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -23,6 +3410,170 @@ "engines": { "node": ">=14.17" } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", + "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/v8-to-istanbul": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", + "integrity": "sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^1.6.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, + "node_modules/v8-to-istanbul/node_modules/convert-source-map": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz", + "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", + "dev": true + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "dependencies": { + "makeerror": "1.0.12" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } } } diff --git a/pgml-sdks/rust/pgml/javascript/tests/package.json b/pgml-sdks/rust/pgml/javascript/tests/package.json index f8886205d..c4348c352 100644 --- a/pgml-sdks/rust/pgml/javascript/tests/package.json +++ b/pgml-sdks/rust/pgml/javascript/tests/package.json @@ -3,7 +3,13 @@ "version": "0.1.0", "description": "", "type": "module", + "scripts": { + "test": "NODE_OPTIONS=--experimental-vm-modules jest" + }, "devDependencies": { - "typescript": "^5.1.3" + "@types/jest": "^29.5.3", + "jest": "^29.6.1", + "ts-jest": "^29.1.1", + "typescript": "^5.1.6" } } diff --git a/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.js b/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.js deleted file mode 100644 index 925395f9b..000000000 --- a/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.js +++ /dev/null @@ -1,20 +0,0 @@ -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -import pgml from '../../index.js'; -const CONNECTION_STRING = process.env.DATABASE_URL ? process.env.DATABASE_URL : ""; -function test() { - return __awaiter(this, void 0, void 0, function* () { - let db = yield pgml.newDatabase(CONNECTION_STRING); - let collection_name = "ttest2"; - let collection = yield db.create_or_get_collection(collection_name); - console.log(collection); - }); -} -test().then(() => console.log("\nTests Done!")).catch((err) => console.log(err)); diff --git a/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.ts b/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.ts index 8e924722e..ae21bf573 100644 --- a/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.ts +++ b/pgml-sdks/rust/pgml/javascript/tests/typescript-tests/test.ts @@ -1,12 +1,135 @@ import pgml from '../../index.js' -const CONNECTION_STRING = process.env.DATABASE_URL ? process.env.DATABASE_URL : ""; -const COLLECTION_NAME = "ttest2"; +//////////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////////// +// PLEASE BE AWARE THESE TESTS DO INVOLVE CHECKS ON LAZILY CREATD DATABASE ITEMS // +// IF ANY OF THE COLLECTION NAMES ALREADY EXIST, SOME TESTS MAY FAIL // +// THIS DOES NOT MEAN THE SDK IS BROKEN. PLEASE CLEAR YOUR DATABASE INSTANCE // +// BEFORE RUNNING ANY TESTS // +//////////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////////// -async function test() { - let db: pgml.Database = await pgml.newDatabase(CONNECTION_STRING); - let collection: pgml.Collection = await db.create_or_get_collection(COLLECTION_NAME); - console.log(collection) +const DATABASE_URL = process.env.DATABASE_URL; +if (!DATABASE_URL) { + console.log("No DATABASE_URL environment variable found. Please set one") + process.exit(1) } +const LOG_LEVEL = process.env.LOG_LEVEL ? process.env.LOG_LEVEL : "ERROR"; -test().then(() => console.log("\nTests Done!")).catch((err) => console.log(err)); +pgml.js_init_logger(DATABASE_URL, LOG_LEVEL); + +const generate_dummy_documents = (count: number) => { + let docs = []; + for (let i = 0; i < count; i++) { + docs.push({ + "id": i, + "text": `This is a test document: ${i}`, + "metadata": { + "uuid": i * 10, + "name": `Test Document ${i}` + } + }); + } + return docs; +} + +/////////////////////////////////////////////////// +// Test the API exposed is correct //////////////// +/////////////////////////////////////////////////// + +it("can create collection", () => { + let collection = pgml.newCollection("test_j_c_ccc_0"); + expect(collection).toBeTruthy(); +}); + +it("can create model", () => { + let model = pgml.newModel("test", "openai", { + "tester": "test 0123948712394871234987" + }); + expect(model).toBeTruthy(); +}); + +it("can create splitter", () => { + let splitter = pgml.newSplitter(); + expect(splitter).toBeTruthy(); +}); + +it("can create pipeline", () => { + let model = pgml.newModel(); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_ccc_0", model, splitter); + expect(pipeline).toBeTruthy(); +}); + +it("can create builtins", () => { + let builtins = pgml.newBuiltins(); + expect(builtins).toBeTruthy(); +}); + +/////////////////////////////////////////////////// +// Test various vector searches /////////////////// +/////////////////////////////////////////////////// + +it("can vector search with local embeddings", async () => { + let model = pgml.newModel(); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_cvswle_0", model, splitter); + let collection = pgml.newCollection("test_j_c_cvswle_3"); + await collection.upsert_documents(generate_dummy_documents(3)); + await collection.add_pipeline(pipeline); + let results = await collection.vector_search("Here is some query", pipeline); + expect(results).toHaveLength(3); + await collection.archive(); +}); + +it("can vector search with remote embeddings", async() => { + let model = pgml.newModel("text-embedding-ada-002", "openai"); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_cvswre_0", model, splitter); + let collection = pgml.newCollection("test_j_c_cvswre_1"); + await collection.upsert_documents(generate_dummy_documents(3)); + await collection.add_pipeline(pipeline); + let results = await collection.vector_search("Here is some query", pipeline); + expect(results).toHaveLength(3); + await collection.archive(); +}); + +it("can vector search with query builder", async() => { + let model = pgml.newModel(); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_cvswqb_0", model, splitter); + let collection = pgml.newCollection("test_j_c_cvswqb_1"); + await collection.upsert_documents(generate_dummy_documents(3)); + await collection.add_pipeline(pipeline); + let results = await collection.query().vector_recall("Here is some query", pipeline).limit(10).fetch_all(); + expect(results).toHaveLength(3); + await collection.archive(); +}); + +it("can vector search with query builder with remote embeddings", async() => { + let model = pgml.newModel("text-embedding-ada-002", "openai"); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_cvswqbwre_0", model, splitter); + let collection = pgml.newCollection("test_j_c_cvswqbwre_1"); + await collection.upsert_documents(generate_dummy_documents(3)); + await collection.add_pipeline(pipeline); + let results = await collection.query().vector_recall("Here is some query", pipeline).limit(10).fetch_all(); + expect(results).toHaveLength(3); + await collection.archive(); +}); + + +/////////////////////////////////////////////////// +// Test user output facing functions ////////////// +/////////////////////////////////////////////////// + +it("pipeline to dict", async () => { + let model = pgml.newModel("text-embedding-ada-002", "openai"); + let splitter = pgml.newSplitter(); + let pipeline = pgml.newPipeline("test_j_p_ptd_0", model, splitter); + let collection = pgml.newCollection("test_j_c_ptd_1"); + await collection.add_pipeline(pipeline); + let pipeline_dict = await pipeline.to_dict(); + console.log(JSON.stringify(pipeline_dict)) + expect(pipeline_dict["name"]).toBe("test_j_p_ptd_0"); +}); diff --git a/pgml-sdks/rust/pgml/pyproject.toml b/pgml-sdks/rust/pgml/pyproject.toml index 375745495..bd5d98d7f 100644 --- a/pgml-sdks/rust/pgml/pyproject.toml +++ b/pgml-sdks/rust/pgml/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "maturin" [project] name = "pgml" requires-python = ">=3.7" -version = "0.8.1" +version = "0.9.0" description = "Python SDK is designed to facilitate the development of scalable vector search applications on PostgreSQL databases." authors = [ {name = "PostgresML", email = "team@postgresml.org"}, @@ -13,7 +13,7 @@ authors = [ homepage = "https://postgresml.org" repository = "https://github.com/postgresml/postgresml" documentation = "https://github.com/postgresml/postgresml/tree/master/pgml-sdks/python/pgml" -readme = "../../python/pgml/README.md" +readme = "./python/README.md" keywords = ["postgres","machine learning","vector databases","embeddings"] classifiers = [ "Programming Language :: Rust", diff --git a/pgml-sdks/rust/pgml/python/README.md b/pgml-sdks/rust/pgml/python/README.md new file mode 100644 index 000000000..b37174feb --- /dev/null +++ b/pgml-sdks/rust/pgml/python/README.md @@ -0,0 +1,244 @@ +# Open Source Alternative for Building End-to-End Vector Search Applications without OpenAI & Pinecone + +## Table of Contents + +- [Overview](#overview) +- [Quickstart](#quickstart) +- [Usage](#usage) +- [Examples](./examples/README.md) +- [Developer setup](#developer-setup) +- [API Reference](#api-reference) +- [Roadmap](#roadmap) + +## Overview + +Python SDK is designed to facilitate the development of scalable vector search applications on PostgreSQL databases. With this SDK, you can seamlessly manage various database tables related to documents, text chunks, text splitters, LLM (Language Model) models, and embeddings. By leveraging the SDK's capabilities, you can efficiently index LLM embeddings using PgVector for fast and accurate queries. + +### Key Features + +- **Automated Database Management**: With the SDK, you can easily handle the management of database tables related to documents, text chunks, text splitters, LLM models, and embeddings. This automated management system simplifies the process of setting up and maintaining your vector search application's data structure. + +- **Embedding Generation from Open Source Models**: The Python SDK provides the ability to generate embeddings using hundreds of open source models. These models, trained on vast amounts of data, capture the semantic meaning of text and enable powerful analysis and search capabilities. + +- **Flexible and Scalable Vector Search**: The Python SDK empowers you to build flexible and scalable vector search applications. The Python SDK seamlessly integrates with PgVector, a PostgreSQL extension specifically designed for handling vector-based indexing and querying. By leveraging these indices, you can perform advanced searches, rank results by relevance, and retrieve accurate and meaningful information from your database. + +### Use Cases + +Embeddings, the core concept of the Python SDK, find applications in various scenarios, including: + +- Search: Embeddings are commonly used for search functionalities, where results are ranked by relevance to a query string. By comparing the embeddings of query strings and documents, you can retrieve search results in order of their similarity or relevance. + +- Clustering: With embeddings, you can group text strings by similarity, enabling clustering of related data. By measuring the similarity between embeddings, you can identify clusters or groups of text strings that share common characteristics. + +- Recommendations: Embeddings play a crucial role in recommendation systems. By identifying items with related text strings based on their embeddings, you can provide personalized recommendations to users. + +- Anomaly Detection: Anomaly detection involves identifying outliers or anomalies that have little relatedness to the rest of the data. Embeddings can aid in this process by quantifying the similarity between text strings and flagging outliers. + +- Classification: Embeddings are utilized in classification tasks, where text strings are classified based on their most similar label. By comparing the embeddings of text strings and labels, you can classify new text strings into predefined categories. + +### How the Python SDK Works + +The Python SDK streamlines the development of vector search applications by abstracting away the complexities of database management and indexing. Here's an overview of how the SDK works: + +- **Automatic Document and Text Chunk Management**: The SDK provides a convenient interface to manage documents and pipelines, automatically handling chunking and embedding for you. You can easily organize and structure your text data within the PostgreSQL database. + +- **Open Source Model Integration**: With the SDK, you can seamlessly incorporate a wide range of open source models to generate high-quality embeddings. These models capture the semantic meaning of text and enable powerful analysis and search capabilities. + +- **Embedding Indexing**: The Python SDK utilizes the PgVector extension to efficiently index the embeddings generated by the open source models. This indexing process optimizes search performance and allows for fast and accurate retrieval of relevant results. + +- **Querying and Search**: Once the embeddings are indexed, you can perform vector-based searches on the documents and text chunks stored in the PostgreSQL database. The SDK provides intuitive methods for executing queries and retrieving search results. + +## Quickstart + +Follow the steps below to quickly get started with the Python SDK for building scalable vector search applications on PostgresML databases. + +### Prerequisites + +Before you begin, make sure you have the following: + +- PostgresML Database: Ensure you have a PostgresML database version >`2.3.1`. You can spin up a database using [Docker](https://github.com/postgresml/postgresml#installation) or [sign up for a free GPU-powered database](https://postgresml.org/signup). + +- Set the `DATABASE_URL` environment variable to the connection string of your PostgresML database. + +- Python version >=3.8.1 + +### Installation + +To install the Python SDK, use pip: + +``` +pip install pgml +``` + +### Sample Code + +Once you have the Python SDK installed, you can use the following sample code as a starting point for your vector search application: + +```python +from pgml import Collection, Model, Splitter, Pipeline +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +import asyncio + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("quora_collection") +``` + +**Explanation:** + +- The code imports the necessary modules and packages, including pgml, datasets, time, and rich. +- It creates an instance of the Collection class which we will add pipelines and documents onto + +Continuing within `async def main():` + +```python + # Create a pipeline using the default model and splitter + model = Model() + splitter = Splitter() + pipeline = Pipeline("quorav1", model, splitter) + await collection.add_pipeline(pipeline) +``` + +**Explanation** + +- The code creates an instance of `Model` and `Splitter` using their default arguments. +- Finally, the code constructs a pipeline called `"quroav1"` and add it to the collection we Initialized above. This pipeline automatically generates chunks and embeddings for every upserted document. + +Continuing with `async def main():` + +``` + # Prep documents for upserting + data = load_dataset("squad", split="train") + data = data.to_pandas() + data = data.drop_duplicates(subset=["context"]) + documents = [ + {"id": r["id"], "text": r["context"], "title": r["title"]} + for r in data.to_dict(orient="records") + ] + + # Upsert documents + await collection.upsert_documents(documents[:200]) +``` + +**Explanation** + +- The code loads the "squad" dataset, converts it to a pandas DataFrame, and drops any duplicate context values. +- It creates a list of dictionaries representing the documents to be indexed, with each dictionary containing the document's id, text, and title. +- Finally, they are upserted. As mentioned above, the pipeline added earlier automatically runs and generates chunks and embeddings for each document. + +Continuing within `async def main():` + +```python + # Query + query = "Who won 20 grammy awards?" + results = await collection.query().vector_recall(query, pipeline).limit(5).fetch_all() + console.print(results) + # Archive collection + await collection.archive() +``` + +**Explanation:** + +- The `query` method is called to perform a vector-based search on the collection. The query string is `Who won more than 20 grammy awards?`, and the top 5 results are requested. +- The search results are printed. +- Finally, the `archive` method is called to archive the collection and free up resources in the PostgresML database. + +Call `main` function in an async loop. + +```python +if __name__ == "__main__": + asyncio.run(main()) +``` + +**Running the Code** + +Open a terminal or command prompt and navigate to the directory where the file is saved. + +Execute the following command: + +``` +python vector_search.py +``` + +You should see the search results printed in the terminal. As you can see, our vector search engine found the right text chunk with the answer we are looking for. + +``` +[ + ( + 0.8423336495860181, + 'Beyoncé has won 20 Grammy Awards, both as a solo artist and member of Destiny\'s Child, making her the second most honored female artist by the Grammys, behind Alison Krauss and the most nominated woman in Grammy Award history with 52 nominations. "Single Ladies (Put a Ring on It)" won Song of the Year in 2010 while "Say My Name" and +"Crazy in Love" had previously won Best R&B Song. Dangerously in Love, B\'Day and I Am... Sasha Fierce have all won Best Contemporary R&B Album. Beyoncé set the record for the most Grammy awards won by a female artist in one night in 2010 when she won six awards, breaking the tie she previously held with Alicia Keys, Norah Jones, Alison Krauss, +and Amy Winehouse, with Adele equaling this in 2012. Following her role in Dreamgirls she was nominated for Best Original Song for "Listen" and Best Actress at the Golden Globe Awards, and Outstanding Actress in a Motion Picture at the NAACP Image Awards. Beyoncé won two awards at the Broadcast Film Critics Association Awards 2006; Best Song for +"Listen" and Best Original Soundtrack for Dreamgirls: Music from the Motion Picture.', + {'id': '56becc903aeaaa14008c949f', 'title': 'Beyoncé'} + ), + ( + 0.8210567582713351, + 'A self-described "modern-day feminist", Beyoncé creates songs that are often characterized by themes of love, relationships, and monogamy, as well as female sexuality and empowerment. On stage, her dynamic, highly choreographed performances have led to critics hailing her as one of the best entertainers in contemporary popular music. +Throughout a career spanning 19 years, she has sold over 118 million records as a solo artist, and a further 60 million with Destiny\'s Child, making her one of the best-selling music artists of all time. She has won 20 Grammy Awards and is the most nominated woman in the award\'s history. The Recording Industry Association of America recognized +her as the Top Certified Artist in America during the 2000s decade. In 2009, Billboard named her the Top Radio Songs Artist of the Decade, the Top Female Artist of the 2000s and their Artist of the Millennium in 2011. Time listed her among the 100 most influential people in the world in 2013 and 2014. Forbes magazine also listed her as the most +powerful female musician of 2015.', + {'id': '56be88473aeaaa14008c9080', 'title': 'Beyoncé'} + ) +] +``` + +## Usage + +### High-level Description + +The Python SDK provides a set of functionalities to build scalable vector search applications on PostgresQL databases. It enables users to create a collection, which represents a schema in the database, to store tables for documents, chunks, models, splitters, and embeddings. The Collection class in the SDK handles all operations related to these tables, allowing users to interact with the collection and perform various tasks. + +#### Create or a Collection + +```python +collection_name = Collection("test_collection") +``` + +This initializes a new Collection used to do everything from upserting documents to performing vector search. + +### Add a Pipeline + +```python +model = Model() +splitter = Splitter() +pipeline = Pipeline("test_pipeline", model, splitter) +await collection.add_pipeline(pipeline) +``` + +This creates a new pipeline with the specified `Model` and `Splitter`. The pipelines do the heavy lifting automatically handling the chunking and embedding of documents. + +#### Upsert Documents + +```python +await collection.upsert_documents(documents) +``` + +The method is used to insert or update documents in a database table based on their ID, and text. All enabled pipelines automatically chunk and embed upserted documents. + +#### Vector Search + +```python +results = await collection.query().vector_recall("Who won 20 grammy awards?", pipeline=pipeline).limit(2).fetch_all() +``` + +The `query` method returns a flexible query builder for high performance filterable vector search. + +### Developer Setup + +This Python library is generated from our core rust-sdk. Please check [rust-sdk documentation](../../README.md) for developer setup. + +### Roadmap + +- [x] Enable filters on document metadata in `vector_search`. [Issue](https://github.com/postgresml/postgresml/issues/663) +- [x] `text_search` functionality on documents using Postgres text search. [Issue](https://github.com/postgresml/postgresml/issues/664) +- [x] `hybrid_search` functionality that does a combination of `vector_search` and `text_search` in an order specified by the user. [Issue](https://github.com/postgresml/postgresml/issues/665) +- [x] Ability to call and manage OpenAI embeddings for comparison purposes. [Issue](https://github.com/postgresml/postgresml/issues/666) +- Save `vector_search` history for downstream monitoring of model performance. [Issue](https://github.com/postgresml/postgresml/issues/667) +- Perform chunking on the DB with multiple langchain splitters. [Issue](https://github.com/postgresml/postgresml/issues/668) diff --git a/pgml-sdks/rust/pgml/python/examples/README.md b/pgml-sdks/rust/pgml/python/examples/README.md new file mode 100644 index 000000000..dc8fce385 --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/README.md @@ -0,0 +1,17 @@ +## Examples + +### [Semantic Search](./semantic_search.py) +This is a basic example to perform semantic search on a collection of documents. It loads the Quora dataset, creates a collection in a PostgreSQL database, upserts documents, generates chunks and embeddings, and then performs a vector search on a query. Embeddings are created using `intfloat/e5-small` model. The results are are semantically similar documemts to the query. Finally, the collection is archived. + +### [Question Answering](./question_answering.py) +This is an example to find documents relevant to a question from the collection of documents. It loads the Stanford Question Answering Dataset (SQuAD) into the database, generates chunks and embeddings. Query is passed to vector search to retrieve documents that match closely in the embeddings space. A score is returned with each of the search result. + +### [Question Answering using Instructore Model](./question_answering_instructor.py) +In this example, we will use `hknlp/instructor-base` model to build text embeddings instead of the default `intfloat/e5-small` model. + +### [Extractive Question Answering](./extractive_question_answering.py) +In this example, we will show how to use `vector_recall` result as a `context` to a HuggingFace question answering model. We will use `Builtins.transform()` to run the model on the database. + +### [Table Question Answering](./table_question_answering.py) +In this example, we will use [Open Table-and-Text Question Answering (OTT-QA) +](https://github.com/wenhuchen/OTT-QA) dataset to run queries on tables. We will use `deepset/all-mpnet-base-v2-table` model that is trained for embedding tabular data for retrieval tasks. diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/.env.template b/pgml-sdks/rust/pgml/python/examples/chatbot/.env.template deleted file mode 100644 index 9543e2450..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/.env.template +++ /dev/null @@ -1,2 +0,0 @@ -OPENAI_API_KEY= -DATABASE_URL= \ No newline at end of file diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/.gitignore b/pgml-sdks/rust/pgml/python/examples/chatbot/.gitignore deleted file mode 100644 index 2eea525d8..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.env \ No newline at end of file diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/README.md b/pgml-sdks/rust/pgml/python/examples/chatbot/README.md deleted file mode 100644 index b3188287e..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/README.md +++ /dev/null @@ -1 +0,0 @@ -# PostgresML Chatbot Builder \ No newline at end of file diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/pgml_chatbot/main.py b/pgml-sdks/rust/pgml/python/examples/chatbot/pgml_chatbot/main.py deleted file mode 100644 index 8c9dbe753..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/pgml_chatbot/main.py +++ /dev/null @@ -1,287 +0,0 @@ -import asyncio -from pgml import Database -import logging -from rich.logging import RichHandler -from rich.progress import track -from rich import print -import os -from dotenv import load_dotenv -import glob -import argparse -from time import time -import openai -import signal -import json -import ast - -def handler(signum, frame): - print('Exiting...') - exit(0) - -signal.signal(signal.SIGINT, handler) - -parser = argparse.ArgumentParser(description="Process some integers.") -parser.add_argument( - "--root_dir", - dest="root_dir", - type=str, - help="Input folder to scan for markdown files", -) -parser.add_argument( - "--collection_name", - dest="collection_name", - type=str, - help="Name of the collection to store the data in", -) -parser.add_argument( - "--splitter", - dest="splitter", - type=str, - help="Name of the splitter to use", - default="recursive_character", -) -parser.add_argument( - "--splitter_params", - dest="splitter_params", - type=json.loads, - help="Parameters for the splitter", - default={"chunk_size": 1500, "chunk_overlap": 40}, -) -parser.add_argument( - "--model", - dest="model", - type=str, - help="Name of the model to use", - default="intfloat/e5-small", -) -parser.add_argument( - "--model_params", - dest="model_params", - type=str, - help="Parameters for the model", - default={}, -) -parser.add_argument( - "--stage", - dest="stage", - choices=["ingest", "chat"], - type=str, - default="chat", - help="Stage to run", -) - -args = parser.parse_args() - -FORMAT = "%(message)s" -logging.basicConfig( - level=os.environ.get("LOG_LEVEL", "DEBUG"), - format="%(asctime)s - %(message)s", - datefmt="[%X]", - handlers=[RichHandler()], -) -log = logging.getLogger("rich") - -# Load .env file -load_dotenv() - - -async def ingest_documents(db: Database, collection_name: str, folder: str) -> int: - log.info("Scanning " + folder + " for markdown files") - md_files = [] - # root_dir needs a trailing slash (i.e. /root/dir/) - for filename in glob.iglob(folder + "**/*.md", recursive=True): - md_files.append(filename) - - log.info("Found " + str(len(md_files)) + " markdown files") - documents = [] - for md_file in track(md_files, description="Extracting text from markdown"): - with open(md_file, "r") as f: - documents.append({"text": f.read(), "filename": md_file}) - - log.info("Upserting documents into database") - collection = await db.create_or_get_collection(collection_name) - await collection.upsert_documents(documents) - - return len(md_files) - - -async def generate_chunks( - db: Database, - collection_name: str, - splitter: str = "recursive_character", - splitter_params: dict = {"chunk_size": 1500, "chunk_overlap": 40}, -) -> int: - """ - The function `generate_chunks` generates chunks for a given collection in a database and returns the - count of chunks created. - - :param db: The `db` parameter is an instance of a database connection or client. It is used to - interact with the database and perform operations such as creating collections, executing queries, - and fetching results - :type db: Database - :param collection_name: The `collection_name` parameter is a string that represents the name of the - collection in the database. It is used to create or get the collection and perform operations on it - :type collection_name: str - :return: The function `generate_chunks` returns an integer, which represents the count of chunks - generated in the specified collection. - """ - log.info("Generating chunks") - collection = await db.create_or_get_collection(collection_name) - await collection.register_text_splitter(splitter, splitter_params) - query_string = """SELECT count(*) from {collection_name}.chunks""".format( - collection_name=collection_name - ) - results = await db.query(query_string).fetch_all() - start_chunks = results[0]["count"] - log.info("Starting chunk count: " + str(start_chunks)) - await collection.generate_chunks() - results = await db.query(query_string).fetch_all() - log.info("Ending chunk count: " + str(results[0]["count"])) - return results[0]["count"] - start_chunks - - -async def generate_embeddings( - db: Database, - collection_name: str, - splitter: str = "recursive_character", - splitter_params: dict = {"chunk_size": 1500, "chunk_overlap": 40}, - model: str = "intfloat/e5-small", - model_params: dict = {}, -) -> int: - """ - The `generate_embeddings` function generates embeddings for text data using a specified model and - splitter. - - :param db: The `db` parameter is an instance of a database object. It is used to interact with the - database and perform operations such as creating or getting a collection, registering a text - splitter, registering a model, and generating embeddings - :type db: Database - :param collection_name: The `collection_name` parameter is a string that represents the name of the - collection in the database where the embeddings will be generated - :type collection_name: str - :param splitter: The `splitter` parameter is used to specify the text splitting method to be used - during the embedding generation process. In this case, the value is set to "recursive_character", - which suggests that the text will be split into chunks based on recursive character splitting, - defaults to recursive_character - :type splitter: str (optional) - :param splitter_params: The `splitter_params` parameter is a dictionary that contains the parameters - for the text splitter. In this case, the `splitter_params` dictionary has two keys: - :type splitter_params: dict - :param model: The `model` parameter is the name or identifier of the language model that will be - used to generate the embeddings. In this case, the model is specified as "intfloat/e5-small", - defaults to intfloat/e5-small - :type model: str (optional) - :param model_params: The `model_params` parameter is a dictionary that allows you to specify - additional parameters for the model. These parameters can be used to customize the behavior of the - model during the embedding generation process. The specific parameters that can be included in the - `model_params` dictionary will depend on the specific model you are - :type model_params: dict - :return: an integer value of 0. - """ - log.info("Generating embeddings") - collection = await db.create_or_get_collection(collection_name) - splitter_id = await collection.register_text_splitter(splitter, splitter_params) - model_id = await collection.register_model("embedding", model, model_params) - - start = time() - await collection.generate_embeddings(model_id, splitter_id) - log.info("Embeddings generated in %0.3f seconds" % (time() - start)) - - return 0 - - -async def generate_response( - messages, openai_api_key, temperature=0.7, max_tokens=256, top_p=1.0 -): - openai.api_key = openai_api_key - response = openai.ChatCompletion.create( - model="gpt-3.5-turbo", - messages=messages, - temperature=temperature, - max_tokens=max_tokens, - top_p=top_p, - frequency_penalty=0, - presence_penalty=0, - ) - return response["choices"][0]["message"]["content"] - - -async def main(): - """ - The `main` function connects to a database, ingests documents from a specified folder, generates - chunks, and logs the total number of documents and chunks. - """ - log.info("Starting pgml_chatbot") - collection_name = args.collection_name - log.info("Connecting to database") - db = Database(os.environ.get("DATABASE_URL")) - - stage = args.stage - splitter = args.splitter - splitter_params = args.splitter_params - model = args.model - if args.model_params: - model_params = ast.literal_eval(args.model_params) - else: - model_params = args.model_params - - if stage == "ingest": - root_dir = args.root_dir - - total_docs = await ingest_documents(db, collection_name, folder=root_dir) - total_chunks = await generate_chunks( - db, collection_name, splitter=splitter, splitter_params=splitter_params - ) - log.info( - "Total documents: " - + str(total_docs) - + " Total chunks: " - + str(total_chunks) - ) - - await generate_embeddings( - db, - collection_name, - splitter=splitter, - splitter_params=splitter_params, - model=model, - model_params=model_params, - ) - elif stage == "chat": - system_prompt = """You are an assistant to answer questions about an open source software named PostgresML. Your name is PgBot. You are based out of San Francisco, California. - """ - base_prompt = """Given relevant parts of a document and a question, create a final answer. Include a SQL query in the answer wherever possible. If you don't find relevant answer then politely say that you don't know and ask for clarification. If the context is empty then ask for clarification and suggest user to send an email to team@postgresml.org or join PostgresML [Discord](https://discord.gg/DmyJP3qJ7U). Use the following portion of a long document to see if any of the text is relevant to answer the question. - \nReturn any relevant text verbatim.\n{context}\nQuestion: {question}\n""" - openai_api_key = os.environ.get("OPENAI_API_KEY") - - collection = await db.create_or_get_collection(collection_name) - model_id = await collection.register_model("embedding", model, model_params) - splitter_id = await collection.register_text_splitter(splitter, splitter_params) - log.info("Model id: " + str(model_id) + " Splitter id: " + str(splitter_id)) - while True: - try: - messages = [{"role": "system", "content": system_prompt}] - user_input = input("Ctrl-C to exit\nUser: ") - vector_results = await collection.vector_search( - user_input, model_id=model_id, splitter_id=splitter_id, top_k=2, query_params=model_params - ) - log.info(vector_results) - context = "" - for result in vector_results: - if result[0] > 0.7: - context += result[1] + "\n" - if context: - query = base_prompt.format(context=context, question=user_input) - else: - query = user_input - log.info("User: " + query) - messages.append({"role": "user", "content": query}) - response = await generate_response(messages, openai_api_key, max_tokens=512, temperature=0.0) - print("PgBot: " + response) - except KeyboardInterrupt: - print("Exiting...") - break - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/poetry.lock b/pgml-sdks/rust/pgml/python/examples/chatbot/poetry.lock deleted file mode 100644 index c67d1e842..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/poetry.lock +++ /dev/null @@ -1,871 +0,0 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. - -[[package]] -name = "aiohttp" -version = "3.8.5" -description = "Async http client/server framework (asyncio)" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] - -[[package]] -name = "aiosignal" -version = "1.3.1" -description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, -] - -[package.dependencies] -frozenlist = ">=1.1.0" - -[[package]] -name = "async-timeout" -version = "4.0.2" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] - -[[package]] -name = "attrs" -version = "23.1.0" -description = "Classes Without Boilerplate" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, -] - -[package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - -[[package]] -name = "black" -version = "23.7.0" -description = "The uncompromising code formatter." -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:552513d5cd5694590d7ef6f46e1767a4df9af168d449ff767b13b084c020e63f"}, - {file = "black-23.7.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:86cee259349b4448adb4ef9b204bb4467aae74a386bce85d56ba4f5dc0da27be"}, - {file = "black-23.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:501387a9edcb75d7ae8a4412bb8749900386eaef258f1aefab18adddea1936bc"}, - {file = "black-23.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb074d8b213749fa1d077d630db0d5f8cc3b2ae63587ad4116e8a436e9bbe995"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b5b0ee6d96b345a8b420100b7d71ebfdd19fab5e8301aff48ec270042cd40ac2"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:893695a76b140881531062d48476ebe4a48f5d1e9388177e175d76234ca247cd"}, - {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, - {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, - {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:f9062af71c59c004cd519e2fb8f5d25d39e46d3af011b41ab43b9c74e27e236f"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:01ede61aac8c154b55f35301fac3e730baf0c9cf8120f65a9cd61a81cfb4a0c3"}, - {file = "black-23.7.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:327a8c2550ddc573b51e2c352adb88143464bb9d92c10416feb86b0f5aee5ff6"}, - {file = "black-23.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1c6022b86f83b632d06f2b02774134def5d4d4f1dac8bef16d90cda18ba28a"}, - {file = "black-23.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:27eb7a0c71604d5de083757fbdb245b1a4fae60e9596514c6ec497eb63f95320"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, - {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, - {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, - {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, - {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, - {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "certifi" -version = "2023.7.22" -description = "Python package for providing Mozilla's CA Bundle." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.2.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, -] - -[[package]] -name = "click" -version = "8.1.6" -description = "Composable command line interface toolkit" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -category = "main" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "frozenlist" -version = "1.4.0" -description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, -] - -[[package]] -name = "idna" -version = "3.4" -description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -category = "main" -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "openai" -version = "0.27.8" -description = "Python client library for the OpenAI API" -category = "main" -optional = false -python-versions = ">=3.7.1" -files = [ - {file = "openai-0.27.8-py3-none-any.whl", hash = "sha256:e0a7c2f7da26bdbe5354b03c6d4b82a2f34bd4458c7a17ae1a7092c3e397e03c"}, - {file = "openai-0.27.8.tar.gz", hash = "sha256:2483095c7db1eee274cebac79e315a986c4e55207bb4fa7b82d185b3a2ed9536"}, -] - -[package.dependencies] -aiohttp = "*" -requests = ">=2.20" -tqdm = "*" - -[package.extras] -datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] -embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] -wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] - -[[package]] -name = "packaging" -version = "23.1" -description = "Core utilities for Python packages" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, -] - -[[package]] -name = "pathspec" -version = "0.11.1" -description = "Utility library for gitignore style pattern matching of file paths." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, -] - -[[package]] -name = "pgml" -version = "0.8.0" -description = "Python SDK is designed to facilitate the development of scalable vector search applications on PostgreSQL databases." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pgml-0.8.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9308a53e30121df0c428a15cff93f8a6c5d6ba936f31c4f4b8c066fbdca9c8cc"}, - {file = "pgml-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e92a014be000c99de6f97ff9f4f63f40af4712a7a480c914283e63b804024c2"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:4d84992b5c5834334f390e4b517e2fb1af6e47f37f244f01867d4f32918b5e47"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:e042422d836b4afd584b63746a53d68a84b370a5b716c1f557fee68ea904a2f5"}, - {file = "pgml-0.8.0-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:05fcb19667f48093cd5bfbbef34d76b87efa36f5c9f8aa8f52b21958134d9507"}, - {file = "pgml-0.8.0-cp310-none-win_amd64.whl", hash = "sha256:42d5c4bbd0bca75c346b9f4a70301e692eb213e7ac0e394f8f44ee90a08f1f8b"}, - {file = "pgml-0.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:12855bacf6df2ac8d0039453755bcc778c3781e857010713ed811a9726617080"}, - {file = "pgml-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46830c9971ee9f2d01ca181d196ca8a2e30d2d9c3d5a106595456534cee7f313"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:34f9ec58369fe6ed05b2ddce7cd212055bb679dd1badb42fa876148bba3e455f"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:da072fe1b0eb3999a01fcd1b1b7e180cbd14eb6a1d65fa32f0f5977bed8ed1a7"}, - {file = "pgml-0.8.0-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fbcceacc564b80852f8a33098169546fa741ff5ee8e1cd3207b2a3cdbe23345e"}, - {file = "pgml-0.8.0-cp311-none-win_amd64.whl", hash = "sha256:dd6b7fe356bc440179d2b3cdb58ee517140978f671cbdb27459b9309d074b01d"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:854c913c0549f5fdde34783f2035256b07873ca8d93e637dd56939e9ac4dfc70"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_34_aarch64.whl", hash = "sha256:16e64df9b259361bd63f0f9aa52100ee85a4bf678c7d03fcc1d0df082469336f"}, - {file = "pgml-0.8.0-cp37-cp37m-manylinux_2_34_x86_64.whl", hash = "sha256:c42f2a92d5c05c390b2b6c34aadf6faa0cfb4243d5244c44bd699f75a28757b1"}, - {file = "pgml-0.8.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:a5bb83ff9bece5021c7d0a078138c87f3e59aaf51208166266b82c439a54bd51"}, - {file = "pgml-0.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e1e22f64fc536c20d026e9bf4a58797535de6d4cde18858ba14f6c28ca6dc9b"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:08050b4b35c90034fb49d96ea74edda130a494f2cfabd956bd6c0d68d02f5d35"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_34_aarch64.whl", hash = "sha256:d71a17e0458747c87534004acdfa586fb978b76e4688611deac4ee677e651f64"}, - {file = "pgml-0.8.0-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:2b059ee7b9173698c0bad8a6f14d35ee90cd6b28c2fb80a7a30396935c0bdab0"}, - {file = "pgml-0.8.0-cp38-none-win_amd64.whl", hash = "sha256:ca3c6e8c570a3ec78ccae14efb8a19aeb73f41f569f162b76750be5d40b40016"}, - {file = "pgml-0.8.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:b9ad06ad7b4284539844effdae31d444402afe53f887974b1a88138af6715422"}, - {file = "pgml-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:592f6364e69194db819fde66072ffdeec349ebca00af9efad6fbc4e23b18fb26"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:cba5a4b5e7fd32d35635ac83f8472f669f5ea49ca0059f8d50671ac9c76dca63"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:ae8c63d577c060cfeb46f7adc2e6b60c2b2f7478205e455bde1c233df3ed581c"}, - {file = "pgml-0.8.0-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:c9832807832f325338a0783e27ee58ebf65b960d3b629e816ffff3de30308519"}, - {file = "pgml-0.8.0-cp39-none-win_amd64.whl", hash = "sha256:acb82bf88ce2f7945cae3ae95ad4e37e24576e478ba50754c61230dc52c91630"}, -] - -[[package]] -name = "platformdirs" -version = "3.9.1" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.9.1-py3-none-any.whl", hash = "sha256:ad8291ae0ae5072f66c16945166cb11c63394c7a3ad1b1bc9828ca3162da8c2f"}, - {file = "platformdirs-3.9.1.tar.gz", hash = "sha256:1b42b450ad933e981d56e59f1b97495428c9bd60698baab9f3eb3d00d5822421"}, -] - -[package.extras] -docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"] - -[[package]] -name = "pygments" -version = "2.15.1" -description = "Pygments is a syntax highlighting package written in Python." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, -] - -[package.extras] -plugins = ["importlib-metadata"] - -[[package]] -name = "python-dotenv" -version = "1.0.0" -description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, - {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, -] - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.4.2" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"}, - {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tqdm" -version = "4.65.0" -description = "Fast, Extensible Progress Meter" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, - {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, -] - -[[package]] -name = "urllib3" -version = "2.0.4" -description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -category = "main" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[metadata] -lock-version = "2.0" -python-versions = ">=3.8,<4.0" -content-hash = "b020e8c2e701446815e81aa7b73332573c08f637cbc38538f0491626c6cd7ba6" diff --git a/pgml-sdks/rust/pgml/python/examples/chatbot/pyproject.toml b/pgml-sdks/rust/pgml/python/examples/chatbot/pyproject.toml deleted file mode 100644 index 2f3f834d1..000000000 --- a/pgml-sdks/rust/pgml/python/examples/chatbot/pyproject.toml +++ /dev/null @@ -1,25 +0,0 @@ -[tool.poetry] -name = "pgml_bot_builder" -version = "0.1.0" -description = "PostgresML bot builder for all your documentation" -authors = ["PostgresML "] -license = "MIT" -readme = "README.md" -packages = [{include = "pgml_chatbot"}] - -[tool.poetry.dependencies] -python = ">=3.8,<4.0" -openai = "^0.27.8" -rich = "^13.4.2" -pgml = "^0.8.0" -python-dotenv = "^1.0.0" -click = "^8.1.6" -black = "^23.7.0" - - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry.scripts] -pgml_chatbot = "pgml_chatbot.main:main" diff --git a/pgml-sdks/rust/pgml/python/examples/extractive_question_answering.py b/pgml-sdks/rust/pgml/python/examples/extractive_question_answering.py new file mode 100644 index 000000000..21cfc90b5 --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/extractive_question_answering.py @@ -0,0 +1,68 @@ +from pgml import Collection, Model, Splitter, Pipeline, Builtins +import json +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +import asyncio + + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("squad_collection") + + # Create a pipeline using the default model and splitter + model = Model() + splitter = Splitter() + pipeline = Pipeline("squadv1", model, splitter) + await collection.add_pipeline(pipeline) + + # Prep documents for upserting + data = load_dataset("squad", split="train") + data = data.to_pandas() + data = data.drop_duplicates(subset=["context"]) + documents = [ + {"id": r["id"], "text": r["context"], "title": r["title"]} + for r in data.to_dict(orient="records") + ] + + # Upsert documents + await collection.upsert_documents(documents[:200]) + + # Query for context + query = "Who won more than 20 grammy awards?" + console.print("Querying for context ...") + start = time() + results = ( + await collection.query().vector_recall(query, pipeline).limit(5).fetch_all() + ) + end = time() + console.print("\n Results for '%s' " % (query), style="bold") + console.print(results) + console.print("Query time = %0.3f" % (end - start)) + + # Construct context from results + context = " ".join(results[0][1].strip().split()) + context = context.replace('"', '\\"').replace("'", "''") + + # Query for answer + builtins = Builtins() + console.print("Querying for answer ...") + start = time() + answer = await builtins.transform( + "question-answering", [json.dumps({"question": query, "context": context})] + ) + end = time() + console.print("Results for query '%s'" % query, style="bold") + console.print(answer) + console.print("Query time = %0.3f" % (end - start)) + + # Archive collection + await collection.archive() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/examples/question_answering.py b/pgml-sdks/rust/pgml/python/examples/question_answering.py new file mode 100644 index 000000000..923eebc31 --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/question_answering.py @@ -0,0 +1,51 @@ +from pgml import Collection, Model, Splitter, Pipeline +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +import asyncio + + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("squad_collection") + + # Create a pipeline using the default model and splitter + model = Model() + splitter = Splitter() + pipeline = Pipeline("squadv1", model, splitter) + await collection.add_pipeline(pipeline) + + # Prep documents for upserting + data = load_dataset("squad", split="train") + data = data.to_pandas() + data = data.drop_duplicates(subset=["context"]) + documents = [ + {"id": r["id"], "text": r["context"], "title": r["title"]} + for r in data.to_dict(orient="records") + ] + + # Upsert documents + await collection.upsert_documents(documents[:200]) + + # Query + query = "Who won 20 grammy awards?" + console.print("Querying for %s..." % query) + start = time() + results = ( + await collection.query().vector_recall(query, pipeline).limit(5).fetch_all() + ) + end = time() + console.print("\n Results for '%s' " % (query), style="bold") + console.print(results) + console.print("Query time = %0.3f" % (end - start)) + + # Archive collection + await collection.archive() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/examples/question_answering_instructor.py b/pgml-sdks/rust/pgml/python/examples/question_answering_instructor.py new file mode 100644 index 000000000..3ca71e429 --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/question_answering_instructor.py @@ -0,0 +1,63 @@ +from pgml import Collection, Model, Splitter, Pipeline +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +import asyncio + + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("squad_collection_1") + + # Create a pipeline using hkunlp/instructor-base + model = Model( + name="hkunlp/instructor-base", + parameters={"instruction": "Represent the Wikipedia document for retrieval: "}, + ) + splitter = Splitter() + pipeline = Pipeline("squad_instruction", model, splitter) + await collection.add_pipeline(pipeline) + + # Prep documents for upserting + data = load_dataset("squad", split="train") + data = data.to_pandas() + data = data.drop_duplicates(subset=["context"]) + documents = [ + {"id": r["id"], "text": r["context"], "title": r["title"]} + for r in data.to_dict(orient="records") + ] + + # Upsert documents + await collection.upsert_documents(documents[:200]) + + # Query + query = "Who won more than 20 grammy awards?" + console.print("Querying for %s..." % query) + start = time() + results = ( + await collection.query() + .vector_recall( + query, + pipeline, + query_parameters={ + "instruction": "Represent the Wikipedia question for retrieving supporting documents: " + }, + ) + .limit(5) + .fetch_all() + ) + end = time() + console.print("\n Results for '%s' " % (query), style="bold") + console.print(results) + console.print("Query time = %0.3f" % (end - start)) + + # Archive collection + await collection.archive() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/examples/semantic_search.py b/pgml-sdks/rust/pgml/python/examples/semantic_search.py new file mode 100644 index 000000000..df861502f --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/semantic_search.py @@ -0,0 +1,53 @@ +from pgml import Collection, Model, Splitter, Pipeline +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +import asyncio + + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("quora_collection") + + # Create a pipeline using the default model and splitter + model = Model() + splitter = Splitter() + pipeline = Pipeline("quorav1", model, splitter) + await collection.add_pipeline(pipeline) + + # Prep documents for upserting + dataset = load_dataset("quora", split="train") + questions = [] + for record in dataset["questions"]: + questions.extend(record["text"]) + # Remove duplicates and add id + documents = [] + for i, question in enumerate(list(set(questions))): + if question: + documents.append({"id": i, "text": question}) + + # Upsert documents + await collection.upsert_documents(documents[:200]) + + # Query + query = "What is a good mobile os?" + console.print("Querying for %s..." % query) + start = time() + results = ( + await collection.query().vector_recall(query, pipeline).limit(5).fetch_all() + ) + end = time() + console.print("\n Results for '%s' " % (query), style="bold") + console.print(results) + console.print("Query time = %0.3f" % (end - start)) + + # Archive collection + await collection.archive() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/examples/table_question_answering.py b/pgml-sdks/rust/pgml/python/examples/table_question_answering.py new file mode 100644 index 000000000..168a830b2 --- /dev/null +++ b/pgml-sdks/rust/pgml/python/examples/table_question_answering.py @@ -0,0 +1,62 @@ +from pgml import Collection, Model, Splitter, Pipeline +from datasets import load_dataset +from time import time +from dotenv import load_dotenv +from rich.console import Console +from rich.progress import track +import pandas as pd +import asyncio + + +async def main(): + load_dotenv() + console = Console() + + # Initialize collection + collection = Collection("ott_qa_20k_collection") + + # Create a pipeline using deepset/all-mpnet-base-v2-table + # A SentenceTransformer model trained specifically for embedding tabular data for retrieval + model = Model(name="deepset/all-mpnet-base-v2-table") + splitter = Splitter() + pipeline = Pipeline("ott_qa_20kv1", model, splitter) + await collection.add_pipeline(pipeline) + + # Prep documents for upserting + data = load_dataset("ashraq/ott-qa-20k", split="train") + documents = [] + + # loop through the dataset and convert tabular data to pandas dataframes + for doc in track(data): + table = pd.DataFrame(doc["data"], columns=doc["header"]) + processed_table = "\n".join([table.to_csv(index=False)]) + documents.append( + { + "text": processed_table, + "title": doc["title"], + "url": doc["url"], + "id": doc["uid"], + } + ) + + # Upsert documents + await collection.upsert_documents(documents[:100]) + + # Query + query = "Which country has the highest GDP in 2020?" + console.print("Querying for %s..." % query) + start = time() + results = ( + await collection.query().vector_recall(query, pipeline).limit(5).fetch_all() + ) + end = time() + console.print("\n Results for '%s' " % (query), style="bold") + console.print(results) + console.print("Query time = %0.3f" % (end - start)) + + # Archive collection + await collection.archive() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/pgml-sdks/rust/pgml/python/manual-build-deploy.sh b/pgml-sdks/rust/pgml/python/manual-build-deploy.sh index 788a6b3e0..81e1756e6 100755 --- a/pgml-sdks/rust/pgml/python/manual-build-deploy.sh +++ b/pgml-sdks/rust/pgml/python/manual-build-deploy.sh @@ -2,10 +2,5 @@ echo "Make sure and set the environment variable MATURIN_PYPI_TOKEN to your PyPI token." - cd .. -rm -r ../../python/pgml/dist/ -mkdir ../../python/pgml/dist/ -maturin build --release --strip -i python3.8 -i python3.9 -i python3.10 -i python3.11 -o ../../python/pgml/dist -F python -cd ../../python/pgml -maturin upload --skip-existing dist/* +maturin publish -r $1 -i python3.8 -i python3.9 -i python3.10 -i python3.11 --skip-existing -F python diff --git a/pgml-sdks/rust/pgml/python/pgml/pgml.pyi b/pgml-sdks/rust/pgml/python/pgml/pgml.pyi new file mode 100644 index 000000000..6c6dbb04f --- /dev/null +++ b/pgml-sdks/rust/pgml/python/pgml/pgml.pyi @@ -0,0 +1,89 @@ + +def py_init_logger(level: Optional[str] = "Default set in Rust. Please see documentation.", format: Optional[str] = "Default set in Rust. Please see documentation.") -> None + +Json = Any +DateTime = int + +# Top of file key: A12BECOD! +from typing import List, Dict, Optional, Self, Any + + +class Builtins: + def __init__(self, database_url: Optional[str] = "Default set in Rust. Please check the documentation.") -> Self + ... + def query(self, query: str) -> QueryRunner + ... + async def transform(self, task: Json, inputs: List[str], args: Optional[Json] = Any) -> Json + ... + +class Collection: + def __init__(self, name: str, database_url: Optional[str] = "Default set in Rust. Please check the documentation.") -> Self + ... + async def add_pipeline(self, pipeline: Pipeline) -> None + ... + async def remove_pipeline(self, pipeline: Pipeline) -> None + ... + async def enable_pipeline(self, pipeline: Pipeline) -> None + ... + async def disable_pipeline(self, pipeline: Pipeline) -> None + ... + async def upsert_documents(self, documents: List[Json], strict: Optional[bool] = True) -> None + ... + async def get_documents(self, last_id: Optional[int] = 1, limit: Optional[int] = 1) -> List[Json] + ... + async def vector_search(self, query: str, pipeline: Pipeline, query_parameters: Optional[Json] = Any, top_k: Optional[int] = 1) -> List[tuple[float, str, Json]] + ... + async def archive(self) -> None + ... + def query(self) -> QueryBuilder + ... + async def get_pipelines(self) -> List[Pipeline] + ... + async def get_pipeline(self, name: str) -> Pipeline + ... + async def exists(self) -> bool + ... + +class Model: + def __init__(self, name: Optional[str] = "Default set in Rust. Please check the documentation.", source: Optional[str] = "Default set in Rust. Please check the documentation.", parameters: Optional[Json] = Any) -> Self + ... + +class Pipeline: + def __init__(self, name: str, model: Optional[Model] = Any, splitter: Optional[Splitter] = Any, parameters: Optional[Json] = Any) -> Self + ... + async def get_status(self) -> PipelineSyncData + ... + async def to_dict(self) -> Json + ... + +class QueryBuilder: + def limit(self, limit: int) -> Self + ... + def filter(self, filter: Json) -> Self + ... + def vector_recall(self, query: str, pipeline: Pipeline, query_parameters: Optional[Json] = Any) -> Self + ... + async def fetch_all(self) -> List[tuple[float, str, Json]] + ... + def to_full_string(self) -> str + ... + +class QueryRunner: + async def fetch_all(self) -> Json + ... + async def execute(self) -> None + ... + def bind_string(self, bind_value: str) -> Self + ... + def bind_int(self, bind_value: int) -> Self + ... + def bind_float(self, bind_value: float) -> Self + ... + def bind_bool(self, bind_value: bool) -> Self + ... + def bind_json(self, bind_value: Json) -> Self + ... + +class Splitter: + def __init__(self, name: Optional[str] = "Default set in Rust. Please check the documentation.", parameters: Optional[Json] = Any) -> Self + ... diff --git a/pgml-sdks/rust/pgml/python/tests/test.py b/pgml-sdks/rust/pgml/python/tests/test.py index aab7ae67d..f64768d8c 100644 --- a/pgml-sdks/rust/pgml/python/tests/test.py +++ b/pgml-sdks/rust/pgml/python/tests/test.py @@ -1,106 +1,222 @@ -import asyncio import os import pgml +import pytest +from multiprocessing import Pool +from typing import List, Dict, Any +import asyncio -CONNECTION_STRING = os.environ.get("DATABASE_URL") - -async def main(): - collection_name = "ptest22" - db = pgml.Database(CONNECTION_STRING) - collection = await db.create_or_get_collection(collection_name) - print("The Collection") - print(collection) - collection_does_exist = await db.does_collection_exist(collection_name) - print("Collection does exist") - print(collection_does_exist) - x = [{'id': '5733be284776f41900661182', 'text': 'Architecturally, the school has a Catholic character. Atop the Main Building\'s gold dome is a golden statue of the Virgin Mary. Immediately in front of the Main Building and facing it, is a copper statue of Christ with arms upraised with the legend "Venite Ad Me Omnes". Next to the Main Building is the Basilica of the Sacred Heart. Immediately behind the basilica is the Grotto, a Marian place of prayer and reflection. It is a replica of the grotto at Lourdes, France where the Virgin Mary reputedly appeared to Saint Bernadette Soubirous in 1858. At the end of the main drive (and in a direct line that connects through 3 statues and the Gold Dome), is a simple, modern stone statue of Mary.', 'title': 'University_of_Notre_Dame'}] - await collection.upsert_documents(x) - await collection.register_text_splitter("recursive_character", {"chunk_size": 1500, "chunk_overlap": 40}) - splitters = await collection.get_text_splitters() - print("The Splitters") - print(splitters) - await collection.generate_chunks() - await collection.register_model("embedding", "intfloat/e5-small") - models = await collection.get_models() - print("The Models") - print(models) - await collection.generate_embeddings() - results = await collection.vector_search("small") - print("The Results") - print(results) - await db.archive_collection(collection_name) - -async def query_builder(): - collection_name = "pqtest2" - db = pgml.Database(CONNECTION_STRING) - collection = await db.create_or_get_collection(collection_name) - print("The collection:") - print(collection) - documents = [ - { - "id": 1, - "metadata": { - "uuid": 1 - }, - "text": "This is a test document", - }, - { - "id": 2, - "metadata": { - "uuid": 2 - }, - "text": "This is another test document", - }, - { - "id": 3, - "metadata": { - "uuid": 3 - }, - "text": "PostgresML", - } - - ] - await collection.upsert_documents(documents) - await collection.generate_tsvectors('english') - await collection.generate_chunks() - await collection.generate_embeddings() - - query = collection.query().vector_recall("test").filter({ - "metadata": { - "metadata": { - "$or": [ - {"uuid": {"$eq": 1}}, - {"uuid": {"$lt": 4}} - ] +#################################################################################### +#################################################################################### +## PLEASE BE AWARE THESE TESTS DO INVOLVE CHECKS ON LAZILY CREATED DATABASE ITEMS ## +## IF ANY OF THE COLLECTION NAMES ALREADY EXIST, SOME TESTS MAY FAIL ## +## THIS DOES NOT MEAN THE SDK IS BROKEN. PLEASE CLEAR YOUR DATABASE INSTANCE ## +## BEFORE RUNNING ANY TESTS ## +#################################################################################### +#################################################################################### + +DATABASE_URL = os.environ.get("DATABASE_URL") +if DATABASE_URL is None: + print("No DATABASE_URL environment variable found. Please set one") + exit(1) + +pgml.py_init_logger() + + +def generate_dummy_documents(count: int) -> List[Dict[str, Any]]: + dummy_documents = [] + for i in range(count): + dummy_documents.append( + { + "id": i, + "text": "This is a test document: {}".format(i), + "some_random_thing": "This will be metadata on it", + "metadata": {"uuid": i * 10, "name": "Test Document {}".format(i)}, } - }, - "full_text": { - "text": "postgresml" - } - }).limit(10) - print("Running query:") - print(query.to_full_string()) - results = await query.run() - print("The results:") - print(results) + ) + return dummy_documents - # await db.archive_collection(collection_name) -async def query_runner(): - db = pgml.Database(CONNECTION_STRING) - # results = await db.query("SELECT * from pgml.collections WHERE id = $1").bind_int(1).fetch_all() - results = await db.query("SELECT * from pgml.collections").fetch_all() - print(results) +################################################### +## Test the API exposed is correct ################ +################################################### -async def transform(): - db = pgml.Database(CONNECTION_STRING) - # results = await db.query("SELECT * from pgml.collections WHERE id = $1").bind_int(1).fetch_all() - results = await db.transform("translation_en_to_fr", ["This is a test", "This is a test 2"]) - print(results) +def test_can_create_collection(): + collection = pgml.Collection(name="test_p_c_tscc_0") + assert collection is not None + + +def test_can_create_model(): + model = pgml.Model() + assert model is not None + + +def test_can_create_splitter(): + splitter = pgml.Splitter() + assert splitter is not None + + +def test_can_create_pipeline(): + model = pgml.Model() + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tccp_0", model, splitter) + assert pipeline is not None + + +def test_can_create_builtins(): + builtins = pgml.Builtins() + assert builtins is not None + + +################################################### +## Test various vector searches ################### +################################################### + + +@pytest.mark.asyncio +async def test_can_vector_search_with_local_embeddings(): + model = pgml.Model() + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tcvs_0", model, splitter) + collection = pgml.Collection(name="test_p_c_tcvs_4") + await collection.upsert_documents(generate_dummy_documents(3)) + await collection.add_pipeline(pipeline) + results = await collection.vector_search("Here is some query", pipeline) + assert len(results) == 3 + await collection.archive() + + +@pytest.mark.asyncio +async def test_can_vector_search_with_remote_embeddings(): + model = pgml.Model(name="text-embedding-ada-002", source="openai") + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tcvswre_0", model, splitter) + collection = pgml.Collection(name="test_p_c_tcvswre_3") + await collection.upsert_documents(generate_dummy_documents(3)) + await collection.add_pipeline(pipeline) + results = await collection.vector_search("Here is some query", pipeline) + assert len(results) == 3 + await collection.archive() + + +@pytest.mark.asyncio +async def test_can_vector_search_with_query_builder(): + model = pgml.Model() + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tcvswqb_1", model, splitter) + collection = pgml.Collection(name="test_p_c_tcvswqb_5") + await collection.upsert_documents(generate_dummy_documents(3)) + await collection.add_pipeline(pipeline) + results = ( + await collection.query() + .vector_recall("Here is some query", pipeline) + .limit(10) + .fetch_all() + ) + assert len(results) == 3 + await collection.archive() + + +@pytest.mark.asyncio +async def test_can_vector_search_with_query_builder_with_remote_embeddings(): + model = pgml.Model(name="text-embedding-ada-002", source="openai") + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tcvswqbwre_1", model, splitter) + collection = pgml.Collection(name="test_p_c_tcvswqbwre_1") + await collection.upsert_documents(generate_dummy_documents(3)) + await collection.add_pipeline(pipeline) + results = ( + await collection.query() + .vector_recall("Here is some query", pipeline) + .limit(10) + .fetch_all() + ) + assert len(results) == 3 + await collection.archive() + + +################################################### +## Test user output facing functions ############## +################################################### + + +@pytest.mark.asyncio +async def test_pipeline_to_dict(): + model = pgml.Model(name="text-embedding-ada-002", source="openai") + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("test_p_p_tptd_1", model, splitter) + collection = pgml.Collection(name="test_p_c_tptd_1") + await collection.add_pipeline(pipeline) + pipeline_dict = await pipeline.to_dict() + assert pipeline_dict["name"] == "test_p_p_tptd_1" + await collection.remove_pipeline(pipeline) + await collection.archive() + + +################################################### +## Test with multiprocessing ###################### +################################################### + + +def vector_search(collection_name, pipeline_name): + collection = pgml.Collection(collection_name) + pipeline = pgml.Pipeline(pipeline_name) + result = asyncio.run( + collection.query() + .vector_recall("Here is some query", pipeline) + .limit(10) + .fetch_all() + ) + print(result) + return [0, 1, 2] + + +# @pytest.mark.asyncio +# async def test_multiprocessing(): +# collection_name = "test_p_p_tm_1" +# pipeline_name = "test_p_c_tm_4" +# +# model = pgml.Model() +# splitter = pgml.Splitter() +# pipeline = pgml.Pipeline(pipeline_name, model, splitter) +# +# collection = pgml.Collection(collection_name) +# await collection.upsert_documents(generate_dummy_documents(3)) +# await collection.add_pipeline(pipeline) +# +# with Pool(5) as p: +# results = p.starmap( +# vector_search, [(collection_name, pipeline_name) for _ in range(5)] +# ) +# for x in results: +# print(x) +# assert len(x) == 3 +# +# await collection.archive() + + +################################################### +## Manual tests ################################### +################################################### + + +async def silas_test_add_pipeline(): + model = pgml.Model() + splitter = pgml.Splitter() + pipeline = pgml.Pipeline("silas_test_p_1", model, splitter) + collection = pgml.Collection(name="silas_test_c_10") + await collection.add_pipeline(pipeline) + +async def silas_test_upsert_documents(): + collection = pgml.Collection(name="silas_test_c_9") + await collection.upsert_documents(generate_dummy_documents(10)) + +async def silas_test_vector_search(): + pipeline = pgml.Pipeline("silas_test_p_1") + collection = pgml.Collection(name="silas_test_c_9") + results = await collection.vector_search("Here is some query", pipeline) + print(results) -if __name__ == "__main__": - asyncio.run(query_builder()) - # asyncio.run(main()) - # asyncio.run(query_runner()) - # asyncio.run(transform()) +# asyncio.run(silas_test_add_pipeline()) +# asyncio.run(silas_test_upsert_documents()) +# asyncio.run(silas_test_vector_search()) diff --git a/pgml-sdks/rust/pgml/python/tests/test_collection.py b/pgml-sdks/rust/pgml/python/tests/test_collection.py deleted file mode 100644 index 60e10eb2b..000000000 --- a/pgml-sdks/rust/pgml/python/tests/test_collection.py +++ /dev/null @@ -1,200 +0,0 @@ -import asyncio -import unittest -import pgml -import os -import hashlib -from pypika import PostgreSQLQuery as Query, Table, Parameter -from psycopg_pool import ConnectionPool -from psycopg import Connection - -from typing import List, Any - -import logging -from rich.logging import RichHandler -from rich.progress import track -import os -import pytest - -FORMAT = "%(message)s" -logging.basicConfig( - level=os.environ.get("LOGLEVEL", "ERROR"), - format=FORMAT, - datefmt="[%X]", - handlers=[RichHandler()], -) -log = logging.getLogger("rich") - -async def run_select_statement( - conn: Connection, statement: str, order_by: str = "", ascending: bool = True -) -> List[Any]: - """ - The function runs a select statement on a database connection and returns the results as a list of - dictionaries. - - :param conn: The `conn` parameter is a connection object that represents a connection to a database. - It is used to execute SQL statements and retrieve results from the database - :type conn: Connection - :param statement: The SQL SELECT statement to be executed on the database - :type statement: str - :return: The function `run_select_statement` returns a list of dictionaries, where each dictionary - represents a row of the result set of the SQL query specified in the `statement` parameter. The keys - of each dictionary are the column names of the result set, and the values are the corresponding - values of the row. - """ - - statement = statement.strip().rstrip(";") - cur = conn.cursor() - order_statement = "" - if order_by: - order_statement = "ORDER BY t.%s" % order_by - if ascending: - order_statement += " ASC" - else: - order_statement += " DESC" - - if order_statement: - json_conversion_statement = """ - SELECT array_to_json(array_agg(row_to_json(t) {order_statement})) - FROM ({select_statement}) t; - """.format( - select_statement=statement, - order_statement=order_statement, - ) - else: - json_conversion_statement = """ - SELECT array_to_json(array_agg(row_to_json(t))) - FROM ({select_statement}) t; - """.format( - select_statement=statement - ) - log.info("Running %s .. " % json_conversion_statement) - cur.execute(json_conversion_statement) - results = cur.fetchall() - conn.commit() - cur.close() - - output = [] - if results: - if results[0][0]: - output = results[0][0] - - return output - -class TestCollection(unittest.IsolatedAsyncioTestCase): - - async def asyncSetUp(self) -> None: - local_pgml = "postgres://postgres@127.0.0.1:5433/pgml_development" - conninfo = os.environ.get("PGML_CONNECTION", local_pgml) - self.pool = ConnectionPool(conninfo) - self.db = pgml.Database(conninfo) - self.collection_name = "test_collection_1" - self.collection = await self.db.create_or_get_collection(self.collection_name) - print(self.collection) - self.documents = [ - { - "id": hashlib.md5(f"abcded-{i}".encode("utf-8")).hexdigest(), - "text": f"Lorem ipsum {i}", - "source": "test_suite", - } - for i in range(4, 7) - ] - self.documents_no_ids = [ - { - "text": f"Lorem ipsum {i}", - "source": "test_suite_no_ids", - } - for i in range(1, 4) - ] - - self.documents_with_metadata = [ - { - "text": f"Lorem ipsum metadata", - "source": f"url {i}", - "url": f"/home {i}", - "user": f"John Doe-{i+1}", - } - for i in range(8, 12) - ] - - self.documents_with_reviews = [ - { - "text": f"product is abc {i}", - "reviews": i * 2, - } - for i in range(20, 25) - ] - - self.documents_with_reviews_metadata = [ - { - "text": f"product is abc {i}", - "reviews": i * 2, - "source": "amazon", - "user": "John Doe", - } - for i in range(20, 25) - ] - - self.documents_with_reviews_metadata += [ - { - "text": f"product is abc {i}", - "reviews": i * 2, - "source": "ebay", - } - for i in range(20, 25) - ] - - async def test_documents_upsert(self): - await self.collection.upsert_documents(self.documents) - conn = self.pool.getconn() - table = Table("documents",schema=self.collection_name) - query = Query.from_(table).select("*") - results = await run_select_statement(conn, str(query)) - self.pool.putconn(conn) - assert len(results) >= len(self.documents) - - async def test_documents_upsert_no_ids(self): - await self.collection.upsert_documents(self.documents_no_ids) - conn = self.pool.getconn() - table = Table("documents",schema=self.collection_name) - query = Query.from_(table).select("*") - results = await run_select_statement(conn, str(query)) - self.pool.putconn(conn) - assert len(results) >= len(self.documents_no_ids) - - async def test_default_text_splitter(self): - await self.collection.register_text_splitter() - splitters = await self.collection.get_text_splitters() - print(splitters) - assert splitters[0]["name"] == "recursive_character" - - async def test_default_embeddings_model(self): - await self.collection.register_model() - models = await self.collection.get_models() - - assert len(models) == 1 - assert models[0]["name"] == "intfloat/e5-small" - - async def test_generate_chunks(self): - await self.collection.upsert_documents(self.documents) - await self.collection.upsert_documents(self.documents_no_ids) - await self.collection.register_text_splitter() - await self.collection.generate_chunks(splitter_id=1) - splitter_params = {"chunk_size": "3", "chunk_overlap": "2"} - await self.collection.register_text_splitter( splitter_name="recursive_character", - splitter_params=splitter_params - ) - await self.collection.generate_chunks(splitter_id=1) - - async def test_generate_embeddings(self): - await self.collection.upsert_documents(self.documents) - await self.collection.upsert_documents(self.documents_no_ids) - self.collection.generate_chunks(splitter_id=1) - self.collection.generate_embeddings() - - async def test_vector_search(self): - await self.collection.upsert_documents(self.documents) - await self.collection.upsert_documents(self.documents_no_ids) - await self.collection.generate_chunks() - await self.collection.generate_embeddings() - results = await self.collection.vector_search("Lorem ipsum 1", top_k=2) - assert abs(results[0][0] - 1.0) < 1e-5 \ No newline at end of file diff --git a/pgml-sdks/rust/pgml/src/builtins.rs b/pgml-sdks/rust/pgml/src/builtins.rs new file mode 100644 index 000000000..29cb573ae --- /dev/null +++ b/pgml-sdks/rust/pgml/src/builtins.rs @@ -0,0 +1,119 @@ +use pgml_macros::{custom_derive, custom_methods}; +use sqlx::Row; +use tracing::instrument; + +#[derive(custom_derive, Debug, Clone)] +pub struct Builtins { + pub database_url: Option, +} + +use crate::{get_or_initialize_pool, query_runner::QueryRunner, types::Json}; + +#[cfg(feature = "javascript")] +use crate::languages::javascript::*; + +#[cfg(feature = "python")] +use crate::{languages::python::*, query_runner::QueryRunnerPython, types::JsonPython}; + +#[custom_methods(new, query, transform)] +impl Builtins { + pub fn new(database_url: Option) -> Self { + Self { database_url } + } + + /// Run an arbitrary query + /// + /// # Arguments + /// + /// * `query` - The query to run + /// + /// # Example + /// + /// ``` + /// use pgml::Builtins; + /// + /// async fn example() -> anyhow::Result<()> { + /// let builtins = Builtins::new(None); + /// let query = "SELECT * FROM pgml.collections"; + /// let results = builtins.query(query).fetch_all().await?; + /// Ok(()) + /// } + ///``` + #[instrument(skip(self))] + pub fn query(&self, query: &str) -> QueryRunner { + QueryRunner::new(query, self.database_url.clone()) + } + + // Run the builtin pgml.transform function + // + // # Arguments + // + // * `task` - The task to run + // * `inputs` - The inputs to the model + // * `args` - The arguments to the model + // + // # Example + // + // ``` + // use pgml::Builtins; + // + // async fn example() -> anyhow::Result<()> { + // let builtins = Builtins::new(None); + // let task = Json::from(serde_json::json!("translation_en_to_fr")); + // let inputs = vec![ + // "test1".to_string(), + // "test2".to_string(), + // ]; + // let results = builtins.transform(task, inputs, None).await?; + // Ok(()) + // } + // ``` + pub async fn transform( + &self, + task: Json, + inputs: Vec, + args: Option, + ) -> anyhow::Result { + let pool = get_or_initialize_pool(&self.database_url).await?; + let args = match args { + Some(a) => a.0, + None => serde_json::json!({}), + }; + let query = sqlx::query("SELECT pgml.transform(task => $1, inputs => $2, args => $3)"); + let query = if task.0.is_string() { + query.bind(task.0.as_str()) + } else { + query.bind(task.0) + }; + let results = query.bind(inputs).bind(args).fetch_all(&pool).await?; + let results = results.get(0).unwrap().get::(0); + Ok(Json(results)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::init_logger; + + #[sqlx::test] + async fn can_query() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let builtins = Builtins::new(None); + let query = "SELECT 10"; + let results = builtins.query(query).fetch_all().await?; + assert!(results.as_array().is_some()); + Ok(()) + } + + #[sqlx::test] + async fn can_transform() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let builtins = Builtins::new(None); + let task = Json::from(serde_json::json!("translation_en_to_fr")); + let inputs = vec!["test1".to_string(), "test2".to_string()]; + let results = builtins.transform(task, inputs, None).await?; + assert!(results.as_array().is_some()); + Ok(()) + } +} diff --git a/pgml-sdks/rust/pgml/src/collection.rs b/pgml-sdks/rust/pgml/src/collection.rs index 01cc72437..39ae112b3 100644 --- a/pgml-sdks/rust/pgml/src/collection.rs +++ b/pgml-sdks/rust/pgml/src/collection.rs @@ -1,785 +1,728 @@ +use anyhow::Context; +use indicatif::MultiProgress; use itertools::Itertools; -use log::warn; use pgml_macros::{custom_derive, custom_methods}; use sqlx::postgres::PgPool; use sqlx::Executor; -use std::borrow::Borrow; +use sqlx::PgConnection; +use std::borrow::Cow; +use std::time::SystemTime; +use tracing::{instrument, warn}; -use crate::models; -use crate::queries; -use crate::query_builder; -use crate::query_builder::QueryBuilder; -use crate::types::Json; +use crate::{ + get_or_initialize_pool, model::ModelRuntime, models, pipeline::Pipeline, queries, + query_builder, query_builder::QueryBuilder, remote_embeddings::build_remote_embeddings, + splitter::Splitter, types::DateTime, types::Json, utils, +}; #[cfg(feature = "javascript")] use crate::languages::javascript::*; +#[cfg(feature = "python")] +use crate::{ + languages::python::*, pipeline::PipelinePython, query_builder::QueryBuilderPython, + types::JsonPython, +}; + +/// Our project tasks +#[derive(Debug, Clone)] +pub enum ProjectTask { + Regression, + Classification, + QuestionAnswering, + Summarization, + Translation, + TextClassification, + TextGeneration, + Text2text, + Embedding, +} + +impl From<&str> for ProjectTask { + fn from(s: &str) -> Self { + match s { + "regression" => Self::Regression, + "classification" => Self::Classification, + "question_answering" => Self::QuestionAnswering, + "summarization" => Self::Summarization, + "translation" => Self::Translation, + "text_classification" => Self::TextClassification, + "text_generation" => Self::TextGeneration, + "text2text" => Self::Text2text, + "embedding" => Self::Embedding, + _ => panic!("Unknown project task: {}", s), + } + } +} + +impl From<&ProjectTask> for &'static str { + fn from(m: &ProjectTask) -> Self { + match m { + ProjectTask::Regression => "regression", + ProjectTask::Classification => "classification", + ProjectTask::QuestionAnswering => "question_answering", + ProjectTask::Summarization => "summarization", + ProjectTask::Translation => "translation", + ProjectTask::TextClassification => "text_classification", + ProjectTask::TextGeneration => "text_generation", + ProjectTask::Text2text => "text2text", + ProjectTask::Embedding => "embedding", + } + } +} + +// Contains information a Collection, Model, or Splitter needs to know about the project +#[derive(Debug, Clone)] +#[allow(dead_code)] +pub(crate) struct ProjectInfo { + pub id: i64, + pub name: String, + pub task: ProjectTask, + pub database_url: Option, +} + +// Data that is stored in the database about a collection +#[allow(dead_code)] +#[derive(Debug, Clone)] +pub(crate) struct CollectionDatabaseData { + pub id: i64, + pub created_at: DateTime, + pub project_info: ProjectInfo, +} + /// A collection of documents #[derive(custom_derive, Debug, Clone)] pub struct Collection { pub name: String, - pub pool: PgPool, + pub database_url: Option, + pub pipelines_table_name: String, pub documents_table_name: String, - pub splitters_table_name: String, - pub models_table_name: String, pub transforms_table_name: String, pub chunks_table_name: String, pub documents_tsvectors_table_name: String, + pub(crate) database_data: Option, } #[custom_methods( + new, upsert_documents, - register_text_splitter, - get_text_splitters, - generate_chunks, - register_model, - get_models, - generate_embeddings, - generate_tsvectors, + get_documents, + get_pipelines, + get_pipeline, + add_pipeline, + remove_pipeline, + enable_pipeline, + disable_pipeline, vector_search, - query + query, + exists, + archive )] impl Collection { - /// Creates a new collection + /// Creates a new [Collection] + /// + /// # Arguments /// - /// This should not be called directly. Use [crate::Database::create_or_get_collection] instead. + /// * `name` - The name of the collection. + /// * `database_url` - An optional database_url. If passed, this url will be used instead of + /// the `DATABASE_URL` environment variable. /// - /// Note that a default text splitter and model are created automatically. - pub async fn new(name: String, pool: PgPool) -> anyhow::Result { + /// # Example + /// + /// ``` + /// use pgml::Collection; + /// let collection = Collection::new("my_collection", None); + /// ``` + pub fn new(name: &str, database_url: Option) -> Self { let ( + pipelines_table_name, documents_table_name, - splitters_table_name, - models_table_name, transforms_table_name, chunks_table_name, documents_tsvectors_table_name, - ) = Self::generate_table_names(&name); - let collection = Self { - name, - pool, + ) = Self::generate_table_names(name); + Self { + name: name.to_string(), + database_url, + pipelines_table_name, documents_table_name, - splitters_table_name, - models_table_name, transforms_table_name, chunks_table_name, documents_tsvectors_table_name, - }; - sqlx::query("INSERT INTO pgml.collections (name, active) VALUES ($1, FALSE) ON CONFLICT (name) DO NOTHING") - .bind(&collection.name) - .execute(&collection.pool) - .await?; - collection.create_documents_table().await?; - collection.create_splitter_table().await?; - collection.create_models_table().await?; - collection.create_transforms_table().await?; - collection.create_chunks_table().await?; - collection.create_documents_tsvectors_table().await?; - collection.register_text_splitter(None, None).await?; - collection.register_model(None, None, None).await?; - sqlx::query("UPDATE pgml.collections SET active = TRUE WHERE name = $1") - .bind(&collection.name) - .execute(&collection.pool) - .await?; - Ok(collection) + database_data: None, + } } - async fn create_documents_table(&self) -> anyhow::Result<()> { - self.pool - .execute(query_builder!("CREATE SCHEMA IF NOT EXISTS %s", self.name).as_str()) - .await?; - self.pool - .execute( - query_builder!(queries::CREATE_DOCUMENTS_TABLE, self.documents_table_name).as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - self.documents_table_name, - "created_at" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX_USING_GIN, - "metadata_index", - self.documents_table_name, - "metadata jsonb_path_ops" - ) - .as_str(), - ) - .await?; - Ok(()) - } + #[instrument(skip(self))] + pub(crate) async fn verify_in_database(&mut self, throw_if_exists: bool) -> anyhow::Result<()> { + if self.database_data.is_none() { + let pool = get_or_initialize_pool(&self.database_url).await?; - async fn create_splitter_table(&self) -> anyhow::Result<()> { - self.pool - .execute( - query_builder!(queries::CREATE_SPLITTERS_TABLE, self.splitters_table_name).as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - self.splitters_table_name, - "created_at" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "name_index", - self.splitters_table_name, - "name" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX_USING_GIN, - "parameters_index", - self.splitters_table_name, - "parameters jsonb_path_ops" - ) - .as_str(), - ) - .await?; - Ok(()) - } + let result: Result, _> = + sqlx::query_as("SELECT * FROM pgml.collections WHERE name = $1") + .bind(&self.name) + .fetch_optional(&pool) + .await; + let collection: Option = match result { + Ok(s) => anyhow::Ok(s), + Err(e) => match e.as_database_error() { + Some(db_e) => { + // Error 42P01 is "undefined_table" + if db_e.code() == Some(std::borrow::Cow::from("42P01")) { + sqlx::query(queries::CREATE_COLLECTIONS_TABLE) + .execute(&pool) + .await?; + Ok(None) + } else { + Err(e.into()) + } + } + None => Err(e.into()), + }, + }?; - async fn create_models_table(&self) -> anyhow::Result<()> { - self.pool - .execute(query_builder!(queries::CREATE_MODELS_TABLE, self.models_table_name).as_str()) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - self.models_table_name, - "created_at" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "task_index", - self.models_table_name, - "task" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "name_index", - self.models_table_name, - "name" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX_USING_GIN, - "parameters_index", - self.models_table_name, - "parameters jsonb_path_ops" - ) - .as_str(), - ) - .await?; - Ok(()) - } + self.database_data = if let Some(c) = collection { + anyhow::ensure!(!throw_if_exists, "Collection {} already exists", self.name); + Some(CollectionDatabaseData { + id: c.id, + created_at: c.created_at, + project_info: ProjectInfo { + id: c.project_id, + name: self.name.clone(), + task: "embedding".into(), + database_url: self.database_url.clone(), + }, + }) + } else { + let mut transaction = pool.begin().await?; - async fn create_transforms_table(&self) -> anyhow::Result<()> { - self.pool - .execute( - query_builder!( - queries::CREATE_TRANSFORMS_TABLE, - self.transforms_table_name, - self.splitters_table_name, - self.models_table_name - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - self.transforms_table_name, - "created_at" - ) - .as_str(), - ) - .await?; - Ok(()) - } + let project_id: i64 = sqlx::query_scalar("INSERT INTO pgml.projects (name, task) VALUES ($1, 'embedding'::pgml.task) ON CONFLICT (name) DO UPDATE SET task = EXCLUDED.task RETURNING id, task::TEXT") + .bind(&self.name) + .fetch_one(&mut transaction) + .await?; - async fn create_chunks_table(&self) -> anyhow::Result<()> { - self.pool - .execute( - query_builder!( - queries::CREATE_CHUNKS_TABLE, - self.chunks_table_name, - self.documents_table_name, - self.splitters_table_name - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - self.chunks_table_name, - "created_at" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "document_id_index", - self.chunks_table_name, - "document_id" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "splitter_id_index", - self.chunks_table_name, - "splitter_id" + transaction + .execute(query_builder!("CREATE SCHEMA IF NOT EXISTS %s", self.name).as_str()) + .await?; + + let c: models::Collection = sqlx::query_as("INSERT INTO pgml.collections (name, project_id) VALUES ($1, $2) ON CONFLICT (name) DO NOTHING RETURNING *") + .bind(&self.name) + .bind(project_id) + .fetch_one(&mut transaction) + .await?; + + let collection_database_data = CollectionDatabaseData { + id: c.id, + created_at: c.created_at, + project_info: ProjectInfo { + id: c.project_id, + name: self.name.clone(), + task: "embedding".into(), + database_url: self.database_url.clone(), + }, + }; + + Splitter::create_splitters_table(&mut transaction).await?; + Pipeline::create_pipelines_table( + &collection_database_data.project_info, + &mut transaction, ) - .as_str(), - ) - .await?; + .await?; + self.create_documents_table(&mut transaction).await?; + self.create_chunks_table(&mut transaction).await?; + self.create_documents_tsvectors_table(&mut transaction) + .await?; + + transaction.commit().await?; + Some(collection_database_data) + }; + } Ok(()) } - async fn create_documents_tsvectors_table(&self) -> anyhow::Result<()> { - self.pool - .execute( - query_builder!( - queries::CREATE_DOCUMENTS_TSVECTORS_TABLE, - self.documents_tsvectors_table_name, - self.documents_table_name - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX, - "configuration_index", - self.documents_tsvectors_table_name, - "configuration" - ) - .as_str(), - ) - .await?; - self.pool - .execute( - query_builder!( - queries::CREATE_INDEX_USING_GIN, - "tsvector_index", - self.documents_tsvectors_table_name, - "ts" - ) - .as_str(), - ) - .await?; + /// Adds a new [Pipeline] to the [Collection] + /// + /// # Arguments + /// + /// * `pipeline` - The [Pipeline] to add. + /// + /// # Example + /// + /// ``` + /// use pgml::{Collection, Pipeline, Model, Splitter}; + /// + /// async fn example() -> anyhow::Result<()> { + /// let model = Model::new(None, None, None); + /// let splitter = Splitter::new(None, None); + /// let mut pipeline = Pipeline::new("my_pipeline", None, None, None); + /// let mut collection = Collection::new("my_collection", None); + /// collection.add_pipeline(&mut pipeline).await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn add_pipeline(&mut self, pipeline: &mut Pipeline) -> anyhow::Result<()> { + self.verify_in_database(false).await?; + pipeline.set_project_info(self.database_data.as_ref().unwrap().project_info.clone()); + let mp = MultiProgress::new(); + mp.println(format!("Added Pipeline {}, Now Syncing...", pipeline.name))?; + pipeline.execute(&None, mp).await?; + eprintln!("Done Syncing {}\n", pipeline.name); Ok(()) } - /// Upserts documents into the database + /// Removes a [Pipeline] from the [Collection] /// /// # Arguments /// - /// * `documents` - A vector of documents to upsert. - /// * `text_key` - The key in the document that contains the text. - /// * `id_key` - The key in the document that contains the id. + /// * `pipeline` - The [Pipeline] to remove. /// /// # Example /// /// ``` - /// use std::collections::HashMap; - /// use pgml::Database; - /// use pgml::types::Json; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::{Collection, Pipeline}; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// let documents: Vec = vec![ - /// serde_json::json!( { - /// "id": 1, - /// "text": "This is a document" - /// }) - /// .into() - /// ]; - /// collection - /// .upsert_documents(documents, None, None) - /// .await?; + /// let mut pipeline = Pipeline::new("my_pipeline", None, None, None); + /// let mut collection = Collection::new("my_collection", None); + /// collection.remove_pipeline(&mut pipeline).await?; /// Ok(()) /// } /// ``` - pub async fn upsert_documents( - &self, - documents: Vec, - text_key: Option, - id_key: Option, - ) -> anyhow::Result<()> { - let text_key = text_key.unwrap_or("text".to_string()); - let id_key = id_key.unwrap_or("id".to_string()); + #[instrument(skip(self))] + pub async fn remove_pipeline(&mut self, pipeline: &mut Pipeline) -> anyhow::Result<()> { + let pool = get_or_initialize_pool(&self.database_url).await?; + self.verify_in_database(false).await?; + pipeline.set_project_info(self.database_data.as_ref().unwrap().project_info.clone()); + pipeline.verify_in_database(false).await?; - for mut document in documents { - let document = document - .0 - .as_object_mut() - .expect("Documents must be a vector of objects"); + let database_data = pipeline + .database_data + .as_ref() + .context("Pipeline must be verified to remove it")?; - let text = match document.remove(&text_key) { - Some(t) => t, - None => { - warn!("{} is not a key in document", text_key); - continue; - } - }; + let embeddings_table_name = format!("{}.{}_embeddings", self.name, pipeline.name); - let document_json = serde_json::to_value(&document)?; + let parameters = pipeline + .parameters + .as_ref() + .context("Pipeline must be verified to remove it")?; - let md5_digest = match document.get(&id_key) { - Some(k) => md5::compute(k.to_string().as_bytes()), - None => md5::compute(format!("{}{}", text, document_json).as_bytes()), - }; - let source_uuid = uuid::Uuid::from_slice(&md5_digest.0)?; + let mut transaction = pool.begin().await?; - sqlx::query(&query_builder!( - "INSERT INTO %s (text, source_uuid, metadata) VALUES ($1, $2, $3) ON CONFLICT (source_uuid) DO UPDATE SET text = $4, metadata = $5", - self.documents_table_name - )) - .bind(&text) - .bind(source_uuid) - .bind(&document_json) - .bind(&text) - .bind(&document_json) - .execute(self.pool.borrow()).await?; - } - Ok(()) - } + // Need to delete from chunks table only if no other pipelines use the same splitter + sqlx::query(&query_builder!( + "DELETE FROM %s WHERE splitter_id = $1 AND NOT EXISTS (SELECT 1 FROM %s WHERE splitter_id = $1 AND id != $2)", + self.chunks_table_name, + self.pipelines_table_name + )) + .bind(database_data.splitter_id) + .bind(database_data.id) + .execute(&pool) + .await?; - pub async fn generate_tsvectors(&self, configuration: Option) -> anyhow::Result<()> { - let (count,): (i64,) = sqlx::query_as(&query_builder!( - "SELECT count(*) FROM (SELECT 1 FROM %s LIMIT 1) AS t", - self.documents_table_name + // Drop the embeddings table + sqlx::query(&query_builder!( + "DROP TABLE IF EXISTS %s", + embeddings_table_name )) - .fetch_one(&self.pool) + .execute(&mut transaction) .await?; - if count == 0 { - anyhow::bail!("No documents in the documents table. Make sure to upsert documents before generating tsvectors") - } + // Need to delete from the tsvectors table only if no other pipelines use the + // same tsvector configuration + sqlx::query(&query_builder!( + "DELETE FROM %s WHERE configuration = $1 AND NOT EXISTS (SELECT 1 FROM %s WHERE parameters->'full_text_search'->>'configuration' = $1 AND id != $2)", + self.documents_tsvectors_table_name, + self.pipelines_table_name)) + .bind(parameters["full_text_search"]["configuration"].as_str()) + .bind(database_data.id) + .execute(&mut transaction) + .await?; - let configuration = configuration.unwrap_or("english".to_string()); sqlx::query(&query_builder!( - queries::GENERATE_TSVECTORS, - self.documents_tsvectors_table_name, - configuration, - configuration, - self.documents_table_name + "DELETE FROM %s WHERE id = $1", + self.pipelines_table_name )) - .execute(&self.pool) + .bind(database_data.id) + .execute(&mut transaction) .await?; + + transaction.commit().await?; Ok(()) } - /// Registers new text splitters + /// Enables a [Pipeline] on the [Collection] /// /// # Arguments /// - /// * `splitter_name` - The name of the text splitter. - /// * `splitter_params` - A [std::collections::HashMap] of parameters. + /// * `pipeline` - The [Pipeline] to remove. /// /// # Example /// /// ``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::{Collection, Pipeline}; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// collection.register_text_splitter(None, None).await?; + /// let pipeline = Pipeline::new("my_pipeline", None, None, None); + /// let collection = Collection::new("my_collection", None); + /// collection.enable_pipeline(&pipeline).await?; /// Ok(()) /// } /// ``` - pub async fn register_text_splitter( - &self, - splitter_name: Option, - splitter_params: Option, - ) -> anyhow::Result { - let splitter_name = splitter_name.unwrap_or("recursive_character".to_string()); - let splitter_params = match splitter_params { - Some(params) => params.0, - None => serde_json::json!({}), - }; - - let current_splitter: Option = sqlx::query_as(&query_builder!( - "SELECT * from %s where name = $1 and parameters = $2;", - self.splitters_table_name - )) - .bind(&splitter_name) - .bind(&splitter_params) - .fetch_optional(self.pool.borrow()) - .await?; - - match current_splitter { - Some(splitter) => { - warn!( - "Text splitter with name: {} and parameters: {:?} already exists", - splitter_name, splitter_params - ); - Ok(splitter.id) - } - None => { - let splitter_id: (i64,) = sqlx::query_as(&query_builder!( - "INSERT INTO %s (name, parameters) VALUES ($1, $2) RETURNING id", - self.splitters_table_name - )) - .bind(splitter_name) - .bind(splitter_params) - .fetch_one(self.pool.borrow()) - .await?; - Ok(splitter_id.0) - } - } - } - - /// Gets all registered text [models::Splitter]s - pub async fn get_text_splitters(&self) -> anyhow::Result> { - let splitters: Vec = sqlx::query_as(&query_builder!( - "SELECT * from %s", - self.splitters_table_name + #[instrument(skip(self))] + pub async fn enable_pipeline(&self, pipeline: &Pipeline) -> anyhow::Result<()> { + sqlx::query(&query_builder!( + "UPDATE %s SET active = TRUE WHERE name = $1", + self.pipelines_table_name )) - .fetch_all(self.pool.borrow()) + .bind(&pipeline.name) + .execute(&get_or_initialize_pool(&self.database_url).await?) .await?; - Ok(splitters) + Ok(()) } - /// Generates chunks for the collection + /// Disables a [Pipeline] on the [Collection] /// /// # Arguments /// - /// * `splitter_id` - The id of the splitter to chunk with. + /// * `pipeline` - The [Pipeline] to remove. /// /// # Example /// /// ``` - /// use std::collections::HashMap; - /// use pgml::Database; - /// use pgml::types::Json; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::{Collection, Pipeline}; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// let documents: Vec = vec![ - /// serde_json::json!( { - /// "id": 1, - /// "text": "This is a document" - /// }) - /// .into() - /// ]; - /// collection - /// .upsert_documents(documents, None, None) - /// .await?; - /// collection.generate_chunks(None).await?; + /// let pipeline = Pipeline::new("my_pipeline", None, None, None); + /// let collection = Collection::new("my_collection", None); + /// collection.disable_pipeline(&pipeline).await?; /// Ok(()) /// } /// ``` - pub async fn generate_chunks(&self, splitter_id: Option) -> anyhow::Result<()> { - let (count,): (i64,) = sqlx::query_as(&query_builder!( - "SELECT count(*) FROM (SELECT 1 FROM %s LIMIT 1) AS t", - self.documents_table_name + #[instrument(skip(self))] + pub async fn disable_pipeline(&self, pipeline: &Pipeline) -> anyhow::Result<()> { + sqlx::query(&query_builder!( + "UPDATE %s SET active = FALSE WHERE name = $1", + self.pipelines_table_name )) - .fetch_one(&self.pool) + .bind(&pipeline.name) + .execute(&get_or_initialize_pool(&self.database_url).await?) .await?; + Ok(()) + } - if count == 0 { - anyhow::bail!("No documents in the documents table. Make sure to upsert documents before generating chunks") - } + #[instrument(skip(self, conn))] + async fn create_documents_table(&mut self, conn: &mut PgConnection) -> anyhow::Result<()> { + conn.execute( + query_builder!(queries::CREATE_DOCUMENTS_TABLE, self.documents_table_name).as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX, + "", + "created_at_index", + self.documents_table_name, + "created_at" + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX_USING_GIN, + "", + "metadata_index", + self.documents_table_name, + "metadata jsonb_path_ops" + ) + .as_str(), + ) + .await?; + Ok(()) + } - let splitter_id = splitter_id.unwrap_or(1); - sqlx::query(&query_builder!( - queries::GENERATE_CHUNKS, - self.splitters_table_name, - self.chunks_table_name, - self.documents_table_name, - self.chunks_table_name - )) - .bind(splitter_id) - .execute(self.pool.borrow()) + #[instrument(skip(self, conn))] + async fn create_chunks_table(&mut self, conn: &mut PgConnection) -> anyhow::Result<()> { + conn.execute( + query_builder!( + queries::CREATE_CHUNKS_TABLE, + self.chunks_table_name, + self.documents_table_name + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX, + "", + "created_at_index", + self.chunks_table_name, + "created_at" + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX, + "", + "document_id_index", + self.chunks_table_name, + "document_id" + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX, + "", + "splitter_id_index", + self.chunks_table_name, + "splitter_id" + ) + .as_str(), + ) + .await?; + Ok(()) + } + + #[instrument(skip(self, conn))] + async fn create_documents_tsvectors_table( + &mut self, + conn: &mut PgConnection, + ) -> anyhow::Result<()> { + conn.execute( + query_builder!( + queries::CREATE_DOCUMENTS_TSVECTORS_TABLE, + self.documents_tsvectors_table_name, + self.documents_table_name + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX, + "", + "configuration_index", + self.documents_tsvectors_table_name, + "configuration" + ) + .as_str(), + ) + .await?; + conn.execute( + query_builder!( + queries::CREATE_INDEX_USING_GIN, + "", + "tsvector_index", + self.documents_tsvectors_table_name, + "ts" + ) + .as_str(), + ) .await?; Ok(()) } - /// Registers new models for specific tasks + /// Upserts documents into the database /// /// # Arguments /// - /// * `task` - The name of the task. - /// * `model_name` - The name of the [models::Model]. - /// * `model_params` - A [std::collections::HashMap] of parameters. + /// * `documents` - A vector of documents to upsert + /// * `strict` - Whether to throw an error if keys: `id` or `text` are missing from any documents /// /// # Example /// /// ``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::Collection; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// collection.register_model(None, None, None).await?; + /// let mut collection = Collection::new("my_collection", None); + /// let documents = vec![ + /// serde_json::json!({"id": 1, "text": "hello world"}).into(), + /// serde_json::json!({"id": 2, "text": "hello world"}).into(), + /// ]; + /// collection.upsert_documents(documents, Some(true)).await?; /// Ok(()) /// } /// ``` - pub async fn register_model( - &self, - task: Option, - model_name: Option, - model_params: Option, - ) -> anyhow::Result { - let task = task.unwrap_or("embedding".to_string()); - let model_name = model_name.unwrap_or("intfloat/e5-small".to_string()); - let model_params = match model_params { - Some(params) => params.0, - None => serde_json::json!({}), - }; + #[instrument(skip(self, documents))] + pub async fn upsert_documents( + &mut self, + documents: Vec, + strict: Option, + ) -> anyhow::Result<()> { + let pool = get_or_initialize_pool(&self.database_url).await?; + self.verify_in_database(false).await?; - let current_model: Option = sqlx::query_as(&query_builder!( - "SELECT * from %s where task = $1 and name = $2 and parameters = $3;", - self.models_table_name - )) - .bind(&task) - .bind(&model_name) - .bind(&model_params) - .fetch_optional(self.pool.borrow()) - .await?; + let strict = strict.unwrap_or(true); - match current_model { - Some(model) => { - warn!( - "Model with name: {} and parameters: {:?} already exists", - model_name, model_params - ); - Ok(model.id) - } - None => { - let id: (i64,) = sqlx::query_as(&query_builder!( - "INSERT INTO %s (task, name, parameters) VALUES ($1, $2, $3) RETURNING id", - self.models_table_name - )) - .bind(task) - .bind(model_name) - .bind(model_params) - .fetch_one(self.pool.borrow()) - .await?; - Ok(id.0) - } - } - } + let progress_bar = utils::default_progress_bar(documents.len() as u64); + progress_bar.println("Upserting Documents..."); - /// Gets all registered [models::Model]s - pub async fn get_models(&self) -> anyhow::Result> { - Ok( - sqlx::query_as(&query_builder!("SELECT * from %s", self.models_table_name)) - .fetch_all(self.pool.borrow()) - .await?, - ) - } + let documents: anyhow::Result> = documents.into_iter().map(|mut document| { + let document = document + .as_object_mut() + .expect("Documents must be a vector of objects"); + let text = match document.remove("text") { + Some(t) => t, + None => { + if strict { + anyhow::bail!("`text` is not a key in document, throwing error. To supress this error, pass strict: false"); + } else { + warn!("`text` is not a key in document, skipping document. To throw an error instead, pass strict: true"); + } + return Ok(None) + } + }; + let text = text.as_str().context("`text` must be a string")?.to_string(); - async fn create_or_get_embeddings_table( - &self, - model_id: i64, - splitter_id: i64, - ) -> anyhow::Result { - let pool = self.pool.borrow(); - - let table_name = self.get_embeddings_table_name(model_id, splitter_id)?; - let exists: Option<(String,)> = sqlx::query_as(&query_builder!( - "SELECT table_name from %s WHERE table_name = $1", - self.transforms_table_name - )) - .bind(&table_name) - .fetch_optional(self.pool.borrow()) - .await?; + // We don't want the text included in the document metadata, but everything else + // should be in there + let metadata = serde_json::to_value(&document)?.into(); - match exists { - Some(_e) => Ok(table_name), - None => { - let embedding: (Vec,) = sqlx::query_as(&query_builder!( - "WITH model as (SELECT name, parameters from %s where id = $1) SELECT embedding from pgml.embed(transformer => (SELECT name FROM model), text => 'Hello, World!', kwargs => (SELECT parameters FROM model)) as embedding", - self.models_table_name)) - .bind(model_id) - .fetch_one(pool).await?; - let embedding = embedding.0; - let embedding_length = embedding.len() as i64; - pool.execute( - query_builder!( - queries::CREATE_EMBEDDINGS_TABLE, - table_name, - self.chunks_table_name, - embedding_length - ) - .as_str(), - ) - .await?; - sqlx::query(&query_builder!( - "INSERT INTO %s (table_name, task, model_id, splitter_id) VALUES ($1, 'embedding', $2, $3)", - self.transforms_table_name)) - .bind(&table_name) - .bind(model_id) - .bind(splitter_id) - .execute(pool).await?; - pool.execute( - query_builder!( - queries::CREATE_INDEX, - "created_at_index", - table_name, - "created_at" - ) - .as_str(), - ) - .await?; - pool.execute( - query_builder!( - queries::CREATE_INDEX, - "chunk_id_index", - table_name, - "chunk_id" - ) - .as_str(), - ) - .await?; - pool.execute( - query_builder!( - queries::CREATE_INDEX_USING_IVFFLAT, - "vector_index", - table_name, - "embedding vector_cosine_ops" - ) - .as_str(), - ) - .await?; - Ok(table_name) + let md5_digest = match document.get("id") { + Some(k) => md5::compute(k.to_string().as_bytes()), + None => { + if strict { + anyhow::bail!("`id` is not a key in document, throwing error. To supress this error, pass strict: false"); + } else { + warn!("`id` is not a key in document, skipping document. To throw an error instead, pass strict: true"); + } + return Ok(None) + } + }; + let source_uuid = uuid::Uuid::from_slice(&md5_digest.0)?; + + Ok(Some((source_uuid, text, metadata))) + }).collect(); + + // We could continue chaining the above iterators but types become super annoying to + // deal with, especially because we are dealing with async functions. This is much easier to read + // Also, we may want to use a variant of chunks that is owned, I'm not 100% sure of what + // cloning happens when passing values into sqlx bind. itertools variants will not work as + // it is not thread safe and pyo3 will get upset + let mut document_ids = Vec::new(); + for chunk in documents?.chunks(10) { + // We want the length before we filter out any None values + let chunk_len = chunk.len(); + // Filter out the None values + let chunk: Vec<&(uuid::Uuid, String, Json)> = + chunk.iter().filter_map(|x| x.as_ref()).collect(); + + // Make sure we didn't filter everything out + if chunk.is_empty() { + progress_bar.inc(chunk_len as u64); + continue; + } + + let mut transaction = pool.begin().await?; + // First delete any documents that already have the same UUID then insert the new ones. + // We are essentially upserting in two steps + sqlx::query(&query_builder!( + "DELETE FROM %s WHERE source_uuid IN (SELECT source_uuid FROM %s WHERE source_uuid = ANY($1::uuid[]))", + self.documents_table_name, + self.documents_table_name + )). + bind(&chunk.iter().map(|(source_uuid, _, _)| *source_uuid).collect::>()). + execute(&mut transaction).await?; + let query_string_values = (0..chunk.len()) + .map(|i| format!("(${}, ${}, ${})", i * 3 + 1, i * 3 + 2, i * 3 + 3)) + .collect::>() + .join(","); + let query_string = format!( + "INSERT INTO %s (source_uuid, text, metadata) VALUES {} ON CONFLICT (source_uuid) DO UPDATE SET text = $2, metadata = $3 RETURNING id", + query_string_values + ); + let query = query_builder!(query_string, self.documents_table_name); + let mut query = sqlx::query_scalar(&query); + for (source_uuid, text, metadata) in chunk.into_iter() { + query = query.bind(source_uuid).bind(text).bind(metadata); } + + let ids: Vec = query.fetch_all(&mut transaction).await?; + document_ids.extend(ids); + progress_bar.inc(chunk_len as u64); + transaction.commit().await?; } + progress_bar.finish(); + eprintln!("Done Upserting Documents\n"); + + self.sync_pipelines(Some(document_ids)).await?; + Ok(()) } - /// Generates embeddings for the collection + /// Gets the documents on a [Collection] /// /// # Arguments /// - /// * `model_id` - The id of the model. - /// * `splitter_id` - The id of the splitter. + /// * `last_id` - The last id of the document to get. If none, starts at 0 + /// * `limit` - The number of documents to get. If none, gets 100 /// /// # Example /// /// ``` - /// use std::collections::HashMap; - /// use pgml::Database; - /// use pgml::types::Json; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::Collection; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// let documents: Vec = vec![ - /// serde_json::json!( { - /// "id": 1, - /// "text": "This is a document" - /// }) - /// .into() - /// ]; - /// collection - /// .upsert_documents(documents, None, None) - /// .await?; - /// collection.generate_chunks(None).await?; - /// collection.generate_embeddings(None, None).await?; - /// Ok(()) + /// let mut collection = Collection::new("my_collection", None); + /// let documents = collection.get_documents(None, None).await?; + /// Ok(()) /// } - /// ``` - pub async fn generate_embeddings( - &self, - model_id: Option, - splitter_id: Option, - ) -> anyhow::Result<()> { - let model_id = model_id.unwrap_or(1); - let splitter_id = splitter_id.unwrap_or(1); - - let (count,): (i64,) = sqlx::query_as(&query_builder!( - "SELECT count(*) FROM (SELECT 1 FROM %s WHERE splitter_id = $1 LIMIT 1) AS t", - self.chunks_table_name + #[instrument(skip(self))] + pub async fn get_documents( + &mut self, + last_id: Option, + limit: Option, + ) -> anyhow::Result> { + let pool = get_or_initialize_pool(&self.database_url).await?; + let documents: Vec = sqlx::query_as(&query_builder!( + "SELECT * FROM %s WHERE id > $1 ORDER BY id ASC LIMIT $2", + self.documents_table_name )) - .bind(splitter_id) - .fetch_one(&self.pool) + .bind(last_id.unwrap_or(0)) + .bind(limit.unwrap_or(100)) + .fetch_all(&pool) .await?; + documents + .into_iter() + .map(|d| { + serde_json::to_value(d) + .map(|t| t.into()) + .map_err(|e| anyhow::anyhow!(e)) + }) + .collect() + } - if count == 0 { - anyhow::bail!("No chunks in the chunks table with the associated splitter_id. Make sure to generate chunks with the correct splitter_id before generating embeddings") + #[instrument(skip(self))] + pub async fn sync_pipelines(&mut self, document_ids: Option>) -> anyhow::Result<()> { + self.verify_in_database(false).await?; + let pipelines = self.get_pipelines().await?; + if !pipelines.is_empty() { + let mp = MultiProgress::new(); + mp.println("Syncing Pipelines...")?; + use futures::stream::StreamExt; + futures::stream::iter(pipelines) + // Need this map to get around moving the document_ids and mp + .map(|pipeline| (pipeline, document_ids.clone(), mp.clone())) + .for_each_concurrent(10, |(mut pipeline, document_ids, mp)| async move { + pipeline + .execute(&document_ids, mp) + .await + .expect("Failed to execute pipeline"); + }) + .await; + // pipelines.into_iter().for_each + // for mut pipeline in pipelines { + // pipeline.execute(&document_ids, mp.clone()).await?; + // } + eprintln!("Done Syncing Pipelines\n"); } - - let embeddings_table_name = self - .create_or_get_embeddings_table(model_id, splitter_id) - .await?; - - sqlx::query(&query_builder!( - queries::GENERATE_EMBEDDINGS, - self.models_table_name, - embeddings_table_name, - self.chunks_table_name, - embeddings_table_name - )) - .bind(model_id) - .bind(splitter_id) - .execute(self.pool.borrow()) - .await?; - Ok(()) } @@ -787,125 +730,334 @@ impl Collection { /// /// # Arguments /// - /// * `query` - The query to search for. - /// * `query_params` - A [std::collections::HashMap] of parameters for the model used in the - /// query. + /// * `query` - The query to search for + /// * `pipeline` - The [Pipeline] used for the search + /// * `query_paramaters` - The query parameters passed to the model for search /// * `top_k` - How many results to limit on. - /// * `model_id` - The id of the [models::Model] to use. - /// * `splitter_id` - The id of the [models::Splitter] to use. /// /// # Example /// /// ``` - /// use std::collections::HashMap; - /// use pgml::Database; - /// use pgml::types::Json; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; + /// use pgml::{Collection, Pipeline}; /// /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// let documents: Vec = vec![ - /// serde_json::json!( { - /// "id": 1, - /// "text": "This is a document" - /// }) - /// .into() - /// ]; - /// collection - /// .upsert_documents(documents, None, None) - /// .await?; - /// collection.generate_chunks(None).await?; - /// collection.generate_embeddings(None, None).await?; - /// let results = collection - /// .vector_search("Here is a test", None, None, None, None) - /// .await - /// .unwrap(); - /// println!("The results are: {:?}", results); - /// Ok(()) + /// let mut collection = Collection::new("my_collection", None); + /// let mut pipeline = Pipeline::new("my_pipeline", None, None, None); + /// let results = collection.vector_search("Query", &mut pipeline, None, None).await?; + /// Ok(()) /// } /// ``` + #[instrument(skip(self))] #[allow(clippy::type_complexity)] pub async fn vector_search( - &self, + &mut self, query: &str, - query_params: Option, + pipeline: &mut Pipeline, + query_parameters: Option, top_k: Option, - model_id: Option, - splitter_id: Option, ) -> anyhow::Result> { - let query_params = match query_params { - Some(params) => params.0, - None => serde_json::json!({}), - }; + let pool = get_or_initialize_pool(&self.database_url).await?; + + let query_parameters = query_parameters.unwrap_or_default(); let top_k = top_k.unwrap_or(5); - let model_id = model_id.unwrap_or(1); - let splitter_id = splitter_id.unwrap_or(1); - let embeddings_table_name = self.get_embeddings_table_name(model_id, splitter_id)?; + // With this system, we only do the wrong type of vector search once + let runtime = if pipeline.model.is_some() { + pipeline.model.as_ref().unwrap().runtime + } else { + ModelRuntime::Python + }; + match runtime { + ModelRuntime::Python => { + let embeddings_table_name = format!("{}.{}_embeddings", self.name, pipeline.name); + + let result = sqlx::query_as(&query_builder!( + queries::EMBED_AND_VECTOR_SEARCH, + self.pipelines_table_name, + embeddings_table_name, + embeddings_table_name, + self.chunks_table_name, + self.documents_table_name + )) + .bind(&pipeline.name) + .bind(query) + .bind(&query_parameters) + .bind(top_k) + .fetch_all(&pool) + .await; + + match result { + Ok(r) => Ok(r), + Err(e) => match e.as_database_error() { + Some(d) => { + if d.code() == Some(Cow::from("XX000")) { + self.vector_search_with_remote_embeddings( + query, + pipeline, + query_parameters, + top_k, + &pool, + ) + .await + } else { + Err(anyhow::anyhow!(e)) + } + } + None => Err(anyhow::anyhow!(e)), + }, + } + } + _ => { + self.vector_search_with_remote_embeddings( + query, + pipeline, + query_parameters, + top_k, + &pool, + ) + .await + } + } + } + + #[instrument(skip(self, pool))] + #[allow(clippy::type_complexity)] + async fn vector_search_with_remote_embeddings( + &mut self, + query: &str, + pipeline: &mut Pipeline, + query_parameters: Json, + top_k: i64, + pool: &PgPool, + ) -> anyhow::Result> { + self.verify_in_database(false).await?; + + // Have to set the project info before we can get and set the model + pipeline.set_project_info( + self.database_data + .as_ref() + .context( + "Collection must be verified to perform vector search with remote embeddings", + )? + .project_info + .clone(), + ); + // Verify to get and set the model if we don't have it set on the pipeline yet + pipeline.verify_in_database(false).await?; + let model = pipeline + .model + .as_ref() + .context("Pipeline must be verified to perform vector search with remote embeddings")?; + + // We need to make sure we are not mutably and immutably borrowing the same things + let embedding = { + let remote_embeddings = + build_remote_embeddings(model.runtime, &model.name, &query_parameters)?; + let mut embeddings = remote_embeddings.embed(vec![query.to_string()]).await?; + std::mem::take(&mut embeddings[0]) + }; - let results: Vec<(f64, String, Json)> = sqlx::query_as(&query_builder!( + let embeddings_table_name = format!("{}.{}_embeddings", self.name, pipeline.name); + sqlx::query_as(&query_builder!( queries::VECTOR_SEARCH, - self.models_table_name, embeddings_table_name, embeddings_table_name, self.chunks_table_name, self.documents_table_name )) - .bind(query) - .bind(query_params) - .bind(model_id) + .bind(embedding) .bind(top_k) - .fetch_all(self.pool.borrow()) + .fetch_all(pool) + .await + .map_err(|e| anyhow::anyhow!(e)) + } + + #[instrument(skip(self))] + pub async fn archive(&mut self) -> anyhow::Result<()> { + let pool = get_or_initialize_pool(&self.database_url).await?; + let timestamp = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("Error getting system time") + .as_secs(); + let archive_table_name = format!("{}_archive_{}", &self.name, timestamp); + let mut transaciton = pool.begin().await?; + sqlx::query("UPDATE pgml.collections SET name = $1, active = FALSE where name = $2") + .bind(&archive_table_name) + .bind(&self.name) + .execute(&mut transaciton) + .await?; + sqlx::query(&query_builder!( + "ALTER SCHEMA %s RENAME TO %s", + &self.name, + archive_table_name + )) + .execute(&mut transaciton) .await?; - Ok(results) + transaciton.commit().await?; + Ok(()) } + #[instrument(skip(self))] pub fn query(&self) -> QueryBuilder { QueryBuilder::new(self.clone()) } - // We will probably want to add a task parameter to this function - pub fn get_embeddings_table_name( - &self, - model_id: i64, - splitter_id: i64, - ) -> anyhow::Result { - let model_splitter_hash = md5::compute(format!("{}_{}", model_id, splitter_id).as_bytes()); - Ok(format!( - "{}.embeddings_{}", - self.name, - &uuid::Uuid::from_slice(&model_splitter_hash.0)? - )) + /// Gets all pipelines for the [Collection] + /// + /// # Example + /// + /// ``` + /// use pgml::Collection; + /// + /// async fn example() -> anyhow::Result<()> { + /// let mut collection = Collection::new("my_collection", None); + /// let pipelines = collection.get_pipelines().await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn get_pipelines(&mut self) -> anyhow::Result> { + self.verify_in_database(false).await?; + let pool = get_or_initialize_pool(&self.database_url).await?; + + let pipelines_with_models_and_splitters: Vec = + sqlx::query_as(&query_builder!( + r#"SELECT + p.id as pipeline_id, + p.name as pipeline_name, + p.created_at as pipeline_created_at, + p.active as pipeline_active, + p.parameters as pipeline_parameters, + m.id as model_id, + m.created_at as model_created_at, + m.runtime::TEXT as model_runtime, + m.hyperparams as model_hyperparams, + s.id as splitter_id, + s.created_at as splitter_created_at, + s.name as splitter_name, + s.parameters as splitter_parameters + FROM + %s p + INNER JOIN pgml.models m ON p.model_id = m.id + INNER JOIN pgml.sdk_splitters s ON p.splitter_id = s.id + WHERE + p.active = TRUE + "#, + self.pipelines_table_name + )) + .fetch_all(&pool) + .await?; + + let pipelines: Vec = pipelines_with_models_and_splitters + .into_iter() + .map(|p| { + let mut pipeline: Pipeline = p.into(); + pipeline.set_project_info( + self.database_data + .as_ref() + .expect("Collection must be verified to get all pipelines") + .project_info + .clone(), + ); + pipeline + }) + .collect(); + Ok(pipelines) } - pub fn from_model_and_pool(model: models::Collection, pool: PgPool) -> Self { - let ( - documents_table_name, - splitters_table_name, - models_table_name, - transforms_table_name, - chunks_table_name, - documents_tsvectors_table_name, - ) = Self::generate_table_names(&model.name); - Self { - name: model.name, - documents_table_name, - splitters_table_name, - models_table_name, - transforms_table_name, - chunks_table_name, - documents_tsvectors_table_name, - pool, - } + /// Gets a [Pipeline] by name + /// + /// # Example + /// + /// ``` + /// use pgml::Collection; + /// + /// async fn example() -> anyhow::Result<()> { + /// let mut collection = Collection::new("my_collection", None); + /// let pipeline = collection.get_pipeline("my_pipeline").await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn get_pipeline(&mut self, name: &str) -> anyhow::Result { + self.verify_in_database(false).await?; + let pool = get_or_initialize_pool(&self.database_url).await?; + + let pipeline_with_model_and_splitter: models::PipelineWithModelAndSplitter = + sqlx::query_as(&query_builder!( + r#"SELECT + p.id as pipeline_id, + p.name as pipeline_name, + p.created_at as pipeline_created_at, + p.active as pipeline_active, + p.parameters as pipeline_parameters, + m.id as model_id, + m.created_at as model_created_at, + m.runtime::TEXT as model_runtime, + m.hyperparams as model_hyperparams, + s.id as splitter_id, + s.created_at as splitter_created_at, + s.name as splitter_name, + s.parameters as splitter_parameters + FROM + %s p + INNER JOIN pgml.models m ON p.model_id = m.id + INNER JOIN pgml.sdk_splitters s ON p.splitter_id = s.id + WHERE + p.active = TRUE + AND p.name = $1 + "#, + self.pipelines_table_name + )) + .bind(name) + .fetch_one(&pool) + .await?; + + let mut pipeline: Pipeline = pipeline_with_model_and_splitter.into(); + pipeline.set_project_info(self.database_data.as_ref().unwrap().project_info.clone()); + Ok(pipeline) + } + + #[instrument(skip(self))] + pub(crate) async fn get_project_info(&mut self) -> anyhow::Result { + self.verify_in_database(false).await?; + Ok(self + .database_data + .as_ref() + .context("Collection must be verified to get project info")? + .project_info + .clone()) } - fn generate_table_names(name: &str) -> (String, String, String, String, String, String) { + /// Check if the [Collection] exists in the database + /// + /// # Example + /// + /// ``` + /// use pgml::Collection; + /// + /// async fn example() -> anyhow::Result<()> { + /// let collection = Collection::new("my_collection", None); + /// let exists = collection.exists().await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn exists(&self) -> anyhow::Result { + let pool = get_or_initialize_pool(&self.database_url).await?; + let collection: Option = sqlx::query_as::<_, models::Collection>( + "SELECT * FROM pgml.collections WHERE name = $1 AND active = TRUE;", + ) + .bind(&self.name) + .fetch_optional(&pool) + .await?; + Ok(collection.is_some()) + } + + fn generate_table_names(name: &str) -> (String, String, String, String, String) { [ + ".pipelines", ".documents", - ".splitters", - ".models", ".transforms", ".chunks", ".documents_tsvectors", @@ -916,3 +1068,45 @@ impl Collection { .unwrap() } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::init_logger; + + #[sqlx::test] + async fn can_upsert_documents() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let mut collection = Collection::new("test_r_c_cud_2", None); + + // Test basic upsert + let documents = vec![ + serde_json::json!({"id": 1, "text": "hello world"}).into(), + serde_json::json!({"text": "hello world"}).into(), + ]; + collection + .upsert_documents(documents.clone(), Some(false)) + .await?; + let document = &collection.get_documents(None, Some(1)).await?[0]; + assert_eq!(document["text"], "hello world"); + + // Test strictness + assert!(collection + .upsert_documents(documents, Some(true)) + .await + .is_err()); + + // Test upsert + let documents = vec![ + serde_json::json!({"id": 1, "text": "hello world 2"}).into(), + serde_json::json!({"text": "hello world"}).into(), + ]; + collection + .upsert_documents(documents.clone(), Some(false)) + .await?; + let document = &collection.get_documents(None, Some(1)).await?[0]; + assert_eq!(document["text"], "hello world 2"); + collection.archive().await?; + Ok(()) + } +} diff --git a/pgml-sdks/rust/pgml/src/database.rs b/pgml-sdks/rust/pgml/src/database.rs deleted file mode 100644 index ed8ec17a2..000000000 --- a/pgml-sdks/rust/pgml/src/database.rs +++ /dev/null @@ -1,237 +0,0 @@ -use pgml_macros::{custom_derive, custom_methods}; -use sqlx::postgres::{PgConnectOptions, PgPool, PgPoolOptions}; -use sqlx::Row; -use std::borrow::Borrow; -use std::str::FromStr; -use std::time::SystemTime; - -use crate::collection::*; -use crate::models; -use crate::queries; -use crate::query_builder; -use crate::query_runner::QueryRunner; - -#[cfg(feature = "javascript")] -use crate::languages::javascript::*; -use crate::types::Json; - -/// A connection to a postgres database -#[derive(custom_derive, Clone, Debug)] -pub struct Database { - pub pool: PgPool, -} - -#[custom_methods( - new, - create_or_get_collection, - does_collection_exist, - archive_collection, - query, - transform -)] -impl Database { - /// Create a new [Database] - /// - /// # Arguments - /// - /// * `connection_string` - A postgres connection string, e.g. `postgres://user:pass@localhost:5432/db` - /// - /// # Example - /// ``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; - /// - /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// // Do stuff with the database - /// Ok(()) - /// } - /// ``` - pub async fn new(connection_string: &str) -> anyhow::Result { - let connection_options = PgConnectOptions::from_str(connection_string)?; - let connection_options = connection_options.statement_cache_capacity(0); - let pool = PgPoolOptions::new() - .max_connections(5) - .connect_with(connection_options) - .await?; - sqlx::query(queries::CREATE_COLLECTIONS_TABLE) - .execute(pool.borrow()) - .await?; - let pool = pool; - Ok(Self { pool }) - } - - /// Create a new [Collection] or get an existing one - /// - /// # Arguments - /// - /// * `name` - The name of the [Collection] - /// - /// # Example - ///``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; - /// - /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection = db.create_or_get_collection("collection number 1").await?; - /// // Do stuff with the collection - /// Ok(()) - /// } - /// ``` - pub async fn create_or_get_collection(&self, name: &str) -> anyhow::Result { - let collection: Option = sqlx::query_as::<_, models::Collection>( - "SELECT * FROM pgml.collections WHERE name = $1 AND active = TRUE;", - ) - .bind(name) - .fetch_optional(self.pool.borrow()) - .await?; - match collection { - Some(c) => Ok(Collection::from_model_and_pool(c, self.pool.clone())), - None => Ok(Collection::new(name.to_string(), self.pool.clone()).await?), - } - } - - /// Check if a [Collection] exists - /// - /// # Arguments - /// - /// * `name` - The name of the [Collection] - /// - /// # Example - /// ``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@localhost:5432/pgml_development"; - /// - /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let collection_exists = db.does_collection_exist("collection number 1").await?; - /// // Do stuff with your new found information - /// Ok(()) - /// } - /// ``` - pub async fn does_collection_exist(&self, name: &str) -> anyhow::Result { - let collection: Option = sqlx::query_as::<_, models::Collection>( - "SELECT * FROM pgml.collections WHERE name = $1 AND active = TRUE;", - ) - .bind(name) - .fetch_optional(self.pool.borrow()) - .await?; - Ok(collection.is_some()) - } - - /// Archive a [Collection] - /// - /// # Arguments - /// - /// * `name` - The name of the [Collection] - /// - /// # Example - ///``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@127.0.0.1:5433/pgml_development"; - /// - /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// db.archive_collection("collection number 1").await?; - /// Ok(()) - /// } - /// ``` - pub async fn archive_collection(&self, name: &str) -> anyhow::Result<()> { - let timestamp = SystemTime::now() - .duration_since(SystemTime::UNIX_EPOCH) - .expect("Error getting system time") - .as_secs(); - let archive_table_name = format!("{}_archive_{}", name, timestamp); - sqlx::query("UPDATE pgml.collections SET name = $1, active = FALSE where name = $2") - .bind(&archive_table_name) - .bind(name) - .execute(self.pool.borrow()) - .await?; - sqlx::query(&query_builder!( - "ALTER SCHEMA %s RENAME TO %s", - name, - archive_table_name - )) - .execute(self.pool.borrow()) - .await?; - Ok(()) - } - - /// Run an arbitrary query - /// - /// # Arguments - /// - /// * `query` - The query to run - /// - /// # Example - ///``` - /// use pgml::Database; - /// - /// const CONNECTION_STRING: &str = "postgres://postgres@localhost:5432/pgml_development"; - /// - /// async fn example() -> anyhow::Result<()> { - /// let db = Database::new(CONNECTION_STRING).await?; - /// let query = "SELECT * FROM pgml.collections"; - /// let results = db.query(query).fetch_all().await?; - /// Ok(()) - /// } - ///``` - pub fn query(&self, query: &str) -> QueryRunner { - QueryRunner::new(query, self.pool.clone()) - } - - // Run the builtin transform function - // - // # Arguments - // - // * `task` - The task to run - // * `inputs` - The inputs to the model - // * `args` - The arguments to the model - // - // # Example - // ``` - // use pgml::Database; - // - // const CONNECTION_STRING: &str = "postgres://postgres@localhost:5432/pgml_development"; - // - // async fn example() -> anyhow::Result<()> { - // let db = Database::new(CONNECTION_STRING).await?; - // let task = Json::from(serde_json::json!("translation_en_to_fr")); - // let inputs = vec![ - // "test1".to_string(), - // "test2".to_string(), - // ]; - // let results = db.transform(task, inputs, None).await?; - // Ok(()) - //} - // ``` - pub async fn transform( - &self, - task: Json, - inputs: Vec, - args: Option, - ) -> anyhow::Result { - let args = match args { - Some(a) => a.0, - None => serde_json::json!({}), - }; - let query = sqlx::query("SELECT pgml.transform(task => $1, inputs => $2, args => $3)"); - let query = if task.0.is_string() { - query.bind(task.0.as_str()) - } else { - query.bind(task.0) - }; - let results = query - .bind(inputs) - .bind(args) - .fetch_all(self.pool.borrow()) - .await?; - let results = results.get(0).unwrap().get::(0); - Ok(Json(results)) - } -} diff --git a/pgml-sdks/rust/pgml/src/filter_builder.rs b/pgml-sdks/rust/pgml/src/filter_builder.rs index 914aee5c3..a156405e5 100644 --- a/pgml-sdks/rust/pgml/src/filter_builder.rs +++ b/pgml-sdks/rust/pgml/src/filter_builder.rs @@ -36,10 +36,7 @@ fn serde_value_to_sea_query_value(value: &serde_json::Value) -> sea_query::Value let value = value.as_array().unwrap(); let ty = get_sea_query_array_type(&value[0]); let value = Some(Box::new( - value - .iter() - .map(serde_value_to_sea_query_value) - .collect(), + value.iter().map(serde_value_to_sea_query_value).collect(), )); sea_query::Value::Array(ty, value) } else if value.is_object() { diff --git a/pgml-sdks/rust/pgml/src/languages/javascript.rs b/pgml-sdks/rust/pgml/src/languages/javascript.rs index 16b33005b..edd7324d8 100644 --- a/pgml-sdks/rust/pgml/src/languages/javascript.rs +++ b/pgml-sdks/rust/pgml/src/languages/javascript.rs @@ -1,6 +1,9 @@ use neon::prelude::*; -use crate::types::{DateTime, Json}; +use crate::{ + pipeline::PipelineSyncData, + types::{DateTime, Json}, +}; //////////////////////////////////////////////////////////////////////////////// // Rust to JS ////////////////////////////////////////////////////////////////// @@ -143,6 +146,16 @@ impl IntoJsResult for Json { } } +impl IntoJsResult for PipelineSyncData { + type Output = JsValue; + fn into_js_result<'a, 'b, 'c: 'b, C: Context<'c>>( + self, + cx: &mut C, + ) -> JsResult<'b, Self::Output> { + Json::from(self).into_js_result(cx) + } +} + impl IntoJsResult for std::collections::HashMap { type Output = JsObject; fn into_js_result<'a, 'b, 'c: 'b, C: Context<'c>>( @@ -171,20 +184,6 @@ impl IntoJsResult for Vec { } } -// Our own types -// gen_into!( -// crate::database::Database, -// JsBox>, -// RefCell -// ); -// impl Finalize for crate::database::Database {} -// gen_into!( -// crate::collection::Collection, -// JsBox>, -// RefCell -// ); -// impl Finalize for crate::collection::Collection {} - //////////////////////////////////////////////////////////////////////////////// // JS To Rust ////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// diff --git a/pgml-sdks/rust/pgml/src/languages/mod.rs b/pgml-sdks/rust/pgml/src/languages/mod.rs index dda671ec1..a40f3de87 100644 --- a/pgml-sdks/rust/pgml/src/languages/mod.rs +++ b/pgml-sdks/rust/pgml/src/languages/mod.rs @@ -3,3 +3,4 @@ pub mod javascript; #[cfg(feature = "python")] pub mod python; + diff --git a/pgml-sdks/rust/pgml/src/languages/python.rs b/pgml-sdks/rust/pgml/src/languages/python.rs index 6dd9fcba6..5b74c46ae 100644 --- a/pgml-sdks/rust/pgml/src/languages/python.rs +++ b/pgml-sdks/rust/pgml/src/languages/python.rs @@ -1,17 +1,13 @@ use pyo3::conversion::IntoPy; use pyo3::types::{PyDict, PyFloat, PyInt, PyList, PyString}; use pyo3::{prelude::*, types::PyBool}; +use std::collections::HashMap; -use crate::types::{DateTime, Json}; +use crate::{pipeline::PipelineSyncData, types::Json}; //////////////////////////////////////////////////////////////////////////////// // Rust to PY ////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// -impl ToPyObject for DateTime { - fn to_object(&self, py: Python) -> PyObject { - self.0.timestamp().to_object(py) - } -} impl ToPyObject for Json { fn to_object(&self, py: Python) -> PyObject { @@ -54,9 +50,22 @@ impl IntoPy for Json { } } +impl ToPyObject for PipelineSyncData { + fn to_object(&self, py: Python) -> PyObject { + Json::from(self.clone()).to_object(py) + } +} + +impl IntoPy for PipelineSyncData { + fn into_py(self, py: Python) -> PyObject { + self.to_object(py) + } +} + //////////////////////////////////////////////////////////////////////////////// // PY to Rust ////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// + impl FromPyObject<'_> for Json { fn extract(ob: &PyAny) -> PyResult { if ob.is_instance_of::()? { @@ -94,3 +103,112 @@ impl FromPyObject<'_> for Json { } } } + +impl FromPyObject<'_> for PipelineSyncData { + fn extract(ob: &PyAny) -> PyResult { + let json = Json::extract(ob)?; + Ok(json.into()) + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Rust to Rust ////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +macro_rules! gen_custom_into { + ($t1:ty) => { + impl CustomInto<$t1> for $t1 { + fn custom_into(self) -> $t1 { + self + } + } + }; + (($($T1:ident),+), ($($T2:ident),+), ($($C:tt),+)) => { + impl<$($T1, $T2: CustomInto<$T1>),+> CustomInto<($($T1),+,)> for ($($T2),+,) { + fn custom_into(self) -> ($($T1),+,) { + ($(self.$C.custom_into()),+,) + } + } + } +} + +/// Very similar to the `Into` trait, but we can implement it on foreign types +pub trait CustomInto { + fn custom_into(self) -> T; +} + +impl CustomInto for PipelineSyncData { + fn custom_into(self) -> Json { + Json::from(self) + } +} + +impl> CustomInto> for Option { + fn custom_into(self) -> Option { + self.map(|s| s.custom_into()) + } +} + +impl> CustomInto> for Vec { + fn custom_into(self) -> Vec { + self.into_iter().map(|x| x.custom_into()).collect() + } +} + +impl, T2: CustomInto> + CustomInto> for HashMap +{ + fn custom_into(self) -> HashMap { + self.into_iter() + .map(|(k, v)| (k.custom_into(), v.custom_into())) + .collect() + } +} + +impl CustomInto<&'static str> for &str { + fn custom_into(self) -> &'static str { + // This is how we get around the liftime checker + unsafe { + let ptr = self as *const str; + let ptr = ptr as *mut str; + let boxed = Box::from_raw(ptr); + Box::leak(boxed) + } + } +} + +gen_custom_into!((T1), (TT2), (0)); +gen_custom_into!((T1, T2), (TT1, TT2), (0, 1)); +gen_custom_into!((T1, T2, T3), (TT1, TT2, TT3), (0, 1, 2)); +gen_custom_into!((T1, T2, T3, T4), (TT1, TT2, TT3, TT4), (0, 1, 2, 3)); +gen_custom_into!( + (T1, T2, T3, T4, T5), + (TT1, TT2, TT3, TT4, TT5), + (0, 1, 2, 3, 4) +); +gen_custom_into!( + (T1, T2, T3, T4, T5, T6), + (TT1, TT2, TT3, TT4, TT5, TT6), + (0, 1, 2, 3, 4, 5) +); + +// There are some restrictions I cannot figure out around conflicting trait +// implimentations so this is my solution for now +gen_custom_into!(String); + +gen_custom_into!(()); + +gen_custom_into!(bool); + +gen_custom_into!(i8); +gen_custom_into!(i16); +gen_custom_into!(i32); +gen_custom_into!(i64); + +gen_custom_into!(u8); +gen_custom_into!(u16); +gen_custom_into!(u32); +gen_custom_into!(u64); + +gen_custom_into!(f32); +gen_custom_into!(f64); diff --git a/pgml-sdks/rust/pgml/src/lib.rs b/pgml-sdks/rust/pgml/src/lib.rs index 1f7688209..26fff5307 100644 --- a/pgml-sdks/rust/pgml/src/lib.rs +++ b/pgml-sdks/rust/pgml/src/lib.rs @@ -4,54 +4,108 @@ //! //! With this SDK, you can seamlessly manage various database tables related to documents, text chunks, text splitters, LLM (Language Model) models, and embeddings. By leveraging the SDK's capabilities, you can efficiently index LLM embeddings using PgVector for fast and accurate queries. -use log::{Level, LevelFilter, Metadata, Record, SetLoggerError}; +use sqlx::PgPool; +use std::collections::HashMap; +use std::env; +use std::sync::RwLock; use tokio::runtime::{Builder, Runtime}; +use tracing::Level; +use tracing_subscriber::FmtSubscriber; +mod builtins; mod collection; -mod database; mod filter_builder; mod languages; +mod model; pub mod models; +mod pipeline; mod queries; mod query_builder; mod query_runner; +mod remote_embeddings; +mod splitter; pub mod types; mod utils; -// Pub re-export the Database and Collection structs for use in the rust library +// Re-export +pub use builtins::Builtins; pub use collection::Collection; -pub use database::Database; - -// Normally libraries leave it up to up to the rust executable using the library to init the -// logger, but because we are used by programs in Python and other languages that do -// not have the ability to do that, we init it for those languages, but leave it uninitialized when -// used natively with rust -struct SimpleLogger; - -static LOGGER: SimpleLogger = SimpleLogger; - -impl log::Log for SimpleLogger { - fn enabled(&self, metadata: &Metadata) -> bool { - metadata.level() <= Level::Info +pub use model::Model; +pub use pipeline::Pipeline; +pub use splitter::Splitter; + +// Store the database(s) in a global variable so that we can access them from anywhere +// This is not necessarily idiomatic Rust, but it is a good way to acomplish what we need +static DATABASE_POOLS: RwLock>> = RwLock::new(None); + +// Even though this function does not use async anywhere, for whatever reason it must be async or +// sqlx's connect_lazy will throw an error +async fn get_or_initialize_pool(database_url: &Option) -> anyhow::Result { + let mut pools = DATABASE_POOLS + .write() + .expect("Error getting DATABASE_POOLS for writing"); + let pools = pools.get_or_insert_with(HashMap::new); + let environment_url = std::env::var("DATABASE_URL"); + let environment_url = environment_url.as_deref(); + let url = database_url + .as_deref() + .unwrap_or(environment_url.expect("Please set DATABASE_URL environment variable")); + if let Some(pool) = pools.get(url) { + Ok(pool.clone()) + } else { + let pool = PgPool::connect_lazy(url)?; + pools.insert(url.to_string(), pool.clone()); + Ok(pool) } +} - fn log(&self, record: &Record) { - if self.enabled(record.metadata()) { - println!("{} - {}", record.level(), record.args()); +pub enum LogFormat { + JSON, + Pretty, +} + +impl From<&str> for LogFormat { + fn from(s: &str) -> Self { + match s { + "JSON" => LogFormat::JSON, + _ => LogFormat::Pretty, } } - - fn flush(&self) {} } -fn init_logger(level: LevelFilter) -> Result<(), SetLoggerError> { - log::set_logger(&LOGGER).map(|()| log::set_max_level(level)) +#[allow(dead_code)] +fn init_logger(level: Option, format: Option) -> anyhow::Result<()> { + let level = level.unwrap_or_else(|| env::var("LOG_LEVEL").unwrap_or("".to_string())); + let level = match level.as_str() { + "TRACE" => Level::TRACE, + "DEBUG" => Level::DEBUG, + "INFO" => Level::INFO, + "WARN" => Level::WARN, + _ => Level::ERROR, + }; + + let format = format.unwrap_or_else(|| env::var("LOG_FORMAT").unwrap_or("".to_string())); + + match format.as_str().into() { + LogFormat::JSON => FmtSubscriber::builder() + .json() + .with_max_level(level) + .try_init() + .map_err(|e| anyhow::anyhow!(e)), + _ => FmtSubscriber::builder() + .pretty() + .with_max_level(level) + .try_init() + .map_err(|e| anyhow::anyhow!(e)), + } } // Normally the global async runtime is handled by tokio but because we are a library being called // by javascript and other langauges, we occasionally need to handle it ourselves +#[allow(dead_code)] static mut RUNTIME: Option = None; +#[allow(dead_code)] fn get_or_set_runtime<'a>() -> &'a Runtime { unsafe { if let Some(r) = &RUNTIME { @@ -68,200 +122,359 @@ fn get_or_set_runtime<'a>() -> &'a Runtime { } } +#[cfg(feature = "python")] +#[pyo3::prelude::pyfunction] +fn py_init_logger(level: Option, format: Option) -> pyo3::PyResult<()> { + init_logger(level, format).ok(); + Ok(()) +} + #[cfg(feature = "python")] #[pyo3::pymodule] fn pgml(_py: pyo3::Python, m: &pyo3::types::PyModule) -> pyo3::PyResult<()> { - // We may want to move this into the new function in the DatabasePython struct and give the - // user the oppertunity to pass in the log level filter - init_logger(LevelFilter::Error).unwrap(); - m.add_class::()?; + m.add_function(pyo3::wrap_pyfunction!(py_init_logger, m)?)?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; Ok(()) } +#[cfg(feature = "javascript")] +fn js_init_logger( + mut cx: neon::context::FunctionContext, +) -> neon::result::JsResult { + use crate::languages::javascript::*; + let level = cx.argument_opt(0); + let level = >::from_option_js_type(&mut cx, level)?; + let format = cx.argument_opt(1); + let format = >::from_option_js_type(&mut cx, format)?; + init_logger(level, format).ok(); + ().into_js_result(&mut cx) +} + #[cfg(feature = "javascript")] #[neon::main] fn main(mut cx: neon::context::ModuleContext) -> neon::result::NeonResult<()> { - // We may want to move this into the new function in the DatabaseJavascript struct and give the - // user the oppertunity to pass in the log level filter - init_logger(LevelFilter::Error).unwrap(); - cx.export_function("newDatabase", database::DatabaseJavascript::new)?; + cx.export_function("js_init_logger", js_init_logger)?; + cx.export_function("newCollection", collection::CollectionJavascript::new)?; + cx.export_function("newModel", model::ModelJavascript::new)?; + cx.export_function("newSplitter", splitter::SplitterJavascript::new)?; + cx.export_function("newBuiltins", builtins::BuiltinsJavascript::new)?; + cx.export_function("newPipeline", pipeline::PipelineJavascript::new)?; Ok(()) } #[cfg(test)] mod tests { use super::*; - use std::env; - - use crate::types::Json; + use crate::{model::Model, pipeline::Pipeline, splitter::Splitter, types::Json}; - #[tokio::test] - async fn can_connect_to_database() { - let connection_string = env::var("DATABASE_URL").unwrap(); - Database::new(&connection_string).await.unwrap(); + fn generate_dummy_documents(count: usize) -> Vec { + let mut documents = Vec::new(); + for i in 0..count { + let document = serde_json::json!( + { + "id": i, + "text": format!("This is a test document: {}", i), + "metadata": { + "uuid": i * 10, + "name": format!("Test Document {}", i) + } + }); + documents.push(document.into()); + } + documents } - #[tokio::test] - async fn can_create_collection() { - let connection_string = env::var("DATABASE_URL").unwrap(); - let collection_name = "rctest2"; - let db = Database::new(&connection_string).await.unwrap(); - let _ = db.create_or_get_collection(collection_name).await.unwrap(); - let does_collection_exist = db.does_collection_exist(collection_name).await.unwrap(); - assert_eq!(does_collection_exist, true); - // db.archive_collection(collection_name).await.unwrap(); + #[sqlx::test] + async fn can_create_collection() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let mut collection = Collection::new("test_r_c_ccc_0", None); + assert!(collection.database_data.is_none()); + collection.verify_in_database(false).await?; + assert!(collection.database_data.is_some()); + collection.archive().await?; + Ok(()) } - #[tokio::test] - async fn can_vector_search() { - let connection_string = env::var("DATABASE_URL").unwrap(); - - init_logger(LevelFilter::Error).ok(); - - let collection_name = "rctest0"; + #[sqlx::test] + async fn can_add_remove_pipeline() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new( + "test_p_cap_57", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_carp_3", None); + assert!(collection.database_data.is_none()); + collection.add_pipeline(&mut pipeline).await?; + assert!(collection.database_data.is_some()); + collection.remove_pipeline(&mut pipeline).await?; + let pipelines = collection.get_pipelines().await?; + assert!(pipelines.is_empty()); + collection.archive().await?; + Ok(()) + } - let db = Database::new(&connection_string).await.unwrap(); - let collection = db.create_or_get_collection(collection_name).await.unwrap(); + #[sqlx::test] + async fn can_add_remove_pipelines() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline1 = Pipeline::new( + "test_r_p_carps_0", + Some(model.clone()), + Some(splitter.clone()), + None, + ); + let mut pipeline2 = Pipeline::new("test_r_p_carps_1", Some(model), Some(splitter), None); + let mut collection = Collection::new("test_r_c_carps_1", None); + collection.add_pipeline(&mut pipeline1).await?; + collection.add_pipeline(&mut pipeline2).await?; + let pipelines = collection.get_pipelines().await?; + assert!(pipelines.len() == 2); + collection.remove_pipeline(&mut pipeline1).await?; + let pipelines = collection.get_pipelines().await?; + assert!(pipelines.len() == 1); + assert!(collection.get_pipeline("test_r_p_carps_0").await.is_err()); + collection.archive().await?; + Ok(()) + } - let documents: Vec = vec![ - serde_json::json!( { - "id": 1, - "text": "This is a document" - }) - .into(), - serde_json::json!( { - "id": 2, - "text": "This is a second document" - }) - .into(), - ]; + #[sqlx::test] + async fn disable_enable_pipeline() -> anyhow::Result<()> { + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new("test_p_dep_0", Some(model), Some(splitter), None); + let mut collection = Collection::new("test_r_c_dep_1", None); + collection.add_pipeline(&mut pipeline).await?; + let queried_pipeline = &collection.get_pipelines().await?[0]; + assert_eq!(pipeline.name, queried_pipeline.name); + collection.disable_pipeline(&mut pipeline).await?; + let queried_pipelines = &collection.get_pipelines().await?; + assert!(queried_pipelines.is_empty()); + collection.enable_pipeline(&mut pipeline).await?; + let queried_pipeline = &collection.get_pipelines().await?[0]; + assert_eq!(pipeline.name, queried_pipeline.name); + collection.archive().await?; + Ok(()) + } + #[sqlx::test] + async fn sync_multiple_pipelines() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline1 = Pipeline::new( + "test_r_p_smp_0", + Some(model.clone()), + Some(splitter.clone()), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut pipeline2 = Pipeline::new( + "test_r_p_smp_1", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_smp_3", None); + collection.add_pipeline(&mut pipeline1).await?; + collection.add_pipeline(&mut pipeline2).await?; collection - .upsert_documents(documents, None, None) - .await - .unwrap(); - let parameters = Json::from(serde_json::json!({ - "chunk_size": 1500, - "chunk_overlap": 40, - })); - collection - .register_text_splitter(None, Some(parameters)) - .await - .unwrap(); - collection.generate_chunks(None).await.unwrap(); - collection.register_model(None, None, None).await.unwrap(); - collection.generate_embeddings(None, None).await.unwrap(); - collection - .vector_search("Here is a test", None, None, None, None) - .await - .unwrap(); - db.archive_collection(&collection_name).await.unwrap(); + .upsert_documents(generate_dummy_documents(3), Some(true)) + .await?; + let status_1 = pipeline1.get_status().await?; + let status_2 = pipeline2.get_status().await?; + assert!( + status_1.chunks_status.synced == status_1.chunks_status.total + && status_1.chunks_status.not_synced == 0 + ); + assert!( + status_2.chunks_status.synced == status_2.chunks_status.total + && status_2.chunks_status.not_synced == 0 + ); + collection.archive().await?; + Ok(()) } - #[tokio::test] - async fn query_builder() { - let connection_string = env::var("DATABASE_URL").unwrap(); - init_logger(LevelFilter::Error).ok(); - - let collection_name = "rqbmftest9"; - - let db = Database::new(&connection_string).await.unwrap(); - let collection = db.create_or_get_collection(collection_name).await.unwrap(); - - let mut documents: Vec = Vec::new(); - for i in 0..4 { - documents.push(serde_json::json!({ - "id": i, - "metadata": { - "uuid": i, - "category": [i, 2, 3] - }, - "text": format!("{} This is some document with some filler text filler filler filler filler filler filler filler filler filler", i) - }).into()); - } - - collection - .upsert_documents(documents, None, None) - .await - .unwrap(); - let parameters = Json::from(serde_json::json!({ - "chunk_size": 1500, - "chunk_overlap": 40, - })); + #[sqlx::test] + async fn can_vector_search_with_local_embeddings() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new( + "test_r_p_cvswle_1", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_cvswle_28", None); + collection.add_pipeline(&mut pipeline).await?; + + // Recreate the pipeline to replicate a more accurate example + let mut pipeline = Pipeline::new("test_r_p_cvswle_1", None, None, None); collection - .register_text_splitter(None, Some(parameters)) - .await - .unwrap(); - collection.generate_chunks(None).await.unwrap(); - collection.register_model(None, None, None).await.unwrap(); - collection.generate_embeddings(None, None).await.unwrap(); - collection.generate_tsvectors(None).await.unwrap(); - - let filter = serde_json::json! ({ - "metadata": { - "metadata": { - "$or": [ - {"uuid": {"$eq": 1 }}, - {"uuid": {"$eq": 2 }}, - {"uuid": {"$eq": 3 }}, - {"category": {"$eq": [1, 2, 3]}} - ] - - } - }, - "full_text": { - "text": "filler text" - } - }); - - let query = collection - .query() - .vector_recall("test query".to_string(), None, None, None) - .filter(filter.into()) - .limit(10); - println!("\n{}\n", query.to_string()); - let results = query.run().await.unwrap(); - println!("{:?}", results); + .upsert_documents(generate_dummy_documents(3), None) + .await?; + let results = collection + .vector_search("Here is some query", &mut pipeline, None, None) + .await?; + assert!(results.len() == 3); + collection.archive().await?; + Ok(()) } - #[tokio::test] - async fn query_runner() { - let connection_string = env::var("DATABASE_URL").unwrap(); - init_logger(LevelFilter::Error).ok(); - - let db = Database::new(&connection_string).await.unwrap(); - let query = db.query("SELECT * from pgml.collections"); - let results = query.fetch_all().await.unwrap(); - println!("{:?}", results); + #[sqlx::test] + async fn can_vector_search_with_remote_embeddings() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::new( + Some("text-embedding-ada-002".to_string()), + Some("openai".to_string()), + None, + ); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new( + "test_r_p_cvswre_1", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_cvswre_20", None); + collection.add_pipeline(&mut pipeline).await?; + + // Recreate the pipeline to replicate a more accurate example + let mut pipeline = Pipeline::new("test_r_p_cvswre_1", None, None, None); + collection + .upsert_documents(generate_dummy_documents(3), None) + .await?; + let results = collection + .vector_search("Here is some query", &mut pipeline, None, None) + .await?; + assert!(results.len() == 3); + collection.archive().await?; + Ok(()) } - #[tokio::test] - async fn transform() { - let connection_string = env::var("DATABASE_URL").unwrap(); - init_logger(LevelFilter::Error).ok(); - - let db = Database::new(&connection_string).await.unwrap(); - // let task = Json::from(serde_json::json!("text-classification")); - let task = Json::from(serde_json::json!("translation_en_to_fr")); - let inputs = vec!["test1".to_string(), "test2".to_string()]; - let results = db.transform(task, inputs, None).await.unwrap(); - println!("{:?}", results); + #[sqlx::test] + async fn can_vector_search_with_query_builder() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::default(); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new( + "test_r_p_cvswqb_1", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_cvswqb_3", None); + collection.add_pipeline(&mut pipeline).await?; + + // Recreate the pipeline to replicate a more accurate example + let mut pipeline = Pipeline::new("test_r_p_cvswqb_1", None, None, None); + collection + .upsert_documents(generate_dummy_documents(3), None) + .await?; + let results = collection + .query() + .vector_recall("Here is some query", &mut pipeline, None) + .fetch_all() + .await?; + assert!(results.len() == 3); + collection.archive().await?; + Ok(()) } - #[tokio::test] - async fn collection_errors() { - let connection_string = env::var("DATABASE_URL").unwrap(); - init_logger(LevelFilter::Error).ok(); - - let db = Database::new(&connection_string).await.unwrap(); - let collection_name = "cetest0"; - let collection = db.create_or_get_collection(collection_name).await.unwrap(); - - // Test that we cannot generate tsvectors without upserting documents first - assert!(collection.generate_tsvectors(None).await.is_err()); - // Test that we cannot generate chunks without upserting documents first - assert!(collection.generate_chunks(None).await.is_err()); - // Test that we cannot generate embeddings without generating chunks first - assert!(collection.generate_embeddings(None, None).await.is_err()); + #[sqlx::test] + async fn can_vector_search_with_query_builder_with_remote_embeddings() -> anyhow::Result<()> { + init_logger(None, None).ok(); + let model = Model::new( + Some("text-embedding-ada-002".to_string()), + Some("openai".to_string()), + None, + ); + let splitter = Splitter::default(); + let mut pipeline = Pipeline::new( + "test_r_p_cvswqbwre_1", + Some(model), + Some(splitter), + Some( + serde_json::json!({ + "full_text_search": { + "active": true, + "configuration": "english" + } + }) + .into(), + ), + ); + let mut collection = Collection::new("test_r_c_cvswqbwre_3", None); + collection.add_pipeline(&mut pipeline).await?; + + // Recreate the pipeline to replicate a more accurate example + let mut pipeline = Pipeline::new("test_r_p_cvswqbwre_1", None, None, None); + collection + .upsert_documents(generate_dummy_documents(3), None) + .await?; + let results = collection + .query() + .vector_recall("Here is some query", &mut pipeline, None) + .fetch_all() + .await?; + assert!(results.len() == 3); + collection.archive().await?; + Ok(()) } } diff --git a/pgml-sdks/rust/pgml/src/model.rs b/pgml-sdks/rust/pgml/src/model.rs new file mode 100644 index 000000000..dc5484af4 --- /dev/null +++ b/pgml-sdks/rust/pgml/src/model.rs @@ -0,0 +1,215 @@ +use anyhow::Context; +use pgml_macros::{custom_derive, custom_methods}; +use sqlx::postgres::PgPool; +use tracing::instrument; + +use crate::{ + collection::ProjectInfo, + get_or_initialize_pool, models, + types::{DateTime, Json}, +}; + +#[cfg(feature = "javascript")] +use crate::languages::javascript::*; + +#[cfg(feature = "python")] +use crate::{languages::python::*, types::JsonPython}; + +/// A few notes on the following enums: +/// - Sqlx does provide type derivation for enums, but it's not very good +/// - Queries using these enums require a number of additional queries to get their oids and +/// other information +/// - Because of the added overhead, we just cast back and forth to strings, which is kind of +/// annoying, but with the traits implimented below is a breeze and can be done just using .into + +/// Our model runtimes +#[derive(Debug, Clone, Copy)] +pub enum ModelRuntime { + Python, + OpenAI, +} + +impl From<&str> for ModelRuntime { + fn from(s: &str) -> Self { + match s { + "pgml" | "python" => Self::Python, + "openai" => Self::OpenAI, + _ => panic!("Unknown model runtime: {}", s), + } + } +} + +impl From<&ModelRuntime> for &'static str { + fn from(m: &ModelRuntime) -> Self { + match m { + ModelRuntime::Python => "python", + ModelRuntime::OpenAI => "openai", + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct ModelDatabaseData { + pub id: i64, + pub created_at: DateTime, +} + +#[derive(custom_derive, Debug, Clone)] +pub struct Model { + pub name: String, + pub runtime: ModelRuntime, + pub parameters: Json, + project_info: Option, + pub(crate) database_data: Option, +} + +impl Default for Model { + fn default() -> Self { + Self::new(None, None, None) + } +} + +#[custom_methods(new)] +impl Model { + /// Creates a new [Model] + /// + /// # Arguments + /// + /// * `name` - The name of the model. + /// * `source` - The source of the model. Defaults to `pgml`, but can be set to providers like `openai`. + /// * `parameters` - The parameters to the model. Defaults to None + /// + /// # Example + /// + /// ``` + /// use pgml::Model; + /// let model = Model::new(Some("intfloat/e5-small".to_string()), None, None); + /// ``` + pub fn new(name: Option, source: Option, parameters: Option) -> Self { + let name = name.unwrap_or("intfloat/e5-small".to_string()); + let parameters = parameters.unwrap_or(Json(serde_json::json!({}))); + let source = source.unwrap_or("pgml".to_string()); + let runtime: ModelRuntime = source.as_str().into(); + + Self { + name, + runtime, + parameters, + project_info: None, + database_data: None, + } + } + + #[instrument(skip(self))] + pub(crate) async fn verify_in_database(&mut self, throw_if_exists: bool) -> anyhow::Result<()> { + if self.database_data.is_none() { + let pool = self.get_pool().await?; + + let project_info = self + .project_info + .as_ref() + .expect("Cannot verify model without project info"); + + let mut parameters = self.parameters.clone(); + parameters + .as_object_mut() + .expect("Parameters for model should be an object") + .insert("name".to_string(), serde_json::json!(self.name)); + + let model: Option = sqlx::query_as( + "SELECT id, created_at, runtime::TEXT, hyperparams FROM pgml.models WHERE project_id = $1 AND runtime = $2::pgml.runtime AND hyperparams = $3", + ) + .bind(project_info.id) + .bind(Into::<&str>::into(&self.runtime)) + .bind(¶meters) + .fetch_optional(&pool) + .await?; + + let model = if let Some(m) = model { + anyhow::ensure!(!throw_if_exists, "Model already exists in database"); + m + } else { + let model: models::Model = sqlx::query_as("INSERT INTO pgml.models (project_id, num_features, algorithm, runtime, hyperparams, status, search_params, search_args) VALUES ($1, $2, $3, $4::pgml.runtime, $5, $6, $7, $8) RETURNING id, created_at, runtime::TEXT, hyperparams") + .bind(project_info.id) + .bind(1) + .bind("transformers") + .bind(Into::<&str>::into(&self.runtime)) + .bind(parameters) + .bind("successful") + .bind(serde_json::json!({})) + .bind(serde_json::json!({})) + .fetch_one(&pool) + .await?; + model + }; + + self.database_data = Some(ModelDatabaseData { + id: model.id, + created_at: model.created_at, + }); + } + Ok(()) + } + + pub(crate) fn set_project_info(&mut self, project_info: ProjectInfo) { + self.project_info = Some(project_info); + } + + #[instrument(skip(self))] + pub(crate) async fn to_dict(&mut self) -> anyhow::Result { + self.verify_in_database(false).await?; + + let database_data = self + .database_data + .as_ref() + .context("Model must be verified to call to_dict")?; + + Ok(serde_json::json!({ + "id": database_data.id, + "created_at": database_data.created_at, + "name": self.name, + "runtime": Into::<&str>::into(&self.runtime), + "parameters": *self.parameters, + }) + .into()) + } + + async fn get_pool(&self) -> anyhow::Result { + let database_url = &self + .project_info + .as_ref() + .context("Project info required to call method model.get_pool()")? + .database_url; + get_or_initialize_pool(database_url).await + } +} + +impl From for Model { + fn from(x: models::PipelineWithModelAndSplitter) -> Self { + Self { + name: x.model_hyperparams["name"].as_str().unwrap().to_string(), + runtime: x.model_runtime.as_str().into(), + parameters: x.model_hyperparams, + project_info: None, + database_data: Some(ModelDatabaseData { + id: x.model_id, + created_at: x.model_created_at, + }), + } + } +} + +impl From for Model { + fn from(model: models::Model) -> Self { + Self { + name: model.hyperparams["name"].as_str().unwrap().to_string(), + runtime: model.runtime.as_str().into(), + parameters: model.hyperparams, + project_info: None, + database_data: Some(ModelDatabaseData { + id: model.id, + created_at: model.created_at, + }), + } + } +} diff --git a/pgml-sdks/rust/pgml/src/models.rs b/pgml-sdks/rust/pgml/src/models.rs index a5970c85c..de5a922c5 100644 --- a/pgml-sdks/rust/pgml/src/models.rs +++ b/pgml-sdks/rust/pgml/src/models.rs @@ -1,37 +1,36 @@ -use pgml_macros::{custom_into_js_result, custom_into_py}; use sea_query::enum_def; +use serde::Serialize; use sqlx::types::Uuid; use sqlx::FromRow; use crate::types::{DateTime, Json}; -#[cfg(feature = "javascript")] -use crate::languages::javascript::*; - -/// A document +/// A pipeline #[enum_def] #[derive(FromRow)] -pub struct Document { +pub struct Pipeline { pub id: i64, + pub name: String, pub created_at: DateTime, - pub source_uuid: Uuid, - pub metadata: Json, - pub text: String, + pub model_id: i64, + pub splitter_id: i64, + pub active: bool, + pub parameters: Json, } -/// A collection of documents +/// A model used to perform some task #[enum_def] #[derive(FromRow)] -pub struct Collection { +pub struct Model { pub id: i64, pub created_at: DateTime, - pub name: String, - pub active: bool, + pub runtime: String, + pub hyperparams: Json, } /// A text splitter #[enum_def] -#[derive(custom_into_js_result, custom_into_py, FromRow)] +#[derive(FromRow)] pub struct Splitter { pub id: i64, pub created_at: DateTime, @@ -39,15 +38,46 @@ pub struct Splitter { pub parameters: Json, } -/// A model used to perform some task +/// A pipeline with its model and splitter +#[derive(FromRow, Clone)] +pub struct PipelineWithModelAndSplitter { + pub pipeline_id: i64, + pub pipeline_name: String, + pub pipeline_created_at: DateTime, + pub pipeline_active: bool, + pub pipeline_parameters: Json, + pub model_id: i64, + pub model_created_at: DateTime, + pub model_runtime: String, + pub model_hyperparams: Json, + pub splitter_id: i64, + pub splitter_created_at: DateTime, + pub splitter_name: String, + pub splitter_parameters: Json, +} + +/// A document #[enum_def] -#[derive(custom_into_js_result, custom_into_py, FromRow)] -pub struct Model { +#[derive(FromRow, Serialize)] +pub struct Document { + pub id: i64, + pub created_at: DateTime, + #[serde(with = "uuid::serde::compact")] + // See: https://docs.rs/uuid/latest/uuid/serde/index.html + pub source_uuid: Uuid, + pub metadata: Json, + pub text: String, +} + +/// A collection of documents +#[enum_def] +#[derive(FromRow)] +pub struct Collection { pub id: i64, pub created_at: DateTime, - pub task: String, pub name: String, - pub parameters: Json, + pub active: bool, + pub project_id: i64, } /// An embedding @@ -59,3 +89,14 @@ pub struct Embedding { pub chunk_id: i64, pub embedding: Vec, } + +/// A chunk of split text +#[derive(FromRow)] +pub struct Chunk { + pub id: i64, + pub created_at: DateTime, + pub document_id: i64, + pub splitter_id: i64, + pub chunk_index: i64, + pub chunk: String, +} diff --git a/pgml-sdks/rust/pgml/src/pipeline.rs b/pgml-sdks/rust/pgml/src/pipeline.rs new file mode 100644 index 000000000..2963ca78c --- /dev/null +++ b/pgml-sdks/rust/pgml/src/pipeline.rs @@ -0,0 +1,815 @@ +use anyhow::Context; +use indicatif::MultiProgress; +use pgml_macros::{custom_derive, custom_methods, pgml_alias}; +use sqlx::{Executor, PgConnection, PgPool}; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering::Relaxed; +use tokio::join; +use tracing::instrument; + +use crate::{ + collection::ProjectInfo, + get_or_initialize_pool, + model::{Model, ModelRuntime}, + models, queries, query_builder, + remote_embeddings::build_remote_embeddings, + splitter::Splitter, + types::{DateTime, Json}, + utils, +}; + +#[cfg(feature = "javascript")] +use crate::languages::javascript::*; + +#[cfg(feature = "python")] +use crate::{ + languages::python::*, model::ModelPython, splitter::SplitterPython, types::JsonPython, +}; + +#[derive(Debug, Clone)] +pub struct InvividualSyncStatus { + pub synced: i64, + pub not_synced: i64, + pub total: i64, +} + +impl From for Json { + fn from(value: InvividualSyncStatus) -> Self { + serde_json::json!({ + "synced": value.synced, + "not_synced": value.not_synced, + "total": value.total, + }) + .into() + } +} + +impl From for InvividualSyncStatus { + fn from(value: Json) -> Self { + Self { + synced: value["synced"].as_i64().expect("The synced field is not an integer"), + not_synced: value["not_synced"].as_i64().expect("The not_synced field is not an integer"), + total: value["total"].as_i64().expect("The total field is not an integer"), + } + } +} + +#[derive(pgml_alias, Debug, Clone)] +pub struct PipelineSyncData { + pub chunks_status: InvividualSyncStatus, + pub embeddings_status: InvividualSyncStatus, + pub tsvectors_status: InvividualSyncStatus, +} + +impl From for Json { + fn from(value: PipelineSyncData) -> Self { + serde_json::json!({ + "chunks_status": *Json::from(value.chunks_status), + "embeddings_status": *Json::from(value.embeddings_status), + "tsvectors_status": *Json::from(value.tsvectors_status), + }) + .into() + } +} + +impl From for PipelineSyncData { + fn from(mut value: Json) -> Self { + Self { + chunks_status: Json::from(std::mem::take(&mut value["chunks_status"])).into(), + embeddings_status: Json::from(std::mem::take(&mut value["embeddings_status"])).into(), + tsvectors_status: Json::from(std::mem::take(&mut value["tsvectors_status"])).into(), + } + } +} + +#[derive(Debug, Clone)] +pub struct PipelineDatabaseData { + pub id: i64, + pub created_at: DateTime, + pub model_id: i64, + pub splitter_id: i64, +} + +#[derive(custom_derive, Debug, Clone)] +pub struct Pipeline { + pub name: String, + pub model: Option, + pub splitter: Option, + pub parameters: Option, + project_info: Option, + pub(crate) database_data: Option, +} + +#[custom_methods(new, get_status, to_dict)] +impl Pipeline { + /// Creates a new [Pipeline] + /// + /// # Arguments + /// + /// * `name` - The name of the pipeline + /// * `model` - The pipeline [Model] + /// * `splitter` - The pipeline [Splitter] + /// * `parameters` - The parameters to the pipeline. Defaults to None + /// + /// # Example + /// + /// ``` + /// use pgml::{Pipeline, Model, Splitter}; + /// let model = Model::new(None, None, None); + /// let splitter = Splitter::new(None, None); + /// let pipeline = Pipeline::new("my_splitter", Some(model), Some(splitter), None); + /// ``` + pub fn new( + name: &str, + model: Option, + splitter: Option, + parameters: Option, + ) -> Self { + let parameters = Some(parameters.unwrap_or_default()); + Self { + name: name.to_string(), + model, + splitter, + parameters, + project_info: None, + database_data: None, + } + } + + /// Gets the status of the [Pipeline] + /// This includes the status of the chunks, embeddings, and tsvectors + /// + /// # Example + /// + /// ``` + /// use pgml::Collection; + /// + /// async fn example() -> anyhow::Result<()> { + /// let mut collection = Collection::new("my_collection", None); + /// let mut pipeline = collection.get_pipeline("my_pipeline").await?; + /// let status = pipeline.get_status().await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn get_status(&mut self) -> anyhow::Result { + let pool = self.get_pool().await?; + + self.verify_in_database(false).await?; + let embeddings_table_name = self.create_or_get_embeddings_table().await?; + + let database_data = self + .database_data + .as_ref() + .context("Pipeline must be verified to get status")?; + + let parameters = self + .parameters + .as_ref() + .context("Pipeline must be verified to get status")?; + + let project_name = &self.project_info.as_ref().unwrap().name; + + // TODO: Maybe combine all of these into one query so it is faster + let chunks_status: (Option, Option) = sqlx::query_as(&query_builder!( + "SELECT (SELECT COUNT(DISTINCT document_id) FROM %s WHERE splitter_id = $1), COUNT(id) FROM %s", + format!("{}.chunks", project_name), + format!("{}.documents", project_name) + )) + .bind(database_data.splitter_id) + .fetch_one(&pool).await?; + let chunks_status = InvividualSyncStatus { + synced: chunks_status.0.unwrap_or(0), + not_synced: chunks_status.1.unwrap_or(0) - chunks_status.0.unwrap_or(0), + total: chunks_status.1.unwrap_or(0), + }; + + let embeddings_status: (Option, Option) = sqlx::query_as(&query_builder!( + "SELECT (SELECT count(*) FROM %s), (SELECT count(*) FROM %s WHERE splitter_id = $1)", + embeddings_table_name, + format!("{}.chunks", project_name) + )) + .bind(database_data.splitter_id) + .fetch_one(&pool) + .await?; + let embeddings_status = InvividualSyncStatus { + synced: embeddings_status.0.unwrap_or(0), + not_synced: embeddings_status.1.unwrap_or(0) - embeddings_status.0.unwrap_or(0), + total: embeddings_status.1.unwrap_or(0), + }; + + let tsvectors_status = if parameters["full_text_search"]["active"] + == serde_json::Value::Bool(true) + { + sqlx::query_as(&query_builder!( + "SELECT (SELECT COUNT(*) FROM %s WHERE configuration = $1), (SELECT COUNT(*) FROM %s)", + format!("{}.documents_tsvectors", project_name), + format!("{}.documents", project_name) + )) + .bind(parameters["full_text_search"]["configuration"].as_str()) + .fetch_one(&pool).await? + } else { + (Some(0), Some(0)) + }; + let tsvectors_status = InvividualSyncStatus { + synced: tsvectors_status.0.unwrap_or(0), + not_synced: tsvectors_status.1.unwrap_or(0) - tsvectors_status.0.unwrap_or(0), + total: tsvectors_status.1.unwrap_or(0), + }; + + Ok(PipelineSyncData { + chunks_status, + embeddings_status, + tsvectors_status, + }) + } + + #[instrument(skip(self))] + pub(crate) async fn verify_in_database(&mut self, throw_if_exists: bool) -> anyhow::Result<()> { + if self.database_data.is_none() { + let pool = self.get_pool().await?; + + let project_info = self + .project_info + .as_ref() + .expect("Cannot verify pipeline without project info"); + + let pipeline: Option = sqlx::query_as(&query_builder!( + "SELECT * FROM %s WHERE name = $1", + format!("{}.pipelines", project_info.name) + )) + .bind(&self.name) + .fetch_optional(&pool) + .await?; + + let pipeline = if let Some(p) = pipeline { + if throw_if_exists { + anyhow::bail!("Pipeline {} already exists", p.name); + } + let model: models::Model = sqlx::query_as( + "SELECT id, created_at, runtime::TEXT, hyperparams FROM pgml.models WHERE id = $1", + ) + .bind(p.model_id) + .fetch_one(&pool) + .await?; + let mut model: Model = model.into(); + model.set_project_info(project_info.clone()); + self.model = Some(model); + + let splitter: models::Splitter = + sqlx::query_as("SELECT * FROM pgml.sdk_splitters WHERE id = $1") + .bind(p.splitter_id) + .fetch_one(&pool) + .await?; + let mut splitter: Splitter = splitter.into(); + splitter.set_project_info(project_info.clone()); + self.splitter = Some(splitter); + + p + } else { + let model = self + .model + .as_mut() + .expect("Cannot save pipeline without model"); + model.set_project_info(project_info.clone()); + model.verify_in_database(false).await?; + + let splitter = self + .splitter + .as_mut() + .expect("Cannot save pipeline without splitter"); + splitter.set_project_info(project_info.clone()); + splitter.verify_in_database(false).await?; + + sqlx::query_as(&query_builder!( + "INSERT INTO %s (name, model_id, splitter_id, parameters) VALUES ($1, $2, $3, $4) RETURNING *", + format!("{}.pipelines", project_info.name) + )) + .bind(&self.name) + .bind( + model + .database_data + .as_ref() + .context("Cannot save pipeline without model")? + .id, + ) + .bind( + splitter + .database_data + .as_ref() + .context("Cannot save pipeline without splitter")? + .id, + ) + .bind(&self.parameters) + .fetch_one(&pool) + .await? + }; + + self.database_data = Some(PipelineDatabaseData { + id: pipeline.id, + created_at: pipeline.created_at, + model_id: pipeline.model_id, + splitter_id: pipeline.splitter_id, + }); + self.parameters = Some(pipeline.parameters); + } + Ok(()) + } + + #[instrument(skip(self, mp))] + pub(crate) async fn execute( + &mut self, + document_ids: &Option>, + mp: MultiProgress, + ) -> anyhow::Result<()> { + // TODO: Chunk document_ids if there are too many + + // A couple notes on the following methods + // - Atomic bools are required to work nicely with pyo3 otherwise we would use cells + // - We use green threads because they are cheap, but we want to be super careful to not + // return an error before stopping the green thread. To meet that end, we map errors and + // return types often + let chunk_ids = self.sync_chunks(document_ids, &mp).await?; + self.sync_embeddings(chunk_ids, &mp).await?; + self.sync_tsvectors(document_ids, &mp).await?; + Ok(()) + } + + #[instrument(skip(self, mp))] + async fn sync_chunks( + &mut self, + document_ids: &Option>, + mp: &MultiProgress, + ) -> anyhow::Result>> { + self.verify_in_database(false).await?; + let pool = self.get_pool().await?; + + let database_data = self + .database_data + .as_mut() + .context("Pipeline must be verified to generate chunks")?; + + let project_info = self + .project_info + .as_ref() + .context("Pipeline must have project info to generate chunks")?; + + let progress_bar = mp + .add(utils::default_progress_spinner(1)) + .with_prefix(self.name.clone()) + .with_message("generating chunks"); + + // This part is a bit tricky + // We want to return the ids for all chunks we inserted OR would have inserted if they didn't already exist + // The query is structured in such a way to not insert any chunks that already exist so we + // can't rely on the data returned from the inset queries, we need to query the chunks table + // It is important we return the ids for chunks we would have inserted if they didn't already exist so we are robust to random crashes + let is_done = AtomicBool::new(false); + let work = async { + let chunk_ids: Result>, _> = if document_ids.is_some() { + sqlx::query(&query_builder!( + queries::GENERATE_CHUNKS_FOR_DOCUMENT_IDS, + &format!("{}.chunks", project_info.name), + &format!("{}.documents", project_info.name), + &format!("{}.chunks", project_info.name) + )) + .bind(database_data.splitter_id) + .bind(document_ids) + .execute(&pool) + .await + .map_err(|e| { + is_done.store(true, Relaxed); + e + })?; + sqlx::query_scalar(&query_builder!( + "SELECT id FROM %s WHERE document_id = ANY($1)", + &format!("{}.chunks", project_info.name) + )) + .bind(document_ids) + .fetch_all(&pool) + .await + .map(Some) + } else { + sqlx::query(&query_builder!( + queries::GENERATE_CHUNKS, + &format!("{}.chunks", project_info.name), + &format!("{}.documents", project_info.name), + &format!("{}.chunks", project_info.name) + )) + .bind(database_data.splitter_id) + .execute(&pool) + .await + .map(|_t| None) + }; + is_done.store(true, Relaxed); + chunk_ids + }; + let progress_work = async { + while !is_done.load(Relaxed) { + progress_bar.inc(1); + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + } + }; + let (chunk_ids, _) = join!(work, progress_work); + progress_bar.set_message("done generating chunks"); + progress_bar.finish(); + Ok(chunk_ids?) + } + + #[instrument(skip(self, mp))] + async fn sync_embeddings( + &mut self, + chunk_ids: Option>, + mp: &MultiProgress, + ) -> anyhow::Result<()> { + self.verify_in_database(false).await?; + let pool = self.get_pool().await?; + + let embeddings_table_name = self.create_or_get_embeddings_table().await?; + + let model = self + .model + .as_ref() + .context("Pipeline must be verified to generate embeddings")?; + + let database_data = self + .database_data + .as_mut() + .context("Pipeline must be verified to generate embeddings")?; + + let project_info = self + .project_info + .as_ref() + .context("Pipeline must have project info to generate embeddings")?; + + // Remove the stored name from the parameters + let mut parameters = model.parameters.clone(); + parameters + .as_object_mut() + .context("Model parameters must be an object")? + .remove("name"); + + let progress_bar = mp + .add(utils::default_progress_spinner(1)) + .with_prefix(self.name.clone()) + .with_message("generating emmbeddings"); + + let is_done = AtomicBool::new(false); + // We need to be careful about how we handle errors here. We do not want to return an error + // from the async block before setting is_done to true. If we do, the progress bar will + // will load forever. We also want to make sure to propogate any errors we have + let work = async { + let res = match model.runtime { + ModelRuntime::Python => if chunk_ids.is_some() { + sqlx::query(&query_builder!( + queries::GENERATE_EMBEDDINGS_FOR_CHUNK_IDS, + embeddings_table_name, + &format!("{}.chunks", project_info.name), + embeddings_table_name + )) + .bind(&model.name) + .bind(¶meters) + .bind(database_data.splitter_id) + .bind(chunk_ids) + .execute(&pool) + .await + } else { + sqlx::query(&query_builder!( + queries::GENERATE_EMBEDDINGS, + embeddings_table_name, + &format!("{}.chunks", project_info.name), + embeddings_table_name + )) + .bind(&model.name) + .bind(¶meters) + .bind(database_data.splitter_id) + .execute(&pool) + .await + } + .map_err(|e| anyhow::anyhow!(e)) + .map(|_t| ()), + r => { + let remote_embeddings = build_remote_embeddings(r, &model.name, ¶meters)?; + remote_embeddings + .generate_embeddings( + &embeddings_table_name, + &format!("{}.chunks", project_info.name), + database_data.splitter_id, + chunk_ids, + &pool, + ) + .await + .map(|_t| ()) + } + }; + is_done.store(true, Relaxed); + res + }; + let progress_work = async { + while !is_done.load(Relaxed) { + progress_bar.inc(1); + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + } + }; + let (res, _) = join!(work, progress_work); + progress_bar.set_message("done generating embeddings"); + progress_bar.finish(); + res + } + + #[instrument(skip(self))] + async fn sync_tsvectors( + &mut self, + document_ids: &Option>, + mp: &MultiProgress, + ) -> anyhow::Result<()> { + self.verify_in_database(false).await?; + let pool = self.get_pool().await?; + + let parameters = self + .parameters + .as_ref() + .context("Pipeline must be verified to generate tsvectors")?; + + if parameters["full_text_search"]["active"] != serde_json::Value::Bool(true) { + return Ok(()); + } + + let project_info = self + .project_info + .as_ref() + .context("Pipeline must have project info to generate tsvectors")?; + + let progress_bar = mp + .add(utils::default_progress_spinner(1)) + .with_prefix(self.name.clone()) + .with_message("generating tsvectors for full text search"); + + let configuration = parameters["full_text_search"]["configuration"] + .as_str() + .context("Full text search configuration must be a string")?; + + let is_done = AtomicBool::new(false); + let work = async { + let res = if document_ids.is_some() { + sqlx::query(&query_builder!( + queries::GENERATE_TSVECTORS_FOR_DOCUMENT_IDS, + format!("{}.documents_tsvectors", project_info.name), + configuration, + configuration, + format!("{}.documents", project_info.name) + )) + .bind(document_ids) + .execute(&pool) + .await + } else { + sqlx::query(&query_builder!( + queries::GENERATE_TSVECTORS, + format!("{}.documents_tsvectors", project_info.name), + configuration, + configuration, + format!("{}.documents", project_info.name) + )) + .execute(&pool) + .await + }; + is_done.store(true, Relaxed); + res.map(|_t| ()).map_err(|e| anyhow::anyhow!(e)) + }; + let progress_work = async { + while !is_done.load(Relaxed) { + progress_bar.inc(1); + tokio::time::sleep(std::time::Duration::from_millis(100)).await; + } + }; + let (res, _) = join!(work, progress_work); + progress_bar.set_message("done generating tsvectors for full text search"); + progress_bar.finish(); + res + } + + #[instrument(skip(self))] + async fn create_or_get_embeddings_table(&mut self) -> anyhow::Result { + self.verify_in_database(false).await?; + let pool = self.get_pool().await?; + + let embeddings_table_name = format!( + "{}.{}_embeddings", + &self + .project_info + .as_ref() + .context("Pipeline must have project info to get the embeddings table name")? + .name, + self.name + ); + + // Notice that we actually check for existence of the table in the database instead of + // blindly creating it with `CREATE TABLE IF NOT EXISTS`. This is because we want to avoid + // generating embeddings just to get the length if we don't need to + let exists: bool = sqlx::query_scalar( + "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = $1 AND table_name = $2)" + ) + .bind(&self + .project_info + .as_ref() + .context("Pipeline must have project info to get the embeddings table name")?.name) + .bind(format!("{}_embeddings", self.name)).fetch_one(&pool).await?; + + if !exists { + let model = self + .model + .as_ref() + .context("Pipeline must be verified to create embeddings table")?; + + // Remove the stored name from the parameters + let mut parameters = model.parameters.clone(); + parameters + .as_object_mut() + .context("Model parameters must be an object")? + .remove("name"); + + let embedding_length = match &model.runtime { + ModelRuntime::Python => { + let embedding: (Vec,) = sqlx::query_as( + "SELECT embedding from pgml.embed(transformer => $1, text => 'Hello, World!', kwargs => $2) as embedding") + .bind(&model.name) + .bind(parameters) + .fetch_one(&pool).await?; + embedding.0.len() as i64 + } + t => { + let remote_embeddings = + build_remote_embeddings(t.to_owned(), &model.name, &model.parameters)?; + remote_embeddings.get_embedding_size().await? + } + }; + + let mut transaction = pool.begin().await?; + sqlx::query(&query_builder!( + queries::CREATE_EMBEDDINGS_TABLE, + &embeddings_table_name, + &format!( + "{}.chunks", + self.project_info + .as_ref() + .context("Pipeline must have project info to create the embeddings table")? + .name + ), + embedding_length + )) + .execute(&mut transaction) + .await?; + transaction + .execute( + query_builder!( + queries::CREATE_INDEX, + "", + "created_at_index", + &embeddings_table_name, + "created_at" + ) + .as_str(), + ) + .await?; + transaction + .execute( + query_builder!( + queries::CREATE_INDEX, + "", + "chunk_id_index", + &embeddings_table_name, + "chunk_id" + ) + .as_str(), + ) + .await?; + transaction + .execute( + query_builder!( + queries::CREATE_INDEX_USING_IVFFLAT, + "", + "vector_index", + &embeddings_table_name, + "embedding vector_cosine_ops" + ) + .as_str(), + ) + .await?; + transaction.commit().await?; + } + + Ok(embeddings_table_name) + } + + #[instrument(skip(self))] + pub(crate) fn set_project_info(&mut self, project_info: ProjectInfo) { + if self.model.is_some() { + self.model + .as_mut() + .unwrap() + .set_project_info(project_info.clone()); + } + if self.splitter.is_some() { + self.splitter + .as_mut() + .unwrap() + .set_project_info(project_info.clone()); + } + self.project_info = Some(project_info); + } + + /// Convert the [Pipeline] to [Json] + /// + /// # Example: + /// + /// ``` + /// use pgml::Collection; + /// + /// async fn example() -> anyhow::Result<()> { + /// let mut collection = Collection::new("my_collection", None); + /// let mut pipeline = collection.get_pipeline("my_pipeline").await?; + /// let pipeline_dict = pipeline.to_dict().await?; + /// Ok(()) + /// } + /// ``` + #[instrument(skip(self))] + pub async fn to_dict(&mut self) -> anyhow::Result { + self.verify_in_database(false).await?; + + let status = self.get_status().await?; + + let model_dict = self + .model + .as_mut() + .context("Pipeline must be verified to call to_dict")? + .to_dict() + .await?; + + let splitter_dict = self + .splitter + .as_mut() + .context("Pipeline must be verified to call to_dict")? + .to_dict() + .await?; + + let database_data = self + .database_data + .as_ref() + .context("Pipeline must be verified to call to_dict")?; + + let parameters = self + .parameters + .as_ref() + .context("Pipeline must be verified to call to_dict")?; + + Ok(serde_json::json!({ + "id": database_data.id, + "name": self.name, + "model": *model_dict, + "splitter": *splitter_dict, + "parameters": *parameters, + "status": *Json::from(status), + }) + .into()) + } + + async fn get_pool(&self) -> anyhow::Result { + let database_url = &self + .project_info + .as_ref() + .context("Project info required to call method pipeline.get_pool()")? + .database_url; + get_or_initialize_pool(database_url).await + } + + pub(crate) async fn create_pipelines_table( + project_info: &ProjectInfo, + conn: &mut PgConnection, + ) -> anyhow::Result<()> { + sqlx::query(&query_builder!( + queries::CREATE_PIPELINES_TABLE, + &format!("{}.pipelines", project_info.name) + )) + .execute(conn) + .await?; + Ok(()) + } +} + +impl From for Pipeline { + fn from(x: models::PipelineWithModelAndSplitter) -> Self { + Self { + model: Some(x.clone().into()), + splitter: Some(x.clone().into()), + name: x.pipeline_name, + project_info: None, + database_data: Some(PipelineDatabaseData { + id: x.pipeline_id, + created_at: x.pipeline_created_at, + model_id: x.model_id, + splitter_id: x.splitter_id, + }), + parameters: Some(x.pipeline_parameters), + } + } +} diff --git a/pgml-sdks/rust/pgml/src/queries.rs b/pgml-sdks/rust/pgml/src/queries.rs index a7bc99209..bd6913a45 100644 --- a/pgml-sdks/rust/pgml/src/queries.rs +++ b/pgml-sdks/rust/pgml/src/queries.rs @@ -4,71 +4,72 @@ pub const CREATE_COLLECTIONS_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS pgml.collections ( id serial8 PRIMARY KEY, - created_at timestamptz NOT NULL DEFAULT now(), + created_at timestamp NOT NULL DEFAULT now(), name text NOT NULL, active BOOLEAN DEFAULT TRUE, + project_id int8 NOT NULL REFERENCES pgml.projects ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, UNIQUE (name) ); "#; -pub const CREATE_DOCUMENTS_TABLE: &str = r#" +pub const CREATE_PIPELINES_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS %s ( - id serial8 PRIMARY KEY, - created_at timestamptz NOT NULL DEFAULT now(), - source_uuid uuid NOT NULL, - metadata jsonb NOT NULL DEFAULT '{}', - text text NOT NULL, - UNIQUE (source_uuid) -); -"#; - -pub const CREATE_SPLITTERS_TABLE: &str = r#" -CREATE TABLE IF NOT EXISTS %s ( - id serial8 PRIMARY KEY, created_at timestamptz NOT NULL DEFAULT now(), - name text NOT NULL, parameters jsonb NOT NULL DEFAULT '{}' + id serial8 PRIMARY KEY, + name text NOT NULL, + created_at timestamp NOT NULL DEFAULT now(), + model_id int8 NOT NULL REFERENCES pgml.models ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, + splitter_id int8 NOT NULL REFERENCES pgml.sdk_splitters ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, + active BOOLEAN NOT NULL DEFAULT TRUE, + parameters jsonb NOT NULL DEFAULT '{}', + UNIQUE (name) ); "#; -pub const CREATE_MODELS_TABLE: &str = r#" +pub const CREATE_DOCUMENTS_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS %s ( - id serial8 PRIMARY KEY, created_at timestamptz NOT NULL DEFAULT now(), - task text NOT NULL, name text NOT NULL, - parameters jsonb NOT NULL DEFAULT '{}' + id serial8 PRIMARY KEY, + created_at timestamp NOT NULL DEFAULT now(), + source_uuid uuid NOT NULL, + metadata jsonb NOT NULL DEFAULT '{}', + text text NOT NULL, + UNIQUE (source_uuid) ); "#; -pub const CREATE_TRANSFORMS_TABLE: &str = r#"CREATE TABLE IF NOT EXISTS %s ( - table_name text PRIMARY KEY, - created_at timestamptz NOT NULL DEFAULT now(), - task text NOT NULL, - splitter_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, - model_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, - UNIQUE (task, splitter_id, model_id) +pub const CREATE_SPLITTERS_TABLE: &str = r#" +CREATE TABLE IF NOT EXISTS pgml.sdk_splitters ( + id serial8 PRIMARY KEY, + created_at timestamp NOT NULL DEFAULT now(), + name text NOT NULL, + parameters jsonb NOT NULL DEFAULT '{}', + project_id int8 NOT NULL REFERENCES pgml.projects ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED ); "#; pub const CREATE_CHUNKS_TABLE: &str = r#"CREATE TABLE IF NOT EXISTS %s ( - id serial8 PRIMARY KEY, created_at timestamptz NOT NULL DEFAULT now(), + id serial8 PRIMARY KEY, created_at timestamp NOT NULL DEFAULT now(), document_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, - splitter_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, + splitter_id int8 NOT NULL REFERENCES pgml.sdk_splitters ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, chunk_index int8 NOT NULL, - chunk text NOT NULL + chunk text NOT NULL, + UNIQUE (document_id, splitter_id, chunk_index) ); "#; pub const CREATE_EMBEDDINGS_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS %s ( id serial8 PRIMARY KEY, - created_at timestamptz NOT NULL DEFAULT now(), + created_at timestamp NOT NULL DEFAULT now(), chunk_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, - embedding vector(%d) NOT NULL + embedding vector(%d) NOT NULL, + UNIQUE (chunk_id) ); "#; pub const CREATE_DOCUMENTS_TSVECTORS_TABLE: &str = r#" CREATE TABLE IF NOT EXISTS %s ( id serial8 PRIMARY KEY, - created_at timestamptz NOT NULL DEFAULT now(), + created_at timestamp NOT NULL DEFAULT now(), document_id int8 NOT NULL REFERENCES %s ON DELETE CASCADE ON UPDATE CASCADE DEFERRABLE INITIALLY DEFERRED, configuration text NOT NULL, ts tsvector, @@ -80,15 +81,15 @@ CREATE TABLE IF NOT EXISTS %s ( // CREATE INDICES /////////// ///////////////////////////// pub const CREATE_INDEX: &str = r#" -CREATE INDEX CONCURRENTLY IF NOT EXISTS %s ON %s (%d); +CREATE INDEX %d IF NOT EXISTS %s ON %s (%d); "#; pub const CREATE_INDEX_USING_GIN: &str = r#" -CREATE INDEX CONCURRENTLY IF NOT EXISTS %s ON %s USING GIN (%d); +CREATE INDEX %d IF NOT EXISTS %s ON %s USING GIN (%d); "#; pub const CREATE_INDEX_USING_IVFFLAT: &str = r#" -CREATE INDEX CONCURRENTLY IF NOT EXISTS %s ON %s USING ivfflat (%d); +CREATE INDEX %d IF NOT EXISTS %s ON %s USING ivfflat (%d); "#; ///////////////////////////// @@ -105,99 +106,202 @@ FROM ON CONFLICT (document_id, configuration) DO UPDATE SET ts = EXCLUDED.ts; "#; +pub const GENERATE_TSVECTORS_FOR_DOCUMENT_IDS: &str = r#" +INSERT INTO %s (document_id, configuration, ts) +SELECT + id, + '%d' configuration, + to_tsvector('%d', text) ts +FROM + %s +WHERE id = ANY ($1) +ON CONFLICT (document_id, configuration) DO NOTHING; +"#; + pub const GENERATE_EMBEDDINGS: &str = r#" -WITH model as ( - SELECT - name, - parameters - from - %s - where - id = $1 -) INSERT INTO %s (chunk_id, embedding) +INSERT INTO %s (chunk_id, embedding) SELECT id, pgml.embed( text => chunk, - transformer => ( - SELECT - name - FROM - model - ), - kwargs => ( - SELECT - parameters - FROM - model - ) + transformer => $1, + kwargs => $2 ) FROM %s WHERE - splitter_id = $2 + splitter_id = $3 AND id NOT IN ( SELECT chunk_id from %s - ); + ) +ON CONFLICT (chunk_id) DO NOTHING; "#; -pub const VECTOR_SEARCH: &str = r#" -WITH query_cte AS ( +pub const GENERATE_EMBEDDINGS_FOR_CHUNK_IDS: &str = r#" +INSERT INTO %s (chunk_id, embedding) +SELECT + id, + pgml.embed( + text => chunk, + transformer => $1, + kwargs => $2 + ) +FROM + %s +WHERE + splitter_id = $3 + AND id = ANY ($4) + AND id NOT IN ( + SELECT + chunk_id + from + %s + ) +ON CONFLICT (chunk_id) DO NOTHING; +"#; + +pub const EMBED_AND_VECTOR_SEARCH: &str = r#" +WITH pipeline AS ( + SELECT + model_id + FROM + %s + WHERE + name = $1 +), +model AS ( + SELECT + hyperparams + FROM + pgml.models + WHERE + id = (SELECT model_id FROM pipeline) +), +embedding AS ( SELECT pgml.embed( - transformer => models.name, - text => $1, - kwargs => $2 - )::vector AS query_embedding from %s models where models.id = $3 + transformer => (SELECT hyperparams->>'name' FROM model), + text => $2, + kwargs => $3 + )::vector AS embedding ), -cte AS ( +comparison AS ( SELECT chunk_id, 1 - ( - %s.embedding <=> (SELECT query_embedding FROM query_cte) + %s.embedding <=> (SELECT embedding FROM embedding) ) AS score FROM %s ) SELECT - cte.score, + comparison.score, chunks.chunk, documents.metadata FROM - cte - INNER JOIN %s chunks ON chunks.id = cte.chunk_id + comparison + INNER JOIN %s chunks ON chunks.id = comparison.chunk_id INNER JOIN %s documents ON documents.id = chunks.document_id ORDER BY - cte.score DESC + comparison.score DESC LIMIT $4; "#; -pub const GENERATE_CHUNKS: &str = r#" -WITH splitter as ( +pub const VECTOR_SEARCH: &str = r#" +WITH comparison AS ( SELECT - id, - name, - parameters + chunk_id, + 1 - ( + %s.embedding <=> $1::vector + ) AS score FROM %s - WHERE - id = $1 -) INSERT INTO %s( +) +SELECT + comparison.score, + chunks.chunk, + documents.metadata +FROM + comparison + INNER JOIN %s chunks ON chunks.id = comparison.chunk_id + INNER JOIN %s documents ON documents.id = chunks.document_id + ORDER BY + comparison.score DESC + LIMIT + $2; +"#; + +pub const GENERATE_CHUNKS: &str = r#" +WITH splitter as ( + SELECT + name, + parameters + FROM + pgml.sdk_splitters + WHERE + id = $1 +) +INSERT INTO %s( document_id, splitter_id, chunk_index, chunk ) SELECT document_id, + $1, + (chunk).chunk_index, + (chunk).chunk +FROM ( - SELECT - id + select + id AS document_id, + pgml.chunk( + (SELECT name FROM splitter), + text, + (SELECT parameters FROM splitter) + ) AS chunk FROM - splitter - ), + ( + SELECT + id, + text + FROM + %s + WHERE + id NOT IN ( + SELECT + document_id + FROM + %s + WHERE + splitter_id = $1 + ) + ) AS documents + ) chunks +ON CONFLICT (document_id, splitter_id, chunk_index) DO NOTHING +RETURNING id +"#; + +pub const GENERATE_CHUNKS_FOR_DOCUMENT_IDS: &str = r#" +WITH splitter as ( + SELECT + name, + parameters + FROM + pgml.sdk_splitters + WHERE + id = $1 +) +INSERT INTO %s( + document_id, splitter_id, chunk_index, + chunk +) +SELECT + document_id, + $1, (chunk).chunk_index, (chunk).chunk FROM @@ -205,41 +309,29 @@ FROM select id AS document_id, pgml.chunk( - ( - SELECT - name - FROM - splitter - ), + (SELECT name FROM splitter), text, - ( - SELECT - parameters - FROM - splitter - ) + (SELECT parameters FROM splitter) ) AS chunk FROM ( - select + SELECT id, text - from + FROM %s WHERE - id NOT IN ( - select + id = ANY($2) + AND id NOT IN ( + SELECT document_id - from + FROM %s - where - splitter_id = ( - select - id - from - splitter - ) + WHERE + splitter_id = $1 ) - ) as documents + ) AS documents ) chunks +ON CONFLICT (document_id, splitter_id, chunk_index) DO NOTHING +RETURNING id "#; diff --git a/pgml-sdks/rust/pgml/src/query_builder.rs b/pgml-sdks/rust/pgml/src/query_builder.rs deleted file mode 100644 index d200daffb..000000000 --- a/pgml-sdks/rust/pgml/src/query_builder.rs +++ /dev/null @@ -1,229 +0,0 @@ -use itertools::Itertools; -use pgml_macros::{custom_derive, custom_methods}; -use sea_query::{ - query::SelectStatement, Alias, CommonTableExpression, Expr, Func, Iden, JoinType, Order, - PostgresQueryBuilder, Query, QueryStatementWriter, WithClause, -}; -use sea_query_binder::SqlxBinder; - -use crate::{filter_builder, models, types::Json, Collection}; - -#[cfg(feature = "javascript")] -use crate::languages::javascript::*; - -#[derive(Clone)] -enum SIden<'a> { - Str(&'a str), - String(String), -} - -impl Iden for SIden<'_> { - fn unquoted(&self, s: &mut dyn std::fmt::Write) { - write!( - s, - "{}", - match self { - SIden::Str(s) => s, - SIden::String(s) => s.as_str(), - } - ) - .unwrap(); - } -} - -trait IntoTableNameAndSchema { - fn to_table_tuple<'b>(&self) -> (SIden<'b>, SIden<'b>); -} - -impl IntoTableNameAndSchema for String { - fn to_table_tuple<'b>(&self) -> (SIden<'b>, SIden<'b>) { - self.split('.') - .map(|s| SIden::String(s.to_string())) - .collect_tuple() - .expect("Malformed table name in IntoTableNameAndSchema") - } -} - -#[derive(custom_derive, Clone, Debug)] -pub struct QueryBuilder { - query: SelectStatement, - with: WithClause, - collection: Collection, -} - -#[custom_methods(limit, filter, vector_recall, to_full_string, run)] -impl QueryBuilder { - pub fn new(collection: Collection) -> Self { - Self { - query: SelectStatement::new(), - with: WithClause::new(), - collection, - } - } - - pub fn limit(mut self, limit: u64) -> Self { - self.query.limit(limit); - self - } - - pub fn filter(mut self, mut filter: Json) -> Self { - let filter = filter - .0 - .as_object_mut() - .expect("Filter must be a Json object"); - if let Some(f) = filter.remove("metadata") { - self = self.filter_metadata(f); - } - if let Some(f) = filter.remove("full_text") { - self = self.filter_full_text(f); - } - self - } - - fn filter_metadata(mut self, filter: serde_json::Value) -> Self { - let filter = filter_builder::FilterBuilder::new(filter, "documents", "metadata").build(); - self.query.cond_where(filter); - self - } - - fn filter_full_text(mut self, mut filter: serde_json::Value) -> Self { - let filter = filter - .as_object_mut() - .expect("Full text filter must be a Json object"); - let configuration = match filter.get("configuration") { - Some(config) => config.as_str().expect("Configuration must be a string"), - None => "english", - }; - let filter_text = filter - .get("text") - .expect("Filter must contain a text field") - .as_str() - .expect("Text must be a string"); - self.query - .join_as( - JoinType::InnerJoin, - self.collection - .documents_tsvectors_table_name - .to_table_tuple(), - Alias::new("documents_tsvectors"), - Expr::col((SIden::Str("documents"), SIden::Str("id"))) - .equals((SIden::Str("documents_tsvectors"), SIden::Str("document_id"))), - ) - .and_where( - Expr::col(( - SIden::Str("documents_tsvectors"), - SIden::Str("configuration"), - )) - .eq(configuration), - ) - .and_where(Expr::cust_with_values( - &format!( - "documents_tsvectors.ts @@ plainto_tsquery('{}', $1)", - configuration - ), - [filter_text], - )); - self - } - - pub fn vector_recall( - mut self, - query: String, - query_params: Option, - model_id: Option, - splitter_id: Option, - ) -> Self { - let query_params = match query_params { - Some(params) => params.0, - None => serde_json::json!({}), - }; - let model_id = model_id.unwrap_or(1); - let splitter_id = splitter_id.unwrap_or(1); - - let embeddings_table_name = self - .collection - .get_embeddings_table_name(model_id, splitter_id) - .expect("Error getting embeddings table name in vector_recall"); - - let mut query_cte = Query::select(); - query_cte - .expr_as( - Func::cast_as( - Func::cust(SIden::Str("pgml.embed")).args([ - Expr::cust("transformer => models.name"), - Expr::cust_with_values("text => $1", [query]), - Expr::cust_with_values("kwargs => $1", [query_params]), - ]), - Alias::new("vector"), - ), - Alias::new("query_embedding"), - ) - .from_as( - self.collection.models_table_name.to_table_tuple(), - SIden::Str("models"), - ) - .and_where(Expr::col((SIden::Str("models"), SIden::Str("id"))).eq(model_id)); - let mut query_cte = CommonTableExpression::from_select(query_cte); - query_cte.table_name(Alias::new("query_cte")); - - let mut cte = Query::select(); - cte.from_as( - embeddings_table_name.to_table_tuple(), - SIden::Str("embedding"), - ) - .columns([models::EmbeddingIden::ChunkId]) - .expr(Expr::cust( - "1 - (embedding.embedding <=> (select query_embedding from query_cte)) as score", - )); - let mut cte = CommonTableExpression::from_select(cte); - cte.table_name(Alias::new("cte")); - - let mut with_clause = WithClause::new(); - self.with = with_clause.cte(query_cte).cte(cte).to_owned(); - - self.query - .columns([ - (SIden::Str("cte"), SIden::Str("score")), - (SIden::Str("chunks"), SIden::Str("chunk")), - (SIden::Str("documents"), SIden::Str("metadata")), - ]) - .from(SIden::Str("cte")) - .join_as( - JoinType::InnerJoin, - self.collection.chunks_table_name.to_table_tuple(), - Alias::new("chunks"), - Expr::col((SIden::Str("chunks"), SIden::Str("id"))) - .equals((SIden::Str("cte"), SIden::Str("chunk_id"))), - ) - .join_as( - JoinType::InnerJoin, - self.collection.documents_table_name.to_table_tuple(), - Alias::new("documents"), - Expr::col((SIden::Str("documents"), SIden::Str("id"))) - .equals((SIden::Str("chunks"), SIden::Str("document_id"))), - ) - .order_by((SIden::Str("cte"), SIden::Str("score")), Order::Desc); - - self - } - - pub async fn run(self) -> anyhow::Result> { - let (sql, values) = self.query.with(self.with).build_sqlx(PostgresQueryBuilder); - let results: Vec<(f64, String, Json)> = sqlx::query_as_with(&sql, values) - .fetch_all(&self.collection.pool) - .await?; - Ok(results) - } - - // This is mostly so our SDKs in other languages have some way to debug - pub fn to_full_string(&self) -> String { - self.to_string() - } -} - -impl std::fmt::Display for QueryBuilder { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let query = self.query.clone().with(self.with.clone()); - write!(f, "{}", query.to_string(PostgresQueryBuilder)) - } -} diff --git a/pgml-sdks/rust/pgml/src/query_builder/mod.rs b/pgml-sdks/rust/pgml/src/query_builder/mod.rs new file mode 100644 index 000000000..102e40e0b --- /dev/null +++ b/pgml-sdks/rust/pgml/src/query_builder/mod.rs @@ -0,0 +1,2 @@ +mod query_builder; +pub use query_builder::*; diff --git a/pgml-sdks/rust/pgml/src/query_builder/query_builder.rs b/pgml-sdks/rust/pgml/src/query_builder/query_builder.rs new file mode 100644 index 000000000..f6e85ba04 --- /dev/null +++ b/pgml-sdks/rust/pgml/src/query_builder/query_builder.rs @@ -0,0 +1,342 @@ +use anyhow::Context; +use itertools::Itertools; +use pgml_macros::{custom_derive, custom_methods}; +use sea_query::{ + query::SelectStatement, Alias, CommonTableExpression, Expr, Func, Iden, JoinType, Order, + PostgresQueryBuilder, Query, QueryStatementWriter, WithClause, +}; +use sea_query_binder::SqlxBinder; +use std::borrow::Cow; + +use crate::{ + filter_builder, get_or_initialize_pool, models, pipeline::Pipeline, + remote_embeddings::build_remote_embeddings, types::Json, Collection, +}; + +#[cfg(feature = "javascript")] +use crate::languages::javascript::*; + +#[cfg(feature = "python")] +use crate::{languages::python::*, pipeline::PipelinePython, types::JsonPython}; + +#[derive(Clone)] +enum SIden<'a> { + Str(&'a str), + String(String), +} + +impl Iden for SIden<'_> { + fn unquoted(&self, s: &mut dyn std::fmt::Write) { + write!( + s, + "{}", + match self { + SIden::Str(s) => s, + SIden::String(s) => s.as_str(), + } + ) + .unwrap(); + } +} + +trait IntoTableNameAndSchema { + fn to_table_tuple<'b>(&self) -> (SIden<'b>, SIden<'b>); +} + +impl IntoTableNameAndSchema for String { + fn to_table_tuple<'b>(&self) -> (SIden<'b>, SIden<'b>) { + self.split('.') + .map(|s| SIden::String(s.to_string())) + .collect_tuple() + .expect("Malformed table name in IntoTableNameAndSchema") + } +} + +#[derive(Clone, Debug)] +struct QueryBuilderState {} + +#[derive(custom_derive, Clone, Debug)] +pub struct QueryBuilder { + query: SelectStatement, + with: WithClause, + collection: Collection, + query_string: Option, + pipeline: Option, + query_parameters: Option, +} + +#[custom_methods(limit, filter, vector_recall, to_full_string, fetch_all)] +impl QueryBuilder { + pub fn new(collection: Collection) -> Self { + Self { + query: SelectStatement::new(), + with: WithClause::new(), + collection, + query_string: None, + pipeline: None, + query_parameters: None, + } + } + + pub fn limit(mut self, limit: u64) -> Self { + self.query.limit(limit); + self + } + + pub fn filter(mut self, mut filter: Json) -> Self { + let filter = filter + .0 + .as_object_mut() + .expect("Filter must be a Json object"); + if let Some(f) = filter.remove("metadata") { + self = self.filter_metadata(f); + } + if let Some(f) = filter.remove("full_text") { + self = self.filter_full_text(f); + } + self + } + + fn filter_metadata(mut self, filter: serde_json::Value) -> Self { + let filter = filter_builder::FilterBuilder::new(filter, "documents", "metadata").build(); + self.query.cond_where(filter); + self + } + + fn filter_full_text(mut self, mut filter: serde_json::Value) -> Self { + let filter = filter + .as_object_mut() + .expect("Full text filter must be a Json object"); + let configuration = match filter.get("configuration") { + Some(config) => config.as_str().expect("Configuration must be a string"), + None => "english", + }; + let filter_text = filter + .get("text") + .expect("Filter must contain a text field") + .as_str() + .expect("Text must be a string"); + self.query + .join_as( + JoinType::InnerJoin, + self.collection + .documents_tsvectors_table_name + .to_table_tuple(), + Alias::new("documents_tsvectors"), + Expr::col((SIden::Str("documents"), SIden::Str("id"))) + .equals((SIden::Str("documents_tsvectors"), SIden::Str("document_id"))), + ) + .and_where( + Expr::col(( + SIden::Str("documents_tsvectors"), + SIden::Str("configuration"), + )) + .eq(configuration), + ) + .and_where(Expr::cust_with_values( + &format!( + "documents_tsvectors.ts @@ plainto_tsquery('{}', $1)", + configuration + ), + [filter_text], + )); + self + } + + pub fn vector_recall( + mut self, + query: &str, + pipeline: &Pipeline, + query_parameters: Option, + ) -> Self { + // Save these in case of failure + self.pipeline = Some(pipeline.clone()); + self.query_string = Some(query.to_owned()); + + let query_parameters = query_parameters.unwrap_or_default().0; + let embeddings_table_name = + format!("{}.{}_embeddings", self.collection.name, pipeline.name); + + // Build the pipeline CTE + let mut pipeline_cte = Query::select(); + pipeline_cte + .from_as( + self.collection.pipelines_table_name.to_table_tuple(), + SIden::Str("pipeline"), + ) + .columns([models::PipelineIden::ModelId]) + .and_where(Expr::col(models::PipelineIden::Name).eq(&pipeline.name)); + let mut pipeline_cte = CommonTableExpression::from_select(pipeline_cte); + pipeline_cte.table_name(Alias::new("pipeline")); + + // Build the model CTE + let mut model_cte = Query::select(); + model_cte + .from_as( + (SIden::Str("pgml"), SIden::Str("models")), + SIden::Str("model"), + ) + .columns([models::ModelIden::Hyperparams]) + .and_where(Expr::cust("id = (SELECT model_id FROM pipeline)")); + let mut model_cte = CommonTableExpression::from_select(model_cte); + model_cte.table_name(Alias::new("model")); + + // Build the embedding CTE + let mut embedding_cte = Query::select(); + embedding_cte.expr_as( + Func::cast_as( + Func::cust(SIden::Str("pgml.embed")).args([ + Expr::cust("transformer => (SELECT hyperparams->>'name' FROM model)"), + Expr::cust_with_values("text => $1", [query]), + Expr::cust_with_values("kwargs => $1", [query_parameters]), + ]), + Alias::new("vector"), + ), + Alias::new("embedding"), + ); + let mut embedding_cte = CommonTableExpression::from_select(embedding_cte); + embedding_cte.table_name(Alias::new("embedding")); + + // Build the comparison CTE + let mut comparison_cte = Query::select(); + comparison_cte + .from_as( + embeddings_table_name.to_table_tuple(), + SIden::Str("embeddings"), + ) + .columns([models::EmbeddingIden::ChunkId]) + .expr(Expr::cust( + "1 - (embeddings.embedding <=> (select embedding from embedding)) as score", + )); + let mut comparison_cte = CommonTableExpression::from_select(comparison_cte); + comparison_cte.table_name(Alias::new("comparison")); + + // Build the where clause + let mut with_clause = WithClause::new(); + self.with = with_clause + .cte(pipeline_cte) + .cte(model_cte) + .cte(embedding_cte) + .cte(comparison_cte) + .to_owned(); + + // Build the query + self.query + .columns([ + (SIden::Str("comparison"), SIden::Str("score")), + (SIden::Str("chunks"), SIden::Str("chunk")), + (SIden::Str("documents"), SIden::Str("metadata")), + ]) + .from(SIden::Str("comparison")) + .join_as( + JoinType::InnerJoin, + self.collection.chunks_table_name.to_table_tuple(), + Alias::new("chunks"), + Expr::col((SIden::Str("chunks"), SIden::Str("id"))) + .equals((SIden::Str("comparison"), SIden::Str("chunk_id"))), + ) + .join_as( + JoinType::InnerJoin, + self.collection.documents_table_name.to_table_tuple(), + Alias::new("documents"), + Expr::col((SIden::Str("documents"), SIden::Str("id"))) + .equals((SIden::Str("chunks"), SIden::Str("document_id"))), + ) + .order_by((SIden::Str("comparison"), SIden::Str("score")), Order::Desc); + + self + } + + pub async fn fetch_all(mut self) -> anyhow::Result> { + let pool = get_or_initialize_pool(&self.collection.database_url).await?; + + let (sql, values) = self + .query + .clone() + .with(self.with.clone()) + .build_sqlx(PostgresQueryBuilder); + let result: Result, _> = + sqlx::query_as_with(&sql, values).fetch_all(&pool).await; + + match result { + Ok(r) => Ok(r), + Err(e) => match e.as_database_error() { + Some(d) => { + if d.code() == Some(Cow::from("XX000")) { + // Explicitly get and set the model + let project_info = self.collection.get_project_info().await?; + let pipeline = self + .pipeline + .as_mut() + .context("Need pipeline to call fetch_all on query builder with remote embeddings")?; + pipeline.set_project_info(project_info); + pipeline.verify_in_database(false).await?; + let model = pipeline + .model + .as_ref() + .context("Pipeline must be verified to perform vector search with remote embeddings")?; + + let query_parameters = self.query_parameters.to_owned().unwrap_or_default(); + + let remote_embeddings = + build_remote_embeddings(model.runtime, &model.name, &query_parameters)?; + let mut embeddings = remote_embeddings + .embed(vec![self + .query_string + .to_owned() + .context("Must have query_string to call fetch_all on query_builder with remote embeddings")?]) + .await?; + let embedding = std::mem::take(&mut embeddings[0]); + + // Explicit drop required here or we can't borrow the pipeline immutably + drop(remote_embeddings); + let embeddings_table_name = + format!("{}.{}_embeddings", self.collection.name, pipeline.name); + + let mut comparison_cte = Query::select(); + comparison_cte + .from_as( + embeddings_table_name.to_table_tuple(), + SIden::Str("embeddings"), + ) + .columns([models::EmbeddingIden::ChunkId]) + .expr(Expr::cust_with_values( + "1 - (embeddings.embedding <=> $1::vector) as score", + [embedding], + )); + + let mut comparison_cte = CommonTableExpression::from_select(comparison_cte); + comparison_cte.table_name(Alias::new("comparison")); + let mut with_clause = WithClause::new(); + with_clause.cte(comparison_cte); + + let (sql, values) = self + .query + .clone() + .with(with_clause) + .build_sqlx(PostgresQueryBuilder); + sqlx::query_as_with(&sql, values) + .fetch_all(&pool) + .await + .map_err(|e| anyhow::anyhow!(e)) + } else { + Err(anyhow::anyhow!(e)) + } + } + None => Err(anyhow::anyhow!(e)), + }, + } + } + + // This is mostly so our SDKs in other languages have some way to debug + pub fn to_full_string(&self) -> String { + self.to_string() + } +} + +impl std::fmt::Display for QueryBuilder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let query = self.query.clone().with(self.with.clone()); + write!(f, "{}", query.to_string(PostgresQueryBuilder)) + } +} diff --git a/pgml-sdks/rust/pgml/src/query_runner.rs b/pgml-sdks/rust/pgml/src/query_runner.rs index 8d87a8196..1db1bd813 100644 --- a/pgml-sdks/rust/pgml/src/query_runner.rs +++ b/pgml-sdks/rust/pgml/src/query_runner.rs @@ -1,13 +1,16 @@ use pgml_macros::{custom_derive, custom_methods}; -use sqlx::postgres::{PgArguments, PgPool}; +use sqlx::postgres::PgArguments; use sqlx::query::Query; use sqlx::{Postgres, Row}; -use crate::types::Json; +use crate::{get_or_initialize_pool, types::Json}; #[cfg(feature = "javascript")] use crate::languages::javascript::*; +#[cfg(feature = "python")] +use crate::{languages::python::*, types::JsonPython}; + #[derive(Clone, Debug)] enum BindValue { String(String), @@ -19,9 +22,9 @@ enum BindValue { #[derive(custom_derive, Clone, Debug)] pub struct QueryRunner { - pool: PgPool, query: String, bind_values: Vec, + database_url: Option, } #[custom_methods( @@ -34,25 +37,27 @@ pub struct QueryRunner { bind_json )] impl QueryRunner { - pub fn new(query: &str, pool: PgPool) -> Self { + pub fn new(query: &str, database_url: Option) -> Self { Self { - pool, query: query.to_string(), bind_values: Vec::new(), + database_url, } } pub async fn fetch_all(mut self) -> anyhow::Result { + let pool = get_or_initialize_pool(&self.database_url).await?; self.query = format!("SELECT json_agg(j) FROM ({}) j", self.query); let query = self.build_query(); - let results = query.fetch_all(&self.pool).await?; - let results = results.get(0).unwrap().get::(0); + let results = query.fetch_all(&pool).await?; + let results = results.get(0).unwrap().get::(0); Ok(Json(results)) } pub async fn execute(self) -> anyhow::Result<()> { + let pool = get_or_initialize_pool(&self.database_url).await?; let query = self.build_query(); - query.execute(&self.pool).await?; + query.execute(&pool).await?; Ok(()) } diff --git a/pgml-sdks/rust/pgml/src/remote_embeddings.rs b/pgml-sdks/rust/pgml/src/remote_embeddings.rs new file mode 100644 index 000000000..bcb84146c --- /dev/null +++ b/pgml-sdks/rust/pgml/src/remote_embeddings.rs @@ -0,0 +1,192 @@ +use reqwest::{Client, RequestBuilder}; +use sqlx::postgres::PgPool; +use std::env; +use tracing::instrument; + +use crate::{model::ModelRuntime, models, query_builder, types::Json}; + +pub fn build_remote_embeddings<'a>( + source: ModelRuntime, + model_name: &'a str, + _model_parameters: &'a Json, +) -> anyhow::Result + Sync + Send + 'a>> { + match source { + // OpenAI endpoint for embedddings does not take any model parameters + ModelRuntime::OpenAI => Ok(Box::new(OpenAIRemoteEmbeddings::new(model_name))), + _ => Err(anyhow::anyhow!("Unknown remote embeddings source")), + } +} + +#[async_trait::async_trait] +pub trait RemoteEmbeddings<'a> { + fn build_request(&self) -> RequestBuilder; + fn generate_body(&self, text: Vec) -> serde_json::Value; + + #[instrument(skip(self))] + async fn get_embedding_size(&self) -> anyhow::Result { + let response = self.embed(vec!["Hello, World!".to_string()]).await?; + if response.is_empty() { + anyhow::bail!("API call to get embedding size returned empty response") + } + let embedding_size = response[0].len() as i64; + Ok(embedding_size) + } + + #[instrument(skip(self, text))] + async fn embed(&self, text: Vec) -> anyhow::Result>> { + let request = self.build_request().json(&self.generate_body(text)); + let response = request.send().await?; + + let response = response.json::().await?; + self.parse_response(response) + } + + #[instrument(skip(self, pool))] + async fn get_chunks( + &self, + embeddings_table_name: &str, + chunks_table_name: &str, + splitter_id: i64, + chunk_ids: &Option>, + pool: &PgPool, + limit: Option, + ) -> anyhow::Result> { + let limit = limit.unwrap_or(1000); + + match chunk_ids { + Some(cids) => sqlx::query_as(&query_builder!( + "SELECT * FROM %s WHERE splitter_id = $1 AND id NOT IN (SELECT chunk_id FROM %s) AND id = ANY ($2) LIMIT $3", + chunks_table_name, + embeddings_table_name + )) + .bind(splitter_id) + .bind(cids) + .bind(limit) + .fetch_all(pool) + .await, + None => sqlx::query_as(&query_builder!( + "SELECT * FROM %s WHERE splitter_id = $1 AND id NOT IN (SELECT chunk_id FROM %s) LIMIT $2", + chunks_table_name, + embeddings_table_name + )) + .bind(splitter_id) + .bind(limit) + .fetch_all(pool) + .await + }.map_err(|e| anyhow::anyhow!(e)) + } + + #[instrument(skip(self, response))] + fn parse_response(&self, response: serde_json::Value) -> anyhow::Result>> { + let data = response["data"] + .as_array() + .ok_or(anyhow::anyhow!("No data in response"))?; + + let embeddings: Vec> = data + .iter() + .map(|d| { + let embedding = d["embedding"] + .as_array() + .expect("Malformed response from openai. Found while in parse_response"); + + embedding + .iter() + .map(|dd| dd.as_f64().unwrap()) + .collect::>() + }) + .collect(); + + Ok(embeddings) + } + + #[instrument(skip(self, pool))] + async fn generate_embeddings( + &self, + embeddings_table_name: &str, + chunks_table_name: &str, + splitter_id: i64, + chunk_ids: Option>, + pool: &PgPool, + ) -> anyhow::Result<()> { + loop { + let chunks = self + .get_chunks( + embeddings_table_name, + chunks_table_name, + splitter_id, + &chunk_ids, + pool, + None, + ) + .await?; + if chunks.is_empty() { + break; + } + let (chunk_ids, chunk_texts): (Vec, Vec) = chunks + .into_iter() + .map(|chunk| (chunk.id, chunk.chunk)) + .unzip(); + let embeddings = self.embed(chunk_texts).await?; + + let query_string_values = (0..embeddings.len()) + .map(|i| format!("(${}, ${})", i * 2 + 1, i * 2 + 2)) + .collect::>() + .join(","); + let query_string = format!( + "INSERT INTO %s (chunk_id, embedding) VALUES {}", + query_string_values + ); + + let query = query_builder!(query_string, embeddings_table_name); + let mut query = sqlx::query(&query); + + for i in 0..embeddings.len() { + query = query.bind(chunk_ids[i]).bind(&embeddings[i]); + } + + query.execute(pool).await?; + } + Ok(()) + } +} + +pub struct OpenAIRemoteEmbeddings<'a> { + model_name: &'a str, +} + +impl<'a> OpenAIRemoteEmbeddings<'a> { + fn new(model_name: &'a str) -> Self { + OpenAIRemoteEmbeddings { model_name } + } +} + +impl<'a> RemoteEmbeddings<'a> for OpenAIRemoteEmbeddings<'a> { + fn build_request(&self) -> RequestBuilder { + let openai_api_key = env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY is not set"); + Client::new() + .post("https://api.openai.com/v1/embeddings") + .bearer_auth(openai_api_key) + } + + fn generate_body(&self, text: Vec) -> serde_json::Value { + serde_json::json!({ + "model": self.model_name, + "input": text + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn openai_remote_embeddings() -> anyhow::Result<()> { + let params = serde_json::json!({}).into(); + let openai_remote_embeddings = + build_remote_embeddings(ModelRuntime::OpenAI, "text-embedding-ada-002", ¶ms)?; + let embedding_size = openai_remote_embeddings.get_embedding_size().await?; + assert!(embedding_size > 0); + Ok(()) + } +} diff --git a/pgml-sdks/rust/pgml/src/splitter.rs b/pgml-sdks/rust/pgml/src/splitter.rs new file mode 100644 index 000000000..91a3dfa2a --- /dev/null +++ b/pgml-sdks/rust/pgml/src/splitter.rs @@ -0,0 +1,170 @@ +use anyhow::Context; +use pgml_macros::{custom_derive, custom_methods}; +use sqlx::postgres::{PgConnection, PgPool}; +use tracing::instrument; + +use crate::{ + collection::ProjectInfo, + get_or_initialize_pool, models, queries, + types::{DateTime, Json}, +}; + +#[cfg(feature = "javascript")] +use crate::languages::javascript::*; + +#[cfg(feature = "python")] +use crate::{languages::python::*, types::JsonPython}; + +#[derive(Debug, Clone)] +pub(crate) struct SplitterDatabaseData { + pub id: i64, + pub created_at: DateTime, +} + +#[derive(custom_derive, Debug, Clone)] +pub struct Splitter { + pub name: String, + pub parameters: Json, + project_info: Option, + pub(crate) database_data: Option, +} + +impl Default for Splitter { + fn default() -> Self { + Self::new(None, None) + } +} + +#[custom_methods(new)] +impl Splitter { + /// Creates a new [Splitter] + /// + /// # Arguments + /// + /// * `name` - The name of the splitter. + /// * `parameters` - The parameters to the splitter. Defaults to None + /// + /// # Example + /// + /// ``` + /// use pgml::Splitter; + /// let splitter = Splitter::new(Some("recursive_character".to_string()), None); + /// ``` + pub fn new(name: Option, parameters: Option) -> Self { + let name = name.unwrap_or("recursive_character".to_string()); + let parameters = parameters.unwrap_or(Json(serde_json::json!({}))); + Self { + name, + parameters, + project_info: None, + database_data: None, + } + } + + #[instrument(skip(self))] + pub(crate) async fn verify_in_database(&mut self, throw_if_exists: bool) -> anyhow::Result<()> { + if self.database_data.is_none() { + let pool = self.get_pool().await?; + + let project_info = self + .project_info + .as_ref() + .expect("Cannot verify splitter without project info"); + + let splitter: Option = sqlx::query_as( + "SELECT * FROM pgml.sdk_splitters WHERE project_id = $1 AND name = $2 and parameters = $3", + ) + .bind(project_info.id) + .bind(&self.name) + .bind(&self.parameters) + .fetch_optional(&pool) + .await?; + + let splitter = if let Some(s) = splitter { + anyhow::ensure!(!throw_if_exists, "Splitter already exists in database"); + s + } else { + sqlx::query_as( + "INSERT INTO pgml.sdk_splitters (project_id, name, parameters) VALUES ($1, $2, $3) RETURNING *", + ) + .bind(project_info.id) + .bind(&self.name) + .bind(&self.parameters) + .fetch_one(&pool) + .await? + }; + + self.database_data = Some(SplitterDatabaseData { + id: splitter.id, + created_at: splitter.created_at, + }); + } + Ok(()) + } + + pub(crate) async fn create_splitters_table(conn: &mut PgConnection) -> anyhow::Result<()> { + sqlx::query(queries::CREATE_SPLITTERS_TABLE) + .execute(conn) + .await?; + Ok(()) + } + + pub(crate) fn set_project_info(&mut self, project_info: ProjectInfo) { + self.project_info = Some(project_info) + } + + #[instrument(skip(self))] + pub(crate) async fn to_dict(&mut self) -> anyhow::Result { + self.verify_in_database(false).await?; + + let database_data = self + .database_data + .as_ref() + .context("Splitter must be verified to call to_dict")?; + + Ok(serde_json::json!({ + "id": database_data.id, + "created_at": database_data.created_at, + "name": self.name, + "parameters": *self.parameters, + }) + .into()) + } + + async fn get_pool(&self) -> anyhow::Result { + let database_url = &self + .project_info + .as_ref() + .context("Project info required to call method splitter.get_pool()")? + .database_url; + get_or_initialize_pool(database_url).await + } +} + +impl From for Splitter { + fn from(x: models::PipelineWithModelAndSplitter) -> Self { + Self { + name: x.splitter_name, + parameters: x.splitter_parameters, + project_info: None, + database_data: Some(SplitterDatabaseData { + id: x.splitter_id, + created_at: x.splitter_created_at, + }), + } + } +} + +impl From for Splitter { + fn from(splitter: models::Splitter) -> Self { + Self { + name: splitter.name, + parameters: splitter.parameters, + project_info: None, + database_data: Some(SplitterDatabaseData { + id: splitter.id, + created_at: splitter.created_at, + }), + } + } +} diff --git a/pgml-sdks/rust/pgml/src/types.rs b/pgml-sdks/rust/pgml/src/types.rs index 33b2c9e21..aeeffcad4 100644 --- a/pgml-sdks/rust/pgml/src/types.rs +++ b/pgml-sdks/rust/pgml/src/types.rs @@ -1,16 +1,55 @@ +use pgml_macros::pgml_alias; +use serde::Serialize; +use std::ops::{Deref, DerefMut}; + +#[cfg(feature = "python")] +use crate::languages::python::*; + /// A wrapper around serde_json::Value // #[derive(sqlx::Type, sqlx::FromRow, Debug)] -#[derive(sqlx::Type, Debug, Clone)] +#[derive(pgml_alias, sqlx::Type, Debug, Clone)] #[sqlx(transparent)] pub struct Json(pub serde_json::Value); +impl Default for Json { + fn default() -> Self { + Self(serde_json::json!({})) + } +} + impl From for Json { fn from(v: serde_json::Value) -> Self { Self(v) } } +impl Deref for Json { + type Target = serde_json::Value; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for Json { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl Serialize for Json { + fn serialize(&self, serializer: S) -> Result { + serde_json::Value::serialize(&self.0, serializer) + } +} + /// A wrapper around sqlx::types::chrono::DateTime -#[derive(sqlx::Type)] +#[derive(sqlx::Type, Debug, Clone)] #[sqlx(transparent)] -pub struct DateTime(pub sqlx::types::chrono::DateTime); +// pub struct DateTime(pub sqlx::types::chrono::DateTime); +pub struct DateTime(pub sqlx::types::chrono::NaiveDateTime); + +impl Serialize for DateTime { + fn serialize(&self, serializer: S) -> Result { + self.0.timestamp().serialize(serializer) + } +} diff --git a/pgml-sdks/rust/pgml/src/utils.rs b/pgml-sdks/rust/pgml/src/utils.rs index 0c86f89bb..13fcf3f90 100644 --- a/pgml-sdks/rust/pgml/src/utils.rs +++ b/pgml-sdks/rust/pgml/src/utils.rs @@ -1,3 +1,5 @@ +use indicatif::{ProgressBar, ProgressStyle}; + /// A more type flexible version of format! #[macro_export] macro_rules! query_builder { @@ -18,3 +20,17 @@ macro_rules! query_builder { query }}; } + +pub fn default_progress_spinner(size: u64) -> ProgressBar { + ProgressBar::new(size).with_style( + ProgressStyle::with_template("[{elapsed_precise}] {spinner:0.cyan/blue} {prefix}: {msg}") + .unwrap(), + ) +} + +pub fn default_progress_bar(size: u64) -> ProgressBar { + ProgressBar::new(size).with_style( + ProgressStyle::with_template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} ") + .unwrap(), + ) +}