diff --git a/README.md b/README.md
index 5624f30..9bf4932 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@ This Vert.x module uses the https://github.com/mauricio/postgresql-async drivers
## Requirements
-* Vert.x 2.1+ (with Scala language module v1.0)
+* Vert.x 2.1+ (with Scala language module v1.0.1+)
* A working PostgreSQL or MySQL server
* For testing PostgreSQL: A 'testdb' database on a local PostgreSQL install and a user called 'vertx'
* For testing MySQL: A 'testdb' database on a local MySQL install and a user called 'root'
@@ -102,10 +102,111 @@ Creates a prepared statement and lets you fill the `?` with values.
{
"action" : "prepared",
"statement" : "SELECT * FROM some_test WHERE name=? AND money > ?",
- "values" : ["John", 1000]
+ "values" : ["Mr. Test", 15]
}
+
+### raw - Raw commands
+
+Use this action to send arbitrary commands to the database. You should be able to submit any query or insertion with this command.
+
+Here is an example for creating a table in PostgreSQL:
+
+ {
+ "action" : "raw",
+ "command" : "CREATE TABLE some_test (
+ id SERIAL,
+ name VARCHAR(255),
+ email VARCHAR(255),
+ is_male BOOLEAN,
+ age INT,
+ money FLOAT,
+ wedding_date DATE
+ );"
+ }
+
+And if you want to drop it again, you can send the following:
+
+ {
+ "action" : "raw",
+ "command" : "DROP TABLE some_test;"
+ }
+
+### Transactions
+
+These commands let you begin a transaction and send an arbitrary number of statements within the started transaction. You can then commit or rollback the transaction.
+Nested transactions are not possible until now!
+
+Remember to reply to the messages after you send the `begin` command. Look in the docs how this works (e.g. for Java: [http://vertx.io/core_manual_java.html#replying-to-messages](http://vertx.io/core_manual_java.html#replying-to-messages)).
+With replying to the messages, the module is able to send all statements within the same transaction. If you don't reply within the `timeoutTransaction` interval, the transaction will automatically fail and rollback.
+
+#### transaction begin
+
+This command starts a transaction. You get an Ok message back to which you can then reply with more statements.
+
+ {
+ "action" : "begin"
+ }
+
+#### transaction commit
+
+To commit a transaction you have to send the `commit` command.
+
+ {
+ "action" : "commit"
+ }
+
+#### transaction rollback
+
+To rollback a transaction you have to send the `rollback` command.
+
+ {
+ "action" : "rollback"
+ }
+
+#### Example for a transaction
+
+Here is a small example on how a transaction works.
+
+ {
+ "action" : "begin"
+ }
+
+This will start the transaction. You get this response:
-### transaction
+ {
+ "status" : "ok"
+ }
+
+You can then reply to this message with the commands `select`, `prepared`, `insert` and `raw`.
+A possible reply could be this:
+
+ {
+ "action" : "raw",
+ "command" : "UPDATE some_test SET email = 'foo@bar.com' WHERE id = 1"
+ }
+
+You get a reply back depending on the statement you sent. In this case the answer would be:
+
+ {
+ "status" : "ok",
+ "rows" : 1,
+ "message" : "UPDATE 1"
+ }
+
+If you want to make more statements you just have to reply to this message again with the next statement.
+When you have done all statements you can `commit` or `rollback` the transaction.
+
+ {
+ "action" : "commit"
+ }
+
+If everything worked, the last answer will be:
+
+ {
+ "status" : "ok"
+ }
+
+#### old transaction command (deprecated, use the new transaction mechanism with begin and commit)
Takes several statements and wraps them into a single transaction for the server to process. Use `statement : [...actions...]` to create such a transaction. Only `select`, `insert` and `raw` commands are allowed right now.
@@ -129,33 +230,7 @@ Takes several statements and wraps them into a single transaction for the server
}
]
}
-
-### raw - Raw commands
-
-Use this action to send arbitrary commands to the database. You should be able to do submit any query or insertion with this command.
-
-Here is an example for creating a table in PostgreSQL:
-
- {
- "action" : "raw",
- "command" : "CREATE TABLE some_test (
- id SERIAL,
- name VARCHAR(255),
- email VARCHAR(255),
- is_male BOOLEAN,
- age INT,
- money FLOAT,
- wedding_date DATE
- );"
- }
-
-And if you want to drop it again, you can send the following:
-
- {
- "action" : "raw",
- "command" : "DROP TABLE some_test;"
- }
-
+
## Planned actions
You can always use `raw` to do anything on the database. If the statement is a query, it will return its results just like a `select`.
diff --git a/build.gradle b/build.gradle
deleted file mode 100644
index e075e09..0000000
--- a/build.gradle
+++ /dev/null
@@ -1,119 +0,0 @@
-apply from: "gradle/vertx.gradle"
-
-/*
-Usage:
-
-./gradlew task_name
-
-(or gradlew.bat task_name if you have the misfortune to have to use Windows)
-
-If no task name is specified then the default task 'assemble' is run
-
-Task names are:
-
-idea - generate a skeleton IntelliJ IDEA project
-
-eclipse - generate a skeleton Eclipse IDE project
-
-assemble - builds the outputs, by default this is the module zip file. It can also include a jar file if produceJar
- in gradle.properties is set to true. Outputs are created in build/libs.
- if pullInDeps in gradle.properties is set to 'true' then the modules dependencies will be
- automatically pulled into a nested mods directory inside the module during the build
-
-copyMod - builds and copies the module to the local 'mods' directory so you can execute vertx runmod (etc)
- directly from the command line
-
-modZip - creates the module zip into build/libs
-
-clean - cleans everything up
-
-test - runs the tests. An nice html test report is created in build/reports/tests (index.html)
-
-runMod - runs the module. This is similar to executing vertx runmod from the command line except that it does
- not use the version of Vert.x installed and on the PATH to run it. Instead it uses the version of Vert.x
- that the module was compiled and tested against.
-
-runModIDEA - run the module from the project resources in IDEA. This allows you to run the module without building it
-first!
-
-runModEclipse - run the module from the project resources in Eclipse. This allows you to run the module without
-building it first!
-
-pullInDeps - pulls in all dependencies of the module into a nested module directory
-
-uploadArchives - upload the module zip file (and jar if one has been created) to Nexus. You will need to
- configure sonatypeUsername and sonatypePassword in ~/.gradle/gradle.properties.
-
-install - install any jars produced to the local Maven repository (.m2)
-
- */
-
-dependencies {
- /*
- Add your module jar dependencies here
- E.g.
- compile "com.foo:foo-lib:1.0.1" - for compile time deps - this will end up in your module too!
- testCompile "com.foo:foo-lib:1.0.1" - for test time deps
- provided "com.foo:foo-lib:1.0.1" - if you DON'T want it to be packaged in the module zip
- */
-
- provided "org.scala-lang:scala-library:$scalaVersion"
- provided "org.scala-lang:scala-compiler:$scalaVersion"
- provided "io.vertx:lang-scala:$scalaLangModVersion"
-
- compile("com.github.mauricio:postgresql-async_2.10:$asyncDriverVersion") {
- exclude group: 'org.scala-lang'
- exclude group: 'io.netty'
- }
- compile("com.github.mauricio:mysql-async_2.10:$asyncDriverVersion") {
- exclude group: 'org.scala-lang'
- exclude group: 'io.netty'
- }
-}
-
-test {
- /* Configure which tests are included
- include 'org/foo/**'
- exclude 'org/boo/**'
- */
-
-}
-
-/*
-If you're uploading stuff to Maven, Gradle needs to generate a POM.
-Please edit the details below.
- */
-def configurePom(def pom) {
- pom.project {
- name rootProject.name
- description 'Using MySQL/PostgreSQL async driver as a module for Vert.x'
- inceptionYear '2013'
- packaging produceJar == 'false' ? 'pom' : 'jar'
-
- url 'https://github.com/vert-x/mod-mysql-postgresql'
-
- developers {
- developer {
- id 'Narigo'
- name 'Joern Bernhardt'
- email 'jb@campudus.com'
- }
- }
-
- scm {
- url 'https://github.com/vert-x/mod-mysql-postgresql'
- }
-
- licenses {
- license {
- name 'The Apache Software License, Version 2.0'
- url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
- distribution 'repo'
- }
- }
-
- properties {
- setProperty('project.build.sourceEncoding', 'UTF8')
- }
- }
-}
diff --git a/gradle.properties b/gradle.properties
deleted file mode 100644
index 04f3b94..0000000
--- a/gradle.properties
+++ /dev/null
@@ -1,38 +0,0 @@
-# E.g. your domain name
-modowner=io.vertx
-
-# Your module name
-modname=mod-mysql-postgresql
-
-# Your module version
-version=0.3.0-SNAPSHOT
-
-# The version of mauricios async driver
-asyncDriverVersion=0.2.12
-
-# The test timeout in seconds
-testtimeout=5
-
-# Set to true if you want module dependencies to be pulled in and nested inside the module itself
-pullInDeps=true
-
-# Set to true if you want the build to output a jar as well as a module zip file
-produceJar=false
-
-# The version of the Scala module
-scalaLangModVersion=1.0.0
-
-# The version of Scala to use
-scalaVersion=2.10.4
-
-# Gradle version
-gradleVersion=1.11
-
-# The version of Vert.x
-vertxVersion=2.1RC3
-
-# The version of Vert.x test tools
-toolsVersion=2.0.2-final
-
-# The version of JUnit
-junitVersion=4.10
diff --git a/gradle/maven.gradle b/gradle/maven.gradle
deleted file mode 100644
index 0d131ba..0000000
--- a/gradle/maven.gradle
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2012 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-apply plugin: 'maven'
-apply plugin: 'signing'
-
-if (!hasProperty('sonatypeUsername')) {
- ext.sonatypeUsername = ''
-}
-if (!hasProperty('sonatypePassword')) {
- ext.sonatypePassword = ''
-}
-
-// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-// maven task configuration
-
-ext.isReleaseVersion = !version.endsWith("SNAPSHOT")
-
-signing {
- required { isReleaseVersion && gradle.taskGraph.hasTask("uploadArchives") }
- sign configurations.archives
-}
-
-uploadArchives {
- group 'build'
- description = "Does a maven deploy of archives artifacts"
-
- repositories {
- mavenDeployer {
- // setUniqueVersion(false)
-
- configuration = configurations.archives
-
- repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
- authentication(userName: sonatypeUsername, password: sonatypePassword)
- }
-
- snapshotRepository(url: "https://oss.sonatype.org/content/repositories/snapshots/") {
- authentication(userName: sonatypeUsername, password: sonatypePassword)
- }
-
- if (isReleaseVersion) {
- beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
- }
-
- configurePom(pom)
- }
- }
-}
-
-// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-// configuration methods
-
-
-
diff --git a/gradle/setup.gradle b/gradle/setup.gradle
deleted file mode 100644
index da1fcd7..0000000
--- a/gradle/setup.gradle
+++ /dev/null
@@ -1,5 +0,0 @@
-
-task wrapper(type: Wrapper, description: "Create a Gradle self-download wrapper") {
- group = 'Project Setup'
- gradleVersion = rootProject.gradleVersion
-}
\ No newline at end of file
diff --git a/gradle/vertx.gradle b/gradle/vertx.gradle
deleted file mode 100644
index 55b006d..0000000
--- a/gradle/vertx.gradle
+++ /dev/null
@@ -1,225 +0,0 @@
-import org.vertx.java.platform.impl.cli.Starter
-
-/*
- * Copyright 2012 the original author or authors.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-apply plugin: 'java'
-apply plugin: 'scala'
-apply plugin: 'idea'
-apply plugin: 'eclipse'
-
-def cpSeparator = System.getProperty("path.separator")
-
-// We have to explicitly load props from the user home dir - on CI we set
-// GRADLE_USER_HOME to a different dir to avoid problems with concurrent builds corrupting
-// a shared Maven local and using Gradle wrapper concurrently
-loadProperties("${System.getProperty('user.home')}/.gradle/gradle.properties")
-
-apply from: "gradle/maven.gradle"
-
-group = modowner
-archivesBaseName = modname
-
-defaultTasks = ['assemble']
-
-sourceCompatibility = '1.7'
-targetCompatibility = '1.7'
-
-project.ext.moduleName = "$modowner~$modname~$version"
-
-configurations {
- provided
- testCompile.extendsFrom provided
-}
-
-repositories {
- if (System.getenv("VERTX_DISABLE_MAVENLOCAL") == null) {
- // We don't want to use mavenLocal when running on CI - mavenLocal is only useful in Gradle for
- // publishing artifacts locally for development purposes - maven local is also not threadsafe when there
- // are concurrent builds
- mavenLocal()
- }
- maven { url 'https://oss.sonatype.org/content/repositories/snapshots' }
- mavenCentral()
-}
-
-dependencies {
- provided "io.vertx:vertx-core:$vertxVersion"
- provided "io.vertx:vertx-platform:$vertxVersion"
- testCompile "junit:junit:$junitVersion"
- testCompile "io.vertx:testtools:$toolsVersion"
-}
-
-// This sets up the classpath for the script itself
-buildscript {
-
- repositories {
- if (System.getenv("VERTX_DISABLE_MAVENLOCAL") == null) {
- // We don't want to use mavenLocal when running on CI - mavenLocal is only useful in Gradle for
- // publishing artifacts locally for development purposes - maven local is also not threadsafe when there
- // are concurrent builds
- mavenLocal()
- }
- maven { url 'https://oss.sonatype.org/content/repositories/snapshots' }
- mavenCentral()
- }
-
- dependencies {
- classpath "io.vertx:vertx-core:$vertxVersion"
- classpath "io.vertx:vertx-platform:$vertxVersion"
- classpath "io.vertx:vertx-hazelcast:$vertxVersion"
- classpath files(['src/main/resources'])
- }
-}
-
-sourceSets {
- main {
- compileClasspath = compileClasspath + configurations.provided
- }
-}
-
-task copyMod( type:Copy, dependsOn: 'classes', description: 'Assemble the module into the local mods directory' ) {
- into "build/mods/$moduleName"
- from compileJava
- from compileScala
- from 'src/main/resources'
- into( 'lib' ) {
- from configurations.compile
- }
-}
-
-task modZip( type: Zip, dependsOn: 'pullInDeps', description: 'Package the module .zip file') {
- group = 'vert.x'
- classifier = "mod"
- description = "Assembles a vert.x module"
- destinationDir = project.file('build/libs')
- archiveName = "${modname}-${version}" + ".zip"
- from copyMod
-}
-
-task sourceJar(type: Jar) {
- description = 'Builds a source jar artifact suitable for maven deployment.'
- classifier = 'sources'
- from sourceSets.main.java
-}
-
-task javadocJar(type: Jar) {
- description = 'Builds a javadoc jar artifact suitable for maven deployment.'
- classifier = 'javadoc'
- from javadoc.destinationDir
-}
-javadocJar.dependsOn javadoc
-
-build.dependsOn sourceJar, javadocJar
-
-artifacts {
- archives sourceJar, javadocJar, modZip
-}
-
-
-test {
- dependsOn copyMod
-
- // Make sure tests are always run!
- outputs.upToDateWhen { false }
-
- // Show output
- testLogging.showStandardStreams = true
-
- testLogging { exceptionFormat "full" }
-
- systemProperty 'vertx.mods', "build/mods"
-}
-
-task init(description: 'Create module link and CP file') << {
- setSysProps()
- doInit()
-}
-
-task runMod(description: 'Run the module') << {
- setSysProps()
- System.setProperty("vertx.langs.scala", "io.vertx~lang-scala~${scalaLangModVersion}:org.vertx.scala.platform.impl.ScalaVerticleFactory")
- // We also init here - this means for single module builds the user doesn't have to explicitly init -
- // they can just do runMod
- doInit()
- args = ['runmod', moduleName]
- def args2 = runModArgs.split("\\s+")
- args.addAll(args2)
- Starter.main(args as String[])
-}
-
-def doInit() {
- File cpFile = new File("vertx_classpath.txt")
- if (!cpFile.exists()) {
- cpFile.createNewFile();
- String defaultCp =
- "src/main/resources\r\n" +
- "bin\r\n" +
- "out/production/${project.name}\r\n" +
- "out/test/${project.name}";
- cpFile << defaultCp;
- }
- def args = ['create-module-link', moduleName]
- Starter.main(args as String[])
-}
-
-task pullInDeps(dependsOn: copyMod, description: 'Pull in all the module dependencies for the module into the nested mods directory') << {
- if (pullInDeps == 'true') {
- setSysProps()
- def args = ['pulldeps', moduleName]
- Starter.main(args as String[])
- }
-}
-
-task fatJar(dependsOn: modZip, description: 'Creates a fat executable jar which contains everything needed to run the module') << {
- if (createFatJar == 'true') {
- setSysProps()
- def args = ['fatjar', moduleName, '-d', 'build/libs']
- Starter.main(args as String[])
- }
-}
-
-def setSysProps() {
- System.setProperty("vertx.clusterManagerFactory", "org.vertx.java.spi.cluster.impl.hazelcast.HazelcastClusterManagerFactory")
- String modsDir = System.getenv("VERTX_MODS")
- if (modsDir == null) {
- modsDir = "build/mods";
- }
- System.setProperty("vertx.mods", modsDir)
-}
-
-def loadProperties(String sourceFileName) {
- def config = new Properties()
- def propFile = new File(sourceFileName)
- if (propFile.canRead()) {
- config.load(new FileInputStream(propFile))
- for (Map.Entry property in config) {
- project.ext[property.key] = property.value;
- }
- }
-}
-
-// Map the 'provided' dependency configuration to the appropriate IDEA visibility scopes.
-plugins.withType(IdeaPlugin) {
- idea {
- module {
- scopes.PROVIDED.plus += configurations.provided
- scopes.COMPILE.minus += configurations.provided
- scopes.TEST.minus += configurations.provided
- scopes.RUNTIME.minus += configurations.provided
- }
- }
-}
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index 42d9b0e..0000000
Binary files a/gradle/wrapper/gradle-wrapper.jar and /dev/null differ
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index 9a6dea0..0000000
--- a/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-#Mon Jan 28 08:12:12 GMT 2013
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-zipStoreBase=GRADLE_USER_HOME
-zipStorePath=wrapper/dists
-distributionUrl=http\://services.gradle.org/distributions/gradle-1.11-bin.zip
diff --git a/gradlew b/gradlew
deleted file mode 100755
index 4bd46b0..0000000
--- a/gradlew
+++ /dev/null
@@ -1,166 +0,0 @@
-#!/bin/bash
-
-##############################################################################
-##
-## Gradle start up script for UN*X
-##
-##############################################################################
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS=""
-
-APP_NAME="Gradle"
-APP_BASE_NAME=`basename "$0"`
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD="maximum"
-
-warn ( ) {
- echo "$*"
-}
-
-die ( ) {
- echo
- echo "$*"
- echo
- exit 1
-}
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-case "`uname`" in
- CYGWIN* )
- cygwin=true
- ;;
- Darwin* )
- darwin=true
- ;;
- MINGW* )
- msys=true
- ;;
-esac
-
-# For Cygwin, ensure paths are in UNIX format before anything is touched.
-if $cygwin ; then
- [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
-fi
-
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/"
-APP_HOME="`pwd -P`"
-cd "$SAVED"
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD="java"
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
- MAX_FD_LIMIT=`ulimit -H -n`
- if [ $? -eq 0 ] ; then
- if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
- MAX_FD="$MAX_FD_LIMIT"
- fi
- ulimit -n $MAX_FD
- if [ $? -ne 0 ] ; then
- warn "Could not set maximum file descriptor limit: $MAX_FD"
- fi
- else
- warn "Could not query businessSystem maximum file descriptor limit: $MAX_FD_LIMIT"
- fi
-fi
-
-# For Darwin, add options to specify how the application appears in the dock
-if $darwin; then
- GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
-fi
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin ; then
- APP_HOME=`cygpath --path --mixed "$APP_HOME"`
- CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
-
- # We build the pattern for arguments to be converted via cygpath
- ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
- SEP=""
- for dir in $ROOTDIRSRAW ; do
- ROOTDIRS="$ROOTDIRS$SEP$dir"
- SEP="|"
- done
- OURCYGPATTERN="(^($ROOTDIRS))"
- # Add a user-defined pattern to the cygpath arguments
- if [ "$GRADLE_CYGPATTERN" != "" ] ; then
- OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
- fi
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- i=0
- for arg in "$@" ; do
- CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
- CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
-
- if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
- eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
- else
- eval `echo args$i`="\"$arg\""
- fi
- i=$((i+1))
- done
- case $i in
- (0) set -- ;;
- (1) set -- "$args0" ;;
- (2) set -- "$args0" "$args1" ;;
- (3) set -- "$args0" "$args1" "$args2" ;;
- (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
- (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
- (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
- (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
- (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
- (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
- esac
-fi
-
-# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
-function splitJvmOpts() {
- JVM_OPTS=("$@")
-}
-eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
-JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
-
-# Don't use daemon or the cwd will be set to the install directory of the daemon and screw up any vert.x
-# Path adjustments for file operations or sendFile
-exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
diff --git a/gradlew.bat b/gradlew.bat
deleted file mode 100755
index 8a0b282..0000000
--- a/gradlew.bat
+++ /dev/null
@@ -1,90 +0,0 @@
-@if "%DEBUG%" == "" @echo off
-@rem ##########################################################################
-@rem
-@rem Gradle startup script for Windows
-@rem
-@rem ##########################################################################
-
-@rem Set local scope for the variables with windows NT shell
-if "%OS%"=="Windows_NT" setlocal
-
-@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-set DEFAULT_JVM_OPTS=
-
-set DIRNAME=%~dp0
-if "%DIRNAME%" == "" set DIRNAME=.
-set APP_BASE_NAME=%~n0
-set APP_HOME=%DIRNAME%
-
-@rem Find java.exe
-if defined JAVA_HOME goto findJavaFromJavaHome
-
-set JAVA_EXE=java.exe
-%JAVA_EXE% -version >NUL 2>&1
-if "%ERRORLEVEL%" == "0" goto init
-
-echo.
-echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:findJavaFromJavaHome
-set JAVA_HOME=%JAVA_HOME:"=%
-set JAVA_EXE=%JAVA_HOME%/bin/java.exe
-
-if exist "%JAVA_EXE%" goto init
-
-echo.
-echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
-echo.
-echo Please set the JAVA_HOME variable in your environment to match the
-echo location of your Java installation.
-
-goto fail
-
-:init
-@rem Get command-line arguments, handling Windowz variants
-
-if not "%OS%" == "Windows_NT" goto win9xME_args
-if "%@eval[2+2]" == "4" goto 4NT_args
-
-:win9xME_args
-@rem Slurp the command line arguments.
-set CMD_LINE_ARGS=
-set _SKIP=2
-
-:win9xME_args_slurp
-if "x%~1" == "x" goto execute
-
-set CMD_LINE_ARGS=%*
-goto execute
-
-:4NT_args
-@rem Get arguments from the 4NT Shell from JP Software
-set CMD_LINE_ARGS=%$
-
-:execute
-@rem Setup the command line
-
-set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
-
-@rem Execute Gradle
-"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
-
-:end
-@rem End local scope for the variables with windows NT shell
-if "%ERRORLEVEL%"=="0" goto mainEnd
-
-:fail
-rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
-rem the _cmd.exe /c_ return code!
-if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
-exit /b 1
-
-:mainEnd
-if "%OS%"=="Windows_NT" endlocal
-
-:omega
diff --git a/project/VertxScalaBuild.scala b/project/VertxScalaBuild.scala
new file mode 100644
index 0000000..167db95
--- /dev/null
+++ b/project/VertxScalaBuild.scala
@@ -0,0 +1,185 @@
+import java.nio.charset.StandardCharsets
+
+import sbt.Keys._
+import sbt._
+
+object Variables {
+ val org = "io.vertx"
+ val name = "mod-mysql-postgresql"
+ val version = "0.3.0-SNAPSHOT"
+ val scalaVersion = "2.10.4"
+ val crossScalaVersions = Seq("2.10.4", "2.11.2")
+ val description = "Fully async MySQL / PostgreSQL module for Vert.x"
+
+ val vertxVersion = "2.1.2"
+ val testtoolsVersion = "2.0.3-final"
+ val hamcrestVersion = "1.3"
+ val junitInterfaceVersion = "0.10"
+ val vertxLangScalaVersion = "1.1.0-M1"
+ val asyncDriverVersion = "0.2.15"
+
+ val pomExtra =
+ 2013
+ http://vertx.io
+
+
+ Apache License, Version 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.html
+ repo
+
+
+
+ scm:git:git://github.com/vert-x/mod-mysql-postgresql.git
+ scm:git:ssh://git@github.com/vert-x/mod-mysql-postgresql.git
+ https://github.com/vert-x/mod-mysql-postgresql
+
+
+
+ Narigo
+ Joern Bernhardt
+ jb@campudus.com
+
+
+ Zwergal
+ Max Stemplinger
+ ms@campudus.com
+
+
+
+}
+
+object Dependencies {
+
+ import Variables._
+
+ val test = List(
+ "io.vertx" % "testtools" % testtoolsVersion % "test",
+ "org.hamcrest" % "hamcrest-library" % hamcrestVersion % "test",
+ "com.novocode" % "junit-interface" % junitInterfaceVersion % "test"
+ )
+
+ val compile = List(
+ "io.vertx" % "vertx-core" % vertxVersion % "provided",
+ "io.vertx" % "vertx-platform" % vertxVersion % "provided",
+ "io.vertx" %% "lang-scala" % vertxLangScalaVersion % "provided",
+ "com.github.mauricio" %% "postgresql-async" % asyncDriverVersion % "compile" excludeAll(
+ ExclusionRule(organization = "org.scala-lang"),
+ ExclusionRule(organization = "io.netty"),
+ ExclusionRule(organization = "org.slf4j")
+ ),
+ "com.github.mauricio" %% "mysql-async" % asyncDriverVersion % "compile" excludeAll(
+ ExclusionRule(organization = "org.scala-lang"),
+ ExclusionRule(organization = "io.netty"),
+ ExclusionRule(organization = "org.slf4j")
+ )
+ ) ::: test
+
+}
+
+object VertxScalaBuild extends Build {
+
+ lazy val project = Project(
+ "project",
+ file("."),
+ settings = Seq(
+ organization := Variables.org,
+ name := Variables.name,
+ version := Variables.version,
+ scalaVersion := Variables.scalaVersion,
+ crossScalaVersions := Variables.crossScalaVersions,
+ description := Variables.description,
+ copyModTask,
+ zipModTask,
+ libraryDependencies := Dependencies.compile,
+ // libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _),
+ // Fork JVM to allow Scala in-flight compilation tests to load the Scala interpreter
+ fork in Test := true,
+ // Vert.x tests are not designed to run in paralell
+ parallelExecution in Test := false,
+ // Adjust test system properties so that scripts are found
+ javaOptions in Test += "-Dvertx.test.resources=src/test/scripts",
+ // Adjust test modules directory
+ javaOptions in Test += "-Dvertx.mods=target/mods",
+ // Set the module name for tests
+ javaOptions in Test += s"-Dvertx.modulename=${organization.value}~${name.value}_${getMajor(scalaVersion.value)}~${version.value}",
+ resourceGenerators in Compile += Def.task {
+ val file = (resourceManaged in Compile).value / "langs.properties"
+ val contents = s"scala=io.vertx~lang-scala_${getMajor(scalaVersion.value)}~${Variables.vertxLangScalaVersion}:org.vertx.scala.platform.impl.ScalaVerticleFactory\n.scala=scala\n"
+ IO.write(file, contents, StandardCharsets.UTF_8)
+ Seq(file)
+ }.taskValue,
+ copyMod <<= copyMod dependsOn (copyResources in Compile),
+ (test in Test) <<= (test in Test) dependsOn copyMod,
+ zipMod <<= zipMod dependsOn copyMod,
+ (packageBin in Compile) <<= (packageBin in Compile) dependsOn copyMod,
+ // Publishing settings
+ publishMavenStyle := true,
+ pomIncludeRepository := { _ => false},
+ publishTo <<= version { (v: String) =>
+ val sonatype = "https://oss.sonatype.org/"
+ if (v.trim.endsWith("SNAPSHOT"))
+ Some("Sonatype Snapshots" at sonatype + "content/repositories/snapshots")
+ else
+ Some("Sonatype Releases" at sonatype + "service/local/staging/deploy/maven2")
+ },
+ pomExtra := Variables.pomExtra
+ )
+ ).settings(addArtifact(Artifact(Variables.name, "zip", "zip", "mod"), zipMod).settings: _*)
+
+ val copyMod = TaskKey[Unit]("copy-mod", "Assemble the module into the local mods directory")
+ val zipMod = TaskKey[File]("zip-mod", "Package the module .zip file")
+
+ lazy val copyModTask = copyMod := {
+ implicit val log = streams.value.log
+ val modOwner = organization.value
+ val modName = name.value
+ val modVersion = version.value
+ val scalaMajor = getMajor(scalaVersion.value)
+ val moduleName = s"$modOwner~${modName}_$scalaMajor~$modVersion"
+ log.info("Create module " + moduleName)
+ val moduleDir = target.value / "mods" / moduleName
+ createDirectory(moduleDir)
+ copyDirectory((classDirectory in Compile).value, moduleDir)
+ copyDirectory((resourceDirectory in Compile).value, moduleDir)
+ val libDir = moduleDir / "lib"
+ createDirectory(libDir)
+ // Get the runtime classpath to get all dependencies except provided ones
+ (managedClasspath in Runtime).value foreach { classpathEntry =>
+ copyClasspathFile(classpathEntry, libDir)
+ }
+ }
+
+ lazy val zipModTask = zipMod := {
+ implicit val log = streams.value.log
+ val modOwner = organization.value
+ val modName = name.value
+ val modVersion = version.value
+ val scalaMajor = getMajor(scalaVersion.value)
+ val moduleName = s"$modOwner~${modName}_$scalaMajor~$modVersion"
+ log.info("Create ZIP module " + moduleName)
+ val moduleDir = target.value / "mods" / moduleName
+ val zipFile = target.value / "zips" / s"$moduleName.zip"
+ IO.zip(allSubpaths(moduleDir), zipFile)
+ zipFile
+ }
+
+ private def getMajor(version: String): String = version.substring(0, version.lastIndexOf('.'))
+
+ private def createDirectory(dir: File)(implicit log: Logger): Unit = {
+ log.debug(s"Create directory $dir")
+ IO.createDirectory(dir)
+ }
+
+ private def copyDirectory(source: File, target: File)(implicit log: Logger): Unit = {
+ log.debug(s"Copy $source to $target")
+ IO.copyDirectory(source, target, overwrite = true)
+ }
+
+ private def copyClasspathFile(cpEntry: Attributed[File], libDir: File)(implicit log: Logger): Unit = {
+ val sourceFile = cpEntry.data
+ val targetFile = libDir / sourceFile.getName
+ log.debug(s"Copy $sourceFile to $targetFile")
+ IO.copyFile(sourceFile, targetFile)
+ }
+
+}
diff --git a/sbt b/sbt
new file mode 100755
index 0000000..08e5882
--- /dev/null
+++ b/sbt
@@ -0,0 +1,525 @@
+#!/usr/bin/env bash
+#
+# A more capable sbt runner, coincidentally also called sbt.
+# Author: Paul Phillips
+
+# todo - make this dynamic
+declare -r sbt_release_version="0.13.6"
+declare -r sbt_unreleased_version="0.13.6"
+declare -r buildProps="project/build.properties"
+
+declare sbt_jar sbt_dir sbt_create sbt_version
+declare scala_version sbt_explicit_version
+declare verbose noshare batch trace_level log_level
+declare sbt_saved_stty debugUs
+
+echoerr () { echo >&2 "$@"; }
+vlog () { [[ -n "$verbose" ]] && echoerr "$@"; }
+
+# spaces are possible, e.g. sbt.version = 0.13.0
+build_props_sbt () {
+ [[ -r "$buildProps" ]] && \
+ grep '^sbt\.version' "$buildProps" | tr '=' ' ' | awk '{ print $2; }'
+}
+
+update_build_props_sbt () {
+ local ver="$1"
+ local old="$(build_props_sbt)"
+
+ [[ -r "$buildProps" ]] && [[ "$ver" != "$old" ]] && {
+ perl -pi -e "s/^sbt\.version\b.*\$/sbt.version=${ver}/" "$buildProps"
+ grep -q '^sbt.version[ =]' "$buildProps" || printf "\nsbt.version=%s\n" "$ver" >> "$buildProps"
+
+ vlog "!!!"
+ vlog "!!! Updated file $buildProps setting sbt.version to: $ver"
+ vlog "!!! Previous value was: $old"
+ vlog "!!!"
+ }
+}
+
+set_sbt_version () {
+ sbt_version="${sbt_explicit_version:-$(build_props_sbt)}"
+ [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version
+ export sbt_version
+}
+
+# restore stty settings (echo in particular)
+onSbtRunnerExit() {
+ [[ -n "$sbt_saved_stty" ]] || return
+ vlog ""
+ vlog "restoring stty: $sbt_saved_stty"
+ stty "$sbt_saved_stty"
+ unset sbt_saved_stty
+}
+
+# save stty and trap exit, to ensure echo is reenabled if we are interrupted.
+trap onSbtRunnerExit EXIT
+sbt_saved_stty="$(stty -g 2>/dev/null)"
+vlog "Saved stty: $sbt_saved_stty"
+
+# this seems to cover the bases on OSX, and someone will
+# have to tell me about the others.
+get_script_path () {
+ local path="$1"
+ [[ -L "$path" ]] || { echo "$path" ; return; }
+
+ local target="$(readlink "$path")"
+ if [[ "${target:0:1}" == "/" ]]; then
+ echo "$target"
+ else
+ echo "${path%/*}/$target"
+ fi
+}
+
+die() {
+ echo "Aborting: $@"
+ exit 1
+}
+
+make_url () {
+ version="$1"
+
+ case "$version" in
+ 0.7.*) echo "http://simple-build-tool.googlecode.com/files/sbt-launch-0.7.7.jar" ;;
+ 0.10.* ) echo "$sbt_launch_repo/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
+ 0.11.[12]) echo "$sbt_launch_repo/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;;
+ *) echo "$sbt_launch_repo/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;;
+ esac
+}
+
+init_default_option_file () {
+ local overriding_var="${!1}"
+ local default_file="$2"
+ if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then
+ local envvar_file="${BASH_REMATCH[1]}"
+ if [[ -r "$envvar_file" ]]; then
+ default_file="$envvar_file"
+ fi
+ fi
+ echo "$default_file"
+}
+
+declare -r cms_opts="-XX:+CMSClassUnloadingEnabled -XX:+UseConcMarkSweepGC"
+declare -r jit_opts="-XX:ReservedCodeCacheSize=256m -XX:+TieredCompilation"
+declare -r default_jvm_opts_common="-Xms512m -Xmx1536m -Xss2m $jit_opts $cms_opts"
+declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy"
+declare -r latest_28="2.8.2"
+declare -r latest_29="2.9.3"
+declare -r latest_210="2.10.4"
+declare -r latest_211="2.11.2"
+
+declare -r script_path="$(get_script_path "$BASH_SOURCE")"
+declare -r script_name="${script_path##*/}"
+
+# some non-read-onlies set with defaults
+declare java_cmd="java"
+declare sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)"
+declare jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)"
+declare sbt_launch_repo="http://typesafe.artifactoryonline.com/typesafe/ivy-releases"
+
+# pull -J and -D options to give to java.
+declare -a residual_args
+declare -a java_args
+declare -a scalac_args
+declare -a sbt_commands
+
+# args to jvm/sbt via files or environment variables
+declare -a extra_jvm_opts extra_sbt_opts
+
+# if set, use JAVA_HOME over java found in path
+[[ -e "$JAVA_HOME/bin/java" ]] && java_cmd="$JAVA_HOME/bin/java"
+
+# directory to store sbt launchers
+declare sbt_launch_dir="$HOME/.sbt/launchers"
+[[ -d "$sbt_launch_dir" ]] || mkdir -p "$sbt_launch_dir"
+[[ -w "$sbt_launch_dir" ]] || sbt_launch_dir="$(mktemp -d -t sbt_extras_launchers.XXXXXX)"
+
+java_version () {
+ local version=$("$java_cmd" -version 2>&1 | grep -e 'java version' | awk '{ print $3 }' | tr -d \")
+ vlog "Detected Java version: $version"
+ echo "${version:2:1}"
+}
+
+# MaxPermSize critical on pre-8 jvms but incurs noisy warning on 8+
+default_jvm_opts () {
+ local v="$(java_version)"
+ if [[ $v -ge 8 ]]; then
+ echo "$default_jvm_opts_common"
+ else
+ echo "-XX:MaxPermSize=384m $default_jvm_opts_common"
+ fi
+}
+
+build_props_scala () {
+ if [[ -r "$buildProps" ]]; then
+ versionLine="$(grep '^build.scala.versions' "$buildProps")"
+ versionString="${versionLine##build.scala.versions=}"
+ echo "${versionString%% .*}"
+ fi
+}
+
+execRunner () {
+ # print the arguments one to a line, quoting any containing spaces
+ vlog "# Executing command line:" && {
+ for arg; do
+ if [[ -n "$arg" ]]; then
+ if printf "%s\n" "$arg" | grep -q ' '; then
+ printf >&2 "\"%s\"\n" "$arg"
+ else
+ printf >&2 "%s\n" "$arg"
+ fi
+ fi
+ done
+ vlog ""
+ }
+
+ [[ -n "$batch" ]] && exec /dev/null; then
+ curl --fail --silent "$url" --output "$jar"
+ elif which wget >/dev/null; then
+ wget --quiet -O "$jar" "$url"
+ fi
+ } && [[ -r "$jar" ]]
+}
+
+acquire_sbt_jar () {
+ sbt_url="$(jar_url "$sbt_version")"
+ sbt_jar="$(jar_file "$sbt_version")"
+
+ [[ -r "$sbt_jar" ]] || download_url "$sbt_url" "$sbt_jar"
+}
+
+usage () {
+ cat < display stack traces with a max of frames (default: -1, traces suppressed)
+ -debug-inc enable debugging log for the incremental compiler
+ -no-colors disable ANSI color codes
+ -sbt-create start sbt even if current directory contains no sbt project
+ -sbt-dir path to global settings/plugins directory (default: ~/.sbt/)
+ -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+)
+ -ivy path to local Ivy repository (default: ~/.ivy2)
+ -no-share use all local caches; no sharing
+ -offline put sbt in offline mode
+ -jvm-debug Turn on JVM debugging, open at the given port.
+ -batch Disable interactive mode
+ -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted
+
+ # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version)
+ -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version
+ -sbt-version use the specified version of sbt (default: $sbt_release_version)
+ -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version
+ -sbt-jar use the specified jar as the sbt launcher
+ -sbt-launch-dir directory to hold sbt launchers (default: ~/.sbt/launchers)
+ -sbt-launch-repo repo url for downloading sbt launcher jar (default: $sbt_launch_repo)
+
+ # scala version (default: as chosen by sbt)
+ -28 use $latest_28
+ -29 use $latest_29
+ -210 use $latest_210
+ -211 use $latest_211
+ -scala-home use the scala build at the specified directory
+ -scala-version use the specified version of scala
+ -binary-version use the specified scala version when searching for dependencies
+
+ # java version (default: java from PATH, currently $(java -version 2>&1 | grep version))
+ -java-home alternate JAVA_HOME
+
+ # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution
+ # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found
+ $(default_jvm_opts)
+ JVM_OPTS environment variable holding either the jvm args directly, or
+ the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts')
+ Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument.
+ -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present)
+ -Dkey=val pass -Dkey=val directly to the jvm
+ -J-X pass option -X directly to the jvm (-J is stripped)
+
+ # passing options to sbt, OR to this runner
+ SBT_OPTS environment variable holding either the sbt args directly, or
+ the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts')
+ Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument.
+ -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present)
+ -S-X add -X to sbt's scalacOptions (-S is stripped)
+EOM
+}
+
+addJava () {
+ vlog "[addJava] arg = '$1'"
+ java_args=( "${java_args[@]}" "$1" )
+}
+addSbt () {
+ vlog "[addSbt] arg = '$1'"
+ sbt_commands=( "${sbt_commands[@]}" "$1" )
+}
+setThisBuild () {
+ vlog "[addBuild] args = '$@'"
+ local key="$1" && shift
+ addSbt "set $key in ThisBuild := $@"
+}
+
+addScalac () {
+ vlog "[addScalac] arg = '$1'"
+ scalac_args=( "${scalac_args[@]}" "$1" )
+}
+addResidual () {
+ vlog "[residual] arg = '$1'"
+ residual_args=( "${residual_args[@]}" "$1" )
+}
+addResolver () {
+ addSbt "set resolvers += $1"
+}
+addDebugger () {
+ addJava "-Xdebug"
+ addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"
+}
+setScalaVersion () {
+ [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")'
+ addSbt "++ $1"
+}
+
+process_args ()
+{
+ require_arg () {
+ local type="$1"
+ local opt="$2"
+ local arg="$3"
+
+ if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then
+ die "$opt requires <$type> argument"
+ fi
+ }
+ while [[ $# -gt 0 ]]; do
+ case "$1" in
+ -h|-help) usage; exit 1 ;;
+ -v) verbose=true && shift ;;
+ -d) addSbt "--debug" && shift ;;
+ -w) addSbt "--warn" && shift ;;
+ -q) addSbt "--error" && shift ;;
+ -x) debugUs=true && shift ;;
+ -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;;
+ -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;;
+ -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;;
+ -no-share) noshare=true && shift ;;
+ -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;;
+ -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;;
+ -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;;
+ -offline) addSbt "set offline := true" && shift ;;
+ -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;;
+ -batch) batch=true && shift ;;
+ -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;;
+
+ -sbt-create) sbt_create=true && shift ;;
+ -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;;
+ -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;;
+ -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;;
+ -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;;
+ -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;;
+ -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;;
+ -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;;
+ -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;;
+ -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "Some(file(\"$2\"))" && shift 2 ;;
+ -java-home) require_arg path "$1" "$2" && java_cmd="$2/bin/java" && shift 2 ;;
+ -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;;
+ -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;;
+
+ -D*) addJava "$1" && shift ;;
+ -J*) addJava "${1:2}" && shift ;;
+ -S*) addScalac "${1:2}" && shift ;;
+ -28) setScalaVersion "$latest_28" && shift ;;
+ -29) setScalaVersion "$latest_29" && shift ;;
+ -210) setScalaVersion "$latest_210" && shift ;;
+ -211) setScalaVersion "$latest_211" && shift ;;
+
+ *) addResidual "$1" && shift ;;
+ esac
+ done
+}
+
+# process the direct command line arguments
+process_args "$@"
+
+# skip #-styled comments and blank lines
+readConfigFile() {
+ while read line; do
+ [[ $line =~ ^# ]] || [[ -z $line ]] || echo "$line"
+ done < "$1"
+}
+
+# if there are file/environment sbt_opts, process again so we
+# can supply args to this runner
+if [[ -r "$sbt_opts_file" ]]; then
+ vlog "Using sbt options defined in file $sbt_opts_file"
+ while read opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file")
+elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then
+ vlog "Using sbt options defined in variable \$SBT_OPTS"
+ extra_sbt_opts=( $SBT_OPTS )
+else
+ vlog "No extra sbt options have been defined"
+fi
+
+[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}"
+
+# reset "$@" to the residual args
+set -- "${residual_args[@]}"
+argumentCount=$#
+
+# set sbt version
+set_sbt_version
+
+# only exists in 0.12+
+setTraceLevel() {
+ case "$sbt_version" in
+ "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;;
+ *) setThisBuild traceLevel $trace_level ;;
+ esac
+}
+
+# set scalacOptions if we were given any -S opts
+[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[@]}\""
+
+# Update build.properties on disk to set explicit version - sbt gives us no choice
+[[ -n "$sbt_explicit_version" ]] && update_build_props_sbt "$sbt_explicit_version"
+vlog "Detected sbt version $sbt_version"
+
+[[ -n "$scala_version" ]] && vlog "Overriding scala version to $scala_version"
+
+# no args - alert them there's stuff in here
+(( argumentCount > 0 )) || {
+ vlog "Starting $script_name: invoke with -help for other options"
+ residual_args=( shell )
+}
+
+# verify this is an sbt dir or -create was given
+[[ -r ./build.sbt || -d ./project || -n "$sbt_create" ]] || {
+ cat < new PostgreSqlConnectionHandler(this, configuration, maxPoolSize)
- case "mysql" => new MySqlConnectionHandler(this, configuration, maxPoolSize)
+ case "postgresql" => new PostgreSqlConnectionHandler(this, configuration, maxPoolSize, transactionTimeout)
+ case "mysql" => new MySqlConnectionHandler(this, configuration, maxPoolSize, transactionTimeout)
}
vertx.eventBus.registerHandler(address, handler)
diff --git a/src/main/scala/io/vertx/asyncsql/database/ConnectionHandler.scala b/src/main/scala/io/vertx/asyncsql/database/ConnectionHandler.scala
index 3763f52..3c06c3e 100644
--- a/src/main/scala/io/vertx/asyncsql/database/ConnectionHandler.scala
+++ b/src/main/scala/io/vertx/asyncsql/database/ConnectionHandler.scala
@@ -1,10 +1,10 @@
package io.vertx.asyncsql.database
import scala.collection.JavaConverters.iterableAsScalaIterableConverter
-import scala.concurrent.Future
+import scala.concurrent.{Promise, Future}
import org.vertx.scala.core.json.{JsonElement, JsonArray, JsonObject, Json}
import org.vertx.scala.core.logging.Logger
-import com.github.mauricio.async.db.{ Configuration, Connection, QueryResult, RowData }
+import com.github.mauricio.async.db.{Configuration, Connection, QueryResult, RowData}
import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException
import io.vertx.asyncsql.database.pool.AsyncConnectionPool
import org.vertx.scala.mods.ScalaBusMod
@@ -13,34 +13,106 @@ import org.vertx.scala.core.Vertx
import org.vertx.scala.platform.Container
import io.vertx.asyncsql.Starter
import org.vertx.scala.mods.ScalaBusMod.Receive
+import scala.util.{Failure, Success}
trait ConnectionHandler extends ScalaBusMod {
val verticle: Starter
+
def dbType: String
+
val config: Configuration
val maxPoolSize: Int
+ val transactionTimeout: Long
lazy val vertx: Vertx = verticle.vertx
lazy val container: Container = verticle.container
lazy val logger: Logger = verticle.logger
lazy val pool = AsyncConnectionPool(verticle, dbType, maxPoolSize, config)
- def transactionStart: String = "START TRANSACTION;"
- def transactionEnd: String = "COMMIT;"
+ def transactionBegin: String = "BEGIN;"
+
+ def transactionCommit: String = "COMMIT;"
+
+ def transactionRollback: String = "ROLLBACK;"
+
def statementDelimiter: String = ";"
import org.vertx.scala.core.eventbus._
- override def receive: Receive = (msg: Message[JsonObject]) => {
- case "select" => select(msg.body)
- case "insert" => insert(msg.body)
- case "prepared" => AsyncReply(sendWithPool(prepared(msg.body)))
- case "transaction" => transaction(msg.body)
- case "raw" => AsyncReply(sendWithPool(rawCommand(msg.body.getString("command"))))
+
+ private def receiver(withConnectionFn: (Connection => Future[SyncReply]) => Future[SyncReply]): Receive = (msg: Message[JsonObject]) => {
+ def sendAsyncWithPool(fn: Connection => Future[QueryResult]) = AsyncReply(sendWithPool(withConnectionFn)(fn))
+
+ {
+ case "select" => sendAsyncWithPool(rawCommand(selectCommand(msg.body())))
+ case "insert" => sendAsyncWithPool(rawCommand(insertCommand(msg.body())))
+ case "prepared" => sendAsyncWithPool(prepared(msg.body()))
+ case "raw" => sendAsyncWithPool(rawCommand(msg.body().getString("command")))
+ }
+ }
+
+ private def regularReceive: Receive = { msg: Message[JsonObject] =>
+ receiver(pool.withConnection)(msg).orElse {
+ case "begin" => beginTransaction(msg)
+ case "transaction" => transaction(pool.withConnection)(msg.body())
+ }
+ }
+
+ override def receive: Receive = regularReceive
+
+ private def mapRepliesToTransactionReceive(c: Connection): BusModReply => BusModReply = {
+ case AsyncReply(receiveEndFuture) => AsyncReply(receiveEndFuture.map(mapRepliesToTransactionReceive(c)))
+ case Ok(v, None) => Ok(v, Some(ReceiverWithTimeout(inTransactionReceive(c), transactionTimeout, () => failTransaction(c))))
+ case Error(msg, id, v, None) => Error(msg, id, v, Some(ReceiverWithTimeout(inTransactionReceive(c), transactionTimeout, () => failTransaction(c))))
+ case x => x
+ }
+
+ private def inTransactionReceive(c: Connection): Receive = { msg: Message[JsonObject] =>
+ def withConnection[T](fn: Connection => Future[T]): Future[T] = fn(c)
+
+ receiver(withConnection)(msg).andThen({
+ case x: BusModReply => mapRepliesToTransactionReceive(c)(x)
+ case x => x
+ }).orElse {
+ case "rollback" => rollbackTransaction(c)
+ case "commit" => commitTransaction(c)
+ }
+ }
+
+ protected def beginTransaction(msg: Message[JsonObject]) = AsyncReply {
+ pool.take().flatMap { c =>
+ c.sendQuery(transactionBegin) map { _ =>
+ Ok(Json.obj(), Some(ReceiverWithTimeout(inTransactionReceive(c), transactionTimeout, () => failTransaction(c))))
+ }
+ }
+ }
+
+ private def endQuery(c: Connection, s: String) = c.sendQuery(s) andThen {
+ case _ => pool.giveBack(c)
+ }
+
+ protected def failTransaction(c: Connection) = {
+ logger.warn("Rolling back transaction, due to no reply")
+ endQuery(c, transactionRollback)
+ }
+
+ protected def rollbackTransaction(c: Connection) = AsyncReply {
+ logger.info("rolling back transaction!")
+ endQuery(c, transactionRollback).map(_ => Ok()).recover {
+ case ex => Error("Could not rollback transaction", "ROLLBACK_FAILED", Json.obj("exception" -> ex))
+ }
+ }
+
+ protected def commitTransaction(c: Connection) = AsyncReply {
+ logger.info("ending transaction with commit!")
+ endQuery(c, transactionCommit).map(_ => Ok()).recover {
+ case ex => Error("Could not commit transaction", "COMMIT_FAILED", Json.obj("exception" -> ex))
+ }
}
def close() = pool.close()
protected def escapeField(str: String): String = "\"" + str.replace("\"", "\"\"") + "\""
+
protected def escapeString(str: String): String = "'" + str.replace("'", "''") + "'"
protected def escapeValue(v: Any): String = v match {
@@ -58,10 +130,6 @@ trait ConnectionHandler extends ScalaBusMod {
}
}
- protected def select(json: JsonObject): AsyncReply = AsyncReply {
- sendWithPool(rawCommand(selectCommand(json)))
- }
-
protected def insertCommand(json: JsonObject): String = {
val table = json.getString("table")
val fields = json.getArray("fields").asScala
@@ -79,51 +147,63 @@ trait ConnectionHandler extends ScalaBusMod {
.append(listOfLines.mkString(",")).toString
}
- protected def insert(json: JsonObject): AsyncReply = AsyncReply {
- sendWithPool(rawCommand(insertCommand(json)))
+ sealed trait CommandType {
+ val query: Connection => Future[QueryResult]
}
- sealed trait CommandType { val query: Connection => Future[QueryResult] }
- case class Raw(stmt: String) extends CommandType { val query = rawCommand(stmt) }
- case class Prepared(json: JsonObject) extends CommandType { val query = prepared(json) }
+ case class Raw(stmt: String) extends CommandType {
+ val query = rawCommand(stmt)
+ }
- protected def transaction(json: JsonObject): AsyncReply = AsyncReply(pool.withConnection({ c: Connection =>
- logger.info("TRANSACTION-JSON: " + json.encodePrettily())
+ case class Prepared(json: JsonObject) extends CommandType {
+ val query = prepared(json)
+ }
- Option(json.getArray("statements")) match {
- case Some(statements) => c.inTransaction { conn: Connection =>
- val futures = statements.asScala.map {
- case js: JsonObject =>
- js.getString("action") match {
- case "select" => Raw(selectCommand(js))
- case "insert" => Raw(insertCommand(js))
- case "prepared" => Prepared(js)
- case "raw" => Raw(js.getString("command"))
+ protected def transaction(withConnection: (Connection => Future[SyncReply]) => Future[SyncReply])(json: JsonObject): AsyncReply = AsyncReply(withConnection({
+ c: Connection =>
+ logger.info("TRANSACTION-JSON: " + json.encodePrettily())
+
+ Option(json.getArray("statements")) match {
+ case Some(statements) => c.inTransaction {
+ conn: Connection =>
+ val futures = statements.asScala.map {
+ case js: JsonObject =>
+ js.getString("action") match {
+ case "select" => Raw(selectCommand(js))
+ case "insert" => Raw(insertCommand(js))
+ case "prepared" => Prepared(js)
+ case "raw" => Raw(js.getString("command"))
+ }
+ case _ => throw new IllegalArgumentException("'statements' needs JsonObjects!")
+ }
+ val f = futures.foldLeft(Future[Any]()) {
+ case (fut, cmd) => fut flatMap (_ => cmd.query(conn))
}
- case _ => throw new IllegalArgumentException("'statements' needs JsonObjects!")
+ f map (_ => Ok(Json.obj()))
}
- val f = futures.foldLeft(Future[Any]()) { case (fut, cmd) => fut flatMap (_ => cmd.query(conn)) }
- f map (_ => Ok(Json.obj()))
+ case None => throw new IllegalArgumentException("No 'statements' field in request!")
}
- case None => throw new IllegalArgumentException("No 'statements' field in request!")
- }
}))
-
- protected def sendWithPool(fn: Connection => Future[QueryResult]): Future[SyncReply] = pool.withConnection({ c: Connection =>
- fn(c) map buildResults recover {
- case x: GenericDatabaseException =>
- Error(x.errorMessage.message)
- case x =>
- Error(x.getMessage())
- }
+
+ protected def sendWithPool(withConnection: (Connection => Future[SyncReply]) => Future[SyncReply])(fn: Connection => Future[QueryResult]): Future[SyncReply] = withConnection({
+ c: Connection =>
+ fn(c) map buildResults recover {
+ case x: GenericDatabaseException =>
+ Error(x.errorMessage.message)
+ case x =>
+ Error(x.getMessage())
+ }
})
- protected def prepared(json: JsonObject): Connection => Future[QueryResult] = { c: Connection =>
- c.sendPreparedStatement(json.getString("statement"), json.getArray("values").toArray())
+ protected def prepared(json: JsonObject): Connection => Future[QueryResult] = {
+ c: Connection =>
+ c.sendPreparedStatement(json.getString("statement"), json.getArray("values").toArray())
}
- protected def rawCommand(command: String): Connection => Future[QueryResult] = { c: Connection => c.sendQuery(command) }
+ protected def rawCommand(command: String): Connection => Future[QueryResult] = {
+ c: Connection => c.sendQuery(command)
+ }
private def buildResults(qr: QueryResult): SyncReply = {
val result = new JsonObject()
@@ -132,12 +212,14 @@ trait ConnectionHandler extends ScalaBusMod {
qr.rows match {
case Some(resultSet) =>
- val fields = (new JsonArray() /: resultSet.columnNames) { (arr, name) =>
- arr.addString(name)
+ val fields = (new JsonArray() /: resultSet.columnNames) {
+ (arr, name) =>
+ arr.addString(name)
}
- val rows = (new JsonArray() /: resultSet) { (arr, rowData) =>
- arr.add(rowDataToJsonArray(rowData))
+ val rows = (new JsonArray() /: resultSet) {
+ (arr, rowData) =>
+ arr.add(rowDataToJsonArray(rowData))
}
result.putArray("fields", fields)
diff --git a/src/main/scala/io/vertx/asyncsql/database/MySqlConnectionHandler.scala b/src/main/scala/io/vertx/asyncsql/database/MySqlConnectionHandler.scala
index 36c97b2..f5b9d76 100644
--- a/src/main/scala/io/vertx/asyncsql/database/MySqlConnectionHandler.scala
+++ b/src/main/scala/io/vertx/asyncsql/database/MySqlConnectionHandler.scala
@@ -4,7 +4,7 @@ import org.vertx.scala.platform.Verticle
import com.github.mauricio.async.db.Configuration
import io.vertx.asyncsql.Starter
-class MySqlConnectionHandler(val verticle: Starter, val config: Configuration, val maxPoolSize: Int) extends ConnectionHandler {
+class MySqlConnectionHandler(val verticle: Starter, val config: Configuration, val maxPoolSize: Int, val transactionTimeout: Long) extends ConnectionHandler {
override val dbType: String = "mysql"
override protected def escapeField(str: String): String = "`" + str.replace("`", "\\`") + "`"
diff --git a/src/main/scala/io/vertx/asyncsql/database/PostgreSqlConnectionHandler.scala b/src/main/scala/io/vertx/asyncsql/database/PostgreSqlConnectionHandler.scala
index 1b5337d..45d40d1 100644
--- a/src/main/scala/io/vertx/asyncsql/database/PostgreSqlConnectionHandler.scala
+++ b/src/main/scala/io/vertx/asyncsql/database/PostgreSqlConnectionHandler.scala
@@ -4,6 +4,6 @@ import org.vertx.scala.platform.Verticle
import com.github.mauricio.async.db.Configuration
import io.vertx.asyncsql.Starter
-class PostgreSqlConnectionHandler(val verticle: Starter, val config: Configuration, val maxPoolSize: Int) extends ConnectionHandler {
+class PostgreSqlConnectionHandler(val verticle: Starter, val config: Configuration, val maxPoolSize: Int, val transactionTimeout: Long) extends ConnectionHandler {
override val dbType: String = "postgresql"
}
\ No newline at end of file
diff --git a/src/test/scala/io/vertx/asyncsql/test/BaseSqlTests.scala b/src/test/scala/io/vertx/asyncsql/test/BaseSqlTests.scala
index 98fc3e7..e7256c6 100644
--- a/src/test/scala/io/vertx/asyncsql/test/BaseSqlTests.scala
+++ b/src/test/scala/io/vertx/asyncsql/test/BaseSqlTests.scala
@@ -1,46 +1,110 @@
package io.vertx.asyncsql.test
-import scala.concurrent.Future
-import org.vertx.scala.core.json.{Json, JsonArray}
+import scala.concurrent.{Future, Promise}
+import org.vertx.scala.core.json.{JsonObject, Json, JsonArray}
import org.vertx.testtools.VertxAssert._
import org.junit.Test
+import scala.util.{Success, Failure, Try}
+import org.vertx.scala.core.eventbus.Message
+import org.vertx.scala.core.FunctionConverters._
trait BaseSqlTests {
this: SqlTestVerticle =>
- private def withTable[X](tableName: String)(fn: => Future[X]) = {
- (for {
- _ <- createTable(tableName)
- sth <- fn
- _ <- dropTable(tableName)
- } yield sth) recoverWith {
- case x =>
- dropTable(tableName) map (_ => throw x)
- }
+ protected def isMysql: Boolean = false
+
+ protected def failedTest: PartialFunction[Throwable, Unit] = {
+ case ex: Throwable =>
+ logger.warn("failed in test", ex)
+ fail("test failed. see warning above")
}
- private def asyncTableTest[X](tableName: String)(fn: => Future[X]) = asyncTest(withTable(tableName)(fn))
+ private def sendWithTimeout(json: JsonObject): Future[(Message[JsonObject], JsonObject)] = {
+ val p = Promise[(Message[JsonObject], JsonObject)]()
+ vertx.eventBus.sendWithTimeout(address, json, 5000, {
+ case Success(reply) => p.success(reply, reply.body())
+ case Failure(ex) => p.failure(ex)
+ }: Try[Message[JsonObject]] => Unit)
+ p.future
+ }
- private def typeTestInsert[X](fn: => Future[X]) = asyncTableTest("some_test") {
- expectOk(insert("some_test",
- new JsonArray( """["name","email","is_male","age","money","wedding_date"]"""),
- new JsonArray( """[["Mr. Test","test@example.com",true,15,167.31,"2024-04-01"],
- ["Ms Test2","test2@example.com",false,43,167.31,"1997-12-24"]]"""))) flatMap {
- _ =>
- fn
- }
+ private def replyWithTimeout(msg: Message[JsonObject], json: JsonObject): Future[(Message[JsonObject], JsonObject)] = {
+ val p = Promise[(Message[JsonObject], JsonObject)]()
+ msg.replyWithTimeout(json, 5000, {
+ case Success(reply) => p.success(reply, reply.body())
+ case Failure(ex) => p.failure(ex)
+ }: Try[Message[JsonObject]] => Unit)
+ p.future
}
- @Test
- def simpleConnection(): Unit = asyncTest {
- expectOk(raw("SELECT 0")) map {
- reply =>
- val res = reply.getArray("results")
- assertEquals(1, res.size())
- assertEquals(0, res.get[JsonArray](0).get[Number](0).intValue())
- }
+ private def checkOkay(json: JsonObject)(msg: (Message[JsonObject], JsonObject)): (Message[JsonObject], JsonObject) = {
+ assertEquals(s"should get 'ok' back when sending ${json.encode()}, but got ${msg._2.encode()}",
+ "ok", msg._2.getString("status"))
+ (msg._1, msg._2)
+ }
+
+ private def checkError(json: JsonObject)(msg: (Message[JsonObject], JsonObject)): (Message[JsonObject], JsonObject) = {
+ assertEquals(s"should get an 'error' back when sending ${json.encode()}, but got ${msg._2.encode()}",
+ "error", msg._2.getString("status"))
+ (msg._1, msg._2)
+ }
+
+ protected def sendOk(json: JsonObject): Future[(Message[JsonObject], JsonObject)] =
+ sendWithTimeout(json) map checkOkay(json)
+
+ protected def sendFail(json: JsonObject): Future[(Message[JsonObject], JsonObject)] =
+ sendWithTimeout(json) map checkError(json)
+
+ private def replyOk(msg: Message[JsonObject], json: JsonObject): Future[(Message[JsonObject], JsonObject)] =
+ replyWithTimeout(msg, json) map checkOkay(json)
+
+ private def replyFail(msg: Message[JsonObject], json: JsonObject): Future[(Message[JsonObject], JsonObject)] =
+ replyWithTimeout(msg, json) map checkError(json)
+
+ private def setupTableTest(): Future[_] = for {
+ (msg, reply) <- sendOk(raw(createTableStatement("some_test")))
+ } yield {
+ assertEquals(0, reply.getInteger("rows"))
}
+ private def setupTypeTest(): Future[_] = for {
+ _ <- setupTableTest()
+ (msg, reply) <- sendOk(insert("some_test",
+ Json.fromArrayString( """["name","email","is_male","age","money","wedding_date"]"""),
+ Json.fromArrayString(
+ """[["Mr. Test","test@example.com",true,15,167.31,"2024-04-01"],
+ | ["Ms Test2","test2@example.com",false,43,167.31,"1997-12-24"]]""".stripMargin)))
+ } yield ()
+
+ private def checkSameFields(arr1: JsonArray, arr2: JsonArray) = {
+ import scala.collection.JavaConversions._
+ arr1.foreach(elem => assertTrue(arr2.contains(elem)))
+ }
+
+ private def checkMrTest(mrTest: JsonArray) = {
+ assertEquals("Mr. Test", mrTest.get[String](0))
+ assertEquals("test@example.com", mrTest.get[String](1))
+ assertTrue(mrTest.get[Any](2) match {
+ case b: Boolean => b
+ case i: Number => i.intValue() == 1
+ case x => false
+ })
+ assertEquals(15, mrTest.get[Number](3).intValue())
+ assertEquals(167.31, mrTest.get[Number](4).doubleValue(), 0.0001)
+ // FIXME check date conversion
+ // assertEquals("2024-04-01", mrTest.get[JsonObject](5))
+ }
+
+ @Test
+ def simpleConnection(): Unit = (for {
+ (msg, reply) <- sendOk(raw("SELECT 0"))
+ } yield {
+ val res = reply.getArray("results")
+ assertEquals(1, res.size())
+ assertEquals(0, res.get[JsonArray](0).get[Number](0).intValue())
+ testComplete()
+ }) recover failedTest
+
@Test
def poolSize(): Unit = asyncTest {
val n = 10
@@ -62,223 +126,345 @@ trait BaseSqlTests {
}
@Test
- def multipleFields(): Unit = asyncTest {
- expectOk(raw("SELECT 1 a, 0 b")) map {
- reply =>
- val res = reply.getArray("results")
- assertEquals(1, res.size())
- val firstElem = res.get[JsonArray](0)
- assertEquals(1, firstElem.get[Number](0).intValue())
- assertEquals(0, firstElem.get[Number](1).intValue())
- }
- }
+ def multipleFields(): Unit = (for {
+ (msg, reply) <- sendOk(raw("SELECT 1 a, 0 b"))
+ } yield {
+ val res = reply.getArray("results")
+ assertEquals(1, res.size())
+ val firstElem = res.get[JsonArray](0)
+ assertEquals(1, firstElem.get[Number](0).intValue())
+ assertEquals(0, firstElem.get[Number](1).intValue())
+ testComplete()
+ }) recover failedTest
@Test
- def multipleFieldsOrder(): Unit = typeTestInsert {
- import scala.collection.JavaConverters._
- expectOk(raw("SELECT is_male, age, email, money, name FROM some_test WHERE is_male = true")) map {
- reply =>
- val receivedFields = reply.getArray("fields")
- val results = reply.getArray("results").get[JsonArray](0)
-
- assertEquals(1, reply.getInteger("rows"))
-
- val columnNamesList = receivedFields.asScala.toList
-
- assertEquals("Mr. Test", results.get(columnNamesList.indexOf("name")))
- assertEquals("test@example.com", results.get(columnNamesList.indexOf("email")))
- assertEquals(15, results.get[Int](columnNamesList.indexOf("age")))
- assertTrue(results.get[Any](columnNamesList.indexOf("is_male")) match {
- case b: Boolean => b
- case i: Number => i.intValue() == 1
- case x => false
- })
- assertEquals(167.31, results.get[Number](columnNamesList.indexOf("money")).doubleValue(), 0.01)
- }
- }
+ def multipleFieldsOrder(): Unit =
+ (for {
+ _ <- setupTypeTest()
+ (msg, reply) <- sendOk(raw("SELECT is_male, age, email, money, name FROM some_test WHERE is_male = true"))
+ } yield {
+ import collection.JavaConverters._
+ val receivedFields = reply.getArray("fields")
+ val results = reply.getArray("results").get[JsonArray](0)
+
+ assertEquals(1, reply.getInteger("rows"))
+
+ val columnNamesList = receivedFields.asScala.toList
+
+ assertEquals("Mr. Test", results.get(columnNamesList.indexOf("name")))
+ assertEquals("test@example.com", results.get(columnNamesList.indexOf("email")))
+ assertEquals(15, results.get[Int](columnNamesList.indexOf("age")))
+ assertTrue(results.get[Any](columnNamesList.indexOf("is_male")) match {
+ case b: Boolean => b
+ case i: Number => i.intValue() == 1
+ case x => false
+ })
+ assertEquals(167.31, results.get[Number](columnNamesList.indexOf("money")).doubleValue(), 0.01)
+ testComplete()
+ }) recover failedTest
@Test
- def createAndDropTable(): Unit = asyncTest {
- createTable("some_test") flatMap (_ => dropTable("some_test")) map {
- reply =>
- assertEquals(0, reply.getInteger("rows"))
+ def createAndDropTable(): Unit = (for {
+ (msg, dropIfExistsReply) <- sendOk(raw("DROP TABLE IF EXISTS some_test;"))
+ (msg, createReply) <- sendOk(raw("CREATE TABLE some_test (id SERIAL, name VARCHAR(255));"))
+ (msg, insertReply) <- sendOk(raw("INSERT INTO some_test (name) VALUES ('tester');"))
+ (msg, selectReply) <- sendOk(raw("SELECT name FROM some_test"))
+ (msg, dropReply) <- {
+ assertEquals("tester", try {
+ selectReply.getArray("results").get[JsonArray](0).get[String](0)
+ } catch {
+ case ex: Throwable => fail(s"Should be able to get a result before drop, but got ${selectReply.encode()}")
+ })
+ sendOk(raw("DROP TABLE some_test;"))
}
- }
+ (msg, selectReply) <- sendFail(raw("SELECT name FROM some_test"))
+ } yield {
+ val error = selectReply.getString("message")
+ assertTrue(s"Not the right error message $error",
+ error.contains("some_test") && (error.contains("doesn't exist") || error.contains("does not exist")))
+ testComplete()
+ }) recover failedTest
@Test
- def insertCorrect(): Unit = asyncTableTest("some_test") {
- expectOk(insert("some_test", new JsonArray( """["name","email"]"""), new JsonArray( """[["Test","test@example.com"],["Test2","test2@example.com"]]""")))
- }
+ def insertCorrectWithMissingValues(): Unit = (for {
+ _ <- setupTableTest()
+ _ <- sendOk(insert("some_test",
+ Json.fromArrayString( """["name","email"]"""),
+ Json.fromArrayString( """[["Test","test@example.com"],
+ | ["Test2","test2@example.com"]]""".stripMargin)))
+ } yield testComplete()) recover failedTest
@Test
- def insertNullValues(): Unit = asyncTableTest("some_test") {
- expectOk(insert("some_test", new JsonArray( """["name","email"]"""), new JsonArray( """[[null,"test@example.com"],[null,"test2@example.com"]]""")))
- }
+ def insertNullValues(): Unit = (for {
+ _ <- setupTableTest()
+ _ <- sendOk(insert("some_test",
+ Json.fromArrayString( """["name","email"]"""),
+ Json.fromArrayString( """[[null,"test@example.com"],
+ | [null,"test2@example.com"]]""".stripMargin)))
+ } yield testComplete()) recover failedTest
@Test
- def insertTypeTest(): Unit = typeTestInsert {
- Future.successful()
- }
+ def insertTypeTest(): Unit = (for {
+ _ <- setupTypeTest()
+ } yield testComplete()) recover failedTest
@Test
- def insertMaliciousDataTest(): Unit = asyncTableTest("some_test") {
- // If this SQL injection works, the drop table of asyncTableTest would throw an exception
- expectOk(insert("some_test",
- new JsonArray( """["name","email","is_male","age","money","wedding_date"]"""),
- new JsonArray( """[["Mr. Test","test@example.com",true,15,167.31,"2024-04-01"],
- ["Ms Test2','some@example.com',false,15,167.31,'2024-04-01');DROP TABLE some_test;--","test2@example.com",false,43,167.31,"1997-12-24"]]""")))
- }
+ def insertMaliciousDataTest(): Unit = (for {
+ _ <- setupTableTest()
+ (msg, insertReply) <- sendOk(insert("some_test",
+ Json.fromArrayString( """["name","email","is_male","age","money","wedding_date"]"""),
+ Json.fromArrayString(
+ """[["Mr. Test","test@example.com",true,15,167.31,"2024-04-01"],
+ | ["Ms Test2','some@example.com',false,15,167.31,'2024-04-01');DROP TABLE some_test;--","test2@example.com",false,43,167.31,"1997-12-24"]]""".stripMargin)))
+ (msg, selectReply) <- sendOk(raw("SELECT * FROM some_test"))
+ } yield {
+ assertEquals(2, selectReply.getArray("results").size())
+ testComplete()
+ }) recover failedTest
@Test
- def insertUniqueProblem(): Unit = asyncTableTest("some_test") {
- expectError(insert("some_test", new JsonArray( """["name","email"]"""), new JsonArray( """[["Test","test@example.com"],["Test","test@example.com"]]"""))) map {
- reply =>
- logger.info("expected error: " + reply.encode())
- }
- }
+ def insertUniqueProblem(): Unit = (for {
+ _ <- setupTableTest()
+ (msg, reply) <- sendFail(insert("some_test",
+ Json.fromArrayString( """["name","email"]"""),
+ Json.fromArrayString(
+ """[["Test","test@example.com"],
+ | ["Test","test@example.com"]]""".stripMargin)))
+ } yield testComplete()) recover failedTest
@Test
- def selectWithoutFields(): Unit = typeTestInsert {
- expectOk(select("some_test")) map {
- reply =>
- val receivedFields = reply.getArray("fields")
- logger.info("received: " + receivedFields.encode())
-
- def assertFieldName(field: String) = {
- assertTrue("fields should contain '" + field + "'", receivedFields.contains(field))
- }
- assertFieldName("id")
- assertFieldName("name")
- assertFieldName("email")
- assertFieldName("is_male")
- assertFieldName("age")
- assertFieldName("money")
- assertFieldName("wedding_date")
- val moneyField = receivedFields.toArray().indexOf("money")
-
- val mrTest = reply.getArray("results").get[JsonArray](0)
- assertTrue(mrTest.contains("Mr. Test"))
- assertTrue(mrTest.contains("test@example.com"))
- assertTrue(mrTest.contains(true) || mrTest.contains(1))
- assertTrue(mrTest.contains(15))
- assertEquals(167.31, mrTest.get[Number](moneyField).doubleValue(), 0.0001)
+ def selectWithoutFields(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, reply) <- sendOk(select("some_test"))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ logger.info("received: " + receivedFields.encode())
+
+ def assertFieldName(field: String) = {
+ assertTrue("fields should contain '" + field + "'", receivedFields.contains(field))
}
- }
+ assertFieldName("id")
+ assertFieldName("name")
+ assertFieldName("email")
+ assertFieldName("is_male")
+ assertFieldName("age")
+ assertFieldName("money")
+ assertFieldName("wedding_date")
+ val moneyField = receivedFields.toArray.indexOf("money")
+
+ val mrTest = reply.getArray("results").get[JsonArray](0)
+ assertTrue(mrTest.contains("Mr. Test"))
+ assertTrue(mrTest.contains("test@example.com"))
+ assertTrue(mrTest.contains(true) || mrTest.contains(1))
+ assertTrue(mrTest.contains(15))
+ assertEquals(167.31, mrTest.get[Number](moneyField).doubleValue(), 0.0001)
+ testComplete()
+ }) recover failedTest
@Test
- def selectEverything(): Unit = typeTestInsert {
+ def selectEverything(): Unit = {
val fieldsArray = Json.arr("name", "email", "is_male", "age", "money", "wedding_date")
- expectOk(select("some_test", fieldsArray)) map {
- reply =>
- val receivedFields = reply.getArray("fields")
- checkSameFields(fieldsArray, receivedFields)
- val results = reply.getArray("results")
- val mrTest = results.get[JsonArray](0)
- checkMrTest(mrTest)
- }
+ (for {
+ _ <- setupTypeTest()
+ (msg, reply) <- sendOk(select("some_test", fieldsArray))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ checkSameFields(fieldsArray, receivedFields)
+ val results = reply.getArray("results")
+ val mrTest = results.get[JsonArray](0)
+ checkMrTest(mrTest)
+ testComplete()
+ }) recover failedTest
}
- private def checkSameFields(arr1: JsonArray, arr2: JsonArray) = {
- import scala.collection.JavaConversions._
- arr1.foreach(elem => assertTrue(arr2.contains(elem)))
- }
+ @Test
+ def selectFiltered(): Unit = {
+ val fieldsArray = Json.arr("name", "email")
- private def checkTestPerson(mrOrMrs: JsonArray) = {
- mrOrMrs.get[String](0) match {
- case "Mr. Test" => checkMrTest(mrOrMrs)
- case "Mrs. Test" => checkMrsTest(mrOrMrs)
- }
+ (for {
+ _ <- setupTypeTest()
+ (msg, reply) <- sendOk(select("some_test", fieldsArray))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ assertEquals(s"arrays ${fieldsArray.encode()} and ${receivedFields.encode()} should match",
+ fieldsArray, receivedFields)
+ assertEquals(2, reply.getInteger("rows"))
+ val results = reply.getArray("results")
+ val mrOrMrs = results.get[JsonArray](0)
+ mrOrMrs.get[String](0) match {
+ case "Mr. Test" =>
+ assertEquals("Mr. Test", mrOrMrs.get[String](0))
+ assertEquals("test@example.com", mrOrMrs.get[String](1))
+ case "Mrs. Test" =>
+ assertEquals("Mrs. Test", mrOrMrs.get[String](0))
+ assertEquals("test2@example.com", mrOrMrs.get[String](1))
+ }
+ testComplete()
+ }) recover failedTest
}
- private def checkMrTest(mrTest: JsonArray) = {
- assertEquals("Mr. Test", mrTest.get[String](0))
- assertEquals("test@example.com", mrTest.get[String](1))
- assertTrue(mrTest.get[Any](2) match {
- case b: Boolean => b
- case i: Number => i.intValue() == 1
- case x => false
- })
- assertEquals(15, mrTest.get[Number](3).intValue())
- assertEquals(167.31, mrTest.get[Number](4).doubleValue(), 0.0001)
- // FIXME check date conversion
- // assertEquals("2024-04-01", mrTest.get[JsonObject](5))
- }
+ @Test
+ def preparedSelect(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, reply) <- sendOk(prepared("SELECT email FROM some_test WHERE name=? AND age=?", Json.arr("Mr. Test", 15)))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ assertEquals(Json.arr("email"), receivedFields)
+ assertEquals(1, reply.getInteger("rows"))
+ assertEquals("test@example.com", reply.getArray("results").get[JsonArray](0).get[String](0))
+ testComplete()
+ }) recover failedTest
- private def checkMrsTest(mrsTest: JsonArray) = {
- assertEquals("Mrs. Test", mrsTest.get[String](0))
- assertEquals("test2@example.com", mrsTest.get[String](1))
- assertEquals(false, mrsTest.get[Boolean](2))
- assertEquals(43L, mrsTest.get[Long](3))
- assertEquals(167.31, mrsTest.get[Number](4).doubleValue(), 0.0001)
- // FIXME check date conversion
- // assertEquals("1997-12-24", mrsTest.get[JsonObject](5))
- }
+ @Test
+ def simpleTransaction(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, transactionReply) <- sendOk(
+ transaction(
+ insert("some_test", Json.arr("name", "email", "is_male", "age", "money"),
+ Json.arr(Json.arr("Mr. Test jr.", "test3@example.com", true, 5, 2))),
+ raw("UPDATE some_test SET age=6 WHERE name = 'Mr. Test jr.'")))
+ (msg, reply) <- sendOk(raw("SELECT SUM(age) FROM some_test WHERE is_male = true"))
+ } yield {
+ val results = reply.getArray("results")
+ assertEquals(1, results.size())
+ assertEquals(21, results.get[JsonArray](0).get[Number](0).intValue())
+ testComplete()
+ }) recover failedTest
@Test
- def selectFiltered(): Unit = typeTestInsert {
- val fieldsArray = new JsonArray( """["name","email"]""")
- expectOk(select("some_test", fieldsArray)) map {
- reply =>
- val receivedFields = reply.getArray("fields")
- assertTrue("arrays " + fieldsArray.encode() + " and " + receivedFields.encode() +
- " should match", fieldsArray == receivedFields)
- // assertEquals(2, reply.getInteger("rows"))
- val results = reply.getArray("results")
- val mrOrMrs = results.get[JsonArray](0)
- mrOrMrs.get[String](0) match {
- case "Mr. Test" =>
- assertEquals("Mr. Test", mrOrMrs.get[String](0))
- assertEquals("test@example.com", mrOrMrs.get[String](1))
- case "Mrs. Test" =>
- assertEquals("Mrs. Test", mrOrMrs.get[String](0))
- assertEquals("test2@example.com", mrOrMrs.get[String](1))
- }
- }
- }
+ def transactionWithPreparedStatement(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, transactionReply) <- sendOk(
+ transaction(
+ insert("some_test", Json.arr("name", "email", "is_male", "age", "money"),
+ Json.arr(Json.arr("Mr. Test jr.", "test3@example.com", true, 5, 2))),
+ prepared("UPDATE some_test SET age=? WHERE name=?", Json.arr(6, "Mr. Test jr."))))
+ (msg, reply) <- sendOk(raw("SELECT SUM(age) FROM some_test WHERE is_male = true"))
+ } yield {
+ val results = reply.getArray("results")
+ assertEquals(1, results.size())
+ assertEquals(21, results.get[JsonArray](0).get[Number](0).intValue())
+ testComplete()
+ }) recover failedTest
@Test
- def preparedSelect(): Unit = typeTestInsert {
- expectOk(prepared("SELECT email FROM some_test WHERE name=? AND age=?", Json.arr("Mr. Test", 15))) map {
- reply =>
- val receivedFields = reply.getArray("fields")
- assertEquals(Json.arr("email"), receivedFields)
- // assertEquals(1, reply.getInteger("rows"))
- assertEquals("test@example.com", reply.getArray("results").get[JsonArray](0).get[String](0))
+ def startAndEndTransaction(): Unit = (for {
+ (msg, beginReply) <- sendOk(Json.obj("action" -> "begin"))
+ (msg, selectReply) <- replyOk(msg, raw("SELECT 15"))
+ (msg, commitReply) <- {
+ val arr = selectReply.getArray("results")
+ assertEquals("ok", selectReply.getString("status"))
+ assertEquals(1, arr.size())
+ assertEquals(15, arr.get[JsonArray](0).get[Number](0).longValue())
+
+ replyOk(msg, Json.obj("action" -> "commit"))
}
- }
+ } yield testComplete()) recover failedTest
+
@Test
- def transaction(): Unit = typeTestInsert {
- (for {
- a <- expectOk(
- transaction(
- insert("some_test", Json.arr("name", "email", "is_male", "age", "money"),
- Json.arr(Json.arr("Mr. Test jr.", "test3@example.com", true, 5, 2))),
- raw("UPDATE some_test SET age=6 WHERE name = 'Mr. Test jr.'")))
- b <- expectOk(raw("SELECT SUM(age) FROM some_test WHERE is_male = true"))
- } yield b) map {
- reply =>
- val results = reply.getArray("results")
- assertEquals(1, results.size())
- assertEquals(21, results.get[JsonArray](0).get[Number](0).intValue())
- }
- }
+ def updateInTransaction(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, beginReply) <- sendOk(Json.obj("action" -> "begin"))
+ (msg, updateReply) <- replyOk(msg, raw("UPDATE some_test set email = 'updated@test.com' WHERE name = 'Mr. Test'"))
+ (msg, commitReply) <- replyOk(msg, Json.obj("action" -> "commit"))
+ (msg, checkReply) <- sendOk(raw("SELECT email FROM some_test WHERE name = 'Mr. Test'"))
+ } yield {
+ val results = checkReply.getArray("results")
+ val mrTest = results.get[JsonArray](0)
+ assertEquals("updated@test.com", mrTest.get[String](0))
+ logger.info("all tests completed")
+ testComplete()
+ }) recover failedTest
+
+ @Test
+ def violateForeignKey(): Unit = (for {
+ (msg, beginResult) <- sendOk(Json.obj("action" -> "begin"))
+ (msg, _) <- replyOk(msg, raw("DROP TABLE IF EXISTS test_two;"))
+ (msg, _) <- replyOk(msg, raw("DROP TABLE IF EXISTS test_one;"))
+ (msg, _) <- replyOk(msg, raw( """CREATE TABLE test_one (
+ | id SERIAL,
+ | name VARCHAR(255),
+ | PRIMARY KEY (id)
+ |);""".stripMargin))
+ (msg, _) <- replyOk(msg, raw(
+ s"""CREATE TABLE test_two (
+ | id SERIAL,
+ | name VARCHAR(255),
+ | one_id BIGINT ${if (isMysql) "UNSIGNED" else ""} NOT NULL,
+ | PRIMARY KEY (id)
+ |);""".stripMargin))
+ (msg, _) <- replyOk(msg, raw(
+ """ALTER TABLE test_two ADD CONSTRAINT test_two_one_id_fk
+ |FOREIGN KEY (one_id)
+ |REFERENCES test_one (id);""".stripMargin))
+ (msg, _) <- replyOk(msg, raw("INSERT INTO test_one (name) VALUES ('first'),('second');"))
+ (msg, setupResult) <- replyOk(msg, raw("INSERT INTO test_two (name, one_id) VALUES ('twoone', 1);"))
+ (msg, insertViolatedResult) <- replyFail(msg, raw("INSERT INTO test_two (name, one_id) VALUES ('twothree', 3);"))
+ (msg, rollbackResult) <- replyOk(msg, raw("ROLLBACK;"))
+ } yield testComplete()) recover failedTest
@Test
- def transactionWithPreparedStatement(): Unit = typeTestInsert {
+ def wrongQueryInTransaction(): Unit = (for {
+ _ <- setupTypeTest()
+ (msg, beginReply) <- sendOk(Json.obj("action" -> "begin"))
+ (msg, updateReply) <- replyWithTimeout(msg, raw("this is a bad raw query for sql"))
+ } yield {
+ assertEquals("error", updateReply.getString("status"))
+ testComplete()
+ }) recover failedTest
+
+ @Test
+ def rollBackTransaction(): Unit = {
+ val fieldsArray = Json.arr("name", "email", "is_male", "age", "money", "wedding_date")
(for {
- a <- expectOk(
- transaction(
- insert("some_test", Json.arr("name", "email", "is_male", "age", "money"),
- Json.arr(Json.arr("Mr. Test jr.", "test3@example.com", true, 5, 2))),
- prepared("UPDATE some_test SET age=? WHERE name=?", Json.arr(6, "Mr. Test jr."))))
- b <- expectOk(raw("SELECT SUM(age) FROM some_test WHERE is_male = true"))
- } yield b) map {
- reply =>
- val results = reply.getArray("results")
- assertEquals(1, results.size())
- assertEquals(21, results.get[JsonArray](0).get[Number](0).intValue())
- }
+ _ <- setupTypeTest()
+ (msg, beginReply) <- sendOk(Json.obj("action" -> "begin"))
+ (msg, reply) <- replyOk(msg, raw("UPDATE some_test set email = 'shouldRollback@test.com' WHERE name = 'Mr. Test'"))
+ (msg, checkUpdateReply) <- replyOk(msg, raw("SELECT email FROM some_test WHERE name = 'Mr. Test'"))
+ (msg, endReply) <- {
+ val results = checkUpdateReply.getArray("results")
+ val mrTest = results.get[JsonArray](0)
+ assertEquals("shouldRollback@test.com", mrTest.get[String](0))
+
+ logger.info("Update done, now do rollback")
+ replyOk(msg, Json.obj("action" -> "rollback"))
+ }
+ (msg, checkReply) <- sendOk(select("some_test", fieldsArray))
+ } yield {
+ val results = checkReply.getArray("results")
+ val mrTest = results.get[JsonArray](0)
+ checkMrTest(mrTest)
+ logger.info("rolled back nicely")
+ testComplete()
+ }) recover failedTest
}
-}
\ No newline at end of file
+ @Test
+ def dateTest(): Unit = (for {
+ _ <- setupTableTest()
+ (msg, insertReply) <- sendOk(raw("INSERT INTO some_test (name, wedding_date) VALUES ('tester', '2015-04-04');"))
+ (msg, reply) <- sendOk(prepared("SELECT wedding_date FROM some_test WHERE name=?", Json.arr("tester")))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ assertEquals(Json.arr("wedding_date"), receivedFields)
+ assertEquals("2015-04-04", reply.getArray("results").get[JsonArray](0).get[String](0))
+ testComplete()
+ }) recover failedTest
+
+ @Test
+ def timestampTest(): Unit = (for {
+ (m, r) <- sendOk(raw("DROP TABLE IF EXISTS date_test"))
+ (msg, r2) <- sendOk(raw(createDateTable("timestamp")))
+ (msg, insertReply) <- sendOk(raw("INSERT INTO date_test (test_date) VALUES ('2015-04-04T10:04:00.000');"))
+ (msg, reply) <- sendOk(raw("SELECT test_date FROM date_test"))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ assertEquals(Json.arr("test_date"), receivedFields)
+ logger.info("date is: " + reply.getArray("results").get[JsonArray](0).get[String](0))
+ assertEquals("2015-04-04T10:04:00.000", reply.getArray("results").get[JsonArray](0).get[String](0))
+ testComplete()
+ }) recover failedTest
+
+}
+
diff --git a/src/test/scala/io/vertx/asyncsql/test/SqlTestVerticle.scala b/src/test/scala/io/vertx/asyncsql/test/SqlTestVerticle.scala
index 29ef238..da6428c 100644
--- a/src/test/scala/io/vertx/asyncsql/test/SqlTestVerticle.scala
+++ b/src/test/scala/io/vertx/asyncsql/test/SqlTestVerticle.scala
@@ -15,7 +15,7 @@ abstract class SqlTestVerticle extends TestVerticle with BaseVertxIntegrationTes
val p = Promise[Unit]
container.deployModule(System.getProperty("vertx.modulename"), getConfig(), 1, { deploymentID: AsyncResult[String] =>
if (deploymentID.failed()) {
- logger.info(deploymentID.cause())
+ logger.info(s"Deployment failed, cause: ${deploymentID.cause()}")
p.failure(deploymentID.cause())
}
assertTrue("deploymentID should not be null", deploymentID.succeeded())
@@ -56,6 +56,8 @@ abstract class SqlTestVerticle extends TestVerticle with BaseVertxIntegrationTes
protected def transaction(statements: JsonObject*) = Json.obj("action" -> "transaction", "statements" -> Json.arr(statements: _*))
+ protected def newTransaction = Json.obj("action" -> "start")
+
protected def createTable(tableName: String) = expectOk(raw(createTableStatement(tableName))) map { reply =>
assertEquals(0, reply.getInteger("rows"))
reply
@@ -65,6 +67,13 @@ abstract class SqlTestVerticle extends TestVerticle with BaseVertxIntegrationTes
reply
}
+ protected def createDateTable(dateDataType :String) = s"""
+ | CREATE TABLE date_test (
+ | id SERIAL,
+ | test_date $dateDataType
+ | );
+ """.stripMargin
+
protected def createTableStatement(tableName: String) = """
DROP TABLE IF EXISTS """ + tableName + """;
CREATE TABLE """ + tableName + """ (
diff --git a/src/test/scala/io/vertx/asyncsql/test/mysql/MySqlTest.scala b/src/test/scala/io/vertx/asyncsql/test/mysql/MySqlTest.scala
index d7416d3..4dc621b 100644
--- a/src/test/scala/io/vertx/asyncsql/test/mysql/MySqlTest.scala
+++ b/src/test/scala/io/vertx/asyncsql/test/mysql/MySqlTest.scala
@@ -1,16 +1,29 @@
package io.vertx.asyncsql.test.mysql
-import org.vertx.scala.core.json.Json
-import io.vertx.asyncsql.test.{ BaseSqlTests, SqlTestVerticle }
+import io.vertx.asyncsql.test.{BaseSqlTests, SqlTestVerticle}
+import org.junit.Test
+import org.vertx.scala.core.json._
+import org.vertx.testtools.VertxAssert._
class MySqlTest extends SqlTestVerticle with BaseSqlTests {
val address = "campudus.asyncdb"
val config = Json.obj("address" -> address, "connection" -> "MySQL", "maxPoolSize" -> 3)
+ override def isMysql = true
+
override def doBefore() = expectOk(raw("DROP TABLE IF EXISTS `some_test`"))
+
override def getConfig = config
+ override def createDateTable(dateDataType: String) = s"""
+ | CREATE TABLE date_test (
+ | id INT NOT NULL AUTO_INCREMENT,
+ | test_date $dateDataType,
+ | PRIMARY KEY(id)
+ | );
+ """.stripMargin
+
override def createTableStatement(tableName: String) = """
CREATE TABLE """ + tableName + """ (
id INT NOT NULL AUTO_INCREMENT,
@@ -21,7 +34,21 @@ CREATE TABLE """ + tableName + """ (
money FLOAT,
wedding_date DATE,
PRIMARY KEY (id)
-);
-"""
+);"""
+
+ @Test
+ def datetimeTest(): Unit =
+ (for {
+ (m, r) <- sendOk(raw("DROP TABLE IF EXISTS date_test"))
+ (msg, r2) <- sendOk(raw(createDateTable("datetime")))
+ (msg, insertReply) <- sendOk(raw("INSERT INTO date_test (test_date) VALUES ('2015-04-04');"))
+ (msg, reply) <- sendOk(raw("SELECT test_date FROM date_test"))
+ } yield {
+ val receivedFields = reply.getArray("fields")
+ assertEquals(Json.arr("test_date"), receivedFields)
+ logger.info("date is: " + reply.getArray("results").get[JsonArray](0).get[String](0));
+ assertEquals("2015-04-04T00:00:00.000", reply.getArray("results").get[JsonArray](0).get[String](0))
+ testComplete()
+ }) recover failedTest
}
\ No newline at end of file