diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index bc9d5f8..01b89bc 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -13,11 +13,19 @@ jobs: MAVEN_CENTRAL_PGP_KEY: ${{ secrets.MAVEN_CENTRAL_PGP_KEY }} steps: - - uses: actions/checkout@v1 - - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: gradle/actions/wrapper-validation@v3 + - name: Set up JDK 11 + uses: actions/setup-java@v4 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'temurin' + check-latest: true + # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. + # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: build test and publish run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace + env: + CI: true diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 13a366a..f16bf96 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -7,15 +7,25 @@ on: pull_request: branches: - master + - reactive-streams-branch + - '**' jobs: buildAndTest: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 - - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: gradle/actions/wrapper-validation@v3 + - name: Set up JDK 11 + uses: actions/setup-java@v4 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'temurin' + check-latest: true + # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. + # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: build and test run: ./gradlew assemble && ./gradlew check --info --stacktrace + env: + CI: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b61d755..a574a68 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,11 +17,19 @@ jobs: RELEASE_VERSION: ${{ github.event.inputs.version }} steps: - - uses: actions/checkout@v1 - - uses: gradle/wrapper-validation-action@v1 - - name: Set up JDK 1.8 - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: gradle/actions/wrapper-validation@v3 + - name: Set up JDK 11 + uses: actions/setup-java@v4 with: - java-version: '8.0.282' + java-version: '11' + distribution: 'temurin' + check-latest: true + # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. + # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: build test and publish run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace + env: + CI: true diff --git a/.github/workflows/stale-pr-issue.yml b/.github/workflows/stale-pr-issue.yml new file mode 100644 index 0000000..d945402 --- /dev/null +++ b/.github/workflows/stale-pr-issue.yml @@ -0,0 +1,48 @@ +# Mark inactive issues and PRs as stale +# GitHub action based on https://github.com/actions/stale + +name: 'Close stale issues and PRs' +on: + schedule: + # Execute every day + - cron: '0 0 * * *' + +permissions: + actions: write + issues: write + pull-requests: write + +jobs: + close-pending: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v9 + with: + # GLOBAL ------------------------------------------------------------ + # Exempt any PRs or issues already added to a milestone + exempt-all-milestones: true + # Days until issues or pull requests are labelled as stale + days-before-stale: 60 + + # ISSUES ------------------------------------------------------------ + # Issues will be closed after 90 days of inactive (60 to mark as stale + 30 to close) + days-before-issue-close: 30 + stale-issue-message: > + Hello, this issue has been inactive for 60 days, so we're marking it as stale. + If you would like to continue this discussion, please comment within the next 30 days or we'll close the issue. + close-issue-message: > + Hello, as this issue has been inactive for 90 days, we're closing the issue. + If you would like to resume the discussion, please create a new issue. + exempt-issue-labels: keep-open + + # PULL REQUESTS ----------------------------------------------------- + # PRs will be closed after 90 days of inactive (60 to mark as stale + 30 to close) + days-before-pr-close: 30 + stale-pr-message: > + Hello, this pull request has been inactive for 60 days, so we're marking it as stale. + If you would like to continue working on this pull request, please make an update within the next 30 days, or we'll close the pull request. + close-pr-message: > + Hello, as this pull request has been inactive for 90 days, we're closing this pull request. + We always welcome contributions, and if you would like to continue, please open a new pull request. + exempt-pr-labels: keep-open + \ No newline at end of file diff --git a/README.md b/README.md index 24a65f6..c7c6fe9 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,11 @@ # java-dataloader [![Build](https://github.com/graphql-java/java-dataloader/actions/workflows/master.yml/badge.svg)](https://github.com/graphql-java/java-dataloader/actions/workflows/master.yml) -[![Latest Release](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/java-dataloader/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/java-dataloader/) +[![Latest Release](https://img.shields.io/maven-central/v/com.graphql-java/java-dataloader?versionPrefix=4.)](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/) +[![Latest Snapshot](https://img.shields.io/maven-central/v/com.graphql-java/java-dataloader?label=maven-central%20snapshot&versionPrefix=0)](https://maven-badges.herokuapp.com/maven-central/com.graphql-java/graphql-java/) [![Apache licensed](https://img.shields.io/hexpm/l/plug.svg?maxAge=2592000)](https://github.com/graphql-java/java-dataloader/blob/master/LICENSE) -This small and simple utility library is a pure Java 8 port of [Facebook DataLoader](https://github.com/facebook/dataloader). +This small and simple utility library is a pure Java 11 port of [Facebook DataLoader](https://github.com/facebook/dataloader). It can serve as integral part of your application's data layer to provide a consistent API over various back-ends and reduce message communication overhead through batching and caching. @@ -15,7 +16,7 @@ are resolved independently and, with a true graph of objects, you may be fetchin A naive implementation of graphql data fetchers can easily lead to the dreaded "n+1" fetch problem. Most of the code is ported directly from Facebook's reference implementation, with one IMPORTANT adaptation to make -it work for Java 8. ([more on this below](#manual-dispatching)). +it work for Java 11. ([more on this below](#manual-dispatching)). Before reading on, be sure to take a short dive into the [original documentation](https://github.com/facebook/dataloader/blob/master/README.md) provided by Lee Byron (@leebyron) @@ -63,11 +64,11 @@ Gradle users configure the `java-dataloader` dependency in `build.gradle`: ``` repositories { - jcenter() + mavenCentral() } dependencies { - compile 'com.graphql-java:java-dataloader: 3.1.0' + compile 'com.graphql-java:java-dataloader: 4.0.0' } ``` @@ -286,6 +287,77 @@ For example, let's assume you want to load users from a database, you could prob // ... ``` +### Returning a stream of results from your batch publisher + +It may be that your batch loader function can use a [Reactive Streams](https://www.reactive-streams.org/) [Publisher](https://www.reactive-streams.org/reactive-streams-1.0.3-javadoc/org/reactivestreams/Publisher.html), where values are emitted as an asynchronous stream. + +For example, let's say you wanted to load many users from a service without forcing the service to load all +users into its memory (which may exert considerable pressure on it). + +A `org.dataloader.BatchPublisher` may be used to load this data: + +```java + BatchPublisher batchPublisher = new BatchPublisher() { + @Override + public void load(List userIds, Subscriber userSubscriber) { + Publisher userResults = userManager.streamUsersById(userIds); + userResults.subscribe(userSubscriber); + } + }; + DataLoader userLoader = DataLoaderFactory.newPublisherDataLoader(batchPublisher); + + // ... +``` + +Rather than waiting for all user values to be returned on one batch, this `DataLoader` will complete +the `CompletableFuture` returned by `Dataloader#load(Long)` as each value is +published. + +This pattern means that data loader values can (in theory) be satisfied more quickly than if we wait for +all results in the batch to be retrieved and hence the overall result may finish more quickly. + +If an exception is thrown, the remaining futures yet to be completed are completed +exceptionally. + +You *MUST* ensure that the values are streamed in the same order as the keys provided, +with the same cardinality (i.e. the number of values must match the number of keys). + +Failing to do so will result in incorrect data being returned from `DataLoader#load`. + +`BatchPublisher` is the reactive version of `BatchLoader`. + + +### Returning a mapped stream of results from your batch publisher + +Your publisher may not necessarily return values in the same order in which it processes keys and it +may not be able to find a value for each key presented. + +For example, let's say your batch publisher function loads user data which is spread across shards, +with some shards responding more quickly than others. + +In instances like these, `org.dataloader.MappedBatchPublisher` can be used. + +```java + MappedBatchPublisher mappedBatchPublisher = new MappedBatchPublisher() { + @Override + public void load(Set userIds, Subscriber> userEntrySubscriber) { + Publisher> userEntries = userManager.streamUsersById(userIds); + userEntries.subscribe(userEntrySubscriber); + } + }; + DataLoader userLoader = DataLoaderFactory.newMappedPublisherDataLoader(mappedBatchPublisher); + + // ... +``` + +Like the `BatchPublisher`, if an exception is thrown, the remaining futures yet to be completed are completed +exceptionally. + +Unlike the `BatchPublisher`, however, it is not necessary to return values in the same order as the provided keys, +or even the same number of values. + +`MappedBatchPublisher` is the reactive version of `MappedBatchLoader`. + ### Error object is not a thing in a type safe Java world In the reference JS implementation if the batch loader returns an `Error` object back from the `load()` promise is rejected @@ -541,6 +613,12 @@ The following is a `BatchLoaderScheduler` that waits 10 milliseconds before invo return scheduledCall.invoke(); }).thenCompose(Function.identity()); } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + snooze(10); + scheduledCall.invoke(); + } }; ``` @@ -673,6 +751,65 @@ When ticker mode is **true** the `ScheduledDataLoaderRegistry` algorithm is as f * If it returns **true**, then `dataLoader.dispatch()` is called **and** a task is scheduled to re-evaluate this specific dataloader in the near future * The re-evaluation tasks are run periodically according to the `registry.getScheduleDuration()` +## Instrumenting the data loader code + +A `DataLoader` can have a `DataLoaderInstrumentation` associated with it. This callback interface is intended to provide +insight into working of the `DataLoader` such as how long it takes to run or to allow for logging of key events. + +You set the `DataLoaderInstrumentation` into the `DataLoaderOptions` at build time. + +```java + + + DataLoaderInstrumentation timingInstrumentation = new DataLoaderInstrumentation() { + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + long then = System.currentTimeMillis(); + return DataLoaderInstrumentationHelper.whenCompleted((result, err) -> { + long ms = System.currentTimeMillis() - then; + System.out.println(format("dispatch time: %d ms", ms)); + }); + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + long then = System.currentTimeMillis(); + return DataLoaderInstrumentationHelper.whenCompleted((result, err) -> { + long ms = System.currentTimeMillis() - then; + System.out.println(format("batch loader time: %d ms", ms)); + }); + } + }; + DataLoaderOptions options = DataLoaderOptions.newOptions().setInstrumentation(timingInstrumentation); + DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options); + +``` + +The example shows how long the overall `DataLoader` dispatch takes or how long the batch loader takes to run. + +### Instrumenting the DataLoaderRegistry + +You can also associate a `DataLoaderInstrumentation` with a `DataLoaderRegistry`. Every `DataLoader` registered will be changed so that the registry +`DataLoaderInstrumentation` is associated with it. This allows you to set just the one `DataLoaderInstrumentation` in place and it applies to all +data loaders. + +```java + DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader); + DataLoader teamsDataLoader = DataLoaderFactory.newDataLoader(teamsBatchLoader); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(timingInstrumentation) + .register("users", userDataLoader) + .register("teams", teamsDataLoader) + .build(); + + DataLoader changedUsersDataLoader = registry.getDataLoader("users"); +``` + +The `timingInstrumentation` here will be associated with the `DataLoader` under the key `users` and the key `teams`. Note that since +DataLoader is immutable, a new changed object is created so you must use the registry to get the `DataLoader`. + + ## Other information sources - [Facebook DataLoader Github repo](https://github.com/facebook/dataloader) @@ -697,10 +834,10 @@ This library was originally written for use within a [VertX world](http://vertx. itself. All the heavy lifting has been done by this project : [vertx-dataloader](https://github.com/engagingspaces/vertx-dataloader) including the extensive testing (which itself came from Facebook). -This particular port was done to reduce the dependency on Vertx and to write a pure Java 8 implementation with no dependencies and also +This particular port was done to reduce the dependency on Vertx and to write a pure Java 11 implementation with no dependencies and also to use the more normative Java CompletableFuture. -[vertx-core](http://vertx.io/docs/vertx-core/java/) is not a lightweight library by any means so having a pure Java 8 implementation is +[vertx-core](http://vertx.io/docs/vertx-core/java/) is not a lightweight library by any means so having a pure Java 11 implementation is very desirable. diff --git a/build.gradle b/build.gradle index f5064ed..6072f4f 100644 --- a/build.gradle +++ b/build.gradle @@ -3,12 +3,21 @@ import java.text.SimpleDateFormat plugins { id 'java' id 'java-library' + id 'jvm-test-suite' id 'maven-publish' id 'signing' - id "biz.aQute.bnd.builder" version "6.2.0" - id "io.github.gradle-nexus.publish-plugin" version "1.0.0" + id 'groovy' + id 'biz.aQute.bnd.builder' version '6.2.0' + id 'io.github.gradle-nexus.publish-plugin' version '1.0.0' + id 'com.github.ben-manes.versions' version '0.51.0' + id "me.champeau.jmh" version "0.7.3" } +java { + toolchain { + languageVersion = JavaLanguageVersion.of(11) + } +} def getDevelopmentVersion() { def output = new StringBuilder() @@ -25,20 +34,10 @@ def getDevelopmentVersion() { version } -if (JavaVersion.current() != JavaVersion.VERSION_1_8) { - def msg = String.format("This build must be run with java 1.8 - you are running %s - gradle finds the JDK via JAVA_HOME=%s", - JavaVersion.current(), System.getenv("JAVA_HOME")) - throw new IllegalStateException(msg) -} - - -sourceCompatibility = 1.8 -targetCompatibility = 1.8 -def slf4jVersion = '1.7.30' def releaseVersion = System.env.RELEASE_VERSION version = releaseVersion ? releaseVersion : getDevelopmentVersion() group = 'com.graphql-java' -description = 'A pure Java 8 port of Facebook Dataloader' +description = 'A pure Java 11 port of Facebook Dataloader' gradle.buildFinished { buildResult -> println "*******************************" @@ -58,47 +57,67 @@ repositories { mavenLocal() } -apply plugin: 'groovy' - jar { manifest { attributes('Automatic-Module-Name': 'org.dataloader', - '-exportcontents': 'org.dataloader.*', - '-removeheaders': 'Private-Package') + '-exportcontents': 'org.dataloader.*', + '-removeheaders': 'Private-Package') } } dependencies { - api 'org.slf4j:slf4j-api:' + slf4jVersion - testImplementation 'org.slf4j:slf4j-simple:' + slf4jVersion - testImplementation 'junit:junit:4.12' - testImplementation 'org.awaitility:awaitility:2.0.0' - testImplementation 'com.github.ben-manes.caffeine:caffeine:2.9.0' + api "org.reactivestreams:reactive-streams:$reactive_streams_version" + api "org.jspecify:jspecify:1.0.0" + + // this is needed for the idea jmh plugin to work correctly + jmh 'org.openjdk.jmh:jmh-core:1.37' + jmh 'org.openjdk.jmh:jmh-generator-annprocess:1.37' } task sourcesJar(type: Jar) { dependsOn classes - classifier 'sources' + archiveClassifier.set('sources') from sourceSets.main.allSource } -task javadocJar(type: Jar, dependsOn: javadoc) { - classifier = 'javadoc' - from javadoc.destinationDir -} - javadoc { options.encoding = 'UTF-8' } +task javadocJar(type: Jar, dependsOn: javadoc) { + archiveClassifier.set('javadoc') + from javadoc.destinationDir +} + artifacts { archives sourcesJar archives javadocJar } -test { - testLogging { - exceptionFormat = 'full' +testing { + suites { + test { + useJUnitJupiter(junit_version) + dependencies { + // Testing dependencies + implementation platform("org.junit:junit-bom:$junit_version") + implementation 'org.junit.jupiter:junit-jupiter-api' + implementation 'org.junit.jupiter:junit-jupiter-params' + implementation 'org.junit.jupiter:junit-jupiter-engine' + implementation "org.awaitility:awaitility:$awaitility_version" + implementation "org.hamcrest:hamcrest:$hamcrest_version" + implementation "io.projectreactor:reactor-core:$reactor_core_version" + implementation "com.github.ben-manes.caffeine:caffeine:$caffeine_version" + } + + targets.configureEach { + testTask.configure { + testLogging { + exceptionFormat = 'full' + } + } + } + } } } @@ -117,7 +136,7 @@ publishing { asNode().children().last() + { resolveStrategy = Closure.DELEGATE_FIRST name 'java-dataloader' - description 'A pure Java 8 port of Facebook Dataloader' + description 'A pure Java 11 port of Facebook Dataloader' url 'https://github.com/graphql-java/java-dataloader' inceptionYear '2017' @@ -162,6 +181,7 @@ nexusPublishing { } signing { + required { !project.hasProperty('publishToMavenLocal') } def signingKey = System.env.MAVEN_CENTRAL_PGP_KEY useInMemoryPgpKeys(signingKey, "") sign publishing.publications @@ -173,9 +193,15 @@ tasks.withType(PublishToMavenRepository) { dependsOn build } - -task myWrapper(type: Wrapper) { - gradleVersion = '6.6.1' - distributionUrl = "https://services.gradle.org/distributions/gradle-${gradleVersion}-all.zip" +def isNonStable = { String version -> + def stableKeyword = ['RELEASE', 'FINAL', 'GA'].any { it -> version.toUpperCase().contains(it) } + def regex = /^[0-9,.v-]+(-r)?$/ + return !stableKeyword && !(version ==~ regex) } +// https://github.com/ben-manes/gradle-versions-plugin +tasks.named("dependencyUpdates").configure { + rejectVersionIf { + isNonStable(it.candidate.version) + } +} diff --git a/gradle.properties b/gradle.properties index 0394946..428b6e2 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,2 +1,26 @@ +# Project-wide Gradle settings. + +# For more details on how to configure your build environment visit +# http://www.gradle.org/docs/current/userguide/build_environment.html + +# Specifies the JVM arguments used for the daemon process. +# The setting is particularly useful for tweaking memory settings. +org.gradle.jvmargs=-Xmx4096m + +# When configured, Gradle will run in parallel mode. +# This option should only be used with decoupled projects. More details, visit +# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects +org.gradle.parallel=true +org.gradle.caching=true + +# Bespoke settings. projectTitle = Java Dataloader -projectDescription = Port of Facebook Dataloader for Java \ No newline at end of file +projectDescription = Port of Facebook Dataloader for Java + +# Dependency versions. +junit_version=5.11.3 +hamcrest_version=2.2 +awaitility_version=2.0.0 +reactor_core_version=3.6.6 +caffeine_version=3.1.8 +reactive_streams_version=1.0.3 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index d2880ba..e2847c8 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip +networkTimeout=10000 +validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/settings.gradle b/settings.gradle index e69de29..47404e7 100644 --- a/settings.gradle +++ b/settings.gradle @@ -0,0 +1,21 @@ +plugins { + id 'com.gradle.develocity' version '3.19' + id 'org.gradle.toolchains.foojay-resolver-convention' version '0.9.0' +} + +develocity { + buildScan { + final def isCI = System.getenv('CI') != null; + termsOfUseUrl = "https://gradle.com/help/legal-terms-of-use" + termsOfUseAgree = "yes" + publishing.onlyIf { true } + tag(isCI ? 'CI' : 'Local') + uploadInBackground = !isCI + } +} + +dependencyResolutionManagement { + repositories { + mavenCentral() + } +} \ No newline at end of file diff --git a/src/jmh/java/performance/DataLoaderDispatchPerformance.java b/src/jmh/java/performance/DataLoaderDispatchPerformance.java new file mode 100644 index 0000000..0b4696d --- /dev/null +++ b/src/jmh/java/performance/DataLoaderDispatchPerformance.java @@ -0,0 +1,309 @@ +package performance; + +import org.dataloader.BatchLoader; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderFactory; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 4) +@Fork(1) +public class DataLoaderDispatchPerformance { + + static Owner o1 = new Owner("O-1", "Andi", List.of("P-1", "P-2", "P-3")); + static Owner o2 = new Owner("O-2", "George", List.of("P-4", "P-5", "P-6")); + static Owner o3 = new Owner("O-3", "Peppa", List.of("P-7", "P-8", "P-9", "P-10")); + static Owner o4 = new Owner("O-4", "Alice", List.of("P-11", "P-12")); + static Owner o5 = new Owner("O-5", "Bob", List.of("P-13")); + static Owner o6 = new Owner("O-6", "Catherine", List.of("P-14", "P-15", "P-16")); + static Owner o7 = new Owner("O-7", "David", List.of("P-17")); + static Owner o8 = new Owner("O-8", "Emma", List.of("P-18", "P-19", "P-20", "P-21")); + static Owner o9 = new Owner("O-9", "Frank", List.of("P-22")); + static Owner o10 = new Owner("O-10", "Grace", List.of("P-23", "P-24")); + static Owner o11 = new Owner("O-11", "Hannah", List.of("P-25", "P-26", "P-27")); + static Owner o12 = new Owner("O-12", "Ian", List.of("P-28")); + static Owner o13 = new Owner("O-13", "Jane", List.of("P-29", "P-30")); + static Owner o14 = new Owner("O-14", "Kevin", List.of("P-31", "P-32", "P-33")); + static Owner o15 = new Owner("O-15", "Laura", List.of("P-34")); + static Owner o16 = new Owner("O-16", "Michael", List.of("P-35", "P-36")); + static Owner o17 = new Owner("O-17", "Nina", List.of("P-37", "P-38", "P-39", "P-40")); + static Owner o18 = new Owner("O-18", "Oliver", List.of("P-41")); + static Owner o19 = new Owner("O-19", "Paula", List.of("P-42", "P-43")); + static Owner o20 = new Owner("O-20", "Quinn", List.of("P-44", "P-45", "P-46")); + static Owner o21 = new Owner("O-21", "Rachel", List.of("P-47")); + static Owner o22 = new Owner("O-22", "Steve", List.of("P-48", "P-49")); + static Owner o23 = new Owner("O-23", "Tina", List.of("P-50", "P-51", "P-52")); + static Owner o24 = new Owner("O-24", "Uma", List.of("P-53")); + static Owner o25 = new Owner("O-25", "Victor", List.of("P-54", "P-55")); + static Owner o26 = new Owner("O-26", "Wendy", List.of("P-56", "P-57", "P-58")); + static Owner o27 = new Owner("O-27", "Xander", List.of("P-59")); + static Owner o28 = new Owner("O-28", "Yvonne", List.of("P-60", "P-61")); + static Owner o29 = new Owner("O-29", "Zach", List.of("P-62", "P-63", "P-64")); + static Owner o30 = new Owner("O-30", "Willy", List.of("P-65", "P-66", "P-67")); + + + static Pet p1 = new Pet("P-1", "Bella", "O-1", List.of("P-2", "P-3", "P-4")); + static Pet p2 = new Pet("P-2", "Charlie", "O-2", List.of("P-1", "P-5", "P-6")); + static Pet p3 = new Pet("P-3", "Luna", "O-3", List.of("P-1", "P-2", "P-7", "P-8")); + static Pet p4 = new Pet("P-4", "Max", "O-1", List.of("P-1", "P-9", "P-10")); + static Pet p5 = new Pet("P-5", "Lucy", "O-2", List.of("P-2", "P-6")); + static Pet p6 = new Pet("P-6", "Cooper", "O-3", List.of("P-3", "P-5", "P-7")); + static Pet p7 = new Pet("P-7", "Daisy", "O-1", List.of("P-4", "P-6", "P-8")); + static Pet p8 = new Pet("P-8", "Milo", "O-2", List.of("P-3", "P-7", "P-9")); + static Pet p9 = new Pet("P-9", "Lola", "O-3", List.of("P-4", "P-8", "P-10")); + static Pet p10 = new Pet("P-10", "Rocky", "O-1", List.of("P-4", "P-9")); + static Pet p11 = new Pet("P-11", "Buddy", "O-4", List.of("P-12")); + static Pet p12 = new Pet("P-12", "Bailey", "O-4", List.of("P-11", "P-13")); + static Pet p13 = new Pet("P-13", "Sadie", "O-5", List.of("P-12")); + static Pet p14 = new Pet("P-14", "Maggie", "O-6", List.of("P-15")); + static Pet p15 = new Pet("P-15", "Sophie", "O-6", List.of("P-14", "P-16")); + static Pet p16 = new Pet("P-16", "Chloe", "O-6", List.of("P-15")); + static Pet p17 = new Pet("P-17", "Duke", "O-7", List.of("P-18")); + static Pet p18 = new Pet("P-18", "Riley", "O-8", List.of("P-17", "P-19")); + static Pet p19 = new Pet("P-19", "Lilly", "O-8", List.of("P-18", "P-20")); + static Pet p20 = new Pet("P-20", "Zoey", "O-8", List.of("P-19")); + static Pet p21 = new Pet("P-21", "Oscar", "O-8", List.of("P-22")); + static Pet p22 = new Pet("P-22", "Toby", "O-9", List.of("P-21", "P-23")); + static Pet p23 = new Pet("P-23", "Ruby", "O-10", List.of("P-22")); + static Pet p24 = new Pet("P-24", "Milo", "O-10", List.of("P-25")); + static Pet p25 = new Pet("P-25", "Finn", "O-11", List.of("P-24", "P-26")); + static Pet p26 = new Pet("P-26", "Luna", "O-11", List.of("P-25")); + static Pet p27 = new Pet("P-27", "Ellie", "O-11", List.of("P-28")); + static Pet p28 = new Pet("P-28", "Harley", "O-12", List.of("P-27", "P-29")); + static Pet p29 = new Pet("P-29", "Penny", "O-13", List.of("P-28")); + static Pet p30 = new Pet("P-30", "Hazel", "O-13", List.of("P-31")); + static Pet p31 = new Pet("P-31", "Gus", "O-14", List.of("P-30", "P-32")); + static Pet p32 = new Pet("P-32", "Dexter", "O-14", List.of("P-31")); + static Pet p33 = new Pet("P-33", "Winnie", "O-14", List.of("P-34")); + static Pet p34 = new Pet("P-34", "Murphy", "O-15", List.of("P-33", "P-35")); + static Pet p35 = new Pet("P-35", "Moose", "O-16", List.of("P-34")); + static Pet p36 = new Pet("P-36", "Scout", "O-16", List.of("P-37")); + static Pet p37 = new Pet("P-37", "Rex", "O-17", List.of("P-36", "P-38")); + static Pet p38 = new Pet("P-38", "Coco", "O-17", List.of("P-37")); + static Pet p39 = new Pet("P-39", "Maddie", "O-17", List.of("P-40")); + static Pet p40 = new Pet("P-40", "Archie", "O-17", List.of("P-39", "P-41")); + static Pet p41 = new Pet("P-41", "Buster", "O-18", List.of("P-40")); + static Pet p42 = new Pet("P-42", "Rosie", "O-19", List.of("P-43")); + static Pet p43 = new Pet("P-43", "Molly", "O-19", List.of("P-42", "P-44")); + static Pet p44 = new Pet("P-44", "Henry", "O-20", List.of("P-43")); + static Pet p45 = new Pet("P-45", "Leo", "O-20", List.of("P-46")); + static Pet p46 = new Pet("P-46", "Jack", "O-20", List.of("P-45", "P-47")); + static Pet p47 = new Pet("P-47", "Zoe", "O-21", List.of("P-46")); + static Pet p48 = new Pet("P-48", "Lulu", "O-22", List.of("P-49")); + static Pet p49 = new Pet("P-49", "Mimi", "O-22", List.of("P-48", "P-50")); + static Pet p50 = new Pet("P-50", "Nala", "O-23", List.of("P-49")); + static Pet p51 = new Pet("P-51", "Simba", "O-23", List.of("P-52")); + static Pet p52 = new Pet("P-52", "Teddy", "O-23", List.of("P-51", "P-53")); + static Pet p53 = new Pet("P-53", "Mochi", "O-24", List.of("P-52")); + static Pet p54 = new Pet("P-54", "Oreo", "O-25", List.of("P-55")); + static Pet p55 = new Pet("P-55", "Peanut", "O-25", List.of("P-54", "P-56")); + static Pet p56 = new Pet("P-56", "Pumpkin", "O-26", List.of("P-55")); + static Pet p57 = new Pet("P-57", "Shadow", "O-26", List.of("P-58")); + static Pet p58 = new Pet("P-58", "Sunny", "O-26", List.of("P-57", "P-59")); + static Pet p59 = new Pet("P-59", "Thor", "O-27", List.of("P-58")); + static Pet p60 = new Pet("P-60", "Willow", "O-28", List.of("P-61")); + static Pet p61 = new Pet("P-61", "Zeus", "O-28", List.of("P-60", "P-62")); + static Pet p62 = new Pet("P-62", "Ace", "O-29", List.of("P-61")); + static Pet p63 = new Pet("P-63", "Blue", "O-29", List.of("P-64")); + static Pet p64 = new Pet("P-64", "Cleo", "O-29", List.of("P-63", "P-65")); + static Pet p65 = new Pet("P-65", "Dolly", "O-30", List.of("P-64")); + static Pet p66 = new Pet("P-66", "Ella", "O-30", List.of("P-67")); + static Pet p67 = new Pet("P-67", "Freddy", "O-30", List.of("P-66")); + + + static Map owners = Map.ofEntries( + Map.entry(o1.id, o1), + Map.entry(o2.id, o2), + Map.entry(o3.id, o3), + Map.entry(o4.id, o4), + Map.entry(o5.id, o5), + Map.entry(o6.id, o6), + Map.entry(o7.id, o7), + Map.entry(o8.id, o8), + Map.entry(o9.id, o9), + Map.entry(o10.id, o10), + Map.entry(o11.id, o11), + Map.entry(o12.id, o12), + Map.entry(o13.id, o13), + Map.entry(o14.id, o14), + Map.entry(o15.id, o15), + Map.entry(o16.id, o16), + Map.entry(o17.id, o17), + Map.entry(o18.id, o18), + Map.entry(o19.id, o19), + Map.entry(o20.id, o20), + Map.entry(o21.id, o21), + Map.entry(o22.id, o22), + Map.entry(o23.id, o23), + Map.entry(o24.id, o24), + Map.entry(o25.id, o25), + Map.entry(o26.id, o26), + Map.entry(o27.id, o27), + Map.entry(o28.id, o28), + Map.entry(o29.id, o29), + Map.entry(o30.id, o30) + ); + static Map pets = Map.ofEntries( + Map.entry(p1.id, p1), + Map.entry(p2.id, p2), + Map.entry(p3.id, p3), + Map.entry(p4.id, p4), + Map.entry(p5.id, p5), + Map.entry(p6.id, p6), + Map.entry(p7.id, p7), + Map.entry(p8.id, p8), + Map.entry(p9.id, p9), + Map.entry(p10.id, p10), + Map.entry(p11.id, p11), + Map.entry(p12.id, p12), + Map.entry(p13.id, p13), + Map.entry(p14.id, p14), + Map.entry(p15.id, p15), + Map.entry(p16.id, p16), + Map.entry(p17.id, p17), + Map.entry(p18.id, p18), + Map.entry(p19.id, p19), + Map.entry(p20.id, p20), + Map.entry(p21.id, p21), + Map.entry(p22.id, p22), + Map.entry(p23.id, p23), + Map.entry(p24.id, p24), + Map.entry(p25.id, p25), + Map.entry(p26.id, p26), + Map.entry(p27.id, p27), + Map.entry(p28.id, p28), + Map.entry(p29.id, p29), + Map.entry(p30.id, p30), + Map.entry(p31.id, p31), + Map.entry(p32.id, p32), + Map.entry(p33.id, p33), + Map.entry(p34.id, p34), + Map.entry(p35.id, p35), + Map.entry(p36.id, p36), + Map.entry(p37.id, p37), + Map.entry(p38.id, p38), + Map.entry(p39.id, p39), + Map.entry(p40.id, p40), + Map.entry(p41.id, p41), + Map.entry(p42.id, p42), + Map.entry(p43.id, p43), + Map.entry(p44.id, p44), + Map.entry(p45.id, p45), + Map.entry(p46.id, p46), + Map.entry(p47.id, p47), + Map.entry(p48.id, p48), + Map.entry(p49.id, p49), + Map.entry(p50.id, p50), + Map.entry(p51.id, p51), + Map.entry(p52.id, p52), + Map.entry(p53.id, p53), + Map.entry(p54.id, p54), + Map.entry(p55.id, p55), + Map.entry(p56.id, p56), + Map.entry(p57.id, p57), + Map.entry(p58.id, p58), + Map.entry(p59.id, p59), + Map.entry(p60.id, p60), + Map.entry(p61.id, p61), + Map.entry(p62.id, p62), + Map.entry(p63.id, p63), + Map.entry(p64.id, p64), + Map.entry(p65.id, p65), + Map.entry(p66.id, p66), + Map.entry(p67.id, p67) + ); + + static class Owner { + public Owner(String id, String name, List petIds) { + this.id = id; + this.name = name; + this.petIds = petIds; + } + + String id; + String name; + List petIds; + } + + static class Pet { + public Pet(String id, String name, String ownerId, List friendsIds) { + this.id = id; + this.name = name; + this.ownerId = ownerId; + this.friendsIds = friendsIds; + } + + String id; + String name; + String ownerId; + List friendsIds; + } + + + static BatchLoader ownerBatchLoader = list -> { + List collect = list.stream().map(key -> { + Owner owner = owners.get(key); + return owner; + }).collect(Collectors.toList()); + return CompletableFuture.completedFuture(collect); + }; + static BatchLoader petBatchLoader = list -> { + List collect = list.stream().map(key -> { + Pet owner = pets.get(key); + return owner; + }).collect(Collectors.toList()); + return CompletableFuture.completedFuture(collect); + }; + + + @State(Scope.Benchmark) + public static class MyState { + @Setup + public void setup() { + + } + + } + + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + public void loadAndDispatch(MyState myState, Blackhole blackhole) { + DataLoader ownerDL = DataLoaderFactory.newDataLoader(ownerBatchLoader); + DataLoader petDL = DataLoaderFactory.newDataLoader(petBatchLoader); + + for (Owner owner : owners.values()) { + ownerDL.load(owner.id); + for (String petId : owner.petIds) { + petDL.load(petId); + for (String friendId : pets.get(petId).friendsIds) { + petDL.load(friendId); + } + } + } + + CompletableFuture cf1 = ownerDL.dispatch(); + CompletableFuture cf2 = petDL.dispatch(); + blackhole.consume(CompletableFuture.allOf(cf1, cf2).join()); + } + + +} diff --git a/src/jmh/java/performance/PerformanceTestingUtils.java b/src/jmh/java/performance/PerformanceTestingUtils.java new file mode 100644 index 0000000..9e05fd6 --- /dev/null +++ b/src/jmh/java/performance/PerformanceTestingUtils.java @@ -0,0 +1,84 @@ +package performance; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.Charset; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.concurrent.Callable; + +public class PerformanceTestingUtils { + + @SuppressWarnings("UnstableApiUsage") + static String loadResource(String name) { + return asRTE(() -> { + URL resource = PerformanceTestingUtils.class.getClassLoader().getResource(name); + if (resource == null) { + throw new IllegalArgumentException("missing resource: " + name); + } + byte[] bytes; + try (InputStream inputStream = resource.openStream()) { + bytes = inputStream.readAllBytes(); + } + return new String(bytes, Charset.defaultCharset()); + }); + } + + static T asRTE(Callable callable) { + try { + return callable.call(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static void runInToolingForSomeTimeThenExit(Runnable setup, Runnable r, Runnable tearDown) { + int runForMillis = getRunForMillis(); + if (runForMillis <= 0) { + System.out.print("'runForMillis' environment var is not set - continuing \n"); + return; + } + System.out.printf("Running initial code in some tooling - runForMillis=%d \n", runForMillis); + System.out.print("Get your tooling in order and press enter..."); + readLine(); + System.out.print("Lets go...\n"); + setup.run(); + + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("HH:mm:ss"); + long now, then = System.currentTimeMillis(); + do { + now = System.currentTimeMillis(); + long msLeft = runForMillis - (now - then); + System.out.printf("\t%s Running in loop... %s ms left\n", dtf.format(LocalDateTime.now()), msLeft); + r.run(); + now = System.currentTimeMillis(); + } while ((now - then) < runForMillis); + + tearDown.run(); + + System.out.printf("This ran for %d millis. Exiting...\n", System.currentTimeMillis() - then); + System.exit(0); + } + + private static void readLine() { + BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); + try { + br.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static int getRunForMillis() { + String runFor = System.getenv("runForMillis"); + try { + return Integer.parseInt(runFor); + } catch (NumberFormatException e) { + return -1; + } + } + +} diff --git a/src/main/java/org/dataloader/BatchLoader.java b/src/main/java/org/dataloader/BatchLoader.java index c1916e3..2b0c3c5 100644 --- a/src/main/java/org/dataloader/BatchLoader.java +++ b/src/main/java/org/dataloader/BatchLoader.java @@ -17,6 +17,8 @@ package org.dataloader; import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.NullMarked; import java.util.List; import java.util.concurrent.CompletionStage; @@ -74,6 +76,7 @@ */ @FunctionalInterface @PublicSpi +@NullMarked public interface BatchLoader { /** diff --git a/src/main/java/org/dataloader/BatchLoaderContextProvider.java b/src/main/java/org/dataloader/BatchLoaderContextProvider.java index d1eb1fe..702fd66 100644 --- a/src/main/java/org/dataloader/BatchLoaderContextProvider.java +++ b/src/main/java/org/dataloader/BatchLoaderContextProvider.java @@ -1,6 +1,7 @@ package org.dataloader; import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; /** * A BatchLoaderContextProvider is used by the {@link org.dataloader.DataLoader} code to @@ -8,9 +9,10 @@ * case is for propagating user security credentials or database connection parameters for example. */ @PublicSpi +@NullMarked public interface BatchLoaderContextProvider { /** * @return a context object that may be needed in batch load calls */ Object getContext(); -} \ No newline at end of file +} diff --git a/src/main/java/org/dataloader/BatchLoaderEnvironment.java b/src/main/java/org/dataloader/BatchLoaderEnvironment.java index 6039a4a..6b84e70 100644 --- a/src/main/java/org/dataloader/BatchLoaderEnvironment.java +++ b/src/main/java/org/dataloader/BatchLoaderEnvironment.java @@ -2,6 +2,8 @@ import org.dataloader.annotations.PublicApi; import org.dataloader.impl.Assertions; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.ArrayList; import java.util.Collections; @@ -14,6 +16,7 @@ * of the calling users for example or database parameters that allow the data layer call to succeed. */ @PublicApi +@NullMarked public class BatchLoaderEnvironment { private final Object context; @@ -34,7 +37,7 @@ private BatchLoaderEnvironment(Object context, List keyContextsList, Map * @return a context object or null if there isn't one */ @SuppressWarnings("unchecked") - public T getContext() { + public @Nullable T getContext() { return (T) context; } diff --git a/src/main/java/org/dataloader/BatchLoaderEnvironmentProvider.java b/src/main/java/org/dataloader/BatchLoaderEnvironmentProvider.java index fd60a14..dae7c92 100644 --- a/src/main/java/org/dataloader/BatchLoaderEnvironmentProvider.java +++ b/src/main/java/org/dataloader/BatchLoaderEnvironmentProvider.java @@ -1,6 +1,7 @@ package org.dataloader; import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; /** * A BatchLoaderEnvironmentProvider is used by the {@link org.dataloader.DataLoader} code to @@ -9,9 +10,10 @@ * case is for propagating user security credentials or database connection parameters. */ @PublicSpi +@NullMarked public interface BatchLoaderEnvironmentProvider { /** * @return a {@link org.dataloader.BatchLoaderEnvironment} that may be needed in batch calls */ BatchLoaderEnvironment get(); -} \ No newline at end of file +} diff --git a/src/main/java/org/dataloader/BatchLoaderWithContext.java b/src/main/java/org/dataloader/BatchLoaderWithContext.java index fbe66b0..eba26e4 100644 --- a/src/main/java/org/dataloader/BatchLoaderWithContext.java +++ b/src/main/java/org/dataloader/BatchLoaderWithContext.java @@ -1,6 +1,7 @@ package org.dataloader; import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; import java.util.List; import java.util.concurrent.CompletionStage; @@ -14,6 +15,7 @@ * use this interface. */ @PublicSpi +@NullMarked public interface BatchLoaderWithContext { /** * Called to batch load the provided keys and return a promise to a list of values. This default diff --git a/src/main/java/org/dataloader/BatchPublisher.java b/src/main/java/org/dataloader/BatchPublisher.java new file mode 100644 index 0000000..943becf --- /dev/null +++ b/src/main/java/org/dataloader/BatchPublisher.java @@ -0,0 +1,41 @@ +package org.dataloader; + +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; +import org.reactivestreams.Subscriber; + +import java.util.List; + +/** + * A function that is invoked for batch loading a stream of data values indicated by the provided list of keys. + *

+ * The function must call the provided {@link Subscriber} to process the values it has retrieved to allow + * the future returned by {@link DataLoader#load(Object)} to complete as soon as the individual value is available + * (rather than when all values have been retrieved). + *

+ * NOTE: It is required that {@link Subscriber#onNext(Object)} is invoked on each value in the same order as + * the provided keys and that you provide a value for every key provided. + * + * @param type parameter indicating the type of keys to use for data load requests. + * @param type parameter indicating the type of values returned + * @see BatchLoader for the non-reactive version + */ +@NullMarked +@PublicSpi +public interface BatchPublisher { + /** + * Called to batch the provided keys into a stream of values. You must provide + * the same number of values as there as keys, and they must be in the order of the keys. + *

+ * The idiomatic approach would be to create a reactive {@link org.reactivestreams.Publisher} that provides + * the values given the keys and then subscribe to it with the provided {@link Subscriber}. + *

+ * NOTE: It is required that {@link Subscriber#onNext(Object)} is invoked on each value in the same order as + * the provided keys and that you provide a value for every key provided. + * + * @param keys the collection of keys to load + * @param subscriber as values arrive you must call the subscriber for each value + */ + void load(List keys, Subscriber subscriber); +} diff --git a/src/main/java/org/dataloader/BatchPublisherWithContext.java b/src/main/java/org/dataloader/BatchPublisherWithContext.java new file mode 100644 index 0000000..9ee010b --- /dev/null +++ b/src/main/java/org/dataloader/BatchPublisherWithContext.java @@ -0,0 +1,38 @@ +package org.dataloader; + +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; +import org.reactivestreams.Subscriber; + +import java.util.List; + +/** + * This form of {@link BatchPublisher} is given a {@link org.dataloader.BatchLoaderEnvironment} object + * that encapsulates the calling context. A typical use case is passing in security credentials or database details + * for example. + *

+ * See {@link BatchPublisher} for more details on the design invariants that you must implement in order to + * use this interface. + */ +@NullMarked +@PublicSpi +public interface BatchPublisherWithContext { + /** + * Called to batch the provided keys into a stream of values. You must provide + * the same number of values as there as keys, and they must be in the order of the keys. + *

+ * The idiomatic approach would be to create a reactive {@link org.reactivestreams.Publisher} that provides + * the values given the keys and then subscribe to it with the provided {@link Subscriber}. + *

+ * NOTE: It is required that {@link Subscriber#onNext(Object)} is invoked on each value in the same order as + * the provided keys and that you provide a value for every key provided. + *

+ * This is given an environment object to that maybe be useful during the call. A typical use case + * is passing in security credentials or database details for example. + * + * @param keys the collection of keys to load + * @param subscriber as values arrive you must call the subscriber for each value + * @param environment an environment object that can help with the call + */ + void load(List keys, Subscriber subscriber, BatchLoaderEnvironment environment); +} diff --git a/src/main/java/org/dataloader/CacheKey.java b/src/main/java/org/dataloader/CacheKey.java index 88b5f97..c5641b1 100644 --- a/src/main/java/org/dataloader/CacheKey.java +++ b/src/main/java/org/dataloader/CacheKey.java @@ -16,6 +16,9 @@ package org.dataloader; +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; + /** * Function that is invoked on input keys of type {@code K} to derive keys that are required by the {@link CacheMap} * implementation. @@ -25,6 +28,8 @@ * @author Arnold Schrijver */ @FunctionalInterface +@NullMarked +@PublicSpi public interface CacheKey { /** diff --git a/src/main/java/org/dataloader/CacheMap.java b/src/main/java/org/dataloader/CacheMap.java index 1a4a455..54b1b49 100644 --- a/src/main/java/org/dataloader/CacheMap.java +++ b/src/main/java/org/dataloader/CacheMap.java @@ -18,6 +18,8 @@ import org.dataloader.annotations.PublicSpi; import org.dataloader.impl.DefaultCacheMap; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.Collection; import java.util.concurrent.CompletableFuture; @@ -39,6 +41,7 @@ * @author Brad Baker */ @PublicSpi +@NullMarked public interface CacheMap { /** @@ -71,7 +74,7 @@ static CacheMap simpleMap() { * * @return the cached value, or {@code null} if not found (depends on cache implementation) */ - CompletableFuture get(K key); + @Nullable CompletableFuture get(K key); /** * Gets a collection of CompletableFutures from the cache map. diff --git a/src/main/java/org/dataloader/DataLoader.java b/src/main/java/org/dataloader/DataLoader.java index 1e4ce7d..d03e5ac 100644 --- a/src/main/java/org/dataloader/DataLoader.java +++ b/src/main/java/org/dataloader/DataLoader.java @@ -21,16 +21,22 @@ import org.dataloader.impl.CompletableFutureKit; import org.dataloader.stats.Statistics; import org.dataloader.stats.StatisticsCollector; +import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.function.BiConsumer; +import java.util.function.Consumer; import static org.dataloader.impl.Assertions.nonNull; @@ -57,17 +63,19 @@ * * @param type parameter indicating the type of the data load keys * @param type parameter indicating the type of the data that is returned - * * @author Arnold Schrijver * @author Brad Baker */ @PublicApi +@NullMarked public class DataLoader { private final DataLoaderHelper helper; private final StatisticsCollector stats; private final CacheMap futureCache; private final ValueCache valueCache; + private final DataLoaderOptions options; + private final Object batchLoadFunction; /** * Creates new DataLoader with the specified batch loader function and default options @@ -76,9 +84,7 @@ public class DataLoader { * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -93,13 +99,11 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newDataLoader(BatchLoader batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newDataLoader(BatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -117,9 +121,7 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -136,14 +138,12 @@ public static DataLoader newDataLoaderWithTry(BatchLoader * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -154,9 +154,7 @@ public static DataLoader newDataLoaderWithTry(BatchLoader * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -171,13 +169,11 @@ public static DataLoader newDataLoader(BatchLoaderWithContext * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -195,9 +191,7 @@ public static DataLoader newDataLoader(BatchLoaderWithContext * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -214,14 +208,12 @@ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContex * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -232,9 +224,7 @@ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContex * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -249,13 +239,11 @@ public static DataLoader newMappedDataLoader(MappedBatchLoader the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -274,9 +262,7 @@ public static DataLoader newMappedDataLoader(MappedBatchLoader the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -293,14 +279,12 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -311,9 +295,7 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -328,13 +310,11 @@ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithC * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -352,9 +332,7 @@ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithC * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -371,14 +349,12 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, @Nullable DataLoaderOptions options) { return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); } @@ -386,7 +362,6 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * Creates a new data loader with the provided batch load function, and default options. * * @param batchLoadFunction the batch load function to use - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated @@ -399,26 +374,27 @@ public DataLoader(BatchLoader batchLoadFunction) { * * @param batchLoadFunction the batch load function to use * @param options the batch load options - * * @deprecated use {@link DataLoaderFactory} instead */ @Deprecated - public DataLoader(BatchLoader batchLoadFunction, DataLoaderOptions options) { + public DataLoader(BatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { this((Object) batchLoadFunction, options); } @VisibleForTesting - DataLoader(Object batchLoadFunction, DataLoaderOptions options) { + DataLoader(Object batchLoadFunction, @Nullable DataLoaderOptions options) { this(batchLoadFunction, options, Clock.systemUTC()); } @VisibleForTesting - DataLoader(Object batchLoadFunction, DataLoaderOptions options, Clock clock) { + DataLoader(Object batchLoadFunction, @Nullable DataLoaderOptions options, Clock clock) { DataLoaderOptions loaderOptions = options == null ? new DataLoaderOptions() : options; this.futureCache = determineFutureCache(loaderOptions); this.valueCache = determineValueCache(loaderOptions); // order of keys matter in data loader this.stats = nonNull(loaderOptions.getStatisticsCollector()); + this.batchLoadFunction = nonNull(batchLoadFunction); + this.options = loaderOptions; this.helper = new DataLoaderHelper<>(this, batchLoadFunction, loaderOptions, this.futureCache, this.valueCache, this.stats, clock); } @@ -434,6 +410,32 @@ private ValueCache determineValueCache(DataLoaderOptions loaderOptions) { return (ValueCache) loaderOptions.valueCache().orElseGet(ValueCache::defaultValueCache); } + /** + * @return the options used to build this {@link DataLoader} + */ + public DataLoaderOptions getOptions() { + return options; + } + + /** + * @return the batch load interface used to build this {@link DataLoader} + */ + public Object getBatchLoadFunction() { + return batchLoadFunction; + } + + /** + * This allows you to change the current {@link DataLoader} and turn it into a new one + * + * @param builderConsumer the {@link DataLoaderFactory.Builder} consumer for changing the {@link DataLoader} + * @return a newly built {@link DataLoader} instance + */ + public DataLoader transform(Consumer> builderConsumer) { + DataLoaderFactory.Builder builder = DataLoaderFactory.builder(this); + builderConsumer.accept(builder); + return builder.build(); + } + /** * This returns the last instant the data loader was dispatched. When the data loader is created this value is set to now. * @@ -460,7 +462,6 @@ public Duration getTimeSinceDispatch() { * and returned from cache). * * @param key the key to load - * * @return the future of the value */ public CompletableFuture load(K key) { @@ -478,7 +479,6 @@ public CompletableFuture load(K key) { * NOTE : This will NOT cause a data load to happen. You must call {@link #load(Object)} for that to happen. * * @param key the key to check - * * @return an Optional to the future of the value */ public Optional> getIfPresent(K key) { @@ -497,7 +497,6 @@ public Optional> getIfPresent(K key) { * NOTE : This will NOT cause a data load to happen. You must call {@link #load(Object)} for that to happen. * * @param key the key to check - * * @return an Optional to the future of the value */ public Optional> getIfCompleted(K key) { @@ -517,11 +516,10 @@ public Optional> getIfCompleted(K key) { * * @param key the key to load * @param keyContext a context object that is specific to this key - * * @return the future of the value */ - public CompletableFuture load(K key, Object keyContext) { - return helper.load(key, keyContext); + public CompletableFuture load(@NonNull K key, @Nullable Object keyContext) { + return helper.load(nonNull(key), keyContext); } /** @@ -533,7 +531,6 @@ public CompletableFuture load(K key, Object keyContext) { * and returned from cache). * * @param keys the list of keys to load - * * @return the composite future of the list of values */ public CompletableFuture> loadMany(List keys) { @@ -553,7 +550,6 @@ public CompletableFuture> loadMany(List keys) { * * @param keys the list of keys to load * @param keyContexts the list of key calling context objects - * * @return the composite future of the list of values */ public CompletableFuture> loadMany(List keys, List keyContexts) { @@ -574,6 +570,34 @@ public CompletableFuture> loadMany(List keys, List keyContext } } + /** + * Requests to load the map of data provided by the specified keys asynchronously, and returns a composite future + * of the resulting values. + *

+ * If batching is enabled (the default), you'll have to call {@link DataLoader#dispatch()} at a later stage to + * start batch execution. If you forget this call the future will never be completed (unless already completed, + * and returned from cache). + *

+ * The key context object may be useful in the batch loader interfaces such as {@link org.dataloader.BatchLoaderWithContext} or + * {@link org.dataloader.MappedBatchLoaderWithContext} to help retrieve data. + * + * @param keysAndContexts the map of keys to their respective contexts + * @return the composite future of the map of keys and values + */ + public CompletableFuture> loadMany(Map keysAndContexts) { + nonNull(keysAndContexts); + + synchronized (this) { + Map> collect = new HashMap<>(keysAndContexts.size()); + for (Map.Entry entry : keysAndContexts.entrySet()) { + K key = entry.getKey(); + Object keyContext = entry.getValue(); + collect.put(key, load(key, keyContext)); + } + return CompletableFutureKit.allOf(collect); + } + } + /** * Dispatches the queued load requests to the batch execution function and returns a promise of the result. *

@@ -630,7 +654,6 @@ public int dispatchDepth() { * on the next load request. * * @param key the key to remove - * * @return the data loader for fluent coding */ public DataLoader clear(K key) { @@ -644,7 +667,6 @@ public DataLoader clear(K key) { * * @param key the key to remove * @param handler a handler that will be called after the async remote clear completes - * * @return the data loader for fluent coding */ public DataLoader clear(K key, BiConsumer handler) { @@ -670,7 +692,6 @@ public DataLoader clearAll() { * Clears the entire cache map of the loader, and of the cached value store. * * @param handler a handler that will be called after the async remote clear all completes - * * @return the data loader for fluent coding */ public DataLoader clearAll(BiConsumer handler) { @@ -688,7 +709,6 @@ public DataLoader clearAll(BiConsumer handler) { * * @param key the key * @param value the value - * * @return the data loader for fluent coding */ public DataLoader prime(K key, V value) { @@ -700,7 +720,6 @@ public DataLoader prime(K key, V value) { * * @param key the key * @param error the exception to prime instead of a value - * * @return the data loader for fluent coding */ public DataLoader prime(K key, Exception error) { @@ -714,7 +733,6 @@ public DataLoader prime(K key, Exception error) { * * @param key the key * @param value the value - * * @return the data loader for fluent coding */ public DataLoader prime(K key, CompletableFuture value) { @@ -734,7 +752,6 @@ public DataLoader prime(K key, CompletableFuture value) { * If no cache key function is present in {@link DataLoaderOptions}, then the returned value equals the input key. * * @param key the input key - * * @return the cache key after the input is transformed with the cache key function */ public Object getCacheKey(K key) { @@ -753,6 +770,7 @@ public Statistics getStatistics() { /** * Gets the cacheMap associated with this data loader passed in via {@link DataLoaderOptions#cacheMap()} + * * @return the cacheMap of this data loader */ public CacheMap getCacheMap() { @@ -762,6 +780,7 @@ public CacheMap getCacheMap() { /** * Gets the valueCache associated with this data loader passed in via {@link DataLoaderOptions#valueCache()} + * * @return the valueCache of this data loader */ public ValueCache getValueCache() { diff --git a/src/main/java/org/dataloader/DataLoaderFactory.java b/src/main/java/org/dataloader/DataLoaderFactory.java index 013f473..ef1a287 100644 --- a/src/main/java/org/dataloader/DataLoaderFactory.java +++ b/src/main/java/org/dataloader/DataLoaderFactory.java @@ -1,6 +1,7 @@ package org.dataloader; import org.dataloader.annotations.PublicApi; +import org.jspecify.annotations.Nullable; /** * A factory class to create {@link DataLoader}s @@ -16,7 +17,6 @@ public class DataLoaderFactory { * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoader batchLoadFunction) { @@ -30,7 +30,6 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoader batchLoadFunction, DataLoaderOptions options) { @@ -51,7 +50,6 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction) { @@ -67,9 +65,7 @@ public static DataLoader newDataLoaderWithTry(BatchLoader * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction, DataLoaderOptions options) { @@ -83,7 +79,6 @@ public static DataLoader newDataLoaderWithTry(BatchLoader * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction) { @@ -97,7 +92,6 @@ public static DataLoader newDataLoader(BatchLoaderWithContext * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { @@ -118,7 +112,6 @@ public static DataLoader newDataLoader(BatchLoaderWithContext * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction) { @@ -134,9 +127,7 @@ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContex * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { @@ -150,7 +141,6 @@ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContex * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction) { @@ -164,10 +154,9 @@ public static DataLoader newMappedDataLoader(MappedBatchLoader the key type * @param the value type - * * @return a new DataLoader */ - public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { return mkDataLoader(batchLoadFunction, options); } @@ -186,7 +175,6 @@ public static DataLoader newMappedDataLoader(MappedBatchLoader the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction) { @@ -202,9 +190,7 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader - * * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction, DataLoaderOptions options) { @@ -218,7 +204,6 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param batchLoadFunction the batch load function to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction) { @@ -232,7 +217,6 @@ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithC * @param options the options to use * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { @@ -253,7 +237,6 @@ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithC * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects * @param the key type * @param the value type - * * @return a new DataLoader */ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction) { @@ -269,16 +252,319 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @param options the options to use * @param the key type * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size). + * + * @param batchLoadFunction the batch load function to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(BatchPublisher batchLoadFunction) { + return newPublisherDataLoader(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options * + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(BatchPublisher batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size) where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + *

+ * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then + * you can use this form to create the data loader. + *

+ * Using Try objects allows you to capture a value returned or an exception that might + * have occurred trying to get a value. . * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoaderWithTry(BatchPublisher> batchLoadFunction) { + return newPublisherDataLoaderWithTry(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader * @see #newDataLoaderWithTry(BatchLoader) */ - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + public static DataLoader newPublisherDataLoaderWithTry(BatchPublisher> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size). + * + * @param batchLoadFunction the batch load function to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(BatchPublisherWithContext batchLoadFunction) { + return newPublisherDataLoader(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(BatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size) where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + *

+ * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then + * you can use this form to create the data loader. + *

+ * Using Try objects allows you to capture a value returned or an exception that might + * have occurred trying to get a value. . + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoaderWithTry(BatchPublisherWithContext> batchLoadFunction) { + return newPublisherDataLoaderWithTry(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newPublisherDataLoaderWithTry(BatchPublisher) + */ + public static DataLoader newPublisherDataLoaderWithTry(BatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size). + * + * @param batchLoadFunction the batch load function to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisher batchLoadFunction) { + return newMappedPublisherDataLoader(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisher batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size) where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + *

+ * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then + * you can use this form to create the data loader. + *

+ * Using Try objects allows you to capture a value returned or an exception that might + * have occurred trying to get a value. . + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisher> batchLoadFunction) { + return newMappedPublisherDataLoaderWithTry(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisher> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size). + * + * @param batchLoadFunction the batch load function to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisherWithContext batchLoadFunction) { + return newMappedPublisherDataLoader(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size) where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + *

+ * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then + * you can use this form to create the data loader. + *

+ * Using Try objects allows you to capture a value returned or an exception that might + * have occurred trying to get a value. . + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisherWithContext> batchLoadFunction) { + return newMappedPublisherDataLoaderWithTry(batchLoadFunction, null); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newMappedPublisherDataLoaderWithTry(MappedBatchPublisher) + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { return mkDataLoader(batchLoadFunction, options); } static DataLoader mkDataLoader(Object batchLoadFunction, DataLoaderOptions options) { return new DataLoader<>(batchLoadFunction, options); } + + /** + * Return a new {@link Builder} of a data loader. + * + * @param the key type + * @param the value type + * @return a new {@link Builder} of a data loader + */ + public static Builder builder() { + return new Builder<>(); + } + + /** + * Return a new {@link Builder} of a data loader using the specified one as a template. + * + * @param the key type + * @param the value type + * @param dataLoader the {@link DataLoader} to copy values from into the builder + * @return a new {@link Builder} of a data loader + */ + public static Builder builder(DataLoader dataLoader) { + return new Builder<>(dataLoader); + } + + /** + * A builder of {@link DataLoader}s + * + * @param the key type + * @param the value type + */ + public static class Builder { + Object batchLoadFunction; + DataLoaderOptions options = DataLoaderOptions.newOptions(); + + Builder() { + } + + Builder(DataLoader dataLoader) { + this.batchLoadFunction = dataLoader.getBatchLoadFunction(); + this.options = dataLoader.getOptions(); + } + + public Builder batchLoadFunction(Object batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder options(DataLoaderOptions options) { + this.options = options; + return this; + } + + public DataLoader build() { + return mkDataLoader(batchLoadFunction, options); + } + } } + diff --git a/src/main/java/org/dataloader/DataLoaderHelper.java b/src/main/java/org/dataloader/DataLoaderHelper.java index d934de2..7858780 100644 --- a/src/main/java/org/dataloader/DataLoaderHelper.java +++ b/src/main/java/org/dataloader/DataLoaderHelper.java @@ -3,6 +3,9 @@ import org.dataloader.annotations.GuardedBy; import org.dataloader.annotations.Internal; import org.dataloader.impl.CompletableFutureKit; +import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.dataloader.instrumentation.DataLoaderInstrumentationContext; +import org.dataloader.reactive.ReactiveSupport; import org.dataloader.scheduler.BatchLoaderScheduler; import org.dataloader.stats.StatisticsCollector; import org.dataloader.stats.context.IncrementBatchLoadCountByStatisticsContext; @@ -10,6 +13,7 @@ import org.dataloader.stats.context.IncrementCacheHitCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; +import org.reactivestreams.Subscriber; import java.time.Clock; import java.time.Instant; @@ -32,6 +36,7 @@ import static java.util.stream.Collectors.toList; import static org.dataloader.impl.Assertions.assertState; import static org.dataloader.impl.Assertions.nonNull; +import static org.dataloader.instrumentation.DataLoaderInstrumentationHelper.ctxOrNoopCtx; /** * This helps break up the large DataLoader class functionality, and it contains the logic to dispatch the @@ -143,26 +148,34 @@ CompletableFuture load(K key, Object loadContext) { boolean cachingEnabled = loaderOptions.cachingEnabled(); stats.incrementLoadCount(new IncrementLoadCountStatisticsContext<>(key, loadContext)); - + DataLoaderInstrumentationContext ctx = ctxOrNoopCtx(instrumentation().beginLoad(dataLoader, key,loadContext)); + CompletableFuture cf; if (cachingEnabled) { - return loadFromCache(key, loadContext, batchingEnabled); + cf = loadFromCache(key, loadContext, batchingEnabled); } else { - return queueOrInvokeLoader(key, loadContext, batchingEnabled, false); + cf = queueOrInvokeLoader(key, loadContext, batchingEnabled, false); } + ctx.onDispatched(); + cf.whenComplete(ctx::onCompleted); + return cf; } } + @SuppressWarnings("unchecked") Object getCacheKey(K key) { return loaderOptions.cacheKeyFunction().isPresent() ? loaderOptions.cacheKeyFunction().get().getKey(key) : key; } + @SuppressWarnings("unchecked") Object getCacheKeyWithContext(K key, Object context) { return loaderOptions.cacheKeyFunction().isPresent() ? loaderOptions.cacheKeyFunction().get().getKeyWithContext(key, context) : key; } DispatchResult dispatch() { + DataLoaderInstrumentationContext> instrCtx = ctxOrNoopCtx(instrumentation().beginDispatch(dataLoader)); + boolean batchingEnabled = loaderOptions.batchingEnabled(); final List keys; final List callContexts; @@ -171,7 +184,8 @@ DispatchResult dispatch() { int queueSize = loaderQueue.size(); if (queueSize == 0) { lastDispatchTime.set(now()); - return emptyDispatchResult(); + instrCtx.onDispatched(); + return endDispatchCtx(instrCtx, emptyDispatchResult()); } // we copy the pre-loaded set of futures ready for dispatch @@ -188,7 +202,8 @@ DispatchResult dispatch() { lastDispatchTime.set(now()); } if (!batchingEnabled) { - return emptyDispatchResult(); + instrCtx.onDispatched(); + return endDispatchCtx(instrCtx, emptyDispatchResult()); } final int totalEntriesHandled = keys.size(); // @@ -209,7 +224,15 @@ DispatchResult dispatch() { } else { futureList = dispatchQueueBatch(keys, callContexts, queuedFutures); } - return new DispatchResult<>(futureList, totalEntriesHandled); + instrCtx.onDispatched(); + return endDispatchCtx(instrCtx, new DispatchResult<>(futureList, totalEntriesHandled)); + } + + private DispatchResult endDispatchCtx(DataLoaderInstrumentationContext> instrCtx, DispatchResult dispatchResult) { + // once the CF completes, we can tell the instrumentation + dispatchResult.getPromisedResults() + .whenComplete((result, throwable) -> instrCtx.onCompleted(dispatchResult, throwable)); + return dispatchResult; } private CompletableFuture> sliceIntoBatchesOfBatches(List keys, List> queuedFutures, List callContexts, int maxBatchSize) { @@ -241,10 +264,14 @@ private CompletableFuture> sliceIntoBatchesOfBatches(List keys, List< @SuppressWarnings("unchecked") private CompletableFuture> dispatchQueueBatch(List keys, List callContexts, List> queuedFutures) { stats.incrementBatchLoadCountBy(keys.size(), new IncrementBatchLoadCountByStatisticsContext<>(keys, callContexts)); - CompletableFuture> batchLoad = invokeLoader(keys, callContexts, loaderOptions.cachingEnabled()); + CompletableFuture> batchLoad = invokeLoader(keys, callContexts, queuedFutures, loaderOptions.cachingEnabled()); return batchLoad .thenApply(values -> { assertResultSize(keys, values); + if (isPublisher() || isMappedPublisher()) { + // We have already completed the queued futures by the time the overall batchLoad future has completed. + return values; + } List clearCacheKeys = new ArrayList<>(); for (int idx = 0; idx < queuedFutures.size(); idx++) { @@ -342,14 +369,15 @@ private CompletableFuture queueOrInvokeLoader(K key, Object loadContext, bool CompletableFuture invokeLoaderImmediately(K key, Object keyContext, boolean cachingEnabled) { List keys = singletonList(key); List keyContexts = singletonList(keyContext); - return invokeLoader(keys, keyContexts, cachingEnabled) + List> queuedFutures = singletonList(new CompletableFuture<>()); + return invokeLoader(keys, keyContexts, queuedFutures, cachingEnabled) .thenApply(list -> list.get(0)) .toCompletableFuture(); } - CompletableFuture> invokeLoader(List keys, List keyContexts, boolean cachingEnabled) { + CompletableFuture> invokeLoader(List keys, List keyContexts, List> queuedFutures, boolean cachingEnabled) { if (!cachingEnabled) { - return invokeLoader(keys, keyContexts); + return invokeLoader(keys, keyContexts, queuedFutures); } CompletableFuture>> cacheCallCF = getFromValueCache(keys); return cacheCallCF.thenCompose(cachedValues -> { @@ -360,6 +388,7 @@ CompletableFuture> invokeLoader(List keys, List keyContexts, List missedKeyIndexes = new ArrayList<>(); List missedKeys = new ArrayList<>(); List missedKeyContexts = new ArrayList<>(); + List> missedQueuedFutures = new ArrayList<>(); // if they return a ValueCachingNotSupported exception then we insert this special marker value, and it // means it's a total miss, we need to get all these keys via the batch loader @@ -369,6 +398,7 @@ CompletableFuture> invokeLoader(List keys, List keyContexts, missedKeyIndexes.add(i); missedKeys.add(keys.get(i)); missedKeyContexts.add(keyContexts.get(i)); + missedQueuedFutures.add(queuedFutures.get(i)); } } else { assertState(keys.size() == cachedValues.size(), () -> "The size of the cached values MUST be the same size as the key list"); @@ -379,6 +409,9 @@ CompletableFuture> invokeLoader(List keys, List keyContexts, missedKeyIndexes.add(i); missedKeys.add(keys.get(i)); missedKeyContexts.add(keyContexts.get(i)); + missedQueuedFutures.add(queuedFutures.get(i)); + } else { + queuedFutures.get(i).complete(cacheGet.get()); } } } @@ -393,7 +426,7 @@ CompletableFuture> invokeLoader(List keys, List keyContexts, // we missed some keys from cache, so send them to the batch loader // and then fill in their values // - CompletableFuture> batchLoad = invokeLoader(missedKeys, missedKeyContexts); + CompletableFuture> batchLoad = invokeLoader(missedKeys, missedKeyContexts, missedQueuedFutures); return batchLoad.thenCompose(missedValues -> { assertResultSize(missedKeys, missedValues); @@ -412,24 +445,34 @@ CompletableFuture> invokeLoader(List keys, List keyContexts, }); } + CompletableFuture> invokeLoader(List keys, List keyContexts, List> queuedFutures) { + Object context = loaderOptions.getBatchLoaderContextProvider().getContext(); + BatchLoaderEnvironment environment = BatchLoaderEnvironment.newBatchLoaderEnvironment() + .context(context).keyContexts(keys, keyContexts).build(); + + DataLoaderInstrumentationContext> instrCtx = ctxOrNoopCtx(instrumentation().beginBatchLoader(dataLoader, keys, environment)); - CompletableFuture> invokeLoader(List keys, List keyContexts) { CompletableFuture> batchLoad; try { - Object context = loaderOptions.getBatchLoaderContextProvider().getContext(); - BatchLoaderEnvironment environment = BatchLoaderEnvironment.newBatchLoaderEnvironment() - .context(context).keyContexts(keys, keyContexts).build(); if (isMapLoader()) { batchLoad = invokeMapBatchLoader(keys, environment); + } else if (isPublisher()) { + batchLoad = invokeBatchPublisher(keys, keyContexts, queuedFutures, environment); + } else if (isMappedPublisher()) { + batchLoad = invokeMappedBatchPublisher(keys, keyContexts, queuedFutures, environment); } else { batchLoad = invokeListBatchLoader(keys, environment); } + instrCtx.onDispatched(); } catch (Exception e) { + instrCtx.onDispatched(); batchLoad = CompletableFutureKit.failedFuture(e); } + batchLoad.whenComplete(instrCtx::onCompleted); return batchLoad; } + @SuppressWarnings("unchecked") private CompletableFuture> invokeListBatchLoader(List keys, BatchLoaderEnvironment environment) { CompletionStage> loadResult; @@ -492,10 +535,76 @@ private CompletableFuture> invokeMapBatchLoader(List keys, BatchLoade }); } + private CompletableFuture> invokeBatchPublisher(List keys, List keyContexts, List> queuedFutures, BatchLoaderEnvironment environment) { + CompletableFuture> loadResult = new CompletableFuture<>(); + Subscriber subscriber = ReactiveSupport.batchSubscriber(loadResult, keys, keyContexts, queuedFutures, helperIntegration()); + + BatchLoaderScheduler batchLoaderScheduler = loaderOptions.getBatchLoaderScheduler(); + if (batchLoadFunction instanceof BatchPublisherWithContext) { + //noinspection unchecked + BatchPublisherWithContext loadFunction = (BatchPublisherWithContext) batchLoadFunction; + if (batchLoaderScheduler != null) { + BatchLoaderScheduler.ScheduledBatchPublisherCall loadCall = () -> loadFunction.load(keys, subscriber, environment); + batchLoaderScheduler.scheduleBatchPublisher(loadCall, keys, environment); + } else { + loadFunction.load(keys, subscriber, environment); + } + } else { + //noinspection unchecked + BatchPublisher loadFunction = (BatchPublisher) batchLoadFunction; + if (batchLoaderScheduler != null) { + BatchLoaderScheduler.ScheduledBatchPublisherCall loadCall = () -> loadFunction.load(keys, subscriber); + batchLoaderScheduler.scheduleBatchPublisher(loadCall, keys, null); + } else { + loadFunction.load(keys, subscriber); + } + } + return loadResult; + } + + private CompletableFuture> invokeMappedBatchPublisher(List keys, List keyContexts, List> queuedFutures, BatchLoaderEnvironment environment) { + CompletableFuture> loadResult = new CompletableFuture<>(); + Subscriber> subscriber = ReactiveSupport.mappedBatchSubscriber(loadResult, keys, keyContexts, queuedFutures, helperIntegration()); + Set setOfKeys = new LinkedHashSet<>(keys); + BatchLoaderScheduler batchLoaderScheduler = loaderOptions.getBatchLoaderScheduler(); + if (batchLoadFunction instanceof MappedBatchPublisherWithContext) { + //noinspection unchecked + MappedBatchPublisherWithContext loadFunction = (MappedBatchPublisherWithContext) batchLoadFunction; + if (batchLoaderScheduler != null) { + BatchLoaderScheduler.ScheduledBatchPublisherCall loadCall = () -> loadFunction.load(keys, subscriber, environment); + batchLoaderScheduler.scheduleBatchPublisher(loadCall, keys, environment); + } else { + loadFunction.load(keys, subscriber, environment); + } + } else { + //noinspection unchecked + MappedBatchPublisher loadFunction = (MappedBatchPublisher) batchLoadFunction; + if (batchLoaderScheduler != null) { + BatchLoaderScheduler.ScheduledBatchPublisherCall loadCall = () -> loadFunction.load(setOfKeys, subscriber); + batchLoaderScheduler.scheduleBatchPublisher(loadCall, keys, null); + } else { + loadFunction.load(setOfKeys, subscriber); + } + } + return loadResult; + } + private boolean isMapLoader() { return batchLoadFunction instanceof MappedBatchLoader || batchLoadFunction instanceof MappedBatchLoaderWithContext; } + private boolean isPublisher() { + return batchLoadFunction instanceof BatchPublisher; + } + + private boolean isMappedPublisher() { + return batchLoadFunction instanceof MappedBatchPublisher; + } + + private DataLoaderInstrumentation instrumentation() { + return loaderOptions.getInstrumentation(); + } + int dispatchDepth() { synchronized (dataLoader) { return loaderQueue.size(); @@ -546,4 +655,23 @@ private CompletableFuture> setToValueCache(List assembledValues, List private static DispatchResult emptyDispatchResult() { return (DispatchResult) EMPTY_DISPATCH_RESULT; } + + private ReactiveSupport.HelperIntegration helperIntegration() { + return new ReactiveSupport.HelperIntegration<>() { + @Override + public StatisticsCollector getStats() { + return stats; + } + + @Override + public void clearCacheView(K key) { + dataLoader.clear(key); + } + + @Override + public void clearCacheEntriesOnExceptions(List keys) { + possiblyClearCacheEntriesOnExceptions(keys); + } + }; + } } diff --git a/src/main/java/org/dataloader/DataLoaderOptions.java b/src/main/java/org/dataloader/DataLoaderOptions.java index b96e785..8667943 100644 --- a/src/main/java/org/dataloader/DataLoaderOptions.java +++ b/src/main/java/org/dataloader/DataLoaderOptions.java @@ -17,18 +17,22 @@ package org.dataloader; import org.dataloader.annotations.PublicApi; -import org.dataloader.impl.Assertions; +import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.dataloader.instrumentation.DataLoaderInstrumentationHelper; import org.dataloader.scheduler.BatchLoaderScheduler; import org.dataloader.stats.NoOpStatisticsCollector; import org.dataloader.stats.StatisticsCollector; +import java.util.Objects; import java.util.Optional; +import java.util.function.Consumer; import java.util.function.Supplier; import static org.dataloader.impl.Assertions.nonNull; /** - * Configuration options for {@link DataLoader} instances. + * Configuration options for {@link DataLoader} instances. This is an immutable class so each time + * you change a value it returns a new object. * * @author Arnold Schrijver */ @@ -36,18 +40,21 @@ public class DataLoaderOptions { private static final BatchLoaderContextProvider NULL_PROVIDER = () -> null; - - private boolean batchingEnabled; - private boolean cachingEnabled; - private boolean cachingExceptionsEnabled; - private CacheKey cacheKeyFunction; - private CacheMap cacheMap; - private ValueCache valueCache; - private int maxBatchSize; - private Supplier statisticsCollector; - private BatchLoaderContextProvider environmentProvider; - private ValueCacheOptions valueCacheOptions; - private BatchLoaderScheduler batchLoaderScheduler; + private static final Supplier NOOP_COLLECTOR = NoOpStatisticsCollector::new; + private static final ValueCacheOptions DEFAULT_VALUE_CACHE_OPTIONS = ValueCacheOptions.newOptions(); + + private final boolean batchingEnabled; + private final boolean cachingEnabled; + private final boolean cachingExceptionsEnabled; + private final CacheKey cacheKeyFunction; + private final CacheMap cacheMap; + private final ValueCache valueCache; + private final int maxBatchSize; + private final Supplier statisticsCollector; + private final BatchLoaderContextProvider environmentProvider; + private final ValueCacheOptions valueCacheOptions; + private final BatchLoaderScheduler batchLoaderScheduler; + private final DataLoaderInstrumentation instrumentation; /** * Creates a new data loader options with default settings. @@ -56,11 +63,30 @@ public DataLoaderOptions() { batchingEnabled = true; cachingEnabled = true; cachingExceptionsEnabled = true; + cacheKeyFunction = null; + cacheMap = null; + valueCache = null; maxBatchSize = -1; - statisticsCollector = NoOpStatisticsCollector::new; + statisticsCollector = NOOP_COLLECTOR; environmentProvider = NULL_PROVIDER; - valueCacheOptions = ValueCacheOptions.newOptions(); + valueCacheOptions = DEFAULT_VALUE_CACHE_OPTIONS; batchLoaderScheduler = null; + instrumentation = DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION; + } + + private DataLoaderOptions(Builder builder) { + this.batchingEnabled = builder.batchingEnabled; + this.cachingEnabled = builder.cachingEnabled; + this.cachingExceptionsEnabled = builder.cachingExceptionsEnabled; + this.cacheKeyFunction = builder.cacheKeyFunction; + this.cacheMap = builder.cacheMap; + this.valueCache = builder.valueCache; + this.maxBatchSize = builder.maxBatchSize; + this.statisticsCollector = builder.statisticsCollector; + this.environmentProvider = builder.environmentProvider; + this.valueCacheOptions = builder.valueCacheOptions; + this.batchLoaderScheduler = builder.batchLoaderScheduler; + this.instrumentation = builder.instrumentation; } /** @@ -80,7 +106,8 @@ public DataLoaderOptions(DataLoaderOptions other) { this.statisticsCollector = other.statisticsCollector; this.environmentProvider = other.environmentProvider; this.valueCacheOptions = other.valueCacheOptions; - batchLoaderScheduler = other.batchLoaderScheduler; + this.batchLoaderScheduler = other.batchLoaderScheduler; + this.instrumentation = other.instrumentation; } /** @@ -90,6 +117,51 @@ public static DataLoaderOptions newOptions() { return new DataLoaderOptions(); } + /** + * @return a new default data loader options {@link Builder} that you can then customize + */ + public static DataLoaderOptions.Builder newOptionsBuilder() { + return new DataLoaderOptions.Builder(); + } + + /** + * @param otherOptions the options to copy + * @return a new default data loader options {@link Builder} from the specified one that you can then customize + */ + public static DataLoaderOptions.Builder newDataLoaderOptions(DataLoaderOptions otherOptions) { + return new DataLoaderOptions.Builder(otherOptions); + } + + /** + * Will transform the current options in to a builder ands allow you to build a new set of options + * + * @param builderConsumer the consumer of a builder that has this objects starting values + * @return a new {@link DataLoaderOptions} object + */ + public DataLoaderOptions transform(Consumer builderConsumer) { + Builder builder = newDataLoaderOptions(this); + builderConsumer.accept(builder); + return builder.build(); + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) return false; + DataLoaderOptions that = (DataLoaderOptions) o; + return batchingEnabled == that.batchingEnabled + && cachingEnabled == that.cachingEnabled + && cachingExceptionsEnabled == that.cachingExceptionsEnabled + && maxBatchSize == that.maxBatchSize + && Objects.equals(cacheKeyFunction, that.cacheKeyFunction) && + Objects.equals(cacheMap, that.cacheMap) && + Objects.equals(valueCache, that.valueCache) && + Objects.equals(statisticsCollector, that.statisticsCollector) && + Objects.equals(environmentProvider, that.environmentProvider) && + Objects.equals(valueCacheOptions, that.valueCacheOptions) && + Objects.equals(batchLoaderScheduler, that.batchLoaderScheduler); + } + + /** * Option that determines whether to use batching (the default), or not. * @@ -103,12 +175,10 @@ public boolean batchingEnabled() { * Sets the option that determines whether batch loading is enabled. * * @param batchingEnabled {@code true} to enable batch loading, {@code false} otherwise - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setBatchingEnabled(boolean batchingEnabled) { - this.batchingEnabled = batchingEnabled; - return this; + return builder().setBatchingEnabled(batchingEnabled).build(); } /** @@ -124,17 +194,15 @@ public boolean cachingEnabled() { * Sets the option that determines whether caching is enabled. * * @param cachingEnabled {@code true} to enable caching, {@code false} otherwise - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setCachingEnabled(boolean cachingEnabled) { - this.cachingEnabled = cachingEnabled; - return this; + return builder().setCachingEnabled(cachingEnabled).build(); } /** * Option that determines whether to cache exceptional values (the default), or not. - * + *

* For short-lived caches (that is request caches) it makes sense to cache exceptions since * it's likely the key is still poisoned. However, if you have long-lived caches, then it may make * sense to set this to false since the downstream system may have recovered from its failure @@ -150,12 +218,10 @@ public boolean cachingExceptionsEnabled() { * Sets the option that determines whether exceptional values are cache enabled. * * @param cachingExceptionsEnabled {@code true} to enable caching exceptional values, {@code false} otherwise - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setCachingExceptionsEnabled(boolean cachingExceptionsEnabled) { - this.cachingExceptionsEnabled = cachingExceptionsEnabled; - return this; + return builder().setCachingExceptionsEnabled(cachingExceptionsEnabled).build(); } /** @@ -173,12 +239,10 @@ public Optional cacheKeyFunction() { * Sets the function to use for creating the cache key, if caching is enabled. * * @param cacheKeyFunction the cache key function to use - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setCacheKeyFunction(CacheKey cacheKeyFunction) { - this.cacheKeyFunction = cacheKeyFunction; - return this; + return builder().setCacheKeyFunction(cacheKeyFunction).build(); } /** @@ -196,12 +260,10 @@ public DataLoaderOptions setCacheKeyFunction(CacheKey cacheKeyFunction) { * Sets the cache map implementation to use for caching, if caching is enabled. * * @param cacheMap the cache map instance - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setCacheMap(CacheMap cacheMap) { - this.cacheMap = cacheMap; - return this; + return builder().setCacheMap(cacheMap).build(); } /** @@ -219,12 +281,10 @@ public int maxBatchSize() { * before they are split into multiple class * * @param maxBatchSize the maximum batch size - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setMaxBatchSize(int maxBatchSize) { - this.maxBatchSize = maxBatchSize; - return this; + return builder().setMaxBatchSize(maxBatchSize).build(); } /** @@ -240,12 +300,10 @@ public StatisticsCollector getStatisticsCollector() { * a common value * * @param statisticsCollector the statistics collector to use - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setStatisticsCollector(Supplier statisticsCollector) { - this.statisticsCollector = nonNull(statisticsCollector); - return this; + return builder().setStatisticsCollector(nonNull(statisticsCollector)).build(); } /** @@ -259,12 +317,10 @@ public BatchLoaderContextProvider getBatchLoaderContextProvider() { * Sets the batch loader environment provider that will be used to give context to batch load functions * * @param contextProvider the batch loader context provider - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setBatchLoaderContextProvider(BatchLoaderContextProvider contextProvider) { - this.environmentProvider = nonNull(contextProvider); - return this; + return builder().setBatchLoaderContextProvider(nonNull(contextProvider)).build(); } /** @@ -282,12 +338,10 @@ public DataLoaderOptions setBatchLoaderContextProvider(BatchLoaderContextProvide * Sets the value cache implementation to use for caching values, if caching is enabled. * * @param valueCache the value cache instance - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setValueCache(ValueCache valueCache) { - this.valueCache = valueCache; - return this; + return builder().setValueCache(valueCache).build(); } /** @@ -301,12 +355,10 @@ public ValueCacheOptions getValueCacheOptions() { * Sets the {@link ValueCacheOptions} that control how the {@link ValueCache} will be used * * @param valueCacheOptions the value cache options - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setValueCacheOptions(ValueCacheOptions valueCacheOptions) { - this.valueCacheOptions = Assertions.nonNull(valueCacheOptions); - return this; + return builder().setValueCacheOptions(nonNull(valueCacheOptions)).build(); } /** @@ -321,11 +373,129 @@ public BatchLoaderScheduler getBatchLoaderScheduler() { * to some future time. * * @param batchLoaderScheduler the scheduler - * - * @return the data loader options for fluent coding + * @return a new data loader options instance for fluent coding */ public DataLoaderOptions setBatchLoaderScheduler(BatchLoaderScheduler batchLoaderScheduler) { - this.batchLoaderScheduler = batchLoaderScheduler; - return this; + return builder().setBatchLoaderScheduler(batchLoaderScheduler).build(); + } + + /** + * @return the {@link DataLoaderInstrumentation} to use + */ + public DataLoaderInstrumentation getInstrumentation() { + return instrumentation; + } + + /** + * Sets in a new {@link DataLoaderInstrumentation} + * + * @param instrumentation the new {@link DataLoaderInstrumentation} + * @return a new data loader options instance for fluent coding + */ + public DataLoaderOptions setInstrumentation(DataLoaderInstrumentation instrumentation) { + return builder().setInstrumentation(instrumentation).build(); + } + + private Builder builder() { + return new Builder(this); + } + + public static class Builder { + private boolean batchingEnabled; + private boolean cachingEnabled; + private boolean cachingExceptionsEnabled; + private CacheKey cacheKeyFunction; + private CacheMap cacheMap; + private ValueCache valueCache; + private int maxBatchSize; + private Supplier statisticsCollector; + private BatchLoaderContextProvider environmentProvider; + private ValueCacheOptions valueCacheOptions; + private BatchLoaderScheduler batchLoaderScheduler; + private DataLoaderInstrumentation instrumentation; + + public Builder() { + this(new DataLoaderOptions()); // use the defaults of the DataLoaderOptions for this builder + } + + Builder(DataLoaderOptions other) { + this.batchingEnabled = other.batchingEnabled; + this.cachingEnabled = other.cachingEnabled; + this.cachingExceptionsEnabled = other.cachingExceptionsEnabled; + this.cacheKeyFunction = other.cacheKeyFunction; + this.cacheMap = other.cacheMap; + this.valueCache = other.valueCache; + this.maxBatchSize = other.maxBatchSize; + this.statisticsCollector = other.statisticsCollector; + this.environmentProvider = other.environmentProvider; + this.valueCacheOptions = other.valueCacheOptions; + this.batchLoaderScheduler = other.batchLoaderScheduler; + this.instrumentation = other.instrumentation; + } + + public Builder setBatchingEnabled(boolean batchingEnabled) { + this.batchingEnabled = batchingEnabled; + return this; + } + + public Builder setCachingEnabled(boolean cachingEnabled) { + this.cachingEnabled = cachingEnabled; + return this; + } + + public Builder setCachingExceptionsEnabled(boolean cachingExceptionsEnabled) { + this.cachingExceptionsEnabled = cachingExceptionsEnabled; + return this; + } + + public Builder setCacheKeyFunction(CacheKey cacheKeyFunction) { + this.cacheKeyFunction = cacheKeyFunction; + return this; + } + + public Builder setCacheMap(CacheMap cacheMap) { + this.cacheMap = cacheMap; + return this; + } + + public Builder setValueCache(ValueCache valueCache) { + this.valueCache = valueCache; + return this; + } + + public Builder setMaxBatchSize(int maxBatchSize) { + this.maxBatchSize = maxBatchSize; + return this; + } + + public Builder setStatisticsCollector(Supplier statisticsCollector) { + this.statisticsCollector = statisticsCollector; + return this; + } + + public Builder setBatchLoaderContextProvider(BatchLoaderContextProvider environmentProvider) { + this.environmentProvider = environmentProvider; + return this; + } + + public Builder setValueCacheOptions(ValueCacheOptions valueCacheOptions) { + this.valueCacheOptions = valueCacheOptions; + return this; + } + + public Builder setBatchLoaderScheduler(BatchLoaderScheduler batchLoaderScheduler) { + this.batchLoaderScheduler = batchLoaderScheduler; + return this; + } + + public Builder setInstrumentation(DataLoaderInstrumentation instrumentation) { + this.instrumentation = nonNull(instrumentation); + return this; + } + + public DataLoaderOptions build() { + return new DataLoaderOptions(this); + } + } } diff --git a/src/main/java/org/dataloader/DataLoaderRegistry.java b/src/main/java/org/dataloader/DataLoaderRegistry.java index 5a3f90f..06c93c4 100644 --- a/src/main/java/org/dataloader/DataLoaderRegistry.java +++ b/src/main/java/org/dataloader/DataLoaderRegistry.java @@ -1,6 +1,9 @@ package org.dataloader; import org.dataloader.annotations.PublicApi; +import org.dataloader.instrumentation.ChainedDataLoaderInstrumentation; +import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.dataloader.instrumentation.DataLoaderInstrumentationHelper; import org.dataloader.stats.Statistics; import java.util.ArrayList; @@ -16,30 +19,108 @@ /** * This allows data loaders to be registered together into a single place, so * they can be dispatched as one. It also allows you to retrieve data loaders by - * name from a central place + * name from a central place. + *

+ * Notes on {@link DataLoaderInstrumentation} : A {@link DataLoaderRegistry} can have an instrumentation + * associated with it. As each {@link DataLoader} is added to the registry, the {@link DataLoaderInstrumentation} + * of the registry is applied to that {@link DataLoader}. + *

+ * The {@link DataLoader} is changed and hence the object in the registry is not the + * same one as was originally registered. So you MUST get access to the {@link DataLoader} via {@link DataLoaderRegistry#getDataLoader(String)} methods + * and not use the original {@link DataLoader} object. + *

+ * If the {@link DataLoader} has no {@link DataLoaderInstrumentation} then the registry one is added to it. If it does have one already + * then a {@link ChainedDataLoaderInstrumentation} is created with the registry {@link DataLoaderInstrumentation} in it first and then any other + * {@link DataLoaderInstrumentation}s added after that. If the registry {@link DataLoaderInstrumentation} instance and {@link DataLoader} {@link DataLoaderInstrumentation} instance + * are the same object, then nothing is changed, since the same instrumentation code is being run. */ @PublicApi public class DataLoaderRegistry { - protected final Map> dataLoaders = new ConcurrentHashMap<>(); + protected final Map> dataLoaders; + protected final DataLoaderInstrumentation instrumentation; + public DataLoaderRegistry() { + this(new ConcurrentHashMap<>(), null); } private DataLoaderRegistry(Builder builder) { - this.dataLoaders.putAll(builder.dataLoaders); + this(builder.dataLoaders, builder.instrumentation); + } + + protected DataLoaderRegistry(Map> dataLoaders, DataLoaderInstrumentation instrumentation) { + this.dataLoaders = instrumentDLs(dataLoaders, instrumentation); + this.instrumentation = instrumentation; + } + + private Map> instrumentDLs(Map> incomingDataLoaders, DataLoaderInstrumentation registryInstrumentation) { + Map> dataLoaders = new ConcurrentHashMap<>(incomingDataLoaders); + if (registryInstrumentation != null) { + dataLoaders.replaceAll((k, existingDL) -> instrumentDL(registryInstrumentation, existingDL)); + } + return dataLoaders; + } + + /** + * Can be called to tweak a {@link DataLoader} so that it has the registry {@link DataLoaderInstrumentation} added as the first one. + * + * @param registryInstrumentation the common registry {@link DataLoaderInstrumentation} + * @param existingDL the existing data loader + * @return a new {@link DataLoader} or the same one if there is nothing to change + */ + private static DataLoader instrumentDL(DataLoaderInstrumentation registryInstrumentation, DataLoader existingDL) { + if (registryInstrumentation == null) { + return existingDL; + } + DataLoaderOptions options = existingDL.getOptions(); + DataLoaderInstrumentation existingInstrumentation = options.getInstrumentation(); + // if they have any instrumentations then add to it + if (existingInstrumentation != null) { + if (existingInstrumentation == registryInstrumentation) { + // nothing to change + return existingDL; + } + if (existingInstrumentation == DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION) { + // replace it with the registry one + return mkInstrumentedDataLoader(existingDL, options, registryInstrumentation); + } + if (existingInstrumentation instanceof ChainedDataLoaderInstrumentation) { + // avoids calling a chained inside a chained + DataLoaderInstrumentation newInstrumentation = ((ChainedDataLoaderInstrumentation) existingInstrumentation).prepend(registryInstrumentation); + return mkInstrumentedDataLoader(existingDL, options, newInstrumentation); + } else { + DataLoaderInstrumentation newInstrumentation = new ChainedDataLoaderInstrumentation().add(registryInstrumentation).add(existingInstrumentation); + return mkInstrumentedDataLoader(existingDL, options, newInstrumentation); + } + } else { + return mkInstrumentedDataLoader(existingDL, options, registryInstrumentation); + } + } + + private static DataLoader mkInstrumentedDataLoader(DataLoader existingDL, DataLoaderOptions options, DataLoaderInstrumentation newInstrumentation) { + return existingDL.transform(builder -> builder.options(setInInstrumentation(options, newInstrumentation))); } + private static DataLoaderOptions setInInstrumentation(DataLoaderOptions options, DataLoaderInstrumentation newInstrumentation) { + return options.transform(optionsBuilder -> optionsBuilder.setInstrumentation(newInstrumentation)); + } + + /** + * @return the {@link DataLoaderInstrumentation} associated with this registry which can be null + */ + public DataLoaderInstrumentation getInstrumentation() { + return instrumentation; + } /** * This will register a new dataloader * * @param key the key to put the data loader under * @param dataLoader the data loader to register - * * @return this registry */ public DataLoaderRegistry register(String key, DataLoader dataLoader) { - dataLoaders.put(key, dataLoader); + dataLoaders.put(key, instrumentDL(instrumentation, dataLoader)); return this; } @@ -54,13 +135,15 @@ public DataLoaderRegistry register(String key, DataLoader dataLoader) { * @param mappingFunction the function to compute a data loader * @param the type of keys * @param the type of values - * * @return a data loader */ @SuppressWarnings("unchecked") public DataLoader computeIfAbsent(final String key, final Function> mappingFunction) { - return (DataLoader) dataLoaders.computeIfAbsent(key, mappingFunction); + return (DataLoader) dataLoaders.computeIfAbsent(key, (k) -> { + DataLoader dl = mappingFunction.apply(k); + return instrumentDL(instrumentation, dl); + }); } /** @@ -68,7 +151,6 @@ public DataLoader computeIfAbsent(final String key, * and return a new combined registry * * @param registry the registry to combine into this registry - * * @return a new combined registry */ public DataLoaderRegistry combine(DataLoaderRegistry registry) { @@ -97,7 +179,6 @@ public DataLoaderRegistry combine(DataLoaderRegistry registry) { * This will unregister a new dataloader * * @param key the key of the data loader to unregister - * * @return this registry */ public DataLoaderRegistry unregister(String key) { @@ -111,7 +192,6 @@ public DataLoaderRegistry unregister(String key) { * @param key the key of the data loader * @param the type of keys * @param the type of values - * * @return a data loader or null if its not present */ @SuppressWarnings("unchecked") @@ -182,13 +262,13 @@ public static Builder newRegistry() { public static class Builder { private final Map> dataLoaders = new HashMap<>(); + private DataLoaderInstrumentation instrumentation; /** * This will register a new dataloader * * @param key the key to put the data loader under * @param dataLoader the data loader to register - * * @return this builder for a fluent pattern */ public Builder register(String key, DataLoader dataLoader) { @@ -201,7 +281,6 @@ public Builder register(String key, DataLoader dataLoader) { * from a previous {@link DataLoaderRegistry} * * @param otherRegistry the previous {@link DataLoaderRegistry} - * * @return this builder for a fluent pattern */ public Builder registerAll(DataLoaderRegistry otherRegistry) { @@ -209,6 +288,11 @@ public Builder registerAll(DataLoaderRegistry otherRegistry) { return this; } + public Builder instrumentation(DataLoaderInstrumentation instrumentation) { + this.instrumentation = instrumentation; + return this; + } + /** * @return the newly built {@link DataLoaderRegistry} */ diff --git a/src/main/java/org/dataloader/DelegatingDataLoader.java b/src/main/java/org/dataloader/DelegatingDataLoader.java new file mode 100644 index 0000000..c54a731 --- /dev/null +++ b/src/main/java/org/dataloader/DelegatingDataLoader.java @@ -0,0 +1,188 @@ +package org.dataloader; + +import org.dataloader.annotations.PublicApi; +import org.dataloader.stats.Statistics; +import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; + +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.function.BiConsumer; +import java.util.function.Consumer; + +/** + * This delegating {@link DataLoader} makes it easier to create wrappers of {@link DataLoader}s in case you want to change how + * values are returned for example. + *

+ * The most common way would be to make a new {@link DelegatingDataLoader} subclass that overloads the {@link DelegatingDataLoader#load(Object, Object)} + * method. + *

+ * For example the following allows you to change the returned value in some way : + *

{@code
+ * DataLoader rawLoader = createDataLoader();
+ * DelegatingDataLoader delegatingDataLoader = new DelegatingDataLoader<>(rawLoader) {
+ *    public CompletableFuture load(@NonNull String key, @Nullable Object keyContext) {
+ *       CompletableFuture cf = super.load(key, keyContext);
+ *       return cf.thenApply(v -> "|" + v + "|");
+ *    }
+ *};
+ *}
+ * + * @param type parameter indicating the type of the data load keys + * @param type parameter indicating the type of the data that is returned + */ +@PublicApi +@NullMarked +public class DelegatingDataLoader extends DataLoader { + + protected final DataLoader delegate; + + /** + * This can be called to unwrap a given {@link DataLoader} such that if it's a {@link DelegatingDataLoader} the underlying + * {@link DataLoader} is returned otherwise it's just passed in data loader + * + * @param dataLoader the dataLoader to unwrap + * @param type parameter indicating the type of the data load keys + * @param type parameter indicating the type of the data that is returned + * @return the delegate dataLoader OR just this current one if it's not wrapped + */ + public static DataLoader unwrap(DataLoader dataLoader) { + if (dataLoader instanceof DelegatingDataLoader) { + return ((DelegatingDataLoader) dataLoader).getDelegate(); + } + return dataLoader; + } + + public DelegatingDataLoader(DataLoader delegate) { + super(delegate.getBatchLoadFunction(), delegate.getOptions()); + this.delegate = delegate; + } + + public DataLoader getDelegate() { + return delegate; + } + + /** + * The {@link DataLoader#load(Object)} and {@link DataLoader#loadMany(List)} type methods all call back + * to the {@link DataLoader#load(Object, Object)} and hence we don't override them. + * + * @param key the key to load + * @param keyContext a context object that is specific to this key + * @return the future of the value + */ + @Override + public CompletableFuture load(@NonNull K key, @Nullable Object keyContext) { + return delegate.load(key, keyContext); + } + + @Override + public DataLoader transform(Consumer> builderConsumer) { + return delegate.transform(builderConsumer); + } + + @Override + public Instant getLastDispatchTime() { + return delegate.getLastDispatchTime(); + } + + @Override + public Duration getTimeSinceDispatch() { + return delegate.getTimeSinceDispatch(); + } + + @Override + public Optional> getIfPresent(K key) { + return delegate.getIfPresent(key); + } + + @Override + public Optional> getIfCompleted(K key) { + return delegate.getIfCompleted(key); + } + + @Override + public CompletableFuture> dispatch() { + return delegate.dispatch(); + } + + @Override + public DispatchResult dispatchWithCounts() { + return delegate.dispatchWithCounts(); + } + + @Override + public List dispatchAndJoin() { + return delegate.dispatchAndJoin(); + } + + @Override + public int dispatchDepth() { + return delegate.dispatchDepth(); + } + + @Override + public Object getCacheKey(K key) { + return delegate.getCacheKey(key); + } + + @Override + public Statistics getStatistics() { + return delegate.getStatistics(); + } + + @Override + public CacheMap getCacheMap() { + return delegate.getCacheMap(); + } + + @Override + public ValueCache getValueCache() { + return delegate.getValueCache(); + } + + @Override + public DataLoader clear(K key) { + delegate.clear(key); + return this; + } + + @Override + public DataLoader clear(K key, BiConsumer handler) { + delegate.clear(key, handler); + return this; + } + + @Override + public DataLoader clearAll() { + delegate.clearAll(); + return this; + } + + @Override + public DataLoader clearAll(BiConsumer handler) { + delegate.clearAll(handler); + return this; + } + + @Override + public DataLoader prime(K key, V value) { + delegate.prime(key, value); + return this; + } + + @Override + public DataLoader prime(K key, Exception error) { + delegate.prime(key, error); + return this; + } + + @Override + public DataLoader prime(K key, CompletableFuture value) { + delegate.prime(key, value); + return this; + } +} diff --git a/src/main/java/org/dataloader/DispatchResult.java b/src/main/java/org/dataloader/DispatchResult.java index 97711da..7305c78 100644 --- a/src/main/java/org/dataloader/DispatchResult.java +++ b/src/main/java/org/dataloader/DispatchResult.java @@ -1,6 +1,7 @@ package org.dataloader; import org.dataloader.annotations.PublicApi; +import org.jspecify.annotations.NullMarked; import java.util.List; import java.util.concurrent.CompletableFuture; @@ -12,6 +13,7 @@ * @param for two */ @PublicApi +@NullMarked public class DispatchResult { private final CompletableFuture> futureList; private final int keysCount; diff --git a/src/main/java/org/dataloader/MappedBatchLoader.java b/src/main/java/org/dataloader/MappedBatchLoader.java index 5a7a1a6..1ad4c79 100644 --- a/src/main/java/org/dataloader/MappedBatchLoader.java +++ b/src/main/java/org/dataloader/MappedBatchLoader.java @@ -16,6 +16,9 @@ package org.dataloader; +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; + import java.util.Map; import java.util.Set; import java.util.concurrent.CompletionStage; @@ -54,6 +57,8 @@ * @param type parameter indicating the type of values returned * */ +@PublicSpi +@NullMarked public interface MappedBatchLoader { /** diff --git a/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java b/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java index 7438d20..9559260 100644 --- a/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java +++ b/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java @@ -16,6 +16,9 @@ package org.dataloader; +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; + import java.util.Map; import java.util.Set; import java.util.concurrent.CompletionStage; @@ -28,6 +31,8 @@ * See {@link MappedBatchLoader} for more details on the design invariants that you must implement in order to * use this interface. */ +@PublicSpi +@NullMarked public interface MappedBatchLoaderWithContext { /** * Called to batch load the provided keys and return a promise to a map of values. diff --git a/src/main/java/org/dataloader/MappedBatchPublisher.java b/src/main/java/org/dataloader/MappedBatchPublisher.java new file mode 100644 index 0000000..493401f --- /dev/null +++ b/src/main/java/org/dataloader/MappedBatchPublisher.java @@ -0,0 +1,34 @@ +package org.dataloader; + +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; +import org.reactivestreams.Subscriber; + +import java.util.Map; +import java.util.Set; + +/** + * A function that is invoked for batch loading a stream of data values indicated by the provided list of keys. + *

+ * The function must call the provided {@link Subscriber} to process the key/value pairs it has retrieved to allow + * the future returned by {@link DataLoader#load(Object)} to complete as soon as the individual value is available + * (rather than when all values have been retrieved). + * + * @param type parameter indicating the type of keys to use for data load requests. + * @param type parameter indicating the type of values returned + * @see MappedBatchLoader for the non-reactive version + */ +@PublicSpi +@NullMarked +public interface MappedBatchPublisher { + /** + * Called to batch the provided keys into a stream of map entries of keys and values. + *

+ * The idiomatic approach would be to create a reactive {@link org.reactivestreams.Publisher} that provides + * the values given the keys and then subscribe to it with the provided {@link Subscriber}. + * + * @param keys the collection of keys to load + * @param subscriber as values arrive you must call the subscriber for each value + */ + void load(Set keys, Subscriber> subscriber); +} diff --git a/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java b/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java new file mode 100644 index 0000000..7b862ca --- /dev/null +++ b/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java @@ -0,0 +1,36 @@ +package org.dataloader; + +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; +import org.reactivestreams.Subscriber; + +import java.util.List; +import java.util.Map; + +/** + * This form of {@link MappedBatchPublisher} is given a {@link org.dataloader.BatchLoaderEnvironment} object + * that encapsulates the calling context. A typical use case is passing in security credentials or database details + * for example. + *

+ * See {@link MappedBatchPublisher} for more details on the design invariants that you must implement in order to + * use this interface. + */ +@PublicSpi +@NullMarked +public interface MappedBatchPublisherWithContext { + + /** + * Called to batch the provided keys into a stream of map entries of keys and values. + *

+ * The idiomatic approach would be to create a reactive {@link org.reactivestreams.Publisher} that provides + * the values given the keys and then subscribe to it with the provided {@link Subscriber}. + *

+ * This is given an environment object to that maybe be useful during the call. A typical use case + * is passing in security credentials or database details for example. + * + * @param keys the collection of keys to load + * @param subscriber as values arrive you must call the subscriber for each value + * @param environment an environment object that can help with the call + */ + void load(List keys, Subscriber> subscriber, BatchLoaderEnvironment environment); +} diff --git a/src/main/java/org/dataloader/ValueCache.java b/src/main/java/org/dataloader/ValueCache.java index a8dabb1..80c8402 100644 --- a/src/main/java/org/dataloader/ValueCache.java +++ b/src/main/java/org/dataloader/ValueCache.java @@ -3,6 +3,7 @@ import org.dataloader.annotations.PublicSpi; import org.dataloader.impl.CompletableFutureKit; import org.dataloader.impl.NoOpValueCache; +import org.jspecify.annotations.NullMarked; import java.util.ArrayList; import java.util.List; @@ -38,6 +39,7 @@ * @author Brad Baker */ @PublicSpi +@NullMarked public interface ValueCache { /** @@ -158,4 +160,4 @@ public Throwable fillInStackTrace() { return this; } } -} \ No newline at end of file +} diff --git a/src/main/java/org/dataloader/ValueCacheOptions.java b/src/main/java/org/dataloader/ValueCacheOptions.java index 7e2f025..b681dda 100644 --- a/src/main/java/org/dataloader/ValueCacheOptions.java +++ b/src/main/java/org/dataloader/ValueCacheOptions.java @@ -1,10 +1,15 @@ package org.dataloader; +import org.dataloader.annotations.PublicSpi; +import org.jspecify.annotations.NullMarked; + /** * Options that control how the {@link ValueCache} is used by {@link DataLoader} * * @author Brad Baker */ +@PublicSpi +@NullMarked public class ValueCacheOptions { private final boolean completeValueAfterCacheSet; diff --git a/src/main/java/org/dataloader/impl/CompletableFutureKit.java b/src/main/java/org/dataloader/impl/CompletableFutureKit.java index 2b94d10..ebc35ec 100644 --- a/src/main/java/org/dataloader/impl/CompletableFutureKit.java +++ b/src/main/java/org/dataloader/impl/CompletableFutureKit.java @@ -3,8 +3,10 @@ import org.dataloader.annotations.Internal; import java.util.List; +import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; import static java.util.stream.Collectors.toList; @@ -48,10 +50,21 @@ public static boolean failed(CompletableFuture future) { } public static CompletableFuture> allOf(List> cfs) { - return CompletableFuture.allOf(cfs.toArray(new CompletableFuture[0])) + return CompletableFuture.allOf(cfs.toArray(CompletableFuture[]::new)) .thenApply(v -> cfs.stream() .map(CompletableFuture::join) .collect(toList()) ); } + + public static CompletableFuture> allOf(Map> cfs) { + return CompletableFuture.allOf(cfs.values().toArray(CompletableFuture[]::new)) + .thenApply(v -> cfs.entrySet().stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + task -> task.getValue().join()) + ) + ); + } } diff --git a/src/main/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentation.java b/src/main/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentation.java new file mode 100644 index 0000000..bf8a40c --- /dev/null +++ b/src/main/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentation.java @@ -0,0 +1,124 @@ +package org.dataloader.instrumentation; + +import org.dataloader.BatchLoaderEnvironment; +import org.dataloader.DataLoader; +import org.dataloader.DispatchResult; +import org.dataloader.annotations.PublicApi; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * This {@link DataLoaderInstrumentation} can chain together multiple instrumentations and have them all called in + * the order of the provided list. + */ +@PublicApi +public class ChainedDataLoaderInstrumentation implements DataLoaderInstrumentation { + private final List instrumentations; + + public ChainedDataLoaderInstrumentation() { + instrumentations = List.of(); + } + + public ChainedDataLoaderInstrumentation(List instrumentations) { + this.instrumentations = List.copyOf(instrumentations); + } + + public List getInstrumentations() { + return instrumentations; + } + + /** + * Adds a new {@link DataLoaderInstrumentation} to the list and creates a new {@link ChainedDataLoaderInstrumentation} + * + * @param instrumentation the one to add + * @return a new ChainedDataLoaderInstrumentation object + */ + public ChainedDataLoaderInstrumentation add(DataLoaderInstrumentation instrumentation) { + ArrayList list = new ArrayList<>(this.instrumentations); + list.add(instrumentation); + return new ChainedDataLoaderInstrumentation(list); + } + + /** + * Prepends a new {@link DataLoaderInstrumentation} to the list and creates a new {@link ChainedDataLoaderInstrumentation} + * + * @param instrumentation the one to add + * @return a new ChainedDataLoaderInstrumentation object + */ + public ChainedDataLoaderInstrumentation prepend(DataLoaderInstrumentation instrumentation) { + ArrayList list = new ArrayList<>(); + list.add(instrumentation); + list.addAll(this.instrumentations); + return new ChainedDataLoaderInstrumentation(list); + } + + /** + * Adds a collection of {@link DataLoaderInstrumentation} to the list and creates a new {@link ChainedDataLoaderInstrumentation} + * + * @param instrumentations the new ones to add + * @return a new ChainedDataLoaderInstrumentation object + */ + public ChainedDataLoaderInstrumentation addAll(Collection instrumentations) { + ArrayList list = new ArrayList<>(this.instrumentations); + list.addAll(instrumentations); + return new ChainedDataLoaderInstrumentation(list); + } + + + @Override + public DataLoaderInstrumentationContext beginLoad(DataLoader dataLoader, Object key, Object loadContext) { + return chainedCtx(it -> it.beginLoad(dataLoader, key, loadContext)); + } + + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + return chainedCtx(it -> it.beginDispatch(dataLoader)); + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + return chainedCtx(it -> it.beginBatchLoader(dataLoader, keys, environment)); + } + + private DataLoaderInstrumentationContext chainedCtx(Function> mapper) { + // if we have zero or 1 instrumentations (and 1 is the most common), then we can avoid an object allocation + // of the ChainedInstrumentationContext since it won't be needed + if (instrumentations.isEmpty()) { + return DataLoaderInstrumentationHelper.noOpCtx(); + } + if (instrumentations.size() == 1) { + return mapper.apply(instrumentations.get(0)); + } + return new ChainedInstrumentationContext<>(dropNullContexts(mapper)); + } + + private List> dropNullContexts(Function> mapper) { + return instrumentations.stream() + .map(mapper) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + private static class ChainedInstrumentationContext implements DataLoaderInstrumentationContext { + private final List> contexts; + + public ChainedInstrumentationContext(List> contexts) { + this.contexts = contexts; + } + + @Override + public void onDispatched() { + contexts.forEach(DataLoaderInstrumentationContext::onDispatched); + } + + @Override + public void onCompleted(T result, Throwable t) { + contexts.forEach(it -> it.onCompleted(result, t)); + } + } +} diff --git a/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentation.java b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentation.java new file mode 100644 index 0000000..bbdba87 --- /dev/null +++ b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentation.java @@ -0,0 +1,53 @@ +package org.dataloader.instrumentation; + +import org.dataloader.BatchLoaderEnvironment; +import org.dataloader.DataLoader; +import org.dataloader.DispatchResult; +import org.dataloader.annotations.PublicSpi; + +import java.util.List; + +/** + * This interface is called when certain actions happen inside a data loader + */ +@PublicSpi +public interface DataLoaderInstrumentation { + /** + * This call back is done just before the {@link DataLoader#load(Object)} methods are invoked, + * and it completes when the load promise is completed. If the value is a cached {@link java.util.concurrent.CompletableFuture} + * then it might return almost immediately, otherwise it will return + * when the batch load function is invoked and values get returned + * + * @param dataLoader the {@link DataLoader} in question + * @param key the key used during the {@link DataLoader#load(Object)} call + * @param loadContext the load context used during the {@link DataLoader#load(Object, Object)} call + * @return a DataLoaderInstrumentationContext or null to be more performant + */ + default DataLoaderInstrumentationContext beginLoad(DataLoader dataLoader, Object key, Object loadContext) { + return null; + } + + /** + * This call back is done just before the {@link DataLoader#dispatch()} is invoked, + * and it completes when the dispatch call promise is done. + * + * @param dataLoader the {@link DataLoader} in question + * @return a DataLoaderInstrumentationContext or null to be more performant + */ + default DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + return null; + } + + /** + * This call back is done just before the `batch loader` of a {@link DataLoader} is invoked. Remember a batch loader + * could be called multiple times during a dispatch event (because of max batch sizes) + * + * @param dataLoader the {@link DataLoader} in question + * @param keys the set of keys being fetched + * @param environment the {@link BatchLoaderEnvironment} + * @return a DataLoaderInstrumentationContext or null to be more performant + */ + default DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + return null; + } +} diff --git a/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationContext.java b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationContext.java new file mode 100644 index 0000000..88b08ef --- /dev/null +++ b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationContext.java @@ -0,0 +1,33 @@ +package org.dataloader.instrumentation; + +import org.dataloader.annotations.PublicSpi; + +import java.util.concurrent.CompletableFuture; + +/** + * When a {@link DataLoaderInstrumentation}.'beginXXX()' method is called then it must return a {@link DataLoaderInstrumentationContext} + * that will be invoked when the step is first dispatched and then when it completes. Sometimes this is effectively the same time + * whereas at other times it's when an asynchronous {@link CompletableFuture} completes. + *

+ * This pattern of construction of an object then call back is intended to allow "timers" to be created that can instrument what has + * just happened or "loggers" to be called to record what has happened. + */ +@PublicSpi +public interface DataLoaderInstrumentationContext { + /** + * This is invoked when the instrumentation step is initially dispatched. Note this is NOT + * the same time as the {@link DataLoaderInstrumentation}`beginXXX()` starts, but rather after all the inner + * work has been done. + */ + default void onDispatched() { + } + + /** + * This is invoked when the instrumentation step is fully completed. + * + * @param result the result of the step (which may be null) + * @param t this exception will be non-null if an exception was thrown during the step + */ + default void onCompleted(T result, Throwable t) { + } +} diff --git a/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationHelper.java b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationHelper.java new file mode 100644 index 0000000..9e60060 --- /dev/null +++ b/src/main/java/org/dataloader/instrumentation/DataLoaderInstrumentationHelper.java @@ -0,0 +1,74 @@ +package org.dataloader.instrumentation; + +import org.dataloader.annotations.PublicApi; + +import java.util.function.BiConsumer; + +@PublicApi +public class DataLoaderInstrumentationHelper { + + @SuppressWarnings("RedundantMethodOverride") + private static final DataLoaderInstrumentationContext NOOP_CTX = new DataLoaderInstrumentationContext<>() { + @Override + public void onDispatched() { + } + + @Override + public void onCompleted(Object result, Throwable t) { + } + }; + + /** + * Returns a noop {@link DataLoaderInstrumentationContext} of the right type + * + * @param for two + * @return a noop context + */ + public static DataLoaderInstrumentationContext noOpCtx() { + //noinspection unchecked + return (DataLoaderInstrumentationContext) NOOP_CTX; + } + + /** + * A well known noop {@link DataLoaderInstrumentation} + */ + public static final DataLoaderInstrumentation NOOP_INSTRUMENTATION = new DataLoaderInstrumentation() { + }; + + /** + * Allows for the more fluent away to return an instrumentation context that runs the specified + * code on instrumentation step dispatch. + * + * @param codeToRun the code to run on dispatch + * @param the generic type + * @return an instrumentation context + */ + public static DataLoaderInstrumentationContext whenDispatched(Runnable codeToRun) { + return new SimpleDataLoaderInstrumentationContext<>(codeToRun, null); + } + + /** + * Allows for the more fluent away to return an instrumentation context that runs the specified + * code on instrumentation step completion. + * + * @param codeToRun the code to run on completion + * @param the generic type + * @return an instrumentation context + */ + public static DataLoaderInstrumentationContext whenCompleted(BiConsumer codeToRun) { + return new SimpleDataLoaderInstrumentationContext<>(null, codeToRun); + } + + + /** + * Check the {@link DataLoaderInstrumentationContext} to see if its null and returns a noop if it is or else the original + * context. This is a bit of a helper method. + * + * @param ic the context in play + * @param for two + * @return a non null context + */ + public static DataLoaderInstrumentationContext ctxOrNoopCtx(DataLoaderInstrumentationContext ic) { + return ic == null ? noOpCtx() : ic; + } +} diff --git a/src/main/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContext.java b/src/main/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContext.java new file mode 100644 index 0000000..f629a05 --- /dev/null +++ b/src/main/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContext.java @@ -0,0 +1,35 @@ +package org.dataloader.instrumentation; + + +import org.dataloader.annotations.Internal; + +import java.util.function.BiConsumer; + +/** + * A simple implementation of {@link DataLoaderInstrumentationContext} + */ +@Internal +class SimpleDataLoaderInstrumentationContext implements DataLoaderInstrumentationContext { + + private final BiConsumer codeToRunOnComplete; + private final Runnable codeToRunOnDispatch; + + SimpleDataLoaderInstrumentationContext(Runnable codeToRunOnDispatch, BiConsumer codeToRunOnComplete) { + this.codeToRunOnComplete = codeToRunOnComplete; + this.codeToRunOnDispatch = codeToRunOnDispatch; + } + + @Override + public void onDispatched() { + if (codeToRunOnDispatch != null) { + codeToRunOnDispatch.run(); + } + } + + @Override + public void onCompleted(T result, Throwable t) { + if (codeToRunOnComplete != null) { + codeToRunOnComplete.accept(result, t); + } + } +} diff --git a/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java b/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java new file mode 100644 index 0000000..c2f5438 --- /dev/null +++ b/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java @@ -0,0 +1,104 @@ +package org.dataloader.reactive; + +import org.dataloader.Try; +import org.dataloader.stats.context.IncrementBatchLoadExceptionCountStatisticsContext; +import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; +import org.reactivestreams.Subscriber; +import org.reactivestreams.Subscription; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; + +import static org.dataloader.impl.Assertions.assertState; + +/** + * The base class for our reactive subscriber support + * + * @param for two + */ +abstract class AbstractBatchSubscriber implements Subscriber { + + final CompletableFuture> valuesFuture; + final List keys; + final List callContexts; + final List> queuedFutures; + final ReactiveSupport.HelperIntegration helperIntegration; + + List clearCacheKeys = new ArrayList<>(); + List completedValues = new ArrayList<>(); + boolean onErrorCalled = false; + boolean onCompleteCalled = false; + + AbstractBatchSubscriber( + CompletableFuture> valuesFuture, + List keys, + List callContexts, + List> queuedFutures, + ReactiveSupport.HelperIntegration helperIntegration + ) { + this.valuesFuture = valuesFuture; + this.keys = keys; + this.callContexts = callContexts; + this.queuedFutures = queuedFutures; + this.helperIntegration = helperIntegration; + } + + @Override + public void onSubscribe(Subscription subscription) { + subscription.request(keys.size()); + } + + @Override + public void onNext(T v) { + assertState(!onErrorCalled, () -> "onError has already been called; onNext may not be invoked."); + assertState(!onCompleteCalled, () -> "onComplete has already been called; onNext may not be invoked."); + } + + @Override + public void onComplete() { + assertState(!onErrorCalled, () -> "onError has already been called; onComplete may not be invoked."); + onCompleteCalled = true; + } + + @Override + public void onError(Throwable throwable) { + assertState(!onCompleteCalled, () -> "onComplete has already been called; onError may not be invoked."); + onErrorCalled = true; + + helperIntegration.getStats().incrementBatchLoadExceptionCount(new IncrementBatchLoadExceptionCountStatisticsContext<>(keys, callContexts)); + } + + /* + * A value has arrived - how do we complete the future that's associated with it in a common way + */ + void onNextValue(K key, V value, Object callContext, List> futures) { + if (value instanceof Try) { + // we allow the batch loader to return a Try so we can better represent a computation + // that might have worked or not. + //noinspection unchecked + Try tryValue = (Try) value; + if (tryValue.isSuccess()) { + futures.forEach(f -> f.complete(tryValue.get())); + } else { + helperIntegration.getStats().incrementLoadErrorCount(new IncrementLoadErrorCountStatisticsContext<>(key, callContext)); + futures.forEach(f -> f.completeExceptionally(tryValue.getThrowable())); + clearCacheKeys.add(key); + } + } else { + futures.forEach(f -> f.complete(value)); + } + } + + Throwable unwrapThrowable(Throwable ex) { + if (ex instanceof CompletionException) { + ex = ex.getCause(); + } + return ex; + } + + void possiblyClearCacheEntriesOnExceptions() { + helperIntegration.clearCacheEntriesOnExceptions(clearCacheKeys); + } +} diff --git a/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java b/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java new file mode 100644 index 0000000..d0b8110 --- /dev/null +++ b/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java @@ -0,0 +1,86 @@ +package org.dataloader.reactive; + +import org.dataloader.impl.DataLoaderAssertionException; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +/** + * This class can be used to subscribe to a {@link org.reactivestreams.Publisher} and then + * have the values it receives complete the data loader keys. The keys and values must be + * in index order. + *

+ * This is a reactive version of {@link org.dataloader.BatchLoader} + * + * @param the type of keys + * @param the type of values + */ +class BatchSubscriberImpl extends AbstractBatchSubscriber { + + private int idx = 0; + + BatchSubscriberImpl( + CompletableFuture> valuesFuture, + List keys, + List callContexts, + List> queuedFutures, + ReactiveSupport.HelperIntegration helperIntegration + ) { + super(valuesFuture, keys, callContexts, queuedFutures, helperIntegration); + } + + // onNext may be called by multiple threads - for the time being, we pass 'synchronized' to guarantee + // correctness (at the cost of speed). + @Override + public synchronized void onNext(V value) { + super.onNext(value); + + if (idx >= keys.size()) { + // hang on they have given us more values than we asked for in keys + // we cant handle this + return; + } + K key = keys.get(idx); + Object callContext = callContexts.get(idx); + CompletableFuture future = queuedFutures.get(idx); + onNextValue(key, value, callContext, List.of(future)); + + completedValues.add(value); + idx++; + } + + + @Override + public synchronized void onComplete() { + super.onComplete(); + if (keys.size() != completedValues.size()) { + // we have more or less values than promised + // we will go through all the outstanding promises and mark those that + // have not finished as failed + for (CompletableFuture queuedFuture : queuedFutures) { + if (!queuedFuture.isDone()) { + queuedFuture.completeExceptionally(new DataLoaderAssertionException("The size of the promised values MUST be the same size as the key list")); + } + } + } + possiblyClearCacheEntriesOnExceptions(); + valuesFuture.complete(completedValues); + } + + @Override + public synchronized void onError(Throwable ex) { + super.onError(ex); + ex = unwrapThrowable(ex); + // Set the remaining keys to the exception. + for (int i = idx; i < queuedFutures.size(); i++) { + K key = keys.get(i); + CompletableFuture future = queuedFutures.get(i); + if (!future.isDone()) { + future.completeExceptionally(ex); + // clear any cached view of this key because it failed + helperIntegration.clearCacheView(key); + } + } + valuesFuture.completeExceptionally(ex); + } +} diff --git a/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java b/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java new file mode 100644 index 0000000..d56efa0 --- /dev/null +++ b/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java @@ -0,0 +1,103 @@ +package org.dataloader.reactive; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +/** + * This class can be used to subscribe to a {@link org.reactivestreams.Publisher} and then + * have the values it receives complete the data loader keys in a map lookup fashion. + *

+ * This is a reactive version of {@link org.dataloader.MappedBatchLoader} + * + * @param the type of keys + * @param the type of values + */ +class MappedBatchSubscriberImpl extends AbstractBatchSubscriber> { + + private final Map callContextByKey; + private final Map>> queuedFuturesByKey; + private final Map completedValuesByKey = new HashMap<>(); + + + MappedBatchSubscriberImpl( + CompletableFuture> valuesFuture, + List keys, + List callContexts, + List> queuedFutures, + ReactiveSupport.HelperIntegration helperIntegration + ) { + super(valuesFuture, keys, callContexts, queuedFutures, helperIntegration); + this.callContextByKey = new HashMap<>(); + this.queuedFuturesByKey = new HashMap<>(); + for (int idx = 0; idx < queuedFutures.size(); idx++) { + K key = keys.get(idx); + Object callContext = callContexts.get(idx); + CompletableFuture queuedFuture = queuedFutures.get(idx); + callContextByKey.put(key, callContext); + queuedFuturesByKey.computeIfAbsent(key, k -> new ArrayList<>()).add(queuedFuture); + } + } + + + @Override + public synchronized void onNext(Map.Entry entry) { + super.onNext(entry); + K key = entry.getKey(); + V value = entry.getValue(); + + Object callContext = callContextByKey.get(key); + List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); + + onNextValue(key, value, callContext, futures); + + // did we have an actual key for this value - ignore it if they send us one outside the key set + if (!futures.isEmpty()) { + completedValuesByKey.put(key, value); + } + } + + @Override + public synchronized void onComplete() { + super.onComplete(); + + possiblyClearCacheEntriesOnExceptions(); + List values = new ArrayList<>(keys.size()); + for (K key : keys) { + V value = completedValuesByKey.get(key); + values.add(value); + + List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); + for (CompletableFuture future : futures) { + if (!future.isDone()) { + // we have a future that never came back for that key + // but the publisher is done sending in data - it must be null + // e.g. for key X when found no value + future.complete(null); + } + } + } + valuesFuture.complete(values); + } + + @Override + public synchronized void onError(Throwable ex) { + super.onError(ex); + ex = unwrapThrowable(ex); + // Complete the futures for the remaining keys with the exception. + for (int idx = 0; idx < queuedFutures.size(); idx++) { + K key = keys.get(idx); + List> futures = queuedFuturesByKey.get(key); + if (!completedValuesByKey.containsKey(key)) { + for (CompletableFuture future : futures) { + future.completeExceptionally(ex); + } + // clear any cached view of this key because they all failed + helperIntegration.clearCacheView(key); + } + } + valuesFuture.completeExceptionally(ex); + } +} diff --git a/src/main/java/org/dataloader/reactive/ReactiveSupport.java b/src/main/java/org/dataloader/reactive/ReactiveSupport.java new file mode 100644 index 0000000..fc03bb0 --- /dev/null +++ b/src/main/java/org/dataloader/reactive/ReactiveSupport.java @@ -0,0 +1,45 @@ +package org.dataloader.reactive; + +import org.dataloader.stats.StatisticsCollector; +import org.reactivestreams.Subscriber; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +public class ReactiveSupport { + + public static Subscriber batchSubscriber( + CompletableFuture> valuesFuture, + List keys, + List callContexts, + List> queuedFutures, + ReactiveSupport.HelperIntegration helperIntegration + ) { + return new BatchSubscriberImpl<>(valuesFuture, keys, callContexts, queuedFutures, helperIntegration); + } + + public static Subscriber> mappedBatchSubscriber( + CompletableFuture> valuesFuture, + List keys, + List callContexts, + List> queuedFutures, + ReactiveSupport.HelperIntegration helperIntegration + ) { + return new MappedBatchSubscriberImpl<>(valuesFuture, keys, callContexts, queuedFutures, helperIntegration); + } + + /** + * Just some callbacks to the data loader code to do common tasks + * + * @param for keys + */ + public interface HelperIntegration { + + StatisticsCollector getStats(); + + void clearCacheView(K key); + + void clearCacheEntriesOnExceptions(List keys); + } +} diff --git a/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java b/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java index 6ea9425..b6bc257 100644 --- a/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java +++ b/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java @@ -3,6 +3,7 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; import org.dataloader.annotations.ExperimentalApi; +import org.dataloader.instrumentation.DataLoaderInstrumentation; import java.time.Duration; import java.util.LinkedHashMap; @@ -64,8 +65,7 @@ public class ScheduledDataLoaderRegistry extends DataLoaderRegistry implements A private volatile boolean closed; private ScheduledDataLoaderRegistry(Builder builder) { - super(); - this.dataLoaders.putAll(builder.dataLoaders); + super(builder.dataLoaders, builder.instrumentation); this.scheduledExecutorService = builder.scheduledExecutorService; this.defaultExecutorUsed = builder.defaultExecutorUsed; this.schedule = builder.schedule; @@ -271,6 +271,8 @@ public static class Builder { private boolean defaultExecutorUsed = false; private Duration schedule = Duration.ofMillis(10); private boolean tickerMode = false; + private DataLoaderInstrumentation instrumentation; + /** * If you provide a {@link ScheduledExecutorService} then it will NOT be shutdown when @@ -363,6 +365,11 @@ public Builder tickerMode(boolean tickerMode) { return this; } + public Builder instrumentation(DataLoaderInstrumentation instrumentation) { + this.instrumentation = instrumentation; + return this; + } + /** * @return the newly built {@link ScheduledDataLoaderRegistry} */ diff --git a/src/main/java/org/dataloader/scheduler/BatchLoaderScheduler.java b/src/main/java/org/dataloader/scheduler/BatchLoaderScheduler.java index 7cddd54..e7e95d9 100644 --- a/src/main/java/org/dataloader/scheduler/BatchLoaderScheduler.java +++ b/src/main/java/org/dataloader/scheduler/BatchLoaderScheduler.java @@ -5,6 +5,8 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.MappedBatchLoader; +import org.dataloader.MappedBatchPublisher; +import org.dataloader.BatchPublisher; import java.util.List; import java.util.Map; @@ -42,6 +44,13 @@ interface ScheduledMappedBatchLoaderCall { CompletionStage> invoke(); } + /** + * This represents a callback that will invoke a {@link BatchPublisher} or {@link MappedBatchPublisher} function under the covers + */ + interface ScheduledBatchPublisherCall { + void invoke(); + } + /** * This is called to schedule a {@link BatchLoader} call. * @@ -71,4 +80,16 @@ interface ScheduledMappedBatchLoaderCall { * @return a promise to the values that come from the {@link BatchLoader} */ CompletionStage> scheduleMappedBatchLoader(ScheduledMappedBatchLoaderCall scheduledCall, List keys, BatchLoaderEnvironment environment); + + /** + * This is called to schedule a {@link BatchPublisher} call. + * + * @param scheduledCall the callback that needs to be invoked to allow the {@link BatchPublisher} to proceed. + * @param keys this is the list of keys that will be passed to the {@link BatchPublisher}. + * This is provided only for informative reasons and, you can't change the keys that are used + * @param environment this is the {@link BatchLoaderEnvironment} in place, + * which can be null if it's a simple {@link BatchPublisher} call + * @param the key type + */ + void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment); } diff --git a/src/test/java/ReadmeExamples.java b/src/test/java/ReadmeExamples.java index d25dfa7..1f718aa 100644 --- a/src/test/java/ReadmeExamples.java +++ b/src/test/java/ReadmeExamples.java @@ -1,20 +1,29 @@ import org.dataloader.BatchLoader; import org.dataloader.BatchLoaderEnvironment; import org.dataloader.BatchLoaderWithContext; +import org.dataloader.BatchPublisher; import org.dataloader.CacheMap; import org.dataloader.DataLoader; import org.dataloader.DataLoaderFactory; import org.dataloader.DataLoaderOptions; +import org.dataloader.DataLoaderRegistry; +import org.dataloader.DispatchResult; import org.dataloader.MappedBatchLoaderWithContext; +import org.dataloader.MappedBatchPublisher; import org.dataloader.Try; import org.dataloader.fixtures.SecurityCtx; import org.dataloader.fixtures.User; import org.dataloader.fixtures.UserManager; +import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.dataloader.instrumentation.DataLoaderInstrumentationContext; +import org.dataloader.instrumentation.DataLoaderInstrumentationHelper; import org.dataloader.registries.DispatchPredicate; import org.dataloader.registries.ScheduledDataLoaderRegistry; import org.dataloader.scheduler.BatchLoaderScheduler; import org.dataloader.stats.Statistics; import org.dataloader.stats.ThreadLocalStatisticsCollector; +import org.reactivestreams.Publisher; +import org.reactivestreams.Subscriber; import java.time.Duration; import java.util.ArrayList; @@ -171,7 +180,7 @@ private void tryExample() { } } - private void tryBatcLoader() { + private void tryBatchLoader() { DataLoader dataLoader = DataLoaderFactory.newDataLoaderWithTry(new BatchLoader>() { @Override public CompletionStage>> load(List keys) { @@ -187,6 +196,28 @@ public CompletionStage>> load(List keys) { }); } + private void batchPublisher() { + BatchPublisher batchPublisher = new BatchPublisher() { + @Override + public void load(List userIds, Subscriber userSubscriber) { + Publisher userResults = userManager.streamUsersById(userIds); + userResults.subscribe(userSubscriber); + } + }; + DataLoader userLoader = DataLoaderFactory.newPublisherDataLoader(batchPublisher); + } + + private void mappedBatchPublisher() { + MappedBatchPublisher mappedBatchPublisher = new MappedBatchPublisher() { + @Override + public void load(Set userIds, Subscriber> userEntrySubscriber) { + Publisher> userEntries = userManager.streamUsersById(userIds); + userEntries.subscribe(userEntrySubscriber); + } + }; + DataLoader userLoader = DataLoaderFactory.newMappedPublisherDataLoader(mappedBatchPublisher); + } + DataLoader userDataLoader; private void clearCacheOnError() { @@ -202,6 +233,7 @@ private void clearCacheOnError() { } BatchLoader userBatchLoader; + BatchLoader teamsBatchLoader; private void disableCache() { DataLoaderFactory.newDataLoader(userBatchLoader, DataLoaderOptions.newOptions().setCachingEnabled(false)); @@ -304,6 +336,12 @@ public CompletionStage> scheduleMappedBatchLoader(ScheduledMapp return scheduledCall.invoke(); }).thenCompose(Function.identity()); } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + snooze(10); + scheduledCall.invoke(); + } }; } @@ -348,4 +386,63 @@ private void ScheduledDispatcherChained() { .build(); } + + private DataLoaderInstrumentation timingInstrumentation = DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION; + + private void instrumentationExample() { + + DataLoaderInstrumentation timingInstrumentation = new DataLoaderInstrumentation() { + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + long then = System.currentTimeMillis(); + return DataLoaderInstrumentationHelper.whenCompleted((result, err) -> { + long ms = System.currentTimeMillis() - then; + System.out.println(format("dispatch time: %d ms", ms)); + }); + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + long then = System.currentTimeMillis(); + return DataLoaderInstrumentationHelper.whenCompleted((result, err) -> { + long ms = System.currentTimeMillis() - then; + System.out.println(format("batch loader time: %d ms", ms)); + }); + } + }; + DataLoaderOptions options = DataLoaderOptions.newOptions().setInstrumentation(timingInstrumentation); + DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options); + } + + private void registryExample() { + DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader); + DataLoader teamsDataLoader = DataLoaderFactory.newDataLoader(teamsBatchLoader); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(timingInstrumentation) + .register("users", userDataLoader) + .register("teams", teamsDataLoader) + .build(); + + DataLoader changedUsersDataLoader = registry.getDataLoader("users"); + + } + + private void combiningRegistryExample() { + DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader); + DataLoader teamsDataLoader = DataLoaderFactory.newDataLoader(teamsBatchLoader); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .register("users", userDataLoader) + .register("teams", teamsDataLoader) + .build(); + + DataLoaderRegistry registryCombined = DataLoaderRegistry.newRegistry() + .instrumentation(timingInstrumentation) + .registerAll(registry) + .build(); + + DataLoader changedUsersDataLoader = registryCombined.getDataLoader("users"); + + } } diff --git a/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java b/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java index 36e0ed4..90adbc5 100644 --- a/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java +++ b/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java @@ -1,11 +1,8 @@ package org.dataloader; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -14,8 +11,8 @@ import static java.util.Collections.singletonList; import static org.dataloader.DataLoaderFactory.newDataLoader; import static org.dataloader.DataLoaderFactory.newMappedDataLoader; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; /** * Tests related to context. DataLoaderTest is getting to big and needs refactoring @@ -50,10 +47,14 @@ public void context_is_passed_to_batch_loader_function() { loader.load("A"); loader.load("B"); loader.loadMany(asList("C", "D")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", null); + keysAndContexts.put("F", null); + loader.loadMany(keysAndContexts); List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-ctx", "B-ctx", "C-ctx", "D-ctx"))); + assertThat(results, equalTo(asList("A-ctx", "B-ctx", "C-ctx", "D-ctx", "E-ctx", "F-ctx"))); } @Test @@ -66,10 +67,14 @@ public void key_contexts_are_passed_to_batch_loader_function() { loader.load("A", "aCtx"); loader.load("B", "bCtx"); loader.loadMany(asList("C", "D"), asList("cCtx", "dCtx")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", "eCtx"); + keysAndContexts.put("F", "fCtx"); + loader.loadMany(keysAndContexts); List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:bCtx-l:bCtx", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:dCtx-l:dCtx"))); + assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:bCtx-l:bCtx", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:dCtx-l:dCtx", "E-ctx-m:eCtx-l:eCtx", "F-ctx-m:fCtx-l:fCtx"))); } @Test @@ -82,12 +87,17 @@ public void key_contexts_are_passed_to_batch_loader_function_when_batching_disab CompletableFuture aLoad = loader.load("A", "aCtx"); CompletableFuture bLoad = loader.load("B", "bCtx"); - CompletableFuture> canDLoad = loader.loadMany(asList("C", "D"), asList("cCtx", "dCtx")); + CompletableFuture> cAndDLoad = loader.loadMany(asList("C", "D"), asList("cCtx", "dCtx")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", "eCtx"); + keysAndContexts.put("F", "fCtx"); + CompletableFuture> eAndFLoad = loader.loadMany(keysAndContexts); List results = new ArrayList<>(asList(aLoad.join(), bLoad.join())); - results.addAll(canDLoad.join()); + results.addAll(cAndDLoad.join()); + results.addAll(eAndFLoad.join().values()); - assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:bCtx-l:bCtx", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:dCtx-l:dCtx"))); + assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:bCtx-l:bCtx", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:dCtx-l:dCtx", "E-ctx-m:eCtx-l:eCtx", "F-ctx-m:fCtx-l:fCtx"))); } @Test @@ -101,9 +111,14 @@ public void missing_key_contexts_are_passed_to_batch_loader_function() { loader.load("B"); loader.loadMany(asList("C", "D"), singletonList("cCtx")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", "eCtx"); + keysAndContexts.put("F", null); + loader.loadMany(keysAndContexts); + List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:null-l:null", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:null-l:null"))); + assertThat(results, equalTo(asList("A-ctx-m:aCtx-l:aCtx", "B-ctx-m:null-l:null", "C-ctx-m:cCtx-l:cCtx", "D-ctx-m:null-l:null", "E-ctx-m:eCtx-l:eCtx", "F-ctx-m:null-l:null"))); } @Test @@ -125,9 +140,14 @@ public void context_is_passed_to_map_batch_loader_function() { loader.load("B"); loader.loadMany(asList("C", "D"), singletonList("cCtx")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", "eCtx"); + keysAndContexts.put("F", null); + loader.loadMany(keysAndContexts); + List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-ctx-aCtx", "B-ctx-null", "C-ctx-cCtx", "D-ctx-null"))); + assertThat(results, equalTo(asList("A-ctx-aCtx", "B-ctx-null", "C-ctx-cCtx", "D-ctx-null", "E-ctx-eCtx", "F-ctx-null"))); } @Test @@ -142,9 +162,14 @@ public void null_is_passed_as_context_if_you_do_nothing() { loader.load("B"); loader.loadMany(asList("C", "D")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", null); + keysAndContexts.put("F", null); + loader.loadMany(keysAndContexts); + List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-null", "B-null", "C-null", "D-null"))); + assertThat(results, equalTo(asList("A-null", "B-null", "C-null", "D-null", "E-null", "F-null"))); } @Test @@ -160,9 +185,14 @@ public void null_is_passed_as_context_to_map_loader_if_you_do_nothing() { loader.load("B"); loader.loadMany(asList("C", "D")); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", null); + keysAndContexts.put("F", null); + loader.loadMany(keysAndContexts); + List results = loader.dispatchAndJoin(); - assertThat(results, equalTo(asList("A-null", "B-null", "C-null", "D-null"))); + assertThat(results, equalTo(asList("A-null", "B-null", "C-null", "D-null", "E-null", "F-null"))); } @Test diff --git a/src/test/java/org/dataloader/DataLoaderBuilderTest.java b/src/test/java/org/dataloader/DataLoaderBuilderTest.java new file mode 100644 index 0000000..f38ff82 --- /dev/null +++ b/src/test/java/org/dataloader/DataLoaderBuilderTest.java @@ -0,0 +1,76 @@ +package org.dataloader; + +import org.junit.jupiter.api.Test; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class DataLoaderBuilderTest { + + BatchLoader batchLoader1 = keys -> null; + + BatchLoader batchLoader2 = keys -> null; + + DataLoaderOptions defaultOptions = DataLoaderOptions.newOptions(); + DataLoaderOptions differentOptions = DataLoaderOptions.newOptions().setCachingEnabled(false); + + @Test + void canBuildNewDataLoaders() { + DataLoaderFactory.Builder builder = DataLoaderFactory.builder(); + builder.options(differentOptions); + builder.batchLoadFunction(batchLoader1); + DataLoader dataLoader = builder.build(); + + assertThat(dataLoader.getOptions(), equalTo(differentOptions)); + assertThat(dataLoader.getBatchLoadFunction(), equalTo(batchLoader1)); + // + // and we can copy ok + // + builder = DataLoaderFactory.builder(dataLoader); + dataLoader = builder.build(); + + assertThat(dataLoader.getOptions(), equalTo(differentOptions)); + assertThat(dataLoader.getBatchLoadFunction(), equalTo(batchLoader1)); + // + // and we can copy and transform ok + // + builder = DataLoaderFactory.builder(dataLoader); + builder.options(defaultOptions); + builder.batchLoadFunction(batchLoader2); + dataLoader = builder.build(); + + assertThat(dataLoader.getOptions(), equalTo(defaultOptions)); + assertThat(dataLoader.getBatchLoadFunction(), equalTo(batchLoader2)); + } + + @Test + void theDataLoaderCanTransform() { + DataLoader dataLoaderOrig = DataLoaderFactory.newDataLoader(batchLoader1, defaultOptions); + assertThat(dataLoaderOrig.getOptions(), equalTo(defaultOptions)); + assertThat(dataLoaderOrig.getBatchLoadFunction(), equalTo(batchLoader1)); + // + // we can transform the data loader + // + DataLoader dataLoaderTransformed = dataLoaderOrig.transform(it -> { + it.options(differentOptions); + it.batchLoadFunction(batchLoader2); + }); + + assertThat(dataLoaderTransformed, not(equalTo(dataLoaderOrig))); + assertThat(dataLoaderTransformed.getOptions(), equalTo(differentOptions)); + assertThat(dataLoaderTransformed.getBatchLoadFunction(), equalTo(batchLoader2)); + + // can copy values + dataLoaderOrig = DataLoaderFactory.newDataLoader(batchLoader1, defaultOptions); + + dataLoaderTransformed = dataLoaderOrig.transform(it -> { + it.batchLoadFunction(batchLoader2); + }); + + assertThat(dataLoaderTransformed, not(equalTo(dataLoaderOrig))); + assertThat(dataLoaderTransformed.getOptions(), equalTo(defaultOptions)); + assertThat(dataLoaderTransformed.getBatchLoadFunction(), equalTo(batchLoader2)); + + } +} diff --git a/src/test/java/org/dataloader/DataLoaderCacheMapTest.java b/src/test/java/org/dataloader/DataLoaderCacheMapTest.java index abfc8d3..df364a2 100644 --- a/src/test/java/org/dataloader/DataLoaderCacheMapTest.java +++ b/src/test/java/org/dataloader/DataLoaderCacheMapTest.java @@ -1,6 +1,6 @@ package org.dataloader; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Collection; @@ -8,8 +8,8 @@ import java.util.concurrent.CompletableFuture; import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; /** * Tests for cacheMap functionality.. @@ -43,7 +43,7 @@ public void should_access_to_future_dependants() { Collection> futures = dataLoader.getCacheMap().getAll(); List> futuresList = new ArrayList<>(futures); - assertThat(futuresList.get(0).getNumberOfDependents(), equalTo(2)); - assertThat(futuresList.get(1).getNumberOfDependents(), equalTo(1)); + assertThat(futuresList.get(0).getNumberOfDependents(), equalTo(4)); // instrumentation is depending on the CF completing + assertThat(futuresList.get(1).getNumberOfDependents(), equalTo(2)); } } diff --git a/src/test/java/org/dataloader/DataLoaderIfPresentTest.java b/src/test/java/org/dataloader/DataLoaderIfPresentTest.java index 1d897f2..f0a50d6 100644 --- a/src/test/java/org/dataloader/DataLoaderIfPresentTest.java +++ b/src/test/java/org/dataloader/DataLoaderIfPresentTest.java @@ -1,15 +1,15 @@ package org.dataloader; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; /** * Tests for IfPresent and IfCompleted functionality. diff --git a/src/test/java/org/dataloader/DataLoaderMapBatchLoaderTest.java b/src/test/java/org/dataloader/DataLoaderMapBatchLoaderTest.java deleted file mode 100644 index 0fced79..0000000 --- a/src/test/java/org/dataloader/DataLoaderMapBatchLoaderTest.java +++ /dev/null @@ -1,184 +0,0 @@ -package org.dataloader; - -import org.junit.Test; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.atomic.AtomicInteger; - -import static java.util.Arrays.asList; -import static java.util.Collections.singletonList; -import static org.awaitility.Awaitility.await; -import static org.dataloader.DataLoaderFactory.newDataLoader; -import static org.dataloader.DataLoaderOptions.newOptions; -import static org.dataloader.fixtures.TestKit.futureError; -import static org.dataloader.fixtures.TestKit.listFrom; -import static org.dataloader.impl.CompletableFutureKit.cause; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; - -/** - * Much of the tests that related to {@link MappedBatchLoader} also related to - * {@link org.dataloader.BatchLoader}. This is white box testing somewhat because we could have repeated - * ALL the tests in {@link org.dataloader.DataLoaderTest} here as well but chose not to because we KNOW that - * DataLoader differs only a little in how it handles the 2 types of loader functions. We choose to grab some - * common functionality for repeat testing and otherwise rely on the very complete other tests. - */ -public class DataLoaderMapBatchLoaderTest { - - MappedBatchLoader evensOnlyMappedBatchLoader = (keys) -> { - Map mapOfResults = new HashMap<>(); - - AtomicInteger index = new AtomicInteger(); - keys.forEach(k -> { - int i = index.getAndIncrement(); - if (i % 2 == 0) { - mapOfResults.put(k, k); - } - }); - return CompletableFuture.completedFuture(mapOfResults); - }; - - private static DataLoader idMapLoader(DataLoaderOptions options, List> loadCalls) { - MappedBatchLoader kvBatchLoader = (keys) -> { - loadCalls.add(new ArrayList<>(keys)); - Map map = new HashMap<>(); - //noinspection unchecked - keys.forEach(k -> map.put(k, (V) k)); - return CompletableFuture.completedFuture(map); - }; - return DataLoaderFactory.newMappedDataLoader(kvBatchLoader, options); - } - - private static DataLoader idMapLoaderBlowsUps( - DataLoaderOptions options, List> loadCalls) { - return newDataLoader((keys) -> { - loadCalls.add(new ArrayList<>(keys)); - return futureError(); - }, options); - } - - - @Test - public void basic_map_batch_loading() { - DataLoader loader = DataLoaderFactory.newMappedDataLoader(evensOnlyMappedBatchLoader); - - loader.load("A"); - loader.load("B"); - loader.loadMany(asList("C", "D")); - - List results = loader.dispatchAndJoin(); - - assertThat(results.size(), equalTo(4)); - assertThat(results, equalTo(asList("A", null, "C", null))); - } - - - @Test - public void should_map_Batch_multiple_requests() throws ExecutionException, InterruptedException { - List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idMapLoader(new DataLoaderOptions(), loadCalls); - - CompletableFuture future1 = identityLoader.load(1); - CompletableFuture future2 = identityLoader.load(2); - identityLoader.dispatch(); - - await().until(() -> future1.isDone() && future2.isDone()); - assertThat(future1.get(), equalTo(1)); - assertThat(future2.get(), equalTo(2)); - assertThat(loadCalls, equalTo(singletonList(asList(1, 2)))); - } - - @Test - public void can_split_max_batch_sizes_correctly() { - List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idMapLoader(newOptions().setMaxBatchSize(5), loadCalls); - - for (int i = 0; i < 21; i++) { - identityLoader.load(i); - } - List> expectedCalls = new ArrayList<>(); - expectedCalls.add(listFrom(0, 5)); - expectedCalls.add(listFrom(5, 10)); - expectedCalls.add(listFrom(10, 15)); - expectedCalls.add(listFrom(15, 20)); - expectedCalls.add(listFrom(20, 21)); - - List result = identityLoader.dispatch().join(); - - assertThat(result, equalTo(listFrom(0, 21))); - assertThat(loadCalls, equalTo(expectedCalls)); - } - - @Test - public void should_Propagate_error_to_all_loads() { - List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idMapLoaderBlowsUps(new DataLoaderOptions(), loadCalls); - - CompletableFuture future1 = errorLoader.load(1); - CompletableFuture future2 = errorLoader.load(2); - errorLoader.dispatch(); - - await().until(future1::isDone); - - assertThat(future1.isCompletedExceptionally(), is(true)); - Throwable cause = cause(future1); - assert cause != null; - assertThat(cause, instanceOf(IllegalStateException.class)); - assertThat(cause.getMessage(), equalTo("Error")); - - await().until(future2::isDone); - cause = cause(future2); - assert cause != null; - assertThat(cause.getMessage(), equalTo(cause.getMessage())); - - assertThat(loadCalls, equalTo(singletonList(asList(1, 2)))); - } - - @Test - public void should_work_with_duplicate_keys_when_caching_disabled() throws ExecutionException, InterruptedException { - List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = - idMapLoader(newOptions().setCachingEnabled(false), loadCalls); - - CompletableFuture future1 = identityLoader.load("A"); - CompletableFuture future2 = identityLoader.load("B"); - CompletableFuture future3 = identityLoader.load("A"); - identityLoader.dispatch(); - - await().until(() -> future1.isDone() && future2.isDone() && future3.isDone()); - assertThat(future1.get(), equalTo("A")); - assertThat(future2.get(), equalTo("B")); - assertThat(future3.get(), equalTo("A")); - - // the map batch functions use a set of keys as input and hence remove duplicates unlike list variant - assertThat(loadCalls, equalTo(singletonList(asList("A", "B")))); - } - - @Test - public void should_work_with_duplicate_keys_when_caching_enabled() throws ExecutionException, InterruptedException { - List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = - idMapLoader(newOptions().setCachingEnabled(true), loadCalls); - - CompletableFuture future1 = identityLoader.load("A"); - CompletableFuture future2 = identityLoader.load("B"); - CompletableFuture future3 = identityLoader.load("A"); - identityLoader.dispatch(); - - await().until(() -> future1.isDone() && future2.isDone() && future3.isDone()); - assertThat(future1.get(), equalTo("A")); - assertThat(future2.get(), equalTo("B")); - assertThat(future3.get(), equalTo("A")); - assertThat(loadCalls, equalTo(singletonList(asList("A", "B")))); - } - - -} diff --git a/src/test/java/org/dataloader/DataLoaderOptionsTest.java b/src/test/java/org/dataloader/DataLoaderOptionsTest.java new file mode 100644 index 0000000..b4ebb9e --- /dev/null +++ b/src/test/java/org/dataloader/DataLoaderOptionsTest.java @@ -0,0 +1,230 @@ +package org.dataloader; + +import org.dataloader.impl.DefaultCacheMap; +import org.dataloader.impl.NoOpValueCache; +import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.dataloader.scheduler.BatchLoaderScheduler; +import org.dataloader.stats.NoOpStatisticsCollector; +import org.dataloader.stats.StatisticsCollector; +import org.hamcrest.CoreMatchers; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletionStage; +import java.util.function.Supplier; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.MatcherAssert.assertThat; + +@SuppressWarnings("OptionalGetWithoutIsPresent") +class DataLoaderOptionsTest { + + DataLoaderOptions optionsDefault = new DataLoaderOptions(); + + @Test + void canCreateDefaultOptions() { + + assertThat(optionsDefault.batchingEnabled(), equalTo(true)); + assertThat(optionsDefault.cachingEnabled(), equalTo(true)); + assertThat(optionsDefault.cachingExceptionsEnabled(), equalTo(true)); + assertThat(optionsDefault.maxBatchSize(), equalTo(-1)); + assertThat(optionsDefault.getBatchLoaderScheduler(), equalTo(null)); + + DataLoaderOptions builtOptions = DataLoaderOptions.newOptionsBuilder().build(); + assertThat(builtOptions, equalTo(optionsDefault)); + assertThat(builtOptions == optionsDefault, equalTo(false)); + + DataLoaderOptions transformedOptions = optionsDefault.transform(builder -> { + }); + assertThat(transformedOptions, equalTo(optionsDefault)); + assertThat(transformedOptions == optionsDefault, equalTo(false)); + } + + @Test + void canCopyOk() { + DataLoaderOptions optionsNext = new DataLoaderOptions(optionsDefault); + assertThat(optionsNext, equalTo(optionsDefault)); + assertThat(optionsNext == optionsDefault, equalTo(false)); + + optionsNext = DataLoaderOptions.newDataLoaderOptions(optionsDefault).build(); + assertThat(optionsNext, equalTo(optionsDefault)); + assertThat(optionsNext == optionsDefault, equalTo(false)); + } + + BatchLoaderScheduler testBatchLoaderScheduler = new BatchLoaderScheduler() { + @Override + public CompletionStage> scheduleBatchLoader(ScheduledBatchLoaderCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + return null; + } + + @Override + public CompletionStage> scheduleMappedBatchLoader(ScheduledMappedBatchLoaderCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + return null; + } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + + } + }; + + BatchLoaderContextProvider testBatchLoaderContextProvider = () -> null; + + CacheMap testCacheMap = new DefaultCacheMap<>(); + + ValueCache testValueCache = new NoOpValueCache<>(); + + CacheKey testCacheKey = new CacheKey() { + @Override + public Object getKey(Object input) { + return null; + } + }; + + ValueCacheOptions testValueCacheOptions = ValueCacheOptions.newOptions(); + + NoOpStatisticsCollector noOpStatisticsCollector = new NoOpStatisticsCollector(); + Supplier testStatisticsCollectorSupplier = () -> noOpStatisticsCollector; + + @Test + void canBuildOk() { + assertThat(optionsDefault.setBatchingEnabled(false).batchingEnabled(), + equalTo(false)); + assertThat(optionsDefault.setBatchLoaderScheduler(testBatchLoaderScheduler).getBatchLoaderScheduler(), + equalTo(testBatchLoaderScheduler)); + assertThat(optionsDefault.setBatchLoaderContextProvider(testBatchLoaderContextProvider).getBatchLoaderContextProvider(), + equalTo(testBatchLoaderContextProvider)); + assertThat(optionsDefault.setCacheMap(testCacheMap).cacheMap().get(), + equalTo(testCacheMap)); + assertThat(optionsDefault.setCachingEnabled(false).cachingEnabled(), + equalTo(false)); + assertThat(optionsDefault.setValueCacheOptions(testValueCacheOptions).getValueCacheOptions(), + equalTo(testValueCacheOptions)); + assertThat(optionsDefault.setCacheKeyFunction(testCacheKey).cacheKeyFunction().get(), + equalTo(testCacheKey)); + assertThat(optionsDefault.setValueCache(testValueCache).valueCache().get(), + equalTo(testValueCache)); + assertThat(optionsDefault.setMaxBatchSize(10).maxBatchSize(), + equalTo(10)); + assertThat(optionsDefault.setStatisticsCollector(testStatisticsCollectorSupplier).getStatisticsCollector(), + equalTo(testStatisticsCollectorSupplier.get())); + + DataLoaderOptions builtOptions = optionsDefault.transform(builder -> { + builder.setBatchingEnabled(false); + builder.setCachingExceptionsEnabled(false); + builder.setCachingEnabled(false); + builder.setBatchLoaderScheduler(testBatchLoaderScheduler); + builder.setBatchLoaderContextProvider(testBatchLoaderContextProvider); + builder.setCacheMap(testCacheMap); + builder.setValueCache(testValueCache); + builder.setCacheKeyFunction(testCacheKey); + builder.setValueCacheOptions(testValueCacheOptions); + builder.setMaxBatchSize(10); + builder.setStatisticsCollector(testStatisticsCollectorSupplier); + }); + + assertThat(builtOptions.batchingEnabled(), + equalTo(false)); + assertThat(builtOptions.getBatchLoaderScheduler(), + equalTo(testBatchLoaderScheduler)); + assertThat(builtOptions.getBatchLoaderContextProvider(), + equalTo(testBatchLoaderContextProvider)); + assertThat(builtOptions.cacheMap().get(), + equalTo(testCacheMap)); + assertThat(builtOptions.cachingEnabled(), + equalTo(false)); + assertThat(builtOptions.getValueCacheOptions(), + equalTo(testValueCacheOptions)); + assertThat(builtOptions.cacheKeyFunction().get(), + equalTo(testCacheKey)); + assertThat(builtOptions.valueCache().get(), + equalTo(testValueCache)); + assertThat(builtOptions.maxBatchSize(), + equalTo(10)); + assertThat(builtOptions.getStatisticsCollector(), + equalTo(testStatisticsCollectorSupplier.get())); + + } + + @Test + void canBuildViaBuilderOk() { + + DataLoaderOptions.Builder builder = DataLoaderOptions.newOptionsBuilder(); + builder.setBatchingEnabled(false); + builder.setCachingExceptionsEnabled(false); + builder.setCachingEnabled(false); + builder.setBatchLoaderScheduler(testBatchLoaderScheduler); + builder.setBatchLoaderContextProvider(testBatchLoaderContextProvider); + builder.setCacheMap(testCacheMap); + builder.setValueCache(testValueCache); + builder.setCacheKeyFunction(testCacheKey); + builder.setValueCacheOptions(testValueCacheOptions); + builder.setMaxBatchSize(10); + builder.setStatisticsCollector(testStatisticsCollectorSupplier); + + DataLoaderOptions builtOptions = builder.build(); + + assertThat(builtOptions.batchingEnabled(), + equalTo(false)); + assertThat(builtOptions.getBatchLoaderScheduler(), + equalTo(testBatchLoaderScheduler)); + assertThat(builtOptions.getBatchLoaderContextProvider(), + equalTo(testBatchLoaderContextProvider)); + assertThat(builtOptions.cacheMap().get(), + equalTo(testCacheMap)); + assertThat(builtOptions.cachingEnabled(), + equalTo(false)); + assertThat(builtOptions.getValueCacheOptions(), + equalTo(testValueCacheOptions)); + assertThat(builtOptions.cacheKeyFunction().get(), + equalTo(testCacheKey)); + assertThat(builtOptions.valueCache().get(), + equalTo(testValueCache)); + assertThat(builtOptions.maxBatchSize(), + equalTo(10)); + assertThat(builtOptions.getStatisticsCollector(), + equalTo(testStatisticsCollectorSupplier.get())); + } + + @Test + void canCopyExistingOptionValuesOnTransform() { + + DataLoaderInstrumentation instrumentation1 = new DataLoaderInstrumentation() { + }; + DataLoaderInstrumentation instrumentation2 = new DataLoaderInstrumentation() { + }; + BatchLoaderContextProvider contextProvider1 = () -> null; + + DataLoaderOptions startingOptions = DataLoaderOptions.newOptionsBuilder().setBatchingEnabled(false) + .setCachingEnabled(false) + .setInstrumentation(instrumentation1) + .setBatchLoaderContextProvider(contextProvider1) + .build(); + + assertThat(startingOptions.batchingEnabled(), equalTo(false)); + assertThat(startingOptions.cachingEnabled(), equalTo(false)); + assertThat(startingOptions.getInstrumentation(), equalTo(instrumentation1)); + assertThat(startingOptions.getBatchLoaderContextProvider(), equalTo(contextProvider1)); + + DataLoaderOptions newOptions = startingOptions.transform(builder -> + builder.setBatchingEnabled(true).setInstrumentation(instrumentation2)); + + + // immutable + assertThat(newOptions, CoreMatchers.not(startingOptions)); + assertThat(startingOptions.batchingEnabled(), equalTo(false)); + assertThat(startingOptions.cachingEnabled(), equalTo(false)); + assertThat(startingOptions.getInstrumentation(), equalTo(instrumentation1)); + assertThat(startingOptions.getBatchLoaderContextProvider(), equalTo(contextProvider1)); + + // stayed the same + assertThat(newOptions.cachingEnabled(), equalTo(false)); + assertThat(newOptions.getBatchLoaderContextProvider(), equalTo(contextProvider1)); + + // was changed + assertThat(newOptions.batchingEnabled(), equalTo(true)); + assertThat(newOptions.getInstrumentation(), equalTo(instrumentation2)); + + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/DataLoaderRegistryTest.java b/src/test/java/org/dataloader/DataLoaderRegistryTest.java index aeaf668..bd1534d 100644 --- a/src/test/java/org/dataloader/DataLoaderRegistryTest.java +++ b/src/test/java/org/dataloader/DataLoaderRegistryTest.java @@ -2,16 +2,16 @@ import org.dataloader.stats.SimpleStatisticsCollector; import org.dataloader.stats.Statistics; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.CompletableFuture; import static java.util.Arrays.asList; import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.sameInstance; -import static org.junit.Assert.assertThat; public class DataLoaderRegistryTest { final BatchLoader identityBatchLoader = CompletableFuture::completedFuture; diff --git a/src/test/java/org/dataloader/DataLoaderStatsTest.java b/src/test/java/org/dataloader/DataLoaderStatsTest.java index c2faa50..b8393e6 100644 --- a/src/test/java/org/dataloader/DataLoaderStatsTest.java +++ b/src/test/java/org/dataloader/DataLoaderStatsTest.java @@ -9,19 +9,20 @@ import org.dataloader.stats.context.IncrementCacheHitCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.concurrent.CompletableFuture; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static java.util.concurrent.CompletableFuture.completedFuture; import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.assertThat; /** * Tests related to stats. DataLoaderTest is getting to big and needs refactoring @@ -118,9 +119,10 @@ public void stats_are_collected_with_caching_disabled() { loader.load("A"); loader.load("B"); loader.loadMany(asList("C", "D")); + loader.loadMany(Map.of("E", "E", "F", "F")); Statistics stats = loader.getStatistics(); - assertThat(stats.getLoadCount(), equalTo(4L)); + assertThat(stats.getLoadCount(), equalTo(6L)); assertThat(stats.getBatchInvokeCount(), equalTo(0L)); assertThat(stats.getBatchLoadCount(), equalTo(0L)); assertThat(stats.getCacheHitCount(), equalTo(0L)); @@ -128,9 +130,9 @@ public void stats_are_collected_with_caching_disabled() { loader.dispatch(); stats = loader.getStatistics(); - assertThat(stats.getLoadCount(), equalTo(4L)); + assertThat(stats.getLoadCount(), equalTo(6L)); assertThat(stats.getBatchInvokeCount(), equalTo(1L)); - assertThat(stats.getBatchLoadCount(), equalTo(4L)); + assertThat(stats.getBatchLoadCount(), equalTo(6L)); assertThat(stats.getCacheHitCount(), equalTo(0L)); loader.load("A"); @@ -139,9 +141,9 @@ public void stats_are_collected_with_caching_disabled() { loader.dispatch(); stats = loader.getStatistics(); - assertThat(stats.getLoadCount(), equalTo(6L)); + assertThat(stats.getLoadCount(), equalTo(8L)); assertThat(stats.getBatchInvokeCount(), equalTo(2L)); - assertThat(stats.getBatchLoadCount(), equalTo(6L)); + assertThat(stats.getBatchLoadCount(), equalTo(8L)); assertThat(stats.getCacheHitCount(), equalTo(0L)); } diff --git a/src/test/java/org/dataloader/DataLoaderTest.java b/src/test/java/org/dataloader/DataLoaderTest.java index bc9ecda..069d390 100644 --- a/src/test/java/org/dataloader/DataLoaderTest.java +++ b/src/test/java/org/dataloader/DataLoaderTest.java @@ -16,39 +16,44 @@ package org.dataloader; +import org.awaitility.Duration; import org.dataloader.fixtures.CustomCacheMap; import org.dataloader.fixtures.JsonObject; -import org.dataloader.fixtures.TestKit; import org.dataloader.fixtures.User; import org.dataloader.fixtures.UserManager; +import org.dataloader.fixtures.parameterized.ListDataLoaderFactory; +import org.dataloader.fixtures.parameterized.MappedDataLoaderFactory; +import org.dataloader.fixtures.parameterized.MappedPublisherDataLoaderFactory; +import org.dataloader.fixtures.parameterized.PublisherDataLoaderFactory; +import org.dataloader.fixtures.parameterized.TestDataLoaderFactory; +import org.dataloader.fixtures.parameterized.TestReactiveDataLoaderFactory; import org.dataloader.impl.CompletableFutureKit; -import org.junit.Test; +import org.dataloader.impl.DataLoaderAssertionException; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Optional; +import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Arrays.asList; -import static java.util.Collections.emptyList; -import static java.util.Collections.singletonList; +import static java.util.Collections.*; +import static java.util.concurrent.CompletableFuture.*; import static org.awaitility.Awaitility.await; import static org.dataloader.DataLoaderFactory.newDataLoader; import static org.dataloader.DataLoaderOptions.newOptions; +import static org.dataloader.fixtures.TestKit.areAllDone; import static org.dataloader.fixtures.TestKit.listFrom; import static org.dataloader.impl.CompletableFutureKit.cause; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertThat; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; /** * Tests for {@link DataLoader}. @@ -65,7 +70,7 @@ public class DataLoaderTest { @Test public void should_Build_a_really_really_simple_data_loader() { AtomicBoolean success = new AtomicBoolean(); - DataLoader identityLoader = newDataLoader(keysAsValues()); + DataLoader identityLoader = newDataLoader(CompletableFuture::completedFuture); CompletionStage future1 = identityLoader.load(1); @@ -78,9 +83,42 @@ public void should_Build_a_really_really_simple_data_loader() { } @Test - public void should_Support_loading_multiple_keys_in_one_call() { + public void basic_map_batch_loading() { + MappedBatchLoader evensOnlyMappedBatchLoader = (keys) -> { + Map mapOfResults = new HashMap<>(); + + AtomicInteger index = new AtomicInteger(); + keys.forEach(k -> { + int i = index.getAndIncrement(); + if (i % 2 == 0) { + mapOfResults.put(k, k); + } + }); + return completedFuture(mapOfResults); + }; + DataLoader loader = DataLoaderFactory.newMappedDataLoader(evensOnlyMappedBatchLoader); + + final List keys = asList("C", "D"); + final Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("E", null); + keysAndContexts.put("F", null); + + loader.load("A"); + loader.load("B"); + loader.loadMany(keys); + loader.loadMany(keysAndContexts); + + List results = loader.dispatchAndJoin(); + + assertThat(results.size(), equalTo(6)); + assertThat(results, equalTo(asList("A", null, "C", null, "E", null))); + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Support_loading_multiple_keys_in_one_call_via_list(TestDataLoaderFactory factory) { AtomicBoolean success = new AtomicBoolean(); - DataLoader identityLoader = newDataLoader(keysAsValues()); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); CompletionStage> futureAll = identityLoader.loadMany(asList(1, 2)); futureAll.thenAccept(promisedValues -> { @@ -92,10 +130,31 @@ public void should_Support_loading_multiple_keys_in_one_call() { assertThat(futureAll.toCompletableFuture().join(), equalTo(asList(1, 2))); } - @Test - public void should_Resolve_to_empty_list_when_no_keys_supplied() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Support_loading_multiple_keys_in_one_call_via_map(TestDataLoaderFactory factory) { AtomicBoolean success = new AtomicBoolean(); - DataLoader identityLoader = newDataLoader(keysAsValues()); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); + + final Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put(1, null); + keysAndContexts.put(2, null); + + CompletionStage> futureAll = identityLoader.loadMany(keysAndContexts); + futureAll.thenAccept(promisedValues -> { + assertThat(promisedValues.size(), is(2)); + success.set(true); + }); + identityLoader.dispatch(); + await().untilAtomic(success, is(true)); + assertThat(futureAll.toCompletableFuture().join(), equalTo(Map.of(1, 1, 2, 2))); + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Resolve_to_empty_list_when_no_keys_supplied(TestDataLoaderFactory factory) { + AtomicBoolean success = new AtomicBoolean(); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); CompletableFuture> futureEmpty = identityLoader.loadMany(emptyList()); futureEmpty.thenAccept(promisedValues -> { assertThat(promisedValues.size(), is(0)); @@ -106,10 +165,26 @@ public void should_Resolve_to_empty_list_when_no_keys_supplied() { assertThat(futureEmpty.join(), empty()); } - @Test - public void should_Return_zero_entries_dispatched_when_no_keys_supplied() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Resolve_to_empty_map_when_no_keys_supplied(TestDataLoaderFactory factory) { + AtomicBoolean success = new AtomicBoolean(); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); + CompletableFuture> futureEmpty = identityLoader.loadMany(emptyMap()); + futureEmpty.thenAccept(promisedValues -> { + assertThat(promisedValues.size(), is(0)); + success.set(true); + }); + identityLoader.dispatch(); + await().untilAtomic(success, is(true)); + assertThat(futureEmpty.join(), anEmptyMap()); + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Return_zero_entries_dispatched_when_no_keys_supplied_via_list(TestDataLoaderFactory factory) { AtomicBoolean success = new AtomicBoolean(); - DataLoader identityLoader = newDataLoader(keysAsValues()); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); CompletableFuture> futureEmpty = identityLoader.loadMany(emptyList()); futureEmpty.thenAccept(promisedValues -> { assertThat(promisedValues.size(), is(0)); @@ -120,10 +195,26 @@ public void should_Return_zero_entries_dispatched_when_no_keys_supplied() { assertThat(dispatchResult.getKeysCount(), equalTo(0)); } - @Test - public void should_Batch_multiple_requests() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Return_zero_entries_dispatched_when_no_keys_supplied_via_map(TestDataLoaderFactory factory) { + AtomicBoolean success = new AtomicBoolean(); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), new ArrayList<>()); + CompletableFuture> futureEmpty = identityLoader.loadMany(emptyMap()); + futureEmpty.thenAccept(promisedValues -> { + assertThat(promisedValues.size(), is(0)); + success.set(true); + }); + DispatchResult dispatchResult = identityLoader.dispatchWithCounts(); + await().untilAtomic(success, is(true)); + assertThat(dispatchResult.getKeysCount(), equalTo(0)); + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Batch_multiple_requests(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load(1); CompletableFuture future2 = identityLoader.load(2); @@ -135,10 +226,11 @@ public void should_Batch_multiple_requests() throws ExecutionException, Interrup assertThat(loadCalls, equalTo(singletonList(asList(1, 2)))); } - @Test - public void should_Return_number_of_batched_entries() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Return_number_of_batched_entries(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load(1); CompletableFuture future2 = identityLoader.load(2); @@ -149,10 +241,11 @@ public void should_Return_number_of_batched_entries() { assertThat(dispatchResult.getPromisedResults().isDone(), equalTo(true)); } - @Test - public void should_Coalesce_identical_requests() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Coalesce_identical_requests(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1a = identityLoader.load(1); CompletableFuture future1b = identityLoader.load(1); @@ -165,10 +258,11 @@ public void should_Coalesce_identical_requests() throws ExecutionException, Inte assertThat(loadCalls, equalTo(singletonList(singletonList(1)))); } - @Test - public void should_Cache_repeated_requests() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Cache_repeated_requests(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -200,10 +294,11 @@ public void should_Cache_repeated_requests() throws ExecutionException, Interrup assertThat(loadCalls, equalTo(asList(asList("A", "B"), singletonList("C")))); } - @Test - public void should_Not_redispatch_previous_load() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Not_redispatch_previous_load(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); identityLoader.dispatch(); @@ -217,10 +312,11 @@ public void should_Not_redispatch_previous_load() throws ExecutionException, Int assertThat(loadCalls, equalTo(asList(singletonList("A"), singletonList("B")))); } - @Test - public void should_Cache_on_redispatch() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Cache_on_redispatch(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); identityLoader.dispatch(); @@ -228,16 +324,24 @@ public void should_Cache_on_redispatch() throws ExecutionException, InterruptedE CompletableFuture> future2 = identityLoader.loadMany(asList("A", "B")); identityLoader.dispatch(); - await().until(() -> future1.isDone() && future2.isDone()); + Map keysAndContexts = new LinkedHashMap<>(); + keysAndContexts.put("A", null); + keysAndContexts.put("C", null); + CompletableFuture> future3 = identityLoader.loadMany(keysAndContexts); + identityLoader.dispatch(); + + await().until(() -> future1.isDone() && future2.isDone() && future3.isDone()); assertThat(future1.get(), equalTo("A")); assertThat(future2.get(), equalTo(asList("A", "B"))); - assertThat(loadCalls, equalTo(asList(singletonList("A"), singletonList("B")))); + assertThat(future3.get(), equalTo(keysAndContexts.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getKey)))); + assertThat(loadCalls, equalTo(asList(singletonList("A"), singletonList("B"), singletonList("C")))); } - @Test - public void should_Clear_single_value_in_loader() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Clear_single_value_in_loader(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -262,10 +366,11 @@ public void should_Clear_single_value_in_loader() throws ExecutionException, Int assertThat(loadCalls, equalTo(asList(asList("A", "B"), singletonList("A")))); } - @Test - public void should_Clear_all_values_in_loader() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Clear_all_values_in_loader(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -289,10 +394,11 @@ public void should_Clear_all_values_in_loader() throws ExecutionException, Inter assertThat(loadCalls, equalTo(asList(asList("A", "B"), asList("A", "B")))); } - @Test - public void should_Allow_priming_the_cache() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Allow_priming_the_cache(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); DataLoader dlFluency = identityLoader.prime("A", "A"); assertThat(dlFluency, equalTo(identityLoader)); @@ -307,10 +413,11 @@ public void should_Allow_priming_the_cache() throws ExecutionException, Interrup assertThat(loadCalls, equalTo(singletonList(singletonList("B")))); } - @Test - public void should_Not_prime_keys_that_already_exist() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Not_prime_keys_that_already_exist(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); identityLoader.prime("A", "X"); @@ -335,10 +442,11 @@ public void should_Not_prime_keys_that_already_exist() throws ExecutionException assertThat(loadCalls, equalTo(singletonList(singletonList("B")))); } - @Test - public void should_Allow_to_forcefully_prime_the_cache() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Allow_to_forcefully_prime_the_cache(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); identityLoader.prime("A", "X"); @@ -363,12 +471,13 @@ public void should_Allow_to_forcefully_prime_the_cache() throws ExecutionExcepti assertThat(loadCalls, equalTo(singletonList(singletonList("B")))); } - @Test - public void should_Allow_priming_the_cache_with_a_future() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Allow_priming_the_cache_with_a_future(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); - DataLoader dlFluency = identityLoader.prime("A", CompletableFuture.completedFuture("A")); + DataLoader dlFluency = identityLoader.prime("A", completedFuture("A")); assertThat(dlFluency, equalTo(identityLoader)); CompletableFuture future1 = identityLoader.load("A"); @@ -381,10 +490,11 @@ public void should_Allow_priming_the_cache_with_a_future() throws ExecutionExcep assertThat(loadCalls, equalTo(singletonList(singletonList("B")))); } - @Test - public void should_not_Cache_failed_fetches_on_complete_failure() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_not_Cache_failed_fetches_on_complete_failure(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); + DataLoader errorLoader = factory.idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = errorLoader.load(1); errorLoader.dispatch(); @@ -402,10 +512,11 @@ public void should_not_Cache_failed_fetches_on_complete_failure() { assertThat(loadCalls, equalTo(asList(singletonList(1), singletonList(1)))); } - @Test - public void should_Resolve_to_error_to_indicate_failure() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Resolve_to_error_to_indicate_failure(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoader evenLoader = idLoaderOddEvenExceptions(new DataLoaderOptions(), loadCalls); + DataLoader evenLoader = factory.idLoaderOddEvenExceptions(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = evenLoader.load(1); evenLoader.dispatch(); @@ -424,11 +535,12 @@ public void should_Resolve_to_error_to_indicate_failure() throws ExecutionExcept // Accept any kind of key. - @Test - public void should_Represent_failures_and_successes_simultaneously() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Represent_failures_and_successes_simultaneously(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { AtomicBoolean success = new AtomicBoolean(); List> loadCalls = new ArrayList<>(); - DataLoader evenLoader = idLoaderOddEvenExceptions(new DataLoaderOptions(), loadCalls); + DataLoader evenLoader = factory.idLoaderOddEvenExceptions(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = evenLoader.load(1); CompletableFuture future2 = evenLoader.load(2); @@ -450,10 +562,11 @@ public void should_Represent_failures_and_successes_simultaneously() throws Exec // Accepts options - @Test - public void should_Cache_failed_fetches() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Cache_failed_fetches(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idLoaderAllExceptions(new DataLoaderOptions(), loadCalls); + DataLoader errorLoader = factory.idLoaderAllExceptions(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = errorLoader.load(1); errorLoader.dispatch(); @@ -472,11 +585,12 @@ public void should_Cache_failed_fetches() { assertThat(loadCalls, equalTo(singletonList(singletonList(1)))); } - @Test - public void should_NOT_Cache_failed_fetches_if_told_not_too() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_NOT_Cache_failed_fetches_if_told_not_too(TestDataLoaderFactory factory) { DataLoaderOptions options = DataLoaderOptions.newOptions().setCachingExceptionsEnabled(false); List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idLoaderAllExceptions(options, loadCalls); + DataLoader errorLoader = factory.idLoaderAllExceptions(options, loadCalls); CompletableFuture future1 = errorLoader.load(1); errorLoader.dispatch(); @@ -498,10 +612,11 @@ public void should_NOT_Cache_failed_fetches_if_told_not_too() { // Accepts object key in custom cacheKey function - @Test - public void should_Handle_priming_the_cache_with_an_error() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Handle_priming_the_cache_with_an_error(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); identityLoader.prime(1, new IllegalStateException("Error")); @@ -514,10 +629,11 @@ public void should_Handle_priming_the_cache_with_an_error() { assertThat(loadCalls, equalTo(emptyList())); } - @Test - public void should_Clear_values_from_cache_after_errors() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Clear_values_from_cache_after_errors(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); + DataLoader errorLoader = factory.idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = errorLoader.load(1); future1.handle((value, t) -> { @@ -549,10 +665,11 @@ public void should_Clear_values_from_cache_after_errors() { assertThat(loadCalls, equalTo(asList(singletonList(1), singletonList(1)))); } - @Test - public void should_Propagate_error_to_all_loads() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Propagate_error_to_all_loads(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader errorLoader = idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); + DataLoader errorLoader = factory.idLoaderBlowsUps(new DataLoaderOptions(), loadCalls); CompletableFuture future1 = errorLoader.load(1); CompletableFuture future2 = errorLoader.load(2); @@ -572,10 +689,11 @@ public void should_Propagate_error_to_all_loads() { assertThat(loadCalls, equalTo(singletonList(asList(1, 2)))); } - @Test - public void should_Accept_objects_as_keys() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Accept_objects_as_keys(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(new DataLoaderOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(new DataLoaderOptions(), loadCalls); Object keyA = new Object(); Object keyB = new Object(); @@ -613,11 +731,12 @@ public void should_Accept_objects_as_keys() { assertThat(loadCalls.get(1).toArray()[0], equalTo(keyA)); } - @Test - public void should_Disable_caching() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Disable_caching(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - idLoader(newOptions().setCachingEnabled(false), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(false), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -650,11 +769,12 @@ public void should_Disable_caching() throws ExecutionException, InterruptedExcep asList("A", "C"), asList("A", "B", "C")))); } - @Test - public void should_work_with_duplicate_keys_when_caching_disabled() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_work_with_duplicate_keys_when_caching_disabled(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - idLoader(newOptions().setCachingEnabled(false), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(false), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -665,14 +785,19 @@ public void should_work_with_duplicate_keys_when_caching_disabled() throws Execu assertThat(future1.get(), equalTo("A")); assertThat(future2.get(), equalTo("B")); assertThat(future3.get(), equalTo("A")); - assertThat(loadCalls, equalTo(singletonList(asList("A", "B", "A")))); + if (factory.unwrap() instanceof MappedDataLoaderFactory || factory.unwrap() instanceof MappedPublisherDataLoaderFactory) { + assertThat(loadCalls, equalTo(singletonList(asList("A", "B")))); + } else { + assertThat(loadCalls, equalTo(singletonList(asList("A", "B", "A")))); + } } - @Test - public void should_work_with_duplicate_keys_when_caching_enabled() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_work_with_duplicate_keys_when_caching_enabled(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - idLoader(newOptions().setCachingEnabled(true), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(true), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -688,17 +813,18 @@ public void should_work_with_duplicate_keys_when_caching_enabled() throws Execut // It is resilient to job queue ordering - @Test - public void should_Accept_objects_with_a_complex_key() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Accept_objects_with_a_complex_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); JsonObject key2 = new JsonObject().put("id", 123); - CompletableFuture future1 = identityLoader.load(key1); - CompletableFuture future2 = identityLoader.load(key2); + CompletableFuture future1 = identityLoader.load(key1); + CompletableFuture future2 = identityLoader.load(key2); identityLoader.dispatch(); await().until(() -> future1.isDone() && future2.isDone()); @@ -709,22 +835,23 @@ public void should_Accept_objects_with_a_complex_key() throws ExecutionException // Helper methods - @Test - public void should_Clear_objects_with_complex_key() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Clear_objects_with_complex_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); JsonObject key2 = new JsonObject().put("id", 123); - CompletableFuture future1 = identityLoader.load(key1); + CompletableFuture future1 = identityLoader.load(key1); identityLoader.dispatch(); await().until(future1::isDone); identityLoader.clear(key2); // clear equivalent object key - CompletableFuture future2 = identityLoader.load(key1); + CompletableFuture future2 = identityLoader.load(key1); identityLoader.dispatch(); await().until(future2::isDone); @@ -733,33 +860,35 @@ public void should_Clear_objects_with_complex_key() throws ExecutionException, I assertThat(future2.get(), equalTo(key1)); } - @Test - public void should_Accept_objects_with_different_order_of_keys() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Accept_objects_with_different_order_of_keys(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("a", 123).put("b", 321); JsonObject key2 = new JsonObject().put("b", 321).put("a", 123); // Fetches as expected - CompletableFuture future1 = identityLoader.load(key1); - CompletableFuture future2 = identityLoader.load(key2); + CompletableFuture future1 = identityLoader.load(key1); + CompletableFuture future2 = identityLoader.load(key2); identityLoader.dispatch(); await().until(() -> future1.isDone() && future2.isDone()); assertThat(loadCalls, equalTo(singletonList(singletonList(key1)))); assertThat(loadCalls.size(), equalTo(1)); assertThat(future1.get(), equalTo(key1)); - assertThat(future2.get(), equalTo(key1)); + assertThat(future2.get(), equalTo(key2)); } - @Test - public void should_Allow_priming_the_cache_with_an_object_key() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Allow_priming_the_cache_with_an_object_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); JsonObject key2 = new JsonObject().put("id", 123); @@ -776,12 +905,13 @@ public void should_Allow_priming_the_cache_with_an_object_key() throws Execution assertThat(future2.get(), equalTo(key1)); } - @Test - public void should_Accept_a_custom_cache_map_implementation() throws ExecutionException, InterruptedException { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Accept_a_custom_cache_map_implementation(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { CustomCacheMap customMap = new CustomCacheMap(); List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setCacheMap(customMap); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -827,12 +957,13 @@ public void should_Accept_a_custom_cache_map_implementation() throws ExecutionEx assertArrayEquals(customMap.stash.keySet().toArray(), emptyList().toArray()); } - @Test - public void should_degrade_gracefully_if_cache_get_throws() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_degrade_gracefully_if_cache_get_throws(TestDataLoaderFactory factory) { CacheMap cache = new ThrowingCacheMap(); DataLoaderOptions options = newOptions().setCachingEnabled(true).setCacheMap(cache); List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); assertThat(identityLoader.getIfPresent("a"), equalTo(Optional.empty())); @@ -841,11 +972,12 @@ public void should_degrade_gracefully_if_cache_get_throws() { assertThat(future.join(), equalTo("a")); } - @Test - public void batching_disabled_should_dispatch_immediately() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void batching_disabled_should_dispatch_immediately(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setBatchingEnabled(false); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fa = identityLoader.load("A"); CompletableFuture fb = identityLoader.load("B"); @@ -869,11 +1001,12 @@ public void batching_disabled_should_dispatch_immediately() { } - @Test - public void batching_disabled_and_caching_disabled_should_dispatch_immediately_and_forget() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void batching_disabled_and_caching_disabled_should_dispatch_immediately_and_forget(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setBatchingEnabled(false).setCachingEnabled(false); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fa = identityLoader.load("A"); CompletableFuture fb = identityLoader.load("B"); @@ -900,10 +1033,11 @@ public void batching_disabled_and_caching_disabled_should_dispatch_immediately_a } - @Test - public void batches_multiple_requests_with_max_batch_size() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void batches_multiple_requests_with_max_batch_size(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(newOptions().setMaxBatchSize(2), loadCalls); + DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(2), loadCalls); CompletableFuture f1 = identityLoader.load(1); CompletableFuture f2 = identityLoader.load(2); @@ -911,7 +1045,7 @@ public void batches_multiple_requests_with_max_batch_size() { identityLoader.dispatch(); - CompletableFuture.allOf(f1, f2, f3).join(); + allOf(f1, f2, f3).join(); assertThat(f1.join(), equalTo(1)); assertThat(f2.join(), equalTo(2)); @@ -921,10 +1055,11 @@ public void batches_multiple_requests_with_max_batch_size() { } - @Test - public void can_split_max_batch_sizes_correctly() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void can_split_max_batch_sizes_correctly(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(newOptions().setMaxBatchSize(5), loadCalls); + DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(5), loadCalls); for (int i = 0; i < 21; i++) { identityLoader.load(i); @@ -943,22 +1078,23 @@ public void can_split_max_batch_sizes_correctly() { } - @Test - public void should_Batch_loads_occurring_within_futures() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Batch_loads_occurring_within_futures(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = idLoader(newOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(newOptions(), loadCalls); Supplier nullValue = () -> null; AtomicBoolean v4Called = new AtomicBoolean(); - CompletableFuture.supplyAsync(nullValue).thenAccept(v1 -> { + supplyAsync(nullValue).thenAccept(v1 -> { identityLoader.load("a"); - CompletableFuture.supplyAsync(nullValue).thenAccept(v2 -> { + supplyAsync(nullValue).thenAccept(v2 -> { identityLoader.load("b"); - CompletableFuture.supplyAsync(nullValue).thenAccept(v3 -> { + supplyAsync(nullValue).thenAccept(v3 -> { identityLoader.load("c"); - CompletableFuture.supplyAsync(nullValue).thenAccept( + supplyAsync(nullValue).thenAccept( v4 -> { identityLoader.load("d"); v4Called.set(true); @@ -975,12 +1111,101 @@ public void should_Batch_loads_occurring_within_futures() { singletonList(asList("a", "b", "c", "d")))); } + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_blowup_after_N_keys(TestDataLoaderFactory factory) { + if (!(factory instanceof TestReactiveDataLoaderFactory)) { + return; + } + // + // if we blow up after emitting N keys, the N keys should work but the rest of the keys + // should be exceptional + DataLoader identityLoader = ((TestReactiveDataLoaderFactory) factory).idLoaderBlowsUpsAfterN(3, new DataLoaderOptions(), new ArrayList<>()); + CompletableFuture cf1 = identityLoader.load(1); + CompletableFuture cf2 = identityLoader.load(2); + CompletableFuture cf3 = identityLoader.load(3); + CompletableFuture cf4 = identityLoader.load(4); + CompletableFuture cf5 = identityLoader.load(5); + identityLoader.dispatch(); + await().until(cf5::isDone); + + assertThat(cf1.join(), equalTo(1)); + assertThat(cf2.join(), equalTo(2)); + assertThat(cf3.join(), equalTo(3)); + assertThat(cf4.isCompletedExceptionally(), is(true)); + assertThat(cf5.isCompletedExceptionally(), is(true)); + + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void when_values_size_are_less_then_key_size(TestDataLoaderFactory factory) { + // + // what happens if we want 4 values but are only given 2 back say + // + DataLoader identityLoader = factory.onlyReturnsNValues(2, new DataLoaderOptions(), new ArrayList<>()); + CompletableFuture cf1 = identityLoader.load("A"); + CompletableFuture cf2 = identityLoader.load("B"); + CompletableFuture cf3 = identityLoader.load("C"); + CompletableFuture cf4 = identityLoader.load("D"); + identityLoader.dispatch(); + + await().atMost(Duration.FIVE_SECONDS).until(() -> areAllDone(cf1, cf2, cf3, cf4)); + + if (factory.unwrap() instanceof ListDataLoaderFactory) { + assertThat(cause(cf1), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf2), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf3), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf4), instanceOf(DataLoaderAssertionException.class)); + } else if (factory.unwrap() instanceof PublisherDataLoaderFactory) { + // some have completed progressively but the other never did + assertThat(cf1.join(), equalTo("A")); + assertThat(cf2.join(), equalTo("B")); + assertThat(cause(cf3), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf4), instanceOf(DataLoaderAssertionException.class)); + } else { + // with the maps it's ok to have fewer results + assertThat(cf1.join(), equalTo("A")); + assertThat(cf2.join(), equalTo("B")); + assertThat(cf3.join(), equalTo(null)); + assertThat(cf4.join(), equalTo(null)); + } + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void when_values_size_are_more_then_key_size(TestDataLoaderFactory factory) { + // + // what happens if we want 4 values but only given 6 back say + // + DataLoader identityLoader = factory.idLoaderReturnsTooMany(2, new DataLoaderOptions(), new ArrayList<>()); + CompletableFuture cf1 = identityLoader.load("A"); + CompletableFuture cf2 = identityLoader.load("B"); + CompletableFuture cf3 = identityLoader.load("C"); + CompletableFuture cf4 = identityLoader.load("D"); + identityLoader.dispatch(); + await().atMost(Duration.FIVE_SECONDS).until(() -> areAllDone(cf1, cf2, cf3, cf4)); + + + if (factory.unwrap() instanceof ListDataLoaderFactory) { + assertThat(cause(cf1), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf2), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf3), instanceOf(DataLoaderAssertionException.class)); + assertThat(cause(cf4), instanceOf(DataLoaderAssertionException.class)); + } else { + assertThat(cf1.join(), equalTo("A")); + assertThat(cf2.join(), equalTo("B")); + assertThat(cf3.join(), equalTo("C")); + assertThat(cf4.join(), equalTo("D")); + } + } + @Test public void can_call_a_loader_from_a_loader() throws Exception { List> deepLoadCalls = new ArrayList<>(); DataLoader deepLoader = newDataLoader(keys -> { deepLoadCalls.add(keys); - return CompletableFuture.completedFuture(keys); + return completedFuture(keys); }); List> aLoadCalls = new ArrayList<>(); @@ -1000,7 +1225,7 @@ public void can_call_a_loader_from_a_loader() throws Exception { CompletableFuture b1 = bLoader.load("B1"); CompletableFuture b2 = bLoader.load("B2"); - CompletableFuture.allOf( + allOf( aLoader.dispatch(), deepLoader.dispatch(), bLoader.dispatch(), @@ -1026,11 +1251,10 @@ public void can_call_a_loader_from_a_loader() throws Exception { public void should_allow_composition_of_data_loader_calls() { UserManager userManager = new UserManager(); - BatchLoader userBatchLoader = userIds -> CompletableFuture - .supplyAsync(() -> userIds - .stream() - .map(userManager::loadUserById) - .collect(Collectors.toList())); + BatchLoader userBatchLoader = userIds -> supplyAsync(() -> userIds + .stream() + .map(userManager::loadUserById) + .collect(Collectors.toList())); DataLoader userLoader = newDataLoader(userBatchLoader); AtomicBoolean gandalfCalled = new AtomicBoolean(false); @@ -1066,57 +1290,8 @@ private static CacheKey getJsonObjectCacheMapFn() { .collect(Collectors.joining()); } - private static DataLoader idLoader(DataLoaderOptions options, List> loadCalls) { - return newDataLoader(keys -> { - loadCalls.add(new ArrayList<>(keys)); - @SuppressWarnings("unchecked") - List values = keys.stream() - .map(k -> (V) k) - .collect(Collectors.toList()); - return CompletableFuture.completedFuture(values); - }, options); - } - - private static DataLoader idLoaderBlowsUps( - DataLoaderOptions options, List> loadCalls) { - return newDataLoader(keys -> { - loadCalls.add(new ArrayList<>(keys)); - return TestKit.futureError(); - }, options); - } - - private static DataLoader idLoaderAllExceptions( - DataLoaderOptions options, List> loadCalls) { - return newDataLoader(keys -> { - loadCalls.add(new ArrayList<>(keys)); - - List errors = keys.stream().map(k -> new IllegalStateException("Error")).collect(Collectors.toList()); - return CompletableFuture.completedFuture(errors); - }, options); - } - - private static DataLoader idLoaderOddEvenExceptions( - DataLoaderOptions options, List> loadCalls) { - return newDataLoader(keys -> { - loadCalls.add(new ArrayList<>(keys)); - - List errors = new ArrayList<>(); - for (Integer key : keys) { - if (key % 2 == 0) { - errors.add(key); - } else { - errors.add(new IllegalStateException("Error")); - } - } - return CompletableFuture.completedFuture(errors); - }, options); - } - - private BatchLoader keysAsValues() { - return CompletableFuture::completedFuture; - } - private static class ThrowingCacheMap extends CustomCacheMap { + @Override public CompletableFuture get(String key) { throw new RuntimeException("Cache implementation failed."); diff --git a/src/test/java/org/dataloader/DataLoaderTimeTest.java b/src/test/java/org/dataloader/DataLoaderTimeTest.java index ee73d85..b4d645c 100644 --- a/src/test/java/org/dataloader/DataLoaderTimeTest.java +++ b/src/test/java/org/dataloader/DataLoaderTimeTest.java @@ -1,13 +1,13 @@ package org.dataloader; import org.dataloader.fixtures.TestingClock; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.time.Instant; import static org.dataloader.fixtures.TestKit.keysAsValues; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; @SuppressWarnings("UnusedReturnValue") public class DataLoaderTimeTest { diff --git a/src/test/java/org/dataloader/DataLoaderValueCacheTest.java b/src/test/java/org/dataloader/DataLoaderValueCacheTest.java index 2716fae..732febe 100644 --- a/src/test/java/org/dataloader/DataLoaderValueCacheTest.java +++ b/src/test/java/org/dataloader/DataLoaderValueCacheTest.java @@ -4,10 +4,13 @@ import com.github.benmanes.caffeine.cache.Caffeine; import org.dataloader.fixtures.CaffeineValueCache; import org.dataloader.fixtures.CustomValueCache; +import org.dataloader.fixtures.parameterized.TestDataLoaderFactory; import org.dataloader.impl.DataLoaderAssertionException; -import org.junit.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; @@ -18,23 +21,23 @@ import static java.util.Collections.singletonList; import static org.awaitility.Awaitility.await; import static org.dataloader.DataLoaderOptions.newOptions; -import static org.dataloader.fixtures.TestKit.idLoader; import static org.dataloader.fixtures.TestKit.snooze; import static org.dataloader.fixtures.TestKit.sort; import static org.dataloader.impl.CompletableFutureKit.failedFuture; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class DataLoaderValueCacheTest { - @Test - public void test_by_default_we_have_no_value_caching() { - List> loadCalls = new ArrayList<>(); + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void test_by_default_we_have_no_value_caching(TestDataLoaderFactory factory) { + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions(); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -64,12 +67,13 @@ public void test_by_default_we_have_no_value_caching() { assertThat(loadCalls, equalTo(emptyList())); } - @Test - public void should_accept_a_remote_value_store_for_caching() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_accept_a_remote_value_store_for_caching(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache(); - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -108,8 +112,9 @@ public void should_accept_a_remote_value_store_for_caching() { assertArrayEquals(customValueCache.store.keySet().toArray(), emptyList().toArray()); } - @Test - public void can_use_caffeine_for_caching() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void can_use_caffeine_for_caching(TestDataLoaderFactory factory) { // // Mostly to prove that some other CACHE library could be used // as the backing value cache. Not really Caffeine specific. @@ -121,9 +126,9 @@ public void can_use_caffeine_for_caching() { ValueCache caffeineValueCache = new CaffeineValueCache(caffeineCache); - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(caffeineValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -148,8 +153,9 @@ public void can_use_caffeine_for_caching() { assertArrayEquals(caffeineCache.asMap().keySet().toArray(), asList("a", "b", "c").toArray()); } - @Test - public void will_invoke_loader_if_CACHE_GET_call_throws_exception() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void will_invoke_loader_if_CACHE_GET_call_throws_exception(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache() { @Override @@ -163,9 +169,9 @@ public CompletableFuture get(String key) { customValueCache.set("a", "Not From Cache"); customValueCache.set("b", "From Cache"); - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -178,8 +184,9 @@ public CompletableFuture get(String key) { assertThat(loadCalls, equalTo(singletonList(singletonList("a")))); } - @Test - public void will_still_work_if_CACHE_SET_call_throws_exception() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void will_still_work_if_CACHE_SET_call_throws_exception(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache() { @Override public CompletableFuture set(String key, Object value) { @@ -190,9 +197,9 @@ public CompletableFuture set(String key, Object value) { } }; - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -206,8 +213,9 @@ public CompletableFuture set(String key, Object value) { assertArrayEquals(customValueCache.store.keySet().toArray(), singletonList("b").toArray()); } - @Test - public void caching_can_take_some_time_complete() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void caching_can_take_some_time_complete(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache() { @Override @@ -228,9 +236,9 @@ public CompletableFuture get(String key) { }; - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -247,8 +255,9 @@ public CompletableFuture get(String key) { assertThat(loadCalls, equalTo(singletonList(asList("missC", "missD")))); } - @Test - public void batch_caching_works_as_expected() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void batch_caching_works_as_expected(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache() { @Override @@ -269,9 +278,9 @@ public CompletableFuture>> getValues(List keys) { }; - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -293,8 +302,9 @@ public CompletableFuture>> getValues(List keys) { assertThat(values, equalTo(asList("missC", "missD"))); } - @Test - public void assertions_will_be_thrown_if_the_cache_does_not_follow_contract() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void assertions_will_be_thrown_if_the_cache_does_not_follow_contract(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache() { @Override @@ -312,9 +322,9 @@ public CompletableFuture>> getValues(List keys) { } }; - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -335,8 +345,9 @@ private boolean isAssertionException(CompletableFuture fA) { } - @Test - public void if_caching_is_off_its_never_hit() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void if_caching_is_off_its_never_hit(TestDataLoaderFactory factory) { AtomicInteger getCalls = new AtomicInteger(); CustomValueCache customValueCache = new CustomValueCache() { @@ -347,9 +358,9 @@ public CompletableFuture get(String key) { } }; - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(false); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -368,8 +379,9 @@ public CompletableFuture get(String key) { assertTrue(customValueCache.asMap().isEmpty()); } - @Test - public void if_everything_is_cached_no_batching_happens() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void if_everything_is_cached_no_batching_happens(TestDataLoaderFactory factory) { AtomicInteger getCalls = new AtomicInteger(); AtomicInteger setCalls = new AtomicInteger(); CustomValueCache customValueCache = new CustomValueCache() { @@ -390,9 +402,9 @@ public CompletableFuture> setValues(List keys, List customValueCache.asMap().put("b", "cachedB"); customValueCache.asMap().put("c", "cachedC"); - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); @@ -410,8 +422,9 @@ public CompletableFuture> setValues(List keys, List } - @Test - public void if_batching_is_off_it_still_can_cache() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void if_batching_is_off_it_still_can_cache(TestDataLoaderFactory factory) { AtomicInteger getCalls = new AtomicInteger(); AtomicInteger setCalls = new AtomicInteger(); CustomValueCache customValueCache = new CustomValueCache() { @@ -430,9 +443,9 @@ public CompletableFuture> setValues(List keys, List }; customValueCache.asMap().put("a", "cachedA"); - List> loadCalls = new ArrayList<>(); + List> loadCalls = new ArrayList<>(); DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true).setBatchingEnabled(false); - DataLoader identityLoader = idLoader(options, loadCalls); + DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); CompletableFuture fB = identityLoader.load("b"); diff --git a/src/test/java/org/dataloader/DataLoaderWithTryTest.java b/src/test/java/org/dataloader/DataLoaderWithTryTest.java index e9e8538..fda7bd4 100644 --- a/src/test/java/org/dataloader/DataLoaderWithTryTest.java +++ b/src/test/java/org/dataloader/DataLoaderWithTryTest.java @@ -1,7 +1,7 @@ package org.dataloader; import org.hamcrest.Matchers; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.HashMap; @@ -12,9 +12,9 @@ import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.dataloader.DataLoaderFactory.*; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; public class DataLoaderWithTryTest { diff --git a/src/test/java/org/dataloader/DelegatingDataLoaderTest.java b/src/test/java/org/dataloader/DelegatingDataLoaderTest.java new file mode 100644 index 0000000..9103eca --- /dev/null +++ b/src/test/java/org/dataloader/DelegatingDataLoaderTest.java @@ -0,0 +1,64 @@ +package org.dataloader; + +import org.dataloader.fixtures.TestKit; +import org.dataloader.fixtures.parameterized.DelegatingDataLoaderFactory; +import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.Nullable; +import org.junit.jupiter.api.Test; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; + +/** + * There are WAY more tests via the {@link DelegatingDataLoaderFactory} + * parameterized tests. All the basic {@link DataLoader} tests pass when wrapped in a {@link DelegatingDataLoader} + */ +public class DelegatingDataLoaderTest { + + @Test + void canUnwrapDataLoaders() { + DataLoader rawLoader = TestKit.idLoader(); + DataLoader delegateLoader = new DelegatingDataLoader<>(rawLoader); + + assertThat(DelegatingDataLoader.unwrap(rawLoader), is(rawLoader)); + assertThat(DelegatingDataLoader.unwrap(delegateLoader), is(rawLoader)); + } + + @Test + void canCreateAClassOk() { + DataLoader rawLoader = TestKit.idLoader(); + DelegatingDataLoader delegatingDataLoader = new DelegatingDataLoader<>(rawLoader) { + @Override + public CompletableFuture load(@NonNull String key, @Nullable Object keyContext) { + CompletableFuture cf = super.load(key, keyContext); + return cf.thenApply(v -> "|" + v + "|"); + } + }; + + assertThat(delegatingDataLoader.getDelegate(), is(rawLoader)); + + + CompletableFuture cfA = delegatingDataLoader.load("A"); + CompletableFuture cfB = delegatingDataLoader.load("B"); + CompletableFuture> cfCD = delegatingDataLoader.loadMany(List.of("C", "D")); + + CompletableFuture> dispatch = delegatingDataLoader.dispatch(); + + await().until(dispatch::isDone); + + assertThat(cfA.join(), equalTo("|A|")); + assertThat(cfB.join(), equalTo("|B|")); + assertThat(cfCD.join(), equalTo(List.of("|C|", "|D|"))); + + assertThat(delegatingDataLoader.getIfPresent("A").isEmpty(), equalTo(false)); + assertThat(delegatingDataLoader.getIfPresent("X").isEmpty(), equalTo(true)); + + assertThat(delegatingDataLoader.getIfCompleted("A").isEmpty(), equalTo(false)); + assertThat(delegatingDataLoader.getIfCompleted("X").isEmpty(), equalTo(true)); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/TryTest.java b/src/test/java/org/dataloader/TryTest.java index 4da7bca..1b237e2 100644 --- a/src/test/java/org/dataloader/TryTest.java +++ b/src/test/java/org/dataloader/TryTest.java @@ -1,7 +1,7 @@ package org.dataloader; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; import java.util.Optional; import java.util.concurrent.CompletableFuture; @@ -9,11 +9,11 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TryTest { @@ -29,7 +29,7 @@ private void expectThrowable(RunThatCanThrow runnable, Class BatchLoader keysAsValues(List> loadCalls) { }; } - public static BatchLoader keysAsValuesAsync(Duration delay) { - return keysAsValuesAsync(new ArrayList<>(), delay); - } - - public static BatchLoader keysAsValuesAsync(List> loadCalls, Duration delay) { - return keys -> CompletableFuture.supplyAsync(() -> { - snooze(delay.toMillis()); - List ks = new ArrayList<>(keys); - loadCalls.add(ks); - @SuppressWarnings("unchecked") - List values = keys.stream() - .map(k -> (V) k) - .collect(toList()); - return values; - }); - } - public static DataLoader idLoader() { return idLoader(null, new ArrayList<>()); } - public static DataLoader idLoader(List> loadCalls) { - return idLoader(null, loadCalls); - } - public static DataLoader idLoader(DataLoaderOptions options, List> loadCalls) { return DataLoaderFactory.newDataLoader(keysAsValues(loadCalls), options); } - public static DataLoader idLoaderAsync(Duration delay) { - return idLoaderAsync(null, new ArrayList<>(), delay); - } - - public static DataLoader idLoaderAsync(DataLoaderOptions options, List> loadCalls, Duration delay) { - return DataLoaderFactory.newDataLoader(keysAsValuesAsync(loadCalls, delay), options); - } - public static Collection listFrom(int i, int max) { List ints = new ArrayList<>(); for (int j = i; j < max; j++) { @@ -131,4 +101,13 @@ public static Set asSet(T... elements) { public static Set asSet(Collection elements) { return new LinkedHashSet<>(elements); } + + public static boolean areAllDone(CompletableFuture... cfs) { + for (CompletableFuture cf : cfs) { + if (! cf.isDone()) { + return false; + } + } + return true; + } } diff --git a/src/test/java/org/dataloader/fixtures/UserManager.java b/src/test/java/org/dataloader/fixtures/UserManager.java index 24fee0d..1d2ff1f 100644 --- a/src/test/java/org/dataloader/fixtures/UserManager.java +++ b/src/test/java/org/dataloader/fixtures/UserManager.java @@ -1,5 +1,8 @@ package org.dataloader.fixtures; +import org.reactivestreams.Publisher; +import reactor.core.publisher.Flux; + import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; @@ -52,6 +55,14 @@ public List loadUsersById(List userIds) { return userIds.stream().map(this::loadUserById).collect(Collectors.toList()); } + public Publisher streamUsersById(List userIds) { + return Flux.fromIterable(loadUsersById(userIds)); + } + + public Publisher> streamUsersById(Set userIds) { + return Flux.fromIterable(loadMapOfUsersByIds(null, userIds).entrySet()); + } + public Map loadMapOfUsersByIds(SecurityCtx callCtx, Set userIds) { Map map = new HashMap<>(); userIds.forEach(userId -> { diff --git a/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java new file mode 100644 index 0000000..0cbd3f3 --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java @@ -0,0 +1,71 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.DelegatingDataLoader; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class DelegatingDataLoaderFactory implements TestDataLoaderFactory { + // its delegates all the way down to the turtles + private final TestDataLoaderFactory delegateFactory; + + public DelegatingDataLoaderFactory(TestDataLoaderFactory delegateFactory) { + this.delegateFactory = delegateFactory; + } + + @Override + public String toString() { + return "DelegatingDataLoaderFactory{" + + "delegateFactory=" + delegateFactory + + '}'; + } + + @Override + public TestDataLoaderFactory unwrap() { + return delegateFactory.unwrap(); + } + + private DataLoader mkDelegateDataLoader(DataLoader dataLoader) { + return new DelegatingDataLoader<>(dataLoader); + } + + @Override + public DataLoader idLoader(DataLoaderOptions options, List> loadCalls) { + return mkDelegateDataLoader(delegateFactory.idLoader(options, loadCalls)); + } + + @Override + public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { + return mkDelegateDataLoader(delegateFactory.idLoaderDelayed(options, loadCalls, delay)); + } + + @Override + public DataLoader idLoaderBlowsUps( + DataLoaderOptions options, List> loadCalls) { + return mkDelegateDataLoader(delegateFactory.idLoaderBlowsUps(options, loadCalls)); + } + + @Override + public DataLoader idLoaderAllExceptions(DataLoaderOptions options, List> loadCalls) { + return mkDelegateDataLoader(delegateFactory.idLoaderAllExceptions(options, loadCalls)); + } + + @Override + public DataLoader idLoaderOddEvenExceptions(DataLoaderOptions options, List> loadCalls) { + return mkDelegateDataLoader(delegateFactory.idLoaderOddEvenExceptions(options, loadCalls)); + } + + @Override + public DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls) { + return mkDelegateDataLoader(delegateFactory.onlyReturnsNValues(N, options, loadCalls)); + } + + @Override + public DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls) { + return mkDelegateDataLoader(delegateFactory.idLoaderReturnsTooMany(howManyMore, options, loadCalls)); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java new file mode 100644 index 0000000..0644d3c --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java @@ -0,0 +1,90 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.fixtures.TestKit; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +import static java.util.concurrent.CompletableFuture.completedFuture; +import static org.dataloader.DataLoaderFactory.newDataLoader; + +public class ListDataLoaderFactory implements TestDataLoaderFactory { + @Override + public DataLoader idLoader(DataLoaderOptions options, List> loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + return completedFuture(keys); + }, options); + } + + @Override + public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { + return newDataLoader(keys -> CompletableFuture.supplyAsync(() -> { + TestKit.snooze(delay.toMillis()); + loadCalls.add(new ArrayList<>(keys)); + return keys; + })); + } + + @Override + public DataLoader idLoaderBlowsUps( + DataLoaderOptions options, List> loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + return TestKit.futureError(); + }, options); + } + + @Override + public DataLoader idLoaderAllExceptions(DataLoaderOptions options, List> loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + + List errors = keys.stream().map(k -> new IllegalStateException("Error")).collect(Collectors.toList()); + return completedFuture(errors); + }, options); + } + + @Override + public DataLoader idLoaderOddEvenExceptions(DataLoaderOptions options, List> loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + + List errors = new ArrayList<>(); + for (Integer key : keys) { + if (key % 2 == 0) { + errors.add(key); + } else { + errors.add(new IllegalStateException("Error")); + } + } + return completedFuture(errors); + }, options); + } + + @Override + public DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + return completedFuture(keys.subList(0, N)); + }, options); + } + + @Override + public DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls) { + return newDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + List l = new ArrayList<>(keys); + for (int i = 0; i < howManyMore; i++) { + l.add("extra-" + i); + } + return completedFuture(l); + }, options); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java new file mode 100644 index 0000000..e7c47ec --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java @@ -0,0 +1,111 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.fixtures.TestKit; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +import static java.util.concurrent.CompletableFuture.completedFuture; +import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.dataloader.DataLoaderFactory.newMappedDataLoader; +import static org.dataloader.fixtures.TestKit.futureError; + +public class MappedDataLoaderFactory implements TestDataLoaderFactory { + + @Override + public DataLoader idLoader( + DataLoaderOptions options, List> loadCalls) { + return newMappedDataLoader((keys) -> { + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + return completedFuture(map); + }, options); + } + + @Override + public DataLoader idLoaderDelayed( + DataLoaderOptions options, List> loadCalls, Duration delay) { + return newMappedDataLoader(keys -> CompletableFuture.supplyAsync(() -> { + TestKit.snooze(delay.toMillis()); + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + return map; + })); + } + + @Override + public DataLoader idLoaderBlowsUps(DataLoaderOptions options, List> loadCalls) { + return newMappedDataLoader((keys) -> { + loadCalls.add(new ArrayList<>(keys)); + return futureError(); + }, options); + } + + @Override + public DataLoader idLoaderAllExceptions( + DataLoaderOptions options, List> loadCalls) { + return newMappedDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + Map errorByKey = new HashMap<>(); + keys.forEach(k -> errorByKey.put(k, new IllegalStateException("Error"))); + return completedFuture(errorByKey); + }, options); + } + + @Override + public DataLoader idLoaderOddEvenExceptions( + DataLoaderOptions options, List> loadCalls) { + return newMappedDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + + Map errorByKey = new HashMap<>(); + for (Integer key : keys) { + if (key % 2 == 0) { + errorByKey.put(key, key); + } else { + errorByKey.put(key, new IllegalStateException("Error")); + } + } + return completedFuture(errorByKey); + }, options); + } + + @Override + public DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls) { + return newMappedDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + + Map collect = List.copyOf(keys).subList(0, N).stream().collect(Collectors.toMap( + k -> k, v -> v + )); + return completedFuture(collect); + }, options); + } + + @Override + public DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls) { + return newMappedDataLoader(keys -> { + loadCalls.add(new ArrayList<>(keys)); + + List l = new ArrayList<>(keys); + for (int i = 0; i < howManyMore; i++) { + l.add("extra-" + i); + } + + Map collect = l.stream().collect(Collectors.toMap( + k -> k, v -> v + )); + return completedFuture(collect); + }, options); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java new file mode 100644 index 0000000..fa920cf --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java @@ -0,0 +1,126 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.Try; +import org.dataloader.fixtures.TestKit; +import reactor.core.publisher.Flux; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toSet; +import static org.dataloader.DataLoaderFactory.newMappedPublisherDataLoader; +import static org.dataloader.DataLoaderFactory.newMappedPublisherDataLoaderWithTry; +import static org.dataloader.DataLoaderFactory.newPublisherDataLoader; + +public class MappedPublisherDataLoaderFactory implements TestDataLoaderFactory, TestReactiveDataLoaderFactory { + + @Override + public DataLoader idLoader( + DataLoaderOptions options, List> loadCalls) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + Flux.fromIterable(map.entrySet()).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderDelayed( + DataLoaderOptions options, List> loadCalls, Duration delay) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + CompletableFuture.runAsync(() -> { + TestKit.snooze(delay.toMillis()); + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + Flux.fromIterable(map.entrySet()).subscribe(subscriber); + }); + }, options); + } + + @Override + public DataLoader idLoaderBlowsUps(DataLoaderOptions options, List> loadCalls) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Flux.>error(new IllegalStateException("Error")).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderAllExceptions( + DataLoaderOptions options, List> loadCalls) { + return newMappedPublisherDataLoaderWithTry((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Stream>> failures = keys.stream().map(k -> Map.entry(k, Try.failed(new IllegalStateException("Error")))); + Flux.fromStream(failures).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderOddEvenExceptions( + DataLoaderOptions options, List> loadCalls) { + return newMappedPublisherDataLoaderWithTry((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + Map> errorByKey = new HashMap<>(); + for (Integer key : keys) { + if (key % 2 == 0) { + errorByKey.put(key, Try.succeeded(key)); + } else { + errorByKey.put(key, Try.failed(new IllegalStateException("Error"))); + } + } + Flux.fromIterable(errorByKey.entrySet()).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderBlowsUpsAfterN(int N, DataLoaderOptions options, List> loadCalls) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List nKeys = keys.stream().limit(N).collect(toList()); + Flux> subFlux = Flux.fromIterable(nKeys).map(k -> Map.entry(k, k)); + subFlux.concatWith(Flux.error(new IllegalStateException("Error"))) + .subscribe(subscriber); + }, options); + } + + @Override + public DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List nKeys = keys.stream().limit(N).collect(toList()); + Flux.fromIterable(nKeys).map(k -> Map.entry(k, k)) + .subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls) { + return newMappedPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List l = new ArrayList<>(keys); + for (int i = 0; i < howManyMore; i++) { + l.add("extra-" + i); + } + + Flux.fromIterable(l).map(k -> Map.entry(k, k)) + .subscribe(subscriber); + }, options); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java new file mode 100644 index 0000000..2049719 --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java @@ -0,0 +1,115 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.Try; +import org.dataloader.fixtures.TestKit; +import reactor.core.publisher.Flux; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Stream; + +import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.dataloader.DataLoaderFactory.newPublisherDataLoader; +import static org.dataloader.DataLoaderFactory.newPublisherDataLoaderWithTry; + +public class PublisherDataLoaderFactory implements TestDataLoaderFactory, TestReactiveDataLoaderFactory { + + @Override + public DataLoader idLoader( + DataLoaderOptions options, List> loadCalls) { + return newPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Flux.fromIterable(keys).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { + return newPublisherDataLoader((keys, subscriber) -> { + CompletableFuture.runAsync(() -> { + TestKit.snooze(delay.toMillis()); + loadCalls.add(new ArrayList<>(keys)); + Flux.fromIterable(keys).subscribe(subscriber); + }); + }, options); + } + + @Override + public DataLoader idLoaderBlowsUps(DataLoaderOptions options, List> loadCalls) { + return newPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Flux.error(new IllegalStateException("Error")).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderAllExceptions( + DataLoaderOptions options, List> loadCalls) { + return newPublisherDataLoaderWithTry((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + Stream> failures = keys.stream().map(k -> Try.failed(new IllegalStateException("Error"))); + Flux.fromStream(failures).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderOddEvenExceptions( + DataLoaderOptions options, List> loadCalls) { + return newPublisherDataLoaderWithTry((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List> errors = new ArrayList<>(); + for (Integer key : keys) { + if (key % 2 == 0) { + errors.add(Try.succeeded(key)); + } else { + errors.add(Try.failed(new IllegalStateException("Error"))); + } + } + Flux.fromIterable(errors).subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderBlowsUpsAfterN(int N, DataLoaderOptions options, List> loadCalls) { + return newPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List nKeys = keys.subList(0, N); + Flux subFlux = Flux.fromIterable(nKeys); + subFlux.concatWith(Flux.error(new IllegalStateException("Error"))) + .subscribe(subscriber); + }, options); + } + + @Override + public DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls) { + return newPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List nKeys = keys.subList(0, N); + Flux.fromIterable(nKeys) + .subscribe(subscriber); + }, options); + } + + @Override + public DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls) { + return newPublisherDataLoader((keys, subscriber) -> { + loadCalls.add(new ArrayList<>(keys)); + + List l = new ArrayList<>(keys); + for (int i = 0; i < howManyMore; i++) { + l.add("extra-" + i); + } + + Flux.fromIterable(l) + .subscribe(subscriber); + }, options); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactories.java b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactories.java new file mode 100644 index 0000000..48678c4 --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactories.java @@ -0,0 +1,25 @@ +package org.dataloader.fixtures.parameterized; + +import org.junit.jupiter.api.Named; +import org.junit.jupiter.params.provider.Arguments; + +import java.util.stream.Stream; + +@SuppressWarnings("unused") +public class TestDataLoaderFactories { + + public static Stream get() { + return Stream.of( + Arguments.of(Named.of("List DataLoader", new ListDataLoaderFactory())), + Arguments.of(Named.of("Mapped DataLoader", new MappedDataLoaderFactory())), + Arguments.of(Named.of("Publisher DataLoader", new PublisherDataLoaderFactory())), + Arguments.of(Named.of("Mapped Publisher DataLoader", new MappedPublisherDataLoaderFactory())), + + // runs all the above via a DelegateDataLoader + Arguments.of(Named.of("Delegate List DataLoader", new DelegatingDataLoaderFactory(new ListDataLoaderFactory()))), + Arguments.of(Named.of("Delegate Mapped DataLoader", new DelegatingDataLoaderFactory(new MappedDataLoaderFactory()))), + Arguments.of(Named.of("Delegate Publisher DataLoader", new DelegatingDataLoaderFactory(new PublisherDataLoaderFactory()))), + Arguments.of(Named.of("Delegate Mapped Publisher DataLoader", new DelegatingDataLoaderFactory(new MappedPublisherDataLoaderFactory()))) + ); + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java new file mode 100644 index 0000000..789b136 --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java @@ -0,0 +1,46 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public interface TestDataLoaderFactory { + DataLoader idLoader(DataLoaderOptions options, List> loadCalls); + + DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay); + + DataLoader idLoaderBlowsUps(DataLoaderOptions options, List> loadCalls); + + DataLoader idLoaderAllExceptions(DataLoaderOptions options, List> loadCalls); + + DataLoader idLoaderOddEvenExceptions(DataLoaderOptions options, List> loadCalls); + + DataLoader onlyReturnsNValues(int N, DataLoaderOptions options, ArrayList loadCalls); + + DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls); + + // Convenience methods + + default DataLoader idLoader(DataLoaderOptions options) { + return idLoader(options, new ArrayList<>()); + } + + default DataLoader idLoader(List> calls) { + return idLoader(null, calls); + } + default DataLoader idLoader() { + return idLoader(null, new ArrayList<>()); + } + + default DataLoader idLoaderDelayed(Duration delay) { + return idLoaderDelayed(null, new ArrayList<>(), delay); + } + + default TestDataLoaderFactory unwrap() { + return this; + } +} diff --git a/src/test/java/org/dataloader/fixtures/parameterized/TestReactiveDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/TestReactiveDataLoaderFactory.java new file mode 100644 index 0000000..d45932c --- /dev/null +++ b/src/test/java/org/dataloader/fixtures/parameterized/TestReactiveDataLoaderFactory.java @@ -0,0 +1,11 @@ +package org.dataloader.fixtures.parameterized; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; + +import java.util.Collection; +import java.util.List; + +public interface TestReactiveDataLoaderFactory { + DataLoader idLoaderBlowsUpsAfterN(int N, DataLoaderOptions options, List> loadCalls); +} diff --git a/src/test/java/org/dataloader/impl/PromisedValuesImplTest.java b/src/test/java/org/dataloader/impl/PromisedValuesImplTest.java index 3c9ce65..6073319 100644 --- a/src/test/java/org/dataloader/impl/PromisedValuesImplTest.java +++ b/src/test/java/org/dataloader/impl/PromisedValuesImplTest.java @@ -1,6 +1,6 @@ package org.dataloader.impl; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; @@ -12,12 +12,12 @@ import static java.util.Arrays.asList; import static java.util.concurrent.CompletableFuture.supplyAsync; import static org.awaitility.Awaitility.await; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -import static org.junit.Assert.assertThat; public class PromisedValuesImplTest { diff --git a/src/test/java/org/dataloader/instrumentation/CapturingInstrumentation.java b/src/test/java/org/dataloader/instrumentation/CapturingInstrumentation.java new file mode 100644 index 0000000..b11bc27 --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/CapturingInstrumentation.java @@ -0,0 +1,83 @@ +package org.dataloader.instrumentation; + +import org.dataloader.BatchLoaderEnvironment; +import org.dataloader.DataLoader; +import org.dataloader.DispatchResult; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +class CapturingInstrumentation implements DataLoaderInstrumentation { + protected String name; + protected List methods = new ArrayList<>(); + + public CapturingInstrumentation(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public List methods() { + return methods; + } + + public List notLoads() { + return methods.stream().filter(method -> !method.contains("beginLoad")).collect(Collectors.toList()); + } + + public List onlyLoads() { + return methods.stream().filter(method -> method.contains("beginLoad")).collect(Collectors.toList()); + } + + + @Override + public DataLoaderInstrumentationContext beginLoad(DataLoader dataLoader, Object key, Object loadContext) { + methods.add(name + "_beginLoad" +"_k:" + key); + return new DataLoaderInstrumentationContext<>() { + @Override + public void onDispatched() { + methods.add(name + "_beginLoad_onDispatched"+"_k:" + key); + } + + @Override + public void onCompleted(Object result, Throwable t) { + methods.add(name + "_beginLoad_onCompleted"+"_k:" + key); + } + }; + } + + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + methods.add(name + "_beginDispatch"); + return new DataLoaderInstrumentationContext<>() { + @Override + public void onDispatched() { + methods.add(name + "_beginDispatch_onDispatched"); + } + + @Override + public void onCompleted(DispatchResult result, Throwable t) { + methods.add(name + "_beginDispatch_onCompleted"); + } + }; + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + methods.add(name + "_beginBatchLoader"); + return new DataLoaderInstrumentationContext<>() { + @Override + public void onDispatched() { + methods.add(name + "_beginBatchLoader_onDispatched"); + } + + @Override + public void onCompleted(List result, Throwable t) { + methods.add(name + "_beginBatchLoader_onCompleted"); + } + }; + } +} diff --git a/src/test/java/org/dataloader/instrumentation/CapturingInstrumentationReturnsNull.java b/src/test/java/org/dataloader/instrumentation/CapturingInstrumentationReturnsNull.java new file mode 100644 index 0000000..4d2f0f4 --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/CapturingInstrumentationReturnsNull.java @@ -0,0 +1,32 @@ +package org.dataloader.instrumentation; + +import org.dataloader.BatchLoaderEnvironment; +import org.dataloader.DataLoader; +import org.dataloader.DispatchResult; + +import java.util.List; + +class CapturingInstrumentationReturnsNull extends CapturingInstrumentation { + + public CapturingInstrumentationReturnsNull(String name) { + super(name); + } + + @Override + public DataLoaderInstrumentationContext beginLoad(DataLoader dataLoader, Object key, Object loadContext) { + methods.add(name + "_beginLoad" +"_k:" + key); + return null; + } + + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + methods.add(name + "_beginDispatch"); + return null; + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + methods.add(name + "_beginBatchLoader"); + return null; + } +} diff --git a/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java new file mode 100644 index 0000000..0d5ddb1 --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java @@ -0,0 +1,130 @@ +package org.dataloader.instrumentation; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderFactory; +import org.dataloader.DataLoaderOptions; +import org.dataloader.fixtures.TestKit; +import org.dataloader.fixtures.parameterized.TestDataLoaderFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.List; +import java.util.concurrent.CompletableFuture; + +import static org.awaitility.Awaitility.await; +import static org.dataloader.DataLoaderOptions.newOptionsBuilder; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; + +public class ChainedDataLoaderInstrumentationTest { + + CapturingInstrumentation capturingA; + CapturingInstrumentation capturingB; + CapturingInstrumentation capturingButReturnsNull; + + + @BeforeEach + void setUp() { + capturingA = new CapturingInstrumentation("A"); + capturingB = new CapturingInstrumentation("B"); + capturingButReturnsNull = new CapturingInstrumentationReturnsNull("NULL"); + } + + @Test + void canChainTogetherZeroInstrumentation() { + // just to prove its useless but harmless + ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation(); + + DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + + DataLoader dl = DataLoaderFactory.newDataLoader(TestKit.keysAsValues(), options); + + dl.load("A"); + dl.load("B"); + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + assertThat(dispatch.join(), equalTo(List.of("A", "B"))); + } + + @Test + void canChainTogetherOneInstrumentation() { + CapturingInstrumentation capturingA = new CapturingInstrumentation("A"); + + ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation() + .add(capturingA); + + DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + + DataLoader dl = DataLoaderFactory.newDataLoader(TestKit.keysAsValues(), options); + + dl.load("X"); + dl.load("Y"); + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + + assertThat(capturingA.notLoads(), equalTo(List.of("A_beginDispatch", + "A_beginBatchLoader", "A_beginBatchLoader_onDispatched", "A_beginBatchLoader_onCompleted", + "A_beginDispatch_onDispatched", "A_beginDispatch_onCompleted"))); + + assertThat(capturingA.onlyLoads(), equalTo(List.of( + "A_beginLoad_k:X", "A_beginLoad_onDispatched_k:X", "A_beginLoad_k:Y", "A_beginLoad_onDispatched_k:Y", + "A_beginLoad_onCompleted_k:X", "A_beginLoad_onCompleted_k:Y" + ))); + } + + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void canChainTogetherManyInstrumentationsWithDifferentBatchLoaders(TestDataLoaderFactory factory) { + + ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation() + .add(capturingA) + .add(capturingB) + .add(capturingButReturnsNull); + + DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + + DataLoader dl = factory.idLoader(options); + + dl.load("X"); + dl.load("Y"); + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + + // + // A_beginBatchLoader happens before A_beginDispatch_onDispatched because these are sync + // and no async - a batch scheduler or async batch loader would change that + // + assertThat(capturingA.notLoads(), equalTo(List.of("A_beginDispatch", + "A_beginBatchLoader", "A_beginBatchLoader_onDispatched", "A_beginBatchLoader_onCompleted", + "A_beginDispatch_onDispatched", "A_beginDispatch_onCompleted"))); + + assertThat(capturingA.onlyLoads(), equalTo(List.of( + "A_beginLoad_k:X", "A_beginLoad_onDispatched_k:X", "A_beginLoad_k:Y", "A_beginLoad_onDispatched_k:Y", + "A_beginLoad_onCompleted_k:X", "A_beginLoad_onCompleted_k:Y" + ))); + + assertThat(capturingB.notLoads(), equalTo(List.of("B_beginDispatch", + "B_beginBatchLoader", "B_beginBatchLoader_onDispatched", "B_beginBatchLoader_onCompleted", + "B_beginDispatch_onDispatched", "B_beginDispatch_onCompleted"))); + + // it returned null on all its contexts - nothing to call back on + assertThat(capturingButReturnsNull.notLoads(), equalTo(List.of("NULL_beginDispatch", "NULL_beginBatchLoader"))); + } + + @Test + void addition_works() { + ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation() + .add(capturingA).prepend(capturingB).addAll(List.of(capturingButReturnsNull)); + + assertThat(chainedItn.getInstrumentations(), equalTo(List.of(capturingB, capturingA, capturingButReturnsNull))); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java new file mode 100644 index 0000000..97f21d3 --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java @@ -0,0 +1,171 @@ +package org.dataloader.instrumentation; + +import org.dataloader.BatchLoader; +import org.dataloader.BatchLoaderEnvironment; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderFactory; +import org.dataloader.DataLoaderOptions; +import org.dataloader.DispatchResult; +import org.dataloader.fixtures.Stopwatch; +import org.dataloader.fixtures.TestKit; +import org.junit.jupiter.api.Test; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicReference; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; + +public class DataLoaderInstrumentationTest { + + BatchLoader snoozingBatchLoader = keys -> CompletableFuture.supplyAsync(() -> { + TestKit.snooze(100); + return keys; + }); + + @Test + void canMonitorLoading() { + AtomicReference> dlRef = new AtomicReference<>(); + + CapturingInstrumentation instrumentation = new CapturingInstrumentation("x") { + + @Override + public DataLoaderInstrumentationContext beginLoad(DataLoader dataLoader, Object key, Object loadContext) { + DataLoaderInstrumentationContext superCtx = super.beginLoad(dataLoader, key, loadContext); + dlRef.set(dataLoader); + return superCtx; + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + return DataLoaderInstrumentationHelper.noOpCtx(); + } + }; + + DataLoaderOptions options = new DataLoaderOptions() + .setInstrumentation(instrumentation) + .setMaxBatchSize(5); + + DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); + + List keys = new ArrayList<>(); + for (int i = 0; i < 3; i++) { + String key = "X" + i; + keys.add(key); + dl.load(key); + } + + // load a key that is cached + dl.load("X0"); + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + assertThat(dlRef.get(), is(dl)); + assertThat(dispatch.join(), equalTo(keys)); + + // the batch loading means they start and are instrumentation dispatched before they all end up completing + assertThat(instrumentation.onlyLoads(), + equalTo(List.of( + "x_beginLoad_k:X0", "x_beginLoad_onDispatched_k:X0", + "x_beginLoad_k:X1", "x_beginLoad_onDispatched_k:X1", + "x_beginLoad_k:X2", "x_beginLoad_onDispatched_k:X2", + "x_beginLoad_k:X0", "x_beginLoad_onDispatched_k:X0", // second cached call counts + "x_beginLoad_onCompleted_k:X0", + "x_beginLoad_onCompleted_k:X0", // each load call counts + "x_beginLoad_onCompleted_k:X1", "x_beginLoad_onCompleted_k:X2"))); + + } + + + @Test + void canMonitorDispatching() { + Stopwatch stopwatch = Stopwatch.stopwatchUnStarted(); + AtomicReference> dlRef = new AtomicReference<>(); + + DataLoaderInstrumentation instrumentation = new DataLoaderInstrumentation() { + + @Override + public DataLoaderInstrumentationContext> beginDispatch(DataLoader dataLoader) { + dlRef.set(dataLoader); + stopwatch.start(); + return new DataLoaderInstrumentationContext<>() { + @Override + public void onCompleted(DispatchResult result, Throwable t) { + stopwatch.stop(); + } + }; + } + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + return DataLoaderInstrumentationHelper.noOpCtx(); + } + }; + + DataLoaderOptions options = new DataLoaderOptions() + .setInstrumentation(instrumentation) + .setMaxBatchSize(5); + + DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); + + List keys = new ArrayList<>(); + for (int i = 0; i < 20; i++) { + String key = "X" + i; + keys.add(key); + dl.load(key); + } + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + // we must have called batch load 4 times at 100ms snooze per call + // but its in parallel via supplyAsync + assertThat(stopwatch.elapsed(), greaterThan(75L)); + assertThat(dlRef.get(), is(dl)); + assertThat(dispatch.join(), equalTo(keys)); + } + + @Test + void canMonitorBatchLoading() { + Stopwatch stopwatch = Stopwatch.stopwatchUnStarted(); + AtomicReference beRef = new AtomicReference<>(); + AtomicReference> dlRef = new AtomicReference<>(); + + DataLoaderInstrumentation instrumentation = new DataLoaderInstrumentation() { + + @Override + public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dataLoader, List keys, BatchLoaderEnvironment environment) { + dlRef.set(dataLoader); + beRef.set(environment); + + stopwatch.start(); + return new DataLoaderInstrumentationContext<>() { + @Override + public void onCompleted(List result, Throwable t) { + stopwatch.stop(); + } + }; + } + }; + + DataLoaderOptions options = new DataLoaderOptions().setInstrumentation(instrumentation); + DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); + + dl.load("A", "kcA"); + dl.load("B", "kcB"); + + CompletableFuture> dispatch = dl.dispatch(); + + await().until(dispatch::isDone); + assertThat(stopwatch.elapsed(), greaterThan(50L)); + assertThat(dlRef.get(), is(dl)); + assertThat(beRef.get().getKeyContexts().keySet(), equalTo(Set.of("A", "B"))); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java new file mode 100644 index 0000000..49ccf0e --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java @@ -0,0 +1,231 @@ +package org.dataloader.instrumentation; + +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderOptions; +import org.dataloader.DataLoaderRegistry; +import org.dataloader.fixtures.TestKit; +import org.dataloader.fixtures.parameterized.TestDataLoaderFactory; +import org.dataloader.registries.ScheduledDataLoaderRegistry; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class DataLoaderRegistryInstrumentationTest { + DataLoader dlX; + DataLoader dlY; + DataLoader dlZ; + + CapturingInstrumentation instrA; + CapturingInstrumentation instrB; + ChainedDataLoaderInstrumentation chainedInstrA; + ChainedDataLoaderInstrumentation chainedInstrB; + + @BeforeEach + void setUp() { + dlX = TestKit.idLoader(); + dlY = TestKit.idLoader(); + dlZ = TestKit.idLoader(); + instrA = new CapturingInstrumentation("A"); + instrB = new CapturingInstrumentation("B"); + chainedInstrA = new ChainedDataLoaderInstrumentation().add(instrA); + chainedInstrB = new ChainedDataLoaderInstrumentation().add(instrB); + } + + @Test + void canInstrumentRegisteredDLsViaBuilder() { + + assertThat(dlX.getOptions().getInstrumentation(), equalTo(DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION)); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(chainedInstrA) + .register("X", dlX) + .register("Y", dlY) + .register("Z", dlZ) + .build(); + + assertThat(registry.getInstrumentation(), equalTo(chainedInstrA)); + + for (String key : List.of("X", "Y", "Z")) { + DataLoaderInstrumentation instrumentation = registry.getDataLoader(key).getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + assertThat(instrumentations, equalTo(List.of(instrA))); + } + } + + @Test + void canInstrumentRegisteredDLsViaBuilderCombined() { + + DataLoaderRegistry registry1 = DataLoaderRegistry.newRegistry() + .register("X", dlX) + .register("Y", dlY) + .build(); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(chainedInstrA) + .register("Z", dlZ) + .registerAll(registry1) + .build(); + + for (String key : List.of("X", "Y", "Z")) { + DataLoaderInstrumentation instrumentation = registry.getDataLoader(key).getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + assertThat(instrumentations, equalTo(List.of(instrA))); + } + } + + @Test + void canInstrumentViaMutativeRegistration() { + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(chainedInstrA) + .build(); + + registry.register("X", dlX); + registry.computeIfAbsent("Y", l -> dlY); + registry.computeIfAbsent("Z", l -> dlZ); + + for (String key : List.of("X", "Y", "Z")) { + DataLoaderInstrumentation instrumentation = registry.getDataLoader(key).getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + assertThat(instrumentations, equalTo(List.of(instrA))); + } + } + + @Test + void wontDoAnyThingIfThereIsNoRegistryInstrumentation() { + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .register("X", dlX) + .register("Y", dlY) + .register("Z", dlZ) + .build(); + + for (String key : List.of("X", "Y", "Z")) { + DataLoaderInstrumentation instrumentation = registry.getDataLoader(key).getOptions().getInstrumentation(); + assertThat(instrumentation, equalTo(DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION)); + } + } + + @Test + void wontDoAnyThingIfThereTheyAreTheSameInstrumentationAlready() { + DataLoader newX = dlX.transform(builder -> builder.options(dlX.getOptions().setInstrumentation(instrA))); + DataLoader newY = dlX.transform(builder -> builder.options(dlY.getOptions().setInstrumentation(instrA))); + DataLoader newZ = dlX.transform(builder -> builder.options(dlZ.getOptions().setInstrumentation(instrA))); + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(instrA) + .register("X", newX) + .register("Y", newY) + .register("Z", newZ) + .build(); + + Map> dls = Map.of("X", newX, "Y", newY, "Z", newZ); + + assertThat(registry.getInstrumentation(), equalTo(instrA)); + + for (String key : List.of("X", "Y", "Z")) { + DataLoader dataLoader = registry.getDataLoader(key); + DataLoaderInstrumentation instrumentation = dataLoader.getOptions().getInstrumentation(); + assertThat(instrumentation, equalTo(instrA)); + // it's the same DL - it's not changed because it has the same instrumentation + assertThat(dls.get(key), equalTo(dataLoader)); + } + } + + @Test + void ifTheDLHasAInstrumentationThenItsTurnedIntoAChainedOne() { + DataLoaderOptions options = dlX.getOptions().setInstrumentation(instrA); + DataLoader newX = dlX.transform(builder -> builder.options(options)); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(instrB) + .register("X", newX) + .build(); + + DataLoader dataLoader = registry.getDataLoader("X"); + DataLoaderInstrumentation instrumentation = dataLoader.getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + // it gets turned into a chained one and the registry one goes first + assertThat(instrumentations, equalTo(List.of(instrB, instrA))); + } + + @Test + void chainedInstrumentationsWillBeCombined() { + DataLoaderOptions options = dlX.getOptions().setInstrumentation(chainedInstrB); + DataLoader newX = dlX.transform(builder -> builder.options(options)); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(instrA) + .register("X", newX) + .build(); + + DataLoader dataLoader = registry.getDataLoader("X"); + DataLoaderInstrumentation instrumentation = dataLoader.getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + // it gets turned into a chained one and the registry one goes first + assertThat(instrumentations, equalTo(List.of(instrA, instrB))); + } + + @SuppressWarnings("resource") + @Test + void canInstrumentScheduledRegistryViaBuilder() { + + assertThat(dlX.getOptions().getInstrumentation(), equalTo(DataLoaderInstrumentationHelper.NOOP_INSTRUMENTATION)); + + ScheduledDataLoaderRegistry registry = ScheduledDataLoaderRegistry.newScheduledRegistry() + .instrumentation(chainedInstrA) + .register("X", dlX) + .register("Y", dlY) + .register("Z", dlZ) + .build(); + + assertThat(registry.getInstrumentation(), equalTo(chainedInstrA)); + + for (String key : List.of("X", "Y", "Z")) { + DataLoaderInstrumentation instrumentation = registry.getDataLoader(key).getOptions().getInstrumentation(); + assertThat(instrumentation, instanceOf(ChainedDataLoaderInstrumentation.class)); + List instrumentations = ((ChainedDataLoaderInstrumentation) instrumentation).getInstrumentations(); + assertThat(instrumentations, equalTo(List.of(instrA))); + } + } + + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void endToEndIntegrationTest(TestDataLoaderFactory factory) { + DataLoader dl = factory.idLoader(); + + DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() + .instrumentation(instrA) + .register("X", dl) + .build(); + + // since the data-loader changed when registered you MUST get the data loader from the registry + // not direct to the old one + DataLoader dataLoader = registry.getDataLoader("X"); + CompletableFuture loadA = dataLoader.load("A"); + + registry.dispatchAll(); + + await().until(loadA::isDone); + assertThat(loadA.join(), equalTo("A")); + + assertThat(instrA.notLoads(), equalTo(List.of("A_beginDispatch", + "A_beginBatchLoader", "A_beginBatchLoader_onDispatched", "A_beginBatchLoader_onCompleted", + "A_beginDispatch_onDispatched", "A_beginDispatch_onCompleted"))); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContextTest.java b/src/test/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContextTest.java new file mode 100644 index 0000000..38328eb --- /dev/null +++ b/src/test/java/org/dataloader/instrumentation/SimpleDataLoaderInstrumentationContextTest.java @@ -0,0 +1,49 @@ +package org.dataloader.instrumentation; + +import org.hamcrest.Matchers; +import org.junit.jupiter.api.Test; + +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.nullValue; + +public class SimpleDataLoaderInstrumentationContextTest { + + @Test + void canRunCompletedCodeAsExpected() { + AtomicReference actual = new AtomicReference<>(); + AtomicReference actualErr = new AtomicReference<>(); + + DataLoaderInstrumentationContext ctx = DataLoaderInstrumentationHelper.whenCompleted((r, err) -> { + actualErr.set(err); + actual.set(r); + }); + + ctx.onDispatched(); // nothing happens + assertThat(actual.get(), nullValue()); + assertThat(actualErr.get(), nullValue()); + + ctx.onCompleted("X", null); + assertThat(actual.get(), Matchers.equalTo("X")); + assertThat(actualErr.get(), nullValue()); + + ctx.onCompleted(null, new RuntimeException()); + assertThat(actual.get(), nullValue()); + assertThat(actualErr.get(), Matchers.instanceOf(RuntimeException.class)); + } + + @Test + void canRunOnDispatchCodeAsExpected() { + AtomicBoolean dispatchedCalled = new AtomicBoolean(); + + DataLoaderInstrumentationContext ctx = DataLoaderInstrumentationHelper.whenDispatched(() -> dispatchedCalled.set(true)); + + ctx.onCompleted("X", null); // nothing happens + assertThat(dispatchedCalled.get(), Matchers.equalTo(false)); + + ctx.onDispatched(); + assertThat(dispatchedCalled.get(), Matchers.equalTo(true)); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/registries/DispatchPredicateTest.java b/src/test/java/org/dataloader/registries/DispatchPredicateTest.java index f241c2f..07a7416 100644 --- a/src/test/java/org/dataloader/registries/DispatchPredicateTest.java +++ b/src/test/java/org/dataloader/registries/DispatchPredicateTest.java @@ -4,12 +4,12 @@ import org.dataloader.DataLoader; import org.dataloader.fixtures.TestKit; import org.dataloader.fixtures.TestingClock; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.time.Duration; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class DispatchPredicateTest { diff --git a/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryPredicateTest.java b/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryPredicateTest.java index 4eab564..94f5cff 100644 --- a/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryPredicateTest.java +++ b/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryPredicateTest.java @@ -3,7 +3,7 @@ import org.dataloader.BatchLoader; import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.time.Duration; import java.util.concurrent.CompletableFuture; @@ -12,10 +12,9 @@ import static org.awaitility.Awaitility.await; import static org.dataloader.DataLoaderFactory.newDataLoader; import static org.dataloader.fixtures.TestKit.asSet; -import static org.dataloader.registries.DispatchPredicate.DISPATCH_NEVER; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; public class ScheduledDataLoaderRegistryPredicateTest { final BatchLoader identityBatchLoader = CompletableFuture::completedFuture; diff --git a/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryTest.java b/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryTest.java index 146c186..e89939c 100644 --- a/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryTest.java +++ b/src/test/java/org/dataloader/registries/ScheduledDataLoaderRegistryTest.java @@ -1,14 +1,16 @@ package org.dataloader.registries; -import junit.framework.TestCase; import org.awaitility.core.ConditionTimeoutException; import org.dataloader.DataLoader; -import org.dataloader.DataLoaderFactory; import org.dataloader.DataLoaderRegistry; -import org.dataloader.fixtures.TestKit; +import org.dataloader.fixtures.parameterized.TestDataLoaderFactory; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.time.Duration; import java.util.ArrayList; +import java.util.Collection; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executors; @@ -20,28 +22,33 @@ import static java.util.Collections.singletonList; import static org.awaitility.Awaitility.await; import static org.awaitility.Duration.TWO_SECONDS; -import static org.dataloader.fixtures.TestKit.keysAsValues; import static org.dataloader.fixtures.TestKit.snooze; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.junit.Assert.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -public class ScheduledDataLoaderRegistryTest extends TestCase { +public class ScheduledDataLoaderRegistryTest { DispatchPredicate alwaysDispatch = (key, dl) -> true; DispatchPredicate neverDispatch = (key, dl) -> false; - public void test_basic_setup_works_like_a_normal_dlr() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void basic_setup_works_like_a_normal_dlr(TestDataLoaderFactory factory) { - List> aCalls = new ArrayList<>(); - List> bCalls = new ArrayList<>(); + List> aCalls = new ArrayList<>(); + List> bCalls = new ArrayList<>(); - DataLoader dlA = TestKit.idLoader(aCalls); + DataLoader dlA = factory.idLoader(aCalls); dlA.load("AK1"); dlA.load("AK2"); - DataLoader dlB = TestKit.idLoader(bCalls); + DataLoader dlB = factory.idLoader(bCalls); dlB.load("BK1"); dlB.load("BK2"); @@ -63,10 +70,12 @@ public void test_basic_setup_works_like_a_normal_dlr() { assertThat(bCalls, equalTo(singletonList(asList("BK1", "BK2")))); } - public void test_predicate_always_false() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void predicate_always_false(TestDataLoaderFactory factory) { - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); dlA.load("K1"); dlA.load("K2"); @@ -92,14 +101,16 @@ public void test_predicate_always_false() { assertThat(calls.size(), equalTo(0)); } - public void test_predicate_that_eventually_returns_true() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void predicate_that_eventually_returns_true(TestDataLoaderFactory factory) { AtomicInteger counter = new AtomicInteger(); DispatchPredicate neverDispatch = (key, dl) -> counter.incrementAndGet() > 5; - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); CompletableFuture p1 = dlA.load("K1"); CompletableFuture p2 = dlA.load("K2"); @@ -123,9 +134,11 @@ public void test_predicate_that_eventually_returns_true() { assertTrue(p2.isDone()); } - public void test_dispatchAllWithCountImmediately() { - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void dispatchAllWithCountImmediately(TestDataLoaderFactory factory) { + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); dlA.load("K1"); dlA.load("K2"); @@ -140,9 +153,11 @@ public void test_dispatchAllWithCountImmediately() { assertThat(calls, equalTo(singletonList(asList("K1", "K2")))); } - public void test_dispatchAllImmediately() { - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void dispatchAllImmediately(TestDataLoaderFactory factory) { + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); dlA.load("K1"); dlA.load("K2"); @@ -156,12 +171,14 @@ public void test_dispatchAllImmediately() { assertThat(calls, equalTo(singletonList(asList("K1", "K2")))); } - public void test_rescheduleNow() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void rescheduleNow(TestDataLoaderFactory factory) { AtomicInteger i = new AtomicInteger(); DispatchPredicate countingPredicate = (dataLoaderKey, dataLoader) -> i.incrementAndGet() > 5; - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); dlA.load("K1"); dlA.load("K2"); @@ -179,12 +196,14 @@ public void test_rescheduleNow() { assertThat(calls, equalTo(singletonList(asList("K1", "K2")))); } - public void test_it_will_take_out_the_schedule_once_it_dispatches() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void it_will_take_out_the_schedule_once_it_dispatches(TestDataLoaderFactory factory) { AtomicInteger counter = new AtomicInteger(); DispatchPredicate countingPredicate = (dataLoaderKey, dataLoader) -> counter.incrementAndGet() > 5; - List> calls = new ArrayList<>(); - DataLoader dlA = DataLoaderFactory.newDataLoader(keysAsValues(calls)); + List> calls = new ArrayList<>(); + DataLoader dlA = factory.idLoader(calls); dlA.load("K1"); dlA.load("K2"); @@ -220,14 +239,16 @@ public void test_it_will_take_out_the_schedule_once_it_dispatches() { assertThat(calls, equalTo(asList(asList("K1", "K2"), asList("K3", "K4")))); } - public void test_close_is_a_one_way_door() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void close_is_a_one_way_door(TestDataLoaderFactory factory) { AtomicInteger counter = new AtomicInteger(); DispatchPredicate countingPredicate = (dataLoaderKey, dataLoader) -> { counter.incrementAndGet(); return false; }; - DataLoader dlA = TestKit.idLoader(); + DataLoader dlA = factory.idLoader(); dlA.load("K1"); dlA.load("K2"); @@ -264,11 +285,13 @@ public void test_close_is_a_one_way_door() { assertEquals(counter.get(), countThen + 1); } - public void test_can_tick_after_first_dispatch_for_chain_data_loaders() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void can_tick_after_first_dispatch_for_chain_data_loaders(TestDataLoaderFactory factory) { // delays much bigger than the tick rate will mean multiple calls to dispatch - DataLoader dlA = TestKit.idLoaderAsync(Duration.ofMillis(100)); - DataLoader dlB = TestKit.idLoaderAsync(Duration.ofMillis(200)); + DataLoader dlA = factory.idLoaderDelayed(Duration.ofMillis(100)); + DataLoader dlB = factory.idLoaderDelayed(Duration.ofMillis(200)); CompletableFuture chainedCF = dlA.load("AK1").thenCompose(dlB::load); @@ -293,11 +316,13 @@ public void test_can_tick_after_first_dispatch_for_chain_data_loaders() { registry.close(); } - public void test_chain_data_loaders_will_hang_if_not_in_ticker_mode() { + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void chain_data_loaders_will_hang_if_not_in_ticker_mode(TestDataLoaderFactory factory) { // delays much bigger than the tick rate will mean multiple calls to dispatch - DataLoader dlA = TestKit.idLoaderAsync(Duration.ofMillis(100)); - DataLoader dlB = TestKit.idLoaderAsync(Duration.ofMillis(200)); + DataLoader dlA = factory.idLoaderDelayed(Duration.ofMillis(100)); + DataLoader dlB = factory.idLoaderDelayed(Duration.ofMillis(200)); CompletableFuture chainedCF = dlA.load("AK1").thenCompose(dlB::load); @@ -325,7 +350,8 @@ public void test_chain_data_loaders_will_hang_if_not_in_ticker_mode() { registry.close(); } - public void test_executors_are_shutdown() { + @Test + public void executors_are_shutdown() { ScheduledDataLoaderRegistry registry = ScheduledDataLoaderRegistry.newScheduledRegistry().build(); ScheduledExecutorService executorService = registry.getScheduledExecutorService(); @@ -345,4 +371,4 @@ public void test_executors_are_shutdown() { } -} \ No newline at end of file +} diff --git a/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java b/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java index beb7c18..ff9ec8e 100644 --- a/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java +++ b/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java @@ -3,7 +3,7 @@ import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.List; import java.util.Map; @@ -20,8 +20,8 @@ import static org.dataloader.fixtures.TestKit.keysAsValues; import static org.dataloader.fixtures.TestKit.keysAsValuesWithContext; import static org.dataloader.fixtures.TestKit.snooze; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; public class BatchLoaderSchedulerTest { @@ -36,6 +36,11 @@ public CompletionStage> scheduleBatchLoader(ScheduledBatchLoaderC public CompletionStage> scheduleMappedBatchLoader(ScheduledMappedBatchLoaderCall scheduledCall, List keys, BatchLoaderEnvironment environment) { return scheduledCall.invoke(); } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + scheduledCall.invoke(); + } }; private BatchLoaderScheduler delayedScheduling(int ms) { @@ -56,6 +61,12 @@ public CompletionStage> scheduleMappedBatchLoader(ScheduledMapp return scheduledCall.invoke(); }).thenCompose(Function.identity()); } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + snooze(ms); + scheduledCall.invoke(); + } }; } @@ -139,6 +150,15 @@ public CompletionStage> scheduleMappedBatchLoader(ScheduledMapp return scheduledCall.invoke(); }).thenCompose(Function.identity()); } + + @Override + public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall, List keys, BatchLoaderEnvironment environment) { + CompletableFuture.supplyAsync(() -> { + snooze(10); + scheduledCall.invoke(); + return null; + }); + } }; DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(funkyScheduler); diff --git a/src/test/java/org/dataloader/stats/StatisticsCollectorTest.java b/src/test/java/org/dataloader/stats/StatisticsCollectorTest.java index fbfd5e2..f1cc8d8 100644 --- a/src/test/java/org/dataloader/stats/StatisticsCollectorTest.java +++ b/src/test/java/org/dataloader/stats/StatisticsCollectorTest.java @@ -5,13 +5,13 @@ import org.dataloader.stats.context.IncrementCacheHitCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.concurrent.CompletableFuture; import static java.util.Collections.singletonList; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; public class StatisticsCollectorTest { diff --git a/src/test/java/org/dataloader/stats/StatisticsTest.java b/src/test/java/org/dataloader/stats/StatisticsTest.java index b900807..6c90907 100644 --- a/src/test/java/org/dataloader/stats/StatisticsTest.java +++ b/src/test/java/org/dataloader/stats/StatisticsTest.java @@ -1,11 +1,11 @@ package org.dataloader.stats; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Map; +import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertThat; public class StatisticsTest {