diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..10ef831 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "gradle" + directory: "/" + schedule: + interval: "weekly" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 01b89bc..3be85c9 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -10,22 +10,24 @@ jobs: env: MAVEN_CENTRAL_USER: ${{ secrets.MAVEN_CENTRAL_USER }} MAVEN_CENTRAL_PASSWORD: ${{ secrets.MAVEN_CENTRAL_PASSWORD }} + MAVEN_CENTRAL_USER_NEW: ${{ secrets.MAVEN_CENTRAL_USER_NEW }} + MAVEN_CENTRAL_PASSWORD_NEW: ${{ secrets.MAVEN_CENTRAL_PASSWORD_NEW }} MAVEN_CENTRAL_PGP_KEY: ${{ secrets.MAVEN_CENTRAL_PGP_KEY }} steps: - - uses: actions/checkout@v4 - - uses: gradle/actions/wrapper-validation@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v4 + - uses: actions/checkout@v5 + - uses: gradle/actions/wrapper-validation@v5 + - name: Set up JDK 21 + uses: actions/setup-java@v5 with: - java-version: '11' - distribution: 'temurin' + java-version: '21' + distribution: 'corretto' check-latest: true # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md - name: Setup Gradle - uses: gradle/actions/setup-gradle@v4 + uses: gradle/actions/setup-gradle@v5 - name: build test and publish - run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace + run: ./gradlew assemble && ./gradlew check --info && ./gradlew jcstress && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace env: CI: true diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index f16bf96..41a1e88 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -13,19 +13,19 @@ jobs: buildAndTest: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - uses: gradle/actions/wrapper-validation@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v4 + - uses: actions/checkout@v5 + - uses: gradle/actions/wrapper-validation@v5 + - name: Set up JDK 21 + uses: actions/setup-java@v5 with: - java-version: '11' - distribution: 'temurin' + java-version: '21' + distribution: 'corretto' check-latest: true # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md - name: Setup Gradle - uses: gradle/actions/setup-gradle@v4 + uses: gradle/actions/setup-gradle@v5 - name: build and test - run: ./gradlew assemble && ./gradlew check --info --stacktrace + run: ./gradlew assemble && ./gradlew check --info --stacktrace && ./gradlew jcstress env: CI: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a574a68..ba43744 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,21 +14,23 @@ jobs: MAVEN_CENTRAL_PGP_KEY: ${{ secrets.MAVEN_CENTRAL_PGP_KEY }} MAVEN_CENTRAL_USER: ${{ secrets.MAVEN_CENTRAL_USER }} MAVEN_CENTRAL_PASSWORD: ${{ secrets.MAVEN_CENTRAL_PASSWORD }} + MAVEN_CENTRAL_USER_NEW: ${{ secrets.MAVEN_CENTRAL_USER_NEW }} + MAVEN_CENTRAL_PASSWORD_NEW: ${{ secrets.MAVEN_CENTRAL_PASSWORD_NEW }} RELEASE_VERSION: ${{ github.event.inputs.version }} steps: - - uses: actions/checkout@v4 - - uses: gradle/actions/wrapper-validation@v3 - - name: Set up JDK 11 - uses: actions/setup-java@v4 + - uses: actions/checkout@v5 + - uses: gradle/actions/wrapper-validation@v5 + - name: Set up JDK 21 + uses: actions/setup-java@v5 with: - java-version: '11' - distribution: 'temurin' + java-version: '21' + distribution: 'corretto' check-latest: true # Configure Gradle for optimal use in GiHub Actions, including caching of downloaded dependencies. # See: https://github.com/gradle/actions/blob/main/setup-gradle/README.md - name: Setup Gradle - uses: gradle/actions/setup-gradle@v4 + uses: gradle/actions/setup-gradle@v5 - name: build test and publish run: ./gradlew assemble && ./gradlew check --info && ./gradlew publishToSonatype closeAndReleaseSonatypeStagingRepository -x check --info --stacktrace env: diff --git a/.github/workflows/stale-pr-issue.yml b/.github/workflows/stale-pr-issue.yml index d945402..0f385ee 100644 --- a/.github/workflows/stale-pr-issue.yml +++ b/.github/workflows/stale-pr-issue.yml @@ -16,7 +16,7 @@ jobs: close-pending: runs-on: ubuntu-latest steps: - - uses: actions/stale@v9 + - uses: actions/stale@v10 with: # GLOBAL ------------------------------------------------------------ # Exempt any PRs or issues already added to a milestone diff --git a/README.md b/README.md index 9f47b15..a53e766 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ Gradle users configure the `java-dataloader` dependency in `build.gradle`: ``` repositories { - jcenter() + mavenCentral() } dependencies { @@ -205,7 +205,7 @@ for the context object. ```java DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()); + .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()).build(); BatchLoaderWithContext batchLoader = new BatchLoaderWithContext() { @Override @@ -227,7 +227,7 @@ You can gain access to them as a map by key or as the original list of context o ```java DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()); + .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()).build(); BatchLoaderWithContext batchLoader = new BatchLoaderWithContext() { @Override @@ -433,7 +433,7 @@ However, you can create your own custom future cache and supply it to the data l ```java MyCustomCache customCache = new MyCustomCache(); - DataLoaderOptions options = DataLoaderOptions.newOptions().setCacheMap(customCache); + DataLoaderOptions options = DataLoaderOptions.newOptions().setCacheMap(customCache).build(); DataLoaderFactory.newDataLoader(userBatchLoader, options); ``` @@ -467,7 +467,7 @@ The tests have an example based on [Caffeine](https://github.com/ben-manes/caffe In certain uncommon cases, a DataLoader which does not cache may be desirable. ```java - DataLoaderFactory.newDataLoader(userBatchLoader, DataLoaderOptions.newOptions().setCachingEnabled(false)); + DataLoaderFactory.newDataLoader(userBatchLoader, DataLoaderOptions.newOptions().setCachingEnabled(false).build()); ``` Calling the above will ensure that every call to `.load()` will produce a new promise, and requested keys will not be saved in memory. @@ -533,7 +533,7 @@ Knowing what the behaviour of your data is important for you to understand how e You can configure the statistics collector used when you build the data loader ```java - DataLoaderOptions options = DataLoaderOptions.newOptions().setStatisticsCollector(() -> new ThreadLocalStatisticsCollector()); + DataLoaderOptions options = DataLoaderOptions.newOptions().setStatisticsCollector(() -> new ThreadLocalStatisticsCollector()).build(); DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader,options); ``` @@ -780,7 +780,7 @@ You set the `DataLoaderInstrumentation` into the `DataLoaderOptions` at build ti }); } }; - DataLoaderOptions options = DataLoaderOptions.newOptions().setInstrumentation(timingInstrumentation); + DataLoaderOptions options = DataLoaderOptions.newOptions().setInstrumentation(timingInstrumentation).build(); DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options); ``` diff --git a/build.gradle b/build.gradle index 7d98bc3..7d2c806 100644 --- a/build.gradle +++ b/build.gradle @@ -1,3 +1,7 @@ +import net.ltgt.gradle.errorprone.CheckSeverity +import org.jetbrains.kotlin.gradle.dsl.JvmTarget +import org.jetbrains.kotlin.gradle.dsl.KotlinVersion + import java.text.SimpleDateFormat plugins { @@ -7,14 +11,33 @@ plugins { id 'maven-publish' id 'signing' id 'groovy' - id 'biz.aQute.bnd.builder' version '6.2.0' - id 'io.github.gradle-nexus.publish-plugin' version '1.0.0' - id 'com.github.ben-manes.versions' version '0.51.0' + id 'biz.aQute.bnd.builder' version '7.1.0' + id 'io.github.gradle-nexus.publish-plugin' version '2.0.0' + id 'com.github.ben-manes.versions' version '0.53.0' + id "me.champeau.jmh" version "0.7.3" + id "net.ltgt.errorprone" version '4.3.0' + id "io.github.reyerizo.gradle.jcstress" version "0.9.0" + + // Kotlin just for tests - not production code + id 'org.jetbrains.kotlin.jvm' version '2.2.21' } java { toolchain { - languageVersion = JavaLanguageVersion.of(11) + languageVersion = JavaLanguageVersion.of(21) + } +} + +kotlin { + compilerOptions { + apiVersion = KotlinVersion.KOTLIN_2_0 + languageVersion = KotlinVersion.KOTLIN_2_0 + jvmTarget = JvmTarget.JVM_11 + javaParameters = true + freeCompilerArgs = [ + '-Xemit-jvm-type-annotations', + '-Xjspecify-annotations=strict', + ] } } @@ -58,17 +81,53 @@ repositories { jar { manifest { - attributes('Automatic-Module-Name': 'org.dataloader', - '-exportcontents': 'org.dataloader.*', - '-removeheaders': 'Private-Package') + attributes('Automatic-Module-Name': 'org.dataloader') + } + bundle { + bnd(''' +-exportcontents: org.dataloader.* +-removeheaders: Private-Package +Import-Package: org.jspecify.annotations;resolution:=optional,* +''') } } dependencies { api "org.reactivestreams:reactive-streams:$reactive_streams_version" api "org.jspecify:jspecify:1.0.0" + + // this is needed for the idea jmh plugin to work correctly + jmh 'org.openjdk.jmh:jmh-core:1.37' + jmh 'org.openjdk.jmh:jmh-generator-annprocess:1.37' + + errorprone 'com.uber.nullaway:nullaway:0.12.10' + errorprone 'com.google.errorprone:error_prone_core:2.43.0' + + // just tests + testImplementation 'org.jetbrains.kotlin:kotlin-stdlib-jdk8' } +tasks.withType(JavaCompile) { + options.release = 11 + options.errorprone { + disableAllChecks = true + check("NullAway", CheckSeverity.ERROR) + // + // end state has us with this config turned on - eg all classes + // + //option("NullAway:AnnotatedPackages", "org.dataloader") + option("NullAway:OnlyNullMarked", "true") + option("NullAway:JSpecifyMode", "true") + } + // Include to disable NullAway on test code + if (name.toLowerCase().contains("test")) { + options.errorprone { + disable("NullAway") + } + } +} + + task sourcesJar(type: Jar) { dependsOn classes archiveClassifier.set('sources') @@ -84,10 +143,7 @@ task javadocJar(type: Jar, dependsOn: javadoc) { from javadoc.destinationDir } -artifacts { - archives sourcesJar - archives javadocJar -} + testing { suites { @@ -120,9 +176,9 @@ publishing { publications { graphqlJava(MavenPublication) { from components.java - groupId 'com.graphql-java' - artifactId 'java-dataloader' - version project.version + groupId = 'com.graphql-java' + artifactId = 'java-dataloader' + version = project.version artifact sourcesJar artifact javadocJar @@ -169,14 +225,18 @@ publishing { nexusPublishing { repositories { sonatype { - username = System.env.MAVEN_CENTRAL_USER - password = System.env.MAVEN_CENTRAL_PASSWORD + username = System.env.MAVEN_CENTRAL_USER_NEW + password = System.env.MAVEN_CENTRAL_PASSWORD_NEW + // https://central.sonatype.org/publish/publish-portal-ossrh-staging-api/#configuration + nexusUrl.set(uri("https://ossrh-staging-api.central.sonatype.com/service/local/")) + // GraphQL Java does not publish snapshots, but adding this URL for completeness + snapshotRepositoryUrl.set(uri("https://central.sonatype.com/repository/maven-snapshots/")) } } } signing { - required { !project.hasProperty('publishToMavenLocal') } + required = { !project.hasProperty('publishToMavenLocal') } def signingKey = System.env.MAVEN_CENTRAL_PGP_KEY useInMemoryPgpKeys(signingKey, "") sign publishing.publications @@ -200,3 +260,7 @@ tasks.named("dependencyUpdates").configure { isNonStable(it.candidate.version) } } + +jcstress { +// verbose = true +} \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index 428b6e2..db4b332 100644 --- a/gradle.properties +++ b/gradle.properties @@ -23,4 +23,8 @@ hamcrest_version=2.2 awaitility_version=2.0.0 reactor_core_version=3.6.6 caffeine_version=3.1.8 -reactive_streams_version=1.0.3 \ No newline at end of file +reactive_streams_version=1.0.3 +# Prevents the Kotlin stdlib being a POM dependency +# +# https://kotlinlang.org/docs/gradle-configure-project.html#dependency-on-the-standard-library +kotlin.stdlib.default.dependency=false \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180..a4b76b9 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index e2847c8..bad7c24 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-9.2.0-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index 1b6c787..f5feea6 100755 --- a/gradlew +++ b/gradlew @@ -15,6 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +# SPDX-License-Identifier: Apache-2.0 +# ############################################################################## # @@ -55,7 +57,7 @@ # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. @@ -80,13 +82,12 @@ do esac done -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" +# This is normally unused +# shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s +' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum @@ -133,22 +134,29 @@ location of your Java installation." fi else JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." + fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac @@ -193,11 +201,15 @@ if "$cygwin" || "$msys" ; then done fi -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ @@ -205,6 +217,12 @@ set -- \ org.gradle.wrapper.GradleWrapperMain \ "$@" +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. diff --git a/gradlew.bat b/gradlew.bat index ac1b06f..9b42019 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -13,8 +13,10 @@ @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem -@if "%DEBUG%" == "" @echo off +@if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @@ -25,7 +27,8 @@ if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @@ -40,13 +43,13 @@ if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute +if %ERRORLEVEL% equ 0 goto execute -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -56,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 goto fail @@ -75,13 +78,15 @@ set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar :end @rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd +if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal diff --git a/settings.gradle b/settings.gradle index 47404e7..0d61ab8 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,6 @@ plugins { - id 'com.gradle.develocity' version '3.19' - id 'org.gradle.toolchains.foojay-resolver-convention' version '0.9.0' + id 'com.gradle.develocity' version '4.2.2' + id 'org.gradle.toolchains.foojay-resolver-convention' version '1.0.0' } develocity { diff --git a/src/jcstress/java/org/dataloader/DataLoader_Batching_Caching_JCStress.java b/src/jcstress/java/org/dataloader/DataLoader_Batching_Caching_JCStress.java new file mode 100644 index 0000000..f0d8263 --- /dev/null +++ b/src/jcstress/java/org/dataloader/DataLoader_Batching_Caching_JCStress.java @@ -0,0 +1,108 @@ +package org.dataloader; + +import org.openjdk.jcstress.annotations.Actor; +import org.openjdk.jcstress.annotations.Arbiter; +import org.openjdk.jcstress.annotations.JCStressTest; +import org.openjdk.jcstress.annotations.Outcome; +import org.openjdk.jcstress.annotations.State; +import org.openjdk.jcstress.infra.results.II_Result; + +import java.util.List; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.openjdk.jcstress.annotations.Expect.ACCEPTABLE; + +@JCStressTest +@State +@Outcome(id = "2000, 2000", expect = ACCEPTABLE, desc = "accepted") +public class DataLoader_Batching_Caching_JCStress { + + + AtomicInteger counter = new AtomicInteger(); + AtomicInteger batchLoaderCount = new AtomicInteger(); + volatile boolean finished1; + volatile boolean finished2; + + + BatchLoader batchLoader = keys -> { + return CompletableFuture.supplyAsync(() -> { + batchLoaderCount.getAndAdd(keys.size()); + return keys; + }); + }; + DataLoader dataLoader = DataLoaderFactory.newDataLoader(batchLoader); + + public DataLoader_Batching_Caching_JCStress() { + + } + + @Actor + public void load1() { + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-1-" + i); + } + // we load the same keys again + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-1-" + i); + } + finished1 = true; + } + + @Actor + public void load2() { + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-2-" + i); + } + // we load the same keys again + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-2-" + i); + } + finished2 = true; + } + + + @Actor + public void dispatch1() { + while (!finished1 || !finished2) { + try { + List dispatchedResult = dataLoader.dispatch().get(); + counter.getAndAdd(dispatchedResult.size()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + try { + List dispatchedResult = dataLoader.dispatch().get(); + counter.getAndAdd(dispatchedResult.size()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Actor + public void dispatch2() { + while (!finished1 || !finished2) { + try { + List dispatchedResult = dataLoader.dispatch().get(); + counter.getAndAdd(dispatchedResult.size()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + try { + List dispatchedResult = dataLoader.dispatch().get(); + counter.getAndAdd(dispatchedResult.size()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Arbiter + public void arbiter(II_Result r) { + r.r1 = counter.get(); + r.r2 = batchLoaderCount.get(); + } + + +} diff --git a/src/jcstress/java/org/dataloader/DataLoader_NoBatching_Caching_JCStress.java b/src/jcstress/java/org/dataloader/DataLoader_NoBatching_Caching_JCStress.java new file mode 100644 index 0000000..6b46ed1 --- /dev/null +++ b/src/jcstress/java/org/dataloader/DataLoader_NoBatching_Caching_JCStress.java @@ -0,0 +1,54 @@ +package org.dataloader; + +import org.openjdk.jcstress.annotations.Actor; +import org.openjdk.jcstress.annotations.Arbiter; +import org.openjdk.jcstress.annotations.JCStressTest; +import org.openjdk.jcstress.annotations.Outcome; +import org.openjdk.jcstress.annotations.State; +import org.openjdk.jcstress.infra.results.II_Result; + +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.openjdk.jcstress.annotations.Expect.ACCEPTABLE; +import static org.openjdk.jcstress.annotations.Expect.ACCEPTABLE_INTERESTING; + +@JCStressTest +@State +@Outcome(id = "1000, 1000", expect = ACCEPTABLE, desc = "No keys loaded twice") +@Outcome(id = "1.*, 1000", expect = ACCEPTABLE_INTERESTING, desc = "Some keys loaded twice") +public class DataLoader_NoBatching_Caching_JCStress { + + + AtomicInteger batchLoaderCount = new AtomicInteger(); + + BatchLoader batchLoader = keys -> { + batchLoaderCount.getAndAdd(keys.size()); + return CompletableFuture.completedFuture(keys); + }; + + + DataLoader dataLoader = DataLoaderFactory.newDataLoader(batchLoader, DataLoaderOptions.newOptions().setBatchingEnabled(false).build()); + + @Actor + public void load1() { + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-1-" + i); + } + } + + @Actor + public void load2() { + for (int i = 0; i < 1000; i++) { + dataLoader.load("load-1-" + i); + } + } + + + @Arbiter + public void arbiter(II_Result r) { + r.r1 = batchLoaderCount.get(); + r.r2 = dataLoader.getCacheMap().size(); + } + +} diff --git a/src/jmh/java/performance/DataLoaderDispatchPerformance.java b/src/jmh/java/performance/DataLoaderDispatchPerformance.java new file mode 100644 index 0000000..ad2060c --- /dev/null +++ b/src/jmh/java/performance/DataLoaderDispatchPerformance.java @@ -0,0 +1,315 @@ +package performance; + +import org.dataloader.BatchLoader; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderFactory; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +@State(Scope.Benchmark) +@Warmup(iterations = 2, time = 5) +@Measurement(iterations = 4) +@Fork(1) +public class DataLoaderDispatchPerformance { + + static Owner o1 = new Owner("O-1", "Andi", List.of("P-1", "P-2", "P-3")); + static Owner o2 = new Owner("O-2", "George", List.of("P-4", "P-5", "P-6")); + static Owner o3 = new Owner("O-3", "Peppa", List.of("P-7", "P-8", "P-9", "P-10")); + static Owner o4 = new Owner("O-4", "Alice", List.of("P-11", "P-12")); + static Owner o5 = new Owner("O-5", "Bob", List.of("P-13")); + static Owner o6 = new Owner("O-6", "Catherine", List.of("P-14", "P-15", "P-16")); + static Owner o7 = new Owner("O-7", "David", List.of("P-17")); + static Owner o8 = new Owner("O-8", "Emma", List.of("P-18", "P-19", "P-20", "P-21")); + static Owner o9 = new Owner("O-9", "Frank", List.of("P-22")); + static Owner o10 = new Owner("O-10", "Grace", List.of("P-23", "P-24")); + static Owner o11 = new Owner("O-11", "Hannah", List.of("P-25", "P-26", "P-27")); + static Owner o12 = new Owner("O-12", "Ian", List.of("P-28")); + static Owner o13 = new Owner("O-13", "Jane", List.of("P-29", "P-30")); + static Owner o14 = new Owner("O-14", "Kevin", List.of("P-31", "P-32", "P-33")); + static Owner o15 = new Owner("O-15", "Laura", List.of("P-34")); + static Owner o16 = new Owner("O-16", "Michael", List.of("P-35", "P-36")); + static Owner o17 = new Owner("O-17", "Nina", List.of("P-37", "P-38", "P-39", "P-40")); + static Owner o18 = new Owner("O-18", "Oliver", List.of("P-41")); + static Owner o19 = new Owner("O-19", "Paula", List.of("P-42", "P-43")); + static Owner o20 = new Owner("O-20", "Quinn", List.of("P-44", "P-45", "P-46")); + static Owner o21 = new Owner("O-21", "Rachel", List.of("P-47")); + static Owner o22 = new Owner("O-22", "Steve", List.of("P-48", "P-49")); + static Owner o23 = new Owner("O-23", "Tina", List.of("P-50", "P-51", "P-52")); + static Owner o24 = new Owner("O-24", "Uma", List.of("P-53")); + static Owner o25 = new Owner("O-25", "Victor", List.of("P-54", "P-55")); + static Owner o26 = new Owner("O-26", "Wendy", List.of("P-56", "P-57", "P-58")); + static Owner o27 = new Owner("O-27", "Xander", List.of("P-59")); + static Owner o28 = new Owner("O-28", "Yvonne", List.of("P-60", "P-61")); + static Owner o29 = new Owner("O-29", "Zach", List.of("P-62", "P-63", "P-64")); + static Owner o30 = new Owner("O-30", "Willy", List.of("P-65", "P-66", "P-67")); + + + static Pet p1 = new Pet("P-1", "Bella", "O-1", List.of("P-2", "P-3", "P-4")); + static Pet p2 = new Pet("P-2", "Charlie", "O-2", List.of("P-1", "P-5", "P-6")); + static Pet p3 = new Pet("P-3", "Luna", "O-3", List.of("P-1", "P-2", "P-7", "P-8")); + static Pet p4 = new Pet("P-4", "Max", "O-1", List.of("P-1", "P-9", "P-10")); + static Pet p5 = new Pet("P-5", "Lucy", "O-2", List.of("P-2", "P-6")); + static Pet p6 = new Pet("P-6", "Cooper", "O-3", List.of("P-3", "P-5", "P-7")); + static Pet p7 = new Pet("P-7", "Daisy", "O-1", List.of("P-4", "P-6", "P-8")); + static Pet p8 = new Pet("P-8", "Milo", "O-2", List.of("P-3", "P-7", "P-9")); + static Pet p9 = new Pet("P-9", "Lola", "O-3", List.of("P-4", "P-8", "P-10")); + static Pet p10 = new Pet("P-10", "Rocky", "O-1", List.of("P-4", "P-9")); + static Pet p11 = new Pet("P-11", "Buddy", "O-4", List.of("P-12")); + static Pet p12 = new Pet("P-12", "Bailey", "O-4", List.of("P-11", "P-13")); + static Pet p13 = new Pet("P-13", "Sadie", "O-5", List.of("P-12")); + static Pet p14 = new Pet("P-14", "Maggie", "O-6", List.of("P-15")); + static Pet p15 = new Pet("P-15", "Sophie", "O-6", List.of("P-14", "P-16")); + static Pet p16 = new Pet("P-16", "Chloe", "O-6", List.of("P-15")); + static Pet p17 = new Pet("P-17", "Duke", "O-7", List.of("P-18")); + static Pet p18 = new Pet("P-18", "Riley", "O-8", List.of("P-17", "P-19")); + static Pet p19 = new Pet("P-19", "Lilly", "O-8", List.of("P-18", "P-20")); + static Pet p20 = new Pet("P-20", "Zoey", "O-8", List.of("P-19")); + static Pet p21 = new Pet("P-21", "Oscar", "O-8", List.of("P-22")); + static Pet p22 = new Pet("P-22", "Toby", "O-9", List.of("P-21", "P-23")); + static Pet p23 = new Pet("P-23", "Ruby", "O-10", List.of("P-22")); + static Pet p24 = new Pet("P-24", "Milo", "O-10", List.of("P-25")); + static Pet p25 = new Pet("P-25", "Finn", "O-11", List.of("P-24", "P-26")); + static Pet p26 = new Pet("P-26", "Luna", "O-11", List.of("P-25")); + static Pet p27 = new Pet("P-27", "Ellie", "O-11", List.of("P-28")); + static Pet p28 = new Pet("P-28", "Harley", "O-12", List.of("P-27", "P-29")); + static Pet p29 = new Pet("P-29", "Penny", "O-13", List.of("P-28")); + static Pet p30 = new Pet("P-30", "Hazel", "O-13", List.of("P-31")); + static Pet p31 = new Pet("P-31", "Gus", "O-14", List.of("P-30", "P-32")); + static Pet p32 = new Pet("P-32", "Dexter", "O-14", List.of("P-31")); + static Pet p33 = new Pet("P-33", "Winnie", "O-14", List.of("P-34")); + static Pet p34 = new Pet("P-34", "Murphy", "O-15", List.of("P-33", "P-35")); + static Pet p35 = new Pet("P-35", "Moose", "O-16", List.of("P-34")); + static Pet p36 = new Pet("P-36", "Scout", "O-16", List.of("P-37")); + static Pet p37 = new Pet("P-37", "Rex", "O-17", List.of("P-36", "P-38")); + static Pet p38 = new Pet("P-38", "Coco", "O-17", List.of("P-37")); + static Pet p39 = new Pet("P-39", "Maddie", "O-17", List.of("P-40")); + static Pet p40 = new Pet("P-40", "Archie", "O-17", List.of("P-39", "P-41")); + static Pet p41 = new Pet("P-41", "Buster", "O-18", List.of("P-40")); + static Pet p42 = new Pet("P-42", "Rosie", "O-19", List.of("P-43")); + static Pet p43 = new Pet("P-43", "Molly", "O-19", List.of("P-42", "P-44")); + static Pet p44 = new Pet("P-44", "Henry", "O-20", List.of("P-43")); + static Pet p45 = new Pet("P-45", "Leo", "O-20", List.of("P-46")); + static Pet p46 = new Pet("P-46", "Jack", "O-20", List.of("P-45", "P-47")); + static Pet p47 = new Pet("P-47", "Zoe", "O-21", List.of("P-46")); + static Pet p48 = new Pet("P-48", "Lulu", "O-22", List.of("P-49")); + static Pet p49 = new Pet("P-49", "Mimi", "O-22", List.of("P-48", "P-50")); + static Pet p50 = new Pet("P-50", "Nala", "O-23", List.of("P-49")); + static Pet p51 = new Pet("P-51", "Simba", "O-23", List.of("P-52")); + static Pet p52 = new Pet("P-52", "Teddy", "O-23", List.of("P-51", "P-53")); + static Pet p53 = new Pet("P-53", "Mochi", "O-24", List.of("P-52")); + static Pet p54 = new Pet("P-54", "Oreo", "O-25", List.of("P-55")); + static Pet p55 = new Pet("P-55", "Peanut", "O-25", List.of("P-54", "P-56")); + static Pet p56 = new Pet("P-56", "Pumpkin", "O-26", List.of("P-55")); + static Pet p57 = new Pet("P-57", "Shadow", "O-26", List.of("P-58")); + static Pet p58 = new Pet("P-58", "Sunny", "O-26", List.of("P-57", "P-59")); + static Pet p59 = new Pet("P-59", "Thor", "O-27", List.of("P-58")); + static Pet p60 = new Pet("P-60", "Willow", "O-28", List.of("P-61")); + static Pet p61 = new Pet("P-61", "Zeus", "O-28", List.of("P-60", "P-62")); + static Pet p62 = new Pet("P-62", "Ace", "O-29", List.of("P-61")); + static Pet p63 = new Pet("P-63", "Blue", "O-29", List.of("P-64")); + static Pet p64 = new Pet("P-64", "Cleo", "O-29", List.of("P-63", "P-65")); + static Pet p65 = new Pet("P-65", "Dolly", "O-30", List.of("P-64")); + static Pet p66 = new Pet("P-66", "Ella", "O-30", List.of("P-67")); + static Pet p67 = new Pet("P-67", "Freddy", "O-30", List.of("P-66")); + + + static Map owners = Map.ofEntries( + Map.entry(o1.id, o1), + Map.entry(o2.id, o2), + Map.entry(o3.id, o3), + Map.entry(o4.id, o4), + Map.entry(o5.id, o5), + Map.entry(o6.id, o6), + Map.entry(o7.id, o7), + Map.entry(o8.id, o8), + Map.entry(o9.id, o9), + Map.entry(o10.id, o10), + Map.entry(o11.id, o11), + Map.entry(o12.id, o12), + Map.entry(o13.id, o13), + Map.entry(o14.id, o14), + Map.entry(o15.id, o15), + Map.entry(o16.id, o16), + Map.entry(o17.id, o17), + Map.entry(o18.id, o18), + Map.entry(o19.id, o19), + Map.entry(o20.id, o20), + Map.entry(o21.id, o21), + Map.entry(o22.id, o22), + Map.entry(o23.id, o23), + Map.entry(o24.id, o24), + Map.entry(o25.id, o25), + Map.entry(o26.id, o26), + Map.entry(o27.id, o27), + Map.entry(o28.id, o28), + Map.entry(o29.id, o29), + Map.entry(o30.id, o30) + ); + static Map pets = Map.ofEntries( + Map.entry(p1.id, p1), + Map.entry(p2.id, p2), + Map.entry(p3.id, p3), + Map.entry(p4.id, p4), + Map.entry(p5.id, p5), + Map.entry(p6.id, p6), + Map.entry(p7.id, p7), + Map.entry(p8.id, p8), + Map.entry(p9.id, p9), + Map.entry(p10.id, p10), + Map.entry(p11.id, p11), + Map.entry(p12.id, p12), + Map.entry(p13.id, p13), + Map.entry(p14.id, p14), + Map.entry(p15.id, p15), + Map.entry(p16.id, p16), + Map.entry(p17.id, p17), + Map.entry(p18.id, p18), + Map.entry(p19.id, p19), + Map.entry(p20.id, p20), + Map.entry(p21.id, p21), + Map.entry(p22.id, p22), + Map.entry(p23.id, p23), + Map.entry(p24.id, p24), + Map.entry(p25.id, p25), + Map.entry(p26.id, p26), + Map.entry(p27.id, p27), + Map.entry(p28.id, p28), + Map.entry(p29.id, p29), + Map.entry(p30.id, p30), + Map.entry(p31.id, p31), + Map.entry(p32.id, p32), + Map.entry(p33.id, p33), + Map.entry(p34.id, p34), + Map.entry(p35.id, p35), + Map.entry(p36.id, p36), + Map.entry(p37.id, p37), + Map.entry(p38.id, p38), + Map.entry(p39.id, p39), + Map.entry(p40.id, p40), + Map.entry(p41.id, p41), + Map.entry(p42.id, p42), + Map.entry(p43.id, p43), + Map.entry(p44.id, p44), + Map.entry(p45.id, p45), + Map.entry(p46.id, p46), + Map.entry(p47.id, p47), + Map.entry(p48.id, p48), + Map.entry(p49.id, p49), + Map.entry(p50.id, p50), + Map.entry(p51.id, p51), + Map.entry(p52.id, p52), + Map.entry(p53.id, p53), + Map.entry(p54.id, p54), + Map.entry(p55.id, p55), + Map.entry(p56.id, p56), + Map.entry(p57.id, p57), + Map.entry(p58.id, p58), + Map.entry(p59.id, p59), + Map.entry(p60.id, p60), + Map.entry(p61.id, p61), + Map.entry(p62.id, p62), + Map.entry(p63.id, p63), + Map.entry(p64.id, p64), + Map.entry(p65.id, p65), + Map.entry(p66.id, p66), + Map.entry(p67.id, p67) + ); + + static class Owner { + public Owner(String id, String name, List petIds) { + this.id = id; + this.name = name; + this.petIds = petIds; + } + + String id; + String name; + List petIds; + } + + static class Pet { + public Pet(String id, String name, String ownerId, List friendsIds) { + this.id = id; + this.name = name; + this.ownerId = ownerId; + this.friendsIds = friendsIds; + } + + String id; + String name; + String ownerId; + List friendsIds; + } + + + static BatchLoader ownerBatchLoader = list -> { + List collect = list.stream().map(key -> { + Owner owner = owners.get(key); + return owner; + }).collect(Collectors.toList()); + return CompletableFuture.completedFuture(collect); + }; + static BatchLoader petBatchLoader = list -> { + List collect = list.stream().map(key -> { + Pet owner = pets.get(key); + return owner; + }).collect(Collectors.toList()); + return CompletableFuture.completedFuture(collect); + }; + + + @State(Scope.Benchmark) + public static class MyState { + @Setup + public void setup() { + + } + + DataLoader ownerDL = DataLoaderFactory.newDataLoader(ownerBatchLoader); + DataLoader petDL = DataLoaderFactory.newDataLoader(petBatchLoader); + + + } + + + @Benchmark + @BenchmarkMode(Mode.AverageTime) + @OutputTimeUnit(TimeUnit.NANOSECONDS) + @Threads(Threads.MAX) + public void loadAndDispatch(MyState myState, Blackhole blackhole) { + DataLoader ownerDL = myState.ownerDL; + DataLoader petDL = myState.petDL; + + for (Owner owner : owners.values()) { + ownerDL.load(owner.id); + for (String petId : owner.petIds) { + petDL.load(petId); + for (String friendId : pets.get(petId).friendsIds) { + petDL.load(friendId); + } + } + } + + CompletableFuture cf1 = ownerDL.dispatch(); + CompletableFuture cf2 = petDL.dispatch(); + blackhole.consume(CompletableFuture.allOf(cf1, cf2).join()); + } + + +} diff --git a/src/jmh/java/performance/PerformanceTestingUtils.java b/src/jmh/java/performance/PerformanceTestingUtils.java new file mode 100644 index 0000000..9e05fd6 --- /dev/null +++ b/src/jmh/java/performance/PerformanceTestingUtils.java @@ -0,0 +1,84 @@ +package performance; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.Charset; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.concurrent.Callable; + +public class PerformanceTestingUtils { + + @SuppressWarnings("UnstableApiUsage") + static String loadResource(String name) { + return asRTE(() -> { + URL resource = PerformanceTestingUtils.class.getClassLoader().getResource(name); + if (resource == null) { + throw new IllegalArgumentException("missing resource: " + name); + } + byte[] bytes; + try (InputStream inputStream = resource.openStream()) { + bytes = inputStream.readAllBytes(); + } + return new String(bytes, Charset.defaultCharset()); + }); + } + + static T asRTE(Callable callable) { + try { + return callable.call(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static void runInToolingForSomeTimeThenExit(Runnable setup, Runnable r, Runnable tearDown) { + int runForMillis = getRunForMillis(); + if (runForMillis <= 0) { + System.out.print("'runForMillis' environment var is not set - continuing \n"); + return; + } + System.out.printf("Running initial code in some tooling - runForMillis=%d \n", runForMillis); + System.out.print("Get your tooling in order and press enter..."); + readLine(); + System.out.print("Lets go...\n"); + setup.run(); + + DateTimeFormatter dtf = DateTimeFormatter.ofPattern("HH:mm:ss"); + long now, then = System.currentTimeMillis(); + do { + now = System.currentTimeMillis(); + long msLeft = runForMillis - (now - then); + System.out.printf("\t%s Running in loop... %s ms left\n", dtf.format(LocalDateTime.now()), msLeft); + r.run(); + now = System.currentTimeMillis(); + } while ((now - then) < runForMillis); + + tearDown.run(); + + System.out.printf("This ran for %d millis. Exiting...\n", System.currentTimeMillis() - then); + System.exit(0); + } + + private static void readLine() { + BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); + try { + br.readLine(); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private static int getRunForMillis() { + String runFor = System.getenv("runForMillis"); + try { + return Integer.parseInt(runFor); + } catch (NumberFormatException e) { + return -1; + } + } + +} diff --git a/src/main/java/org/dataloader/BatchLoader.java b/src/main/java/org/dataloader/BatchLoader.java index 2b0c3c5..df11f89 100644 --- a/src/main/java/org/dataloader/BatchLoader.java +++ b/src/main/java/org/dataloader/BatchLoader.java @@ -17,8 +17,8 @@ package org.dataloader; import org.dataloader.annotations.PublicSpi; -import org.jspecify.annotations.NonNull; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.List; import java.util.concurrent.CompletionStage; @@ -40,7 +40,7 @@ * 2, 9, 6, 1 * ] * - * + *

* and loading from a back-end service returned this list of values: * *

@@ -50,7 +50,7 @@
  *      { id: 2, name: 'San Francisco' },
  *  ]
  * 
- * + *

* then the batch loader function contract has been broken. *

* The back-end service returned results in a different order than we requested, likely because it was more efficient for it to @@ -77,7 +77,7 @@ @FunctionalInterface @PublicSpi @NullMarked -public interface BatchLoader { +public interface BatchLoader { /** * Called to batch load the provided keys and return a promise to a list of values. @@ -85,7 +85,6 @@ public interface BatchLoader { * If you need calling context then implement {@link org.dataloader.BatchLoaderWithContext} * * @param keys the collection of keys to load - * * @return a promise of the values for those keys */ CompletionStage> load(List keys); diff --git a/src/main/java/org/dataloader/BatchLoaderEnvironment.java b/src/main/java/org/dataloader/BatchLoaderEnvironment.java index 6b84e70..c7a2ed8 100644 --- a/src/main/java/org/dataloader/BatchLoaderEnvironment.java +++ b/src/main/java/org/dataloader/BatchLoaderEnvironment.java @@ -19,11 +19,11 @@ @NullMarked public class BatchLoaderEnvironment { - private final Object context; + private final @Nullable Object context; private final Map keyContexts; private final List keyContextsList; - private BatchLoaderEnvironment(Object context, List keyContextsList, Map keyContexts) { + private BatchLoaderEnvironment(@Nullable Object context, List keyContextsList, Map keyContexts) { this.context = context; this.keyContexts = keyContexts; this.keyContextsList = keyContextsList; @@ -33,7 +33,6 @@ private BatchLoaderEnvironment(Object context, List keyContextsList, Map * Returns the overall context object provided by {@link org.dataloader.BatchLoaderContextProvider} * * @param the type you would like the object to be - * * @return a context object or null if there isn't one */ @SuppressWarnings("unchecked") @@ -68,7 +67,7 @@ public static Builder newBatchLoaderEnvironment() { } public static class Builder { - private Object context; + private @Nullable Object context; private Map keyContexts = Collections.emptyMap(); private List keyContextsList = Collections.emptyList(); diff --git a/src/main/java/org/dataloader/BatchLoaderWithContext.java b/src/main/java/org/dataloader/BatchLoaderWithContext.java index eba26e4..fb6ff71 100644 --- a/src/main/java/org/dataloader/BatchLoaderWithContext.java +++ b/src/main/java/org/dataloader/BatchLoaderWithContext.java @@ -2,6 +2,7 @@ import org.dataloader.annotations.PublicSpi; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.List; import java.util.concurrent.CompletionStage; @@ -16,7 +17,7 @@ */ @PublicSpi @NullMarked -public interface BatchLoaderWithContext { +public interface BatchLoaderWithContext { /** * Called to batch load the provided keys and return a promise to a list of values. This default * version can be given an environment object to that maybe be useful during the call. A typical use case diff --git a/src/main/java/org/dataloader/CacheMap.java b/src/main/java/org/dataloader/CacheMap.java index 54b1b49..3e6d895 100644 --- a/src/main/java/org/dataloader/CacheMap.java +++ b/src/main/java/org/dataloader/CacheMap.java @@ -28,7 +28,13 @@ * CacheMap is used by data loaders that use caching promises to values aka {@link CompletableFuture}<V>. A better name for this * class might have been FutureCache but that is history now. *

- * The default implementation used by the data loader is based on a {@link java.util.LinkedHashMap}. + * The default implementation used by the data loader is based on a {@link java.util.concurrent.ConcurrentHashMap} because + * the data loader code requires the cache to prove atomic writes especially the {@link #putIfAbsentAtomically(Object, CompletableFuture)} + * method. + *

+ * The data loader code using a Compare and Swap (CAS) algorithm to decide if a cache entry is present or not. If you write your + * own {@link CacheMap} implementation then you MUST provide atomic writes in this method to ensure that the same promise for a key is + * returned always. *

* This is really a cache of completed {@link CompletableFuture}<V> values in memory. It is used, when caching is enabled, to * give back the same future to any code that may call it. If you need a cache of the underlying values that is possible external to the JVM @@ -42,7 +48,7 @@ */ @PublicSpi @NullMarked -public interface CacheMap { +public interface CacheMap { /** * Creates a new cache map, using the default implementation that is based on a {@link java.util.LinkedHashMap}. @@ -74,23 +80,31 @@ static CacheMap simpleMap() { * * @return the cached value, or {@code null} if not found (depends on cache implementation) */ - @Nullable CompletableFuture get(K key); + @Nullable CompletableFuture get(K key); /** * Gets a collection of CompletableFutures from the cache map. + * * @return the collection of cached values */ Collection> getAll(); /** - * Creates a new cache map entry with the specified key and value, or updates the value if the key already exists. + * Atomically creates a new cache map entry with the specified key and value, or updates the value if the key already exists. + *

+ * The data loader code using a Compare and Swap (CAS) algorithm to decide if a cache entry is present or not. If you write your + * own {@link CacheMap} implementation then you MUST provide atomic writes in this method to ensure that the same promise for a key is + * returned always. + *

+ * The default implementation of this method uses {@link java.util.concurrent.ConcurrentHashMap} has its implementation so these CAS + * writes work as expected. * * @param key the key to cache * @param value the value to cache * - * @return the cache map for fluent coding + * @return atomically the previous value for the key or null if the value is not present. */ - CacheMap set(K key, CompletableFuture value); + @Nullable CompletableFuture putIfAbsentAtomically(K key, CompletableFuture value); /** * Deletes the entry with the specified key from the cache map, if it exists. @@ -107,4 +121,13 @@ static CacheMap simpleMap() { * @return the cache map for fluent coding */ CacheMap clear(); + + /** + * Returns the current size of the cache. This is not used by DataLoader directly + * and intended for testing and debugging. + * If a cache doesn't support it, it can throw an Exception. + * + * @return the size of the cache + */ + int size(); } diff --git a/src/main/java/org/dataloader/DataLoader.java b/src/main/java/org/dataloader/DataLoader.java index d03e5ac..125efe1 100644 --- a/src/main/java/org/dataloader/DataLoader.java +++ b/src/main/java/org/dataloader/DataLoader.java @@ -58,18 +58,20 @@ *

* A call to the batch loader might result in individual exception failures for item with the returned list. if * you want to capture these specific item failures then use {@link org.dataloader.Try} as a return value and - * create the data loader with {@link #newDataLoaderWithTry(BatchLoader)} form. The Try values will be interpreted + * create the data loader with {@link DataLoaderFactory#newDataLoaderWithTry(BatchLoader)} form. The Try values will be interpreted * as either success values or cause the {@link #load(Object)} promise to complete exceptionally. * * @param type parameter indicating the type of the data load keys * @param type parameter indicating the type of the data that is returned + * * @author Arnold Schrijver * @author Brad Baker */ @PublicApi @NullMarked -public class DataLoader { +public class DataLoader { + private final @Nullable String name; private final DataLoaderHelper helper; private final StatisticsCollector stats; private final CacheMap futureCache; @@ -77,317 +79,13 @@ public class DataLoader { private final DataLoaderOptions options; private final Object batchLoadFunction; - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size). - * - * @param batchLoadFunction the batch load function to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoader(BatchLoader batchLoadFunction) { - return newDataLoader(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function with the provided options - * - * @param batchLoadFunction the batch load function to use - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoader(BatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size) where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - *

- * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then - * you can use this form to create the data loader. - *

- * Using Try objects allows you to capture a value returned or an exception that might - * have occurred trying to get a value. . - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction) { - return newDataLoaderWithTry(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function and with the provided options - * where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size). - * - * @param batchLoadFunction the batch load function to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction) { - return newDataLoader(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function with the provided options - * - * @param batchLoadFunction the batch load function to use - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size) where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - *

- * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then - * you can use this form to create the data loader. - *

- * Using Try objects allows you to capture a value returned or an exception that might - * have occurred trying to get a value. . - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction) { - return newDataLoaderWithTry(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function and with the provided options - * where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size). - * - * @param batchLoadFunction the batch load function to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction) { - return newMappedDataLoader(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function with the provided options - * - * @param batchLoadFunction the batch load function to use - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size) where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - *

- * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then - * you can use this form to create the data loader. - *

- * Using Try objects allows you to capture a value returned or an exception that might - * have occurred trying to get a value. . - *

- * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction) { - return newMappedDataLoaderWithTry(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function and with the provided options - * where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified mapped batch loader function and default options - * (batching, caching and unlimited batch size). - * - * @param batchLoadFunction the batch load function to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction) { - return newMappedDataLoader(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function with the provided options - * - * @param batchLoadFunction the batch load function to use - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates new DataLoader with the specified batch loader function and default options - * (batching, caching and unlimited batch size) where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - *

- * If it's important you to know the exact status of each item in a batch call and whether it threw exceptions then - * you can use this form to create the data loader. - *

- * Using Try objects allows you to capture a value returned or an exception that might - * have occurred trying to get a value. . - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param the key type - * @param the value type - * @return a new DataLoader - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction) { - return newMappedDataLoaderWithTry(batchLoadFunction, null); - } - - /** - * Creates new DataLoader with the specified batch loader function and with the provided options - * where the batch loader function returns a list of - * {@link org.dataloader.Try} objects. - * - * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects - * @param options the options to use - * @param the key type - * @param the value type - * @return a new DataLoader - * @see DataLoaderFactory#newDataLoaderWithTry(BatchLoader) - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, @Nullable DataLoaderOptions options) { - return DataLoaderFactory.mkDataLoader(batchLoadFunction, options); - } - - /** - * Creates a new data loader with the provided batch load function, and default options. - * - * @param batchLoadFunction the batch load function to use - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public DataLoader(BatchLoader batchLoadFunction) { - this((Object) batchLoadFunction, null); - } - - /** - * Creates a new data loader with the provided batch load function and options. - * - * @param batchLoadFunction the batch load function to use - * @param options the batch load options - * @deprecated use {@link DataLoaderFactory} instead - */ - @Deprecated - public DataLoader(BatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { - this((Object) batchLoadFunction, options); - } - @VisibleForTesting - DataLoader(Object batchLoadFunction, @Nullable DataLoaderOptions options) { - this(batchLoadFunction, options, Clock.systemUTC()); + DataLoader(@Nullable String name, Object batchLoadFunction, @Nullable DataLoaderOptions options) { + this(name, batchLoadFunction, options, Clock.systemUTC()); } @VisibleForTesting - DataLoader(Object batchLoadFunction, @Nullable DataLoaderOptions options, Clock clock) { + DataLoader(@Nullable String name, Object batchLoadFunction, @Nullable DataLoaderOptions options, Clock clock) { DataLoaderOptions loaderOptions = options == null ? new DataLoaderOptions() : options; this.futureCache = determineFutureCache(loaderOptions); this.valueCache = determineValueCache(loaderOptions); @@ -395,7 +93,7 @@ public DataLoader(BatchLoader batchLoadFunction, @Nullable DataLoaderOptio this.stats = nonNull(loaderOptions.getStatisticsCollector()); this.batchLoadFunction = nonNull(batchLoadFunction); this.options = loaderOptions; - + this.name = name; this.helper = new DataLoaderHelper<>(this, batchLoadFunction, loaderOptions, this.futureCache, this.valueCache, this.stats, clock); } @@ -410,6 +108,13 @@ private ValueCache determineValueCache(DataLoaderOptions loaderOptions) { return (ValueCache) loaderOptions.valueCache().orElseGet(ValueCache::defaultValueCache); } + /** + * @return the name of the DataLoader which can be null + */ + public @Nullable String getName() { + return name; + } + /** * @return the options used to build this {@link DataLoader} */ @@ -428,6 +133,7 @@ public Object getBatchLoadFunction() { * This allows you to change the current {@link DataLoader} and turn it into a new one * * @param builderConsumer the {@link DataLoaderFactory.Builder} consumer for changing the {@link DataLoader} + * * @return a newly built {@link DataLoader} instance */ public DataLoader transform(Consumer> builderConsumer) { @@ -454,20 +160,6 @@ public Duration getTimeSinceDispatch() { return Duration.between(helper.getLastDispatchTime(), helper.now()); } - /** - * Requests to load the data with the specified key asynchronously, and returns a future of the resulting value. - *

- * If batching is enabled (the default), you'll have to call {@link DataLoader#dispatch()} at a later stage to - * start batch execution. If you forget this call the future will never be completed (unless already completed, - * and returned from cache). - * - * @param key the key to load - * @return the future of the value - */ - public CompletableFuture load(K key) { - return load(key, null); - } - /** * This will return an optional promise to a value previously loaded via a {@link #load(Object)} call or empty if not call has been made for that key. *

@@ -479,6 +171,7 @@ public CompletableFuture load(K key) { * NOTE : This will NOT cause a data load to happen. You must call {@link #load(Object)} for that to happen. * * @param key the key to check + * * @return an Optional to the future of the value */ public Optional> getIfPresent(K key) { @@ -497,6 +190,7 @@ public Optional> getIfPresent(K key) { * NOTE : This will NOT cause a data load to happen. You must call {@link #load(Object)} for that to happen. * * @param key the key to check + * * @return an Optional to the future of the value */ public Optional> getIfCompleted(K key) { @@ -504,6 +198,24 @@ public Optional> getIfCompleted(K key) { } + private CompletableFuture loadImpl(@NonNull K key, @Nullable Object keyContext) { + return helper.load(nonNull(key), keyContext); + } + + /** + * Requests to load the data with the specified key asynchronously, and returns a future of the resulting value. + *

+ * If batching is enabled (the default), you'll have to call {@link DataLoader#dispatch()} at a later stage to + * start batch execution. If you forget this call the future will never be completed (unless already completed, + * and returned from cache). + * + * @param key the key to load + * @return the future of the value + */ + public CompletableFuture load(K key) { + return loadImpl(key, null); + } + /** * Requests to load the data with the specified key asynchronously, and returns a future of the resulting value. *

@@ -516,10 +228,11 @@ public Optional> getIfCompleted(K key) { * * @param key the key to load * @param keyContext a context object that is specific to this key + * * @return the future of the value */ public CompletableFuture load(@NonNull K key, @Nullable Object keyContext) { - return helper.load(nonNull(key), keyContext); + return loadImpl(key, keyContext); } /** @@ -531,6 +244,7 @@ public CompletableFuture load(@NonNull K key, @Nullable Object keyContext) { * and returned from cache). * * @param keys the list of keys to load + * * @return the composite future of the list of values */ public CompletableFuture> loadMany(List keys) { @@ -550,24 +264,23 @@ public CompletableFuture> loadMany(List keys) { * * @param keys the list of keys to load * @param keyContexts the list of key calling context objects + * * @return the composite future of the list of values */ public CompletableFuture> loadMany(List keys, List keyContexts) { nonNull(keys); nonNull(keyContexts); - synchronized (this) { - List> collect = new ArrayList<>(keys.size()); - for (int i = 0; i < keys.size(); i++) { - K key = keys.get(i); - Object keyContext = null; - if (i < keyContexts.size()) { - keyContext = keyContexts.get(i); - } - collect.add(load(key, keyContext)); + List> collect = new ArrayList<>(keys.size()); + for (int i = 0; i < keys.size(); i++) { + K key = keys.get(i); + Object keyContext = null; + if (i < keyContexts.size()) { + keyContext = keyContexts.get(i); } - return CompletableFutureKit.allOf(collect); + collect.add(loadImpl(key, keyContext)); } + return CompletableFutureKit.allOf(collect); } /** @@ -582,20 +295,19 @@ public CompletableFuture> loadMany(List keys, List keyContext * {@link org.dataloader.MappedBatchLoaderWithContext} to help retrieve data. * * @param keysAndContexts the map of keys to their respective contexts + * * @return the composite future of the map of keys and values */ public CompletableFuture> loadMany(Map keysAndContexts) { nonNull(keysAndContexts); - synchronized (this) { - Map> collect = new HashMap<>(keysAndContexts.size()); - for (Map.Entry entry : keysAndContexts.entrySet()) { - K key = entry.getKey(); - Object keyContext = entry.getValue(); - collect.put(key, load(key, keyContext)); - } - return CompletableFutureKit.allOf(collect); + Map> collect = new HashMap<>(keysAndContexts.size()); + for (Map.Entry entry : keysAndContexts.entrySet()) { + K key = entry.getKey(); + Object keyContext = entry.getValue(); + collect.put(key, loadImpl(key, keyContext)); } + return CompletableFutureKit.allOf(collect); } /** @@ -654,6 +366,7 @@ public int dispatchDepth() { * on the next load request. * * @param key the key to remove + * * @return the data loader for fluent coding */ public DataLoader clear(K key) { @@ -667,14 +380,13 @@ public DataLoader clear(K key) { * * @param key the key to remove * @param handler a handler that will be called after the async remote clear completes + * * @return the data loader for fluent coding */ public DataLoader clear(K key, BiConsumer handler) { Object cacheKey = getCacheKey(key); - synchronized (this) { - futureCache.delete(cacheKey); - valueCache.delete(key).whenComplete(handler); - } + futureCache.delete(cacheKey); + valueCache.delete(key).whenComplete(handler); return this; } @@ -692,13 +404,12 @@ public DataLoader clearAll() { * Clears the entire cache map of the loader, and of the cached value store. * * @param handler a handler that will be called after the async remote clear all completes + * * @return the data loader for fluent coding */ public DataLoader clearAll(BiConsumer handler) { - synchronized (this) { - futureCache.clear(); - valueCache.clear().whenComplete(handler); - } + futureCache.clear(); + valueCache.clear().whenComplete(handler); return this; } @@ -709,6 +420,7 @@ public DataLoader clearAll(BiConsumer handler) { * * @param key the key * @param value the value + * * @return the data loader for fluent coding */ public DataLoader prime(K key, V value) { @@ -720,6 +432,7 @@ public DataLoader prime(K key, V value) { * * @param key the key * @param error the exception to prime instead of a value + * * @return the data loader for fluent coding */ public DataLoader prime(K key, Exception error) { @@ -733,15 +446,12 @@ public DataLoader prime(K key, Exception error) { * * @param key the key * @param value the value + * * @return the data loader for fluent coding */ public DataLoader prime(K key, CompletableFuture value) { Object cacheKey = getCacheKey(key); - synchronized (this) { - if (!futureCache.containsKey(cacheKey)) { - futureCache.set(cacheKey, value); - } - } + futureCache.putIfAbsentAtomically(cacheKey, value); return this; } @@ -752,6 +462,7 @@ public DataLoader prime(K key, CompletableFuture value) { * If no cache key function is present in {@link DataLoaderOptions}, then the returned value equals the input key. * * @param key the input key + * * @return the cache key after the input is transformed with the cache key function */ public Object getCacheKey(K key) { @@ -787,4 +498,11 @@ public ValueCache getValueCache() { return valueCache; } + @Override + public String toString() { + return "DataLoader{" + + "name='" + name + '\'' + + ", stats=" + stats + + '}'; + } } diff --git a/src/main/java/org/dataloader/DataLoaderFactory.java b/src/main/java/org/dataloader/DataLoaderFactory.java index ef1a287..509c16a 100644 --- a/src/main/java/org/dataloader/DataLoaderFactory.java +++ b/src/main/java/org/dataloader/DataLoaderFactory.java @@ -3,6 +3,8 @@ import org.dataloader.annotations.PublicApi; import org.jspecify.annotations.Nullable; +import static org.dataloader.impl.Assertions.nonNull; + /** * A factory class to create {@link DataLoader}s */ @@ -23,6 +25,20 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF return newDataLoader(batchLoadFunction, null); } + /** + * Creates new DataLoader with the specified batch loader function and default options + * (batching, caching and unlimited batch size). + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newDataLoader(String name, BatchLoader batchLoadFunction) { + return newDataLoader(name, batchLoadFunction, null); + } + /** * Creates new DataLoader with the specified batch loader function with the provided options * @@ -33,7 +49,21 @@ public static DataLoader newDataLoader(BatchLoader batchLoadF * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoader batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newDataLoader(String name, BatchLoader batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -69,7 +99,24 @@ public static DataLoader newDataLoaderWithTry(BatchLoader * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newDataLoaderWithTry(BatchLoader> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newDataLoaderWithTry(String name, BatchLoader> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -95,7 +142,21 @@ public static DataLoader newDataLoader(BatchLoaderWithContext * @return a new DataLoader */ public static DataLoader newDataLoader(BatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newDataLoader(String name, BatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -131,7 +192,24 @@ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContex * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newDataLoaderWithTry(BatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newDataLoaderWithTry(String name, BatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -157,7 +235,20 @@ public static DataLoader newMappedDataLoader(MappedBatchLoader DataLoader newMappedDataLoader(MappedBatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedDataLoader(String name, MappedBatchLoader batchLoadFunction, @Nullable DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -194,7 +285,24 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoader> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newMappedDataLoaderWithTry(String name, MappedBatchLoader> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -220,7 +328,21 @@ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithC * @return a new DataLoader */ public static DataLoader newMappedDataLoader(MappedBatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedDataLoader(String name, MappedBatchLoaderWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -256,7 +378,24 @@ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoad * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newMappedDataLoaderWithTry(MappedBatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newMappedDataLoaderWithTry(String name, MappedBatchLoaderWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -282,7 +421,21 @@ public static DataLoader newPublisherDataLoader(BatchPublisher DataLoader newPublisherDataLoader(BatchPublisher batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(String name, BatchPublisher batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -318,7 +471,24 @@ public static DataLoader newPublisherDataLoaderWithTry(BatchPublish * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newPublisherDataLoaderWithTry(BatchPublisher> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newPublisherDataLoaderWithTry(String name, BatchPublisher> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -344,7 +514,21 @@ public static DataLoader newPublisherDataLoader(BatchPublisherWithC * @return a new DataLoader */ public static DataLoader newPublisherDataLoader(BatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newPublisherDataLoader(String name, BatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -380,7 +564,24 @@ public static DataLoader newPublisherDataLoaderWithTry(BatchPublish * @see #newPublisherDataLoaderWithTry(BatchPublisher) */ public static DataLoader newPublisherDataLoaderWithTry(BatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newPublisherDataLoaderWithTry(BatchPublisher) + */ + public static DataLoader newPublisherDataLoaderWithTry(String name, BatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -406,7 +607,21 @@ public static DataLoader newMappedPublisherDataLoader(MappedBatchPu * @return a new DataLoader */ public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisher batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(String name, MappedBatchPublisher batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -442,7 +657,24 @@ public static DataLoader newMappedPublisherDataLoaderWithTry(Mapped * @see #newDataLoaderWithTry(BatchLoader) */ public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisher> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newDataLoaderWithTry(BatchLoader) + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(String name, MappedBatchPublisher> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -468,7 +700,21 @@ public static DataLoader newMappedPublisherDataLoader(MappedBatchPu * @return a new DataLoader */ public static DataLoader newMappedPublisherDataLoader(MappedBatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); + } + + /** + * Creates new DataLoader with the specified batch loader function with the provided options + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + */ + public static DataLoader newMappedPublisherDataLoader(String name, MappedBatchPublisherWithContext batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); } /** @@ -504,11 +750,28 @@ public static DataLoader newMappedPublisherDataLoaderWithTry(Mapped * @see #newMappedPublisherDataLoaderWithTry(MappedBatchPublisher) */ public static DataLoader newMappedPublisherDataLoaderWithTry(MappedBatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(null, batchLoadFunction, options); } - static DataLoader mkDataLoader(Object batchLoadFunction, DataLoaderOptions options) { - return new DataLoader<>(batchLoadFunction, options); + /** + * Creates new DataLoader with the specified batch loader function and with the provided options + * where the batch loader function returns a list of + * {@link org.dataloader.Try} objects. + * + * @param name the name to use + * @param batchLoadFunction the batch load function to use that uses {@link org.dataloader.Try} objects + * @param options the options to use + * @param the key type + * @param the value type + * @return a new DataLoader + * @see #newMappedPublisherDataLoaderWithTry(MappedBatchPublisher) + */ + public static DataLoader newMappedPublisherDataLoaderWithTry(String name, MappedBatchPublisherWithContext> batchLoadFunction, DataLoaderOptions options) { + return mkDataLoader(nonNull(name), batchLoadFunction, options); + } + + static DataLoader mkDataLoader(@Nullable String name, Object batchLoadFunction, @Nullable DataLoaderOptions options) { + return new DataLoader<>(name, batchLoadFunction, options); } /** @@ -541,19 +804,21 @@ public static Builder builder(DataLoader dataLoader) { * @param the value type */ public static class Builder { + String name; Object batchLoadFunction; - DataLoaderOptions options = DataLoaderOptions.newOptions(); + DataLoaderOptions options = DataLoaderOptions.newDefaultOptions(); Builder() { } Builder(DataLoader dataLoader) { + this.name = dataLoader.getName(); this.batchLoadFunction = dataLoader.getBatchLoadFunction(); this.options = dataLoader.getOptions(); } - public Builder batchLoadFunction(Object batchLoadFunction) { - this.batchLoadFunction = batchLoadFunction; + public Builder name(String name) { + this.name = name; return this; } @@ -562,8 +827,53 @@ public Builder options(DataLoaderOptions options) { return this; } + public Builder batchLoadFunction(Object batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder batchLoader(BatchLoader batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder batchLoader(BatchLoaderWithContext batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder mappedBatchLoader(MappedBatchLoader batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder mappedBatchLoader(MappedBatchLoaderWithContext batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder publisherBatchLoader(BatchPublisher batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder publisherBatchLoader(BatchPublisherWithContext batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder mappedPublisherBatchLoader(MappedBatchPublisher batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + + public Builder mappedPublisherBatchLoader(MappedBatchPublisherWithContext batchLoadFunction) { + this.batchLoadFunction = batchLoadFunction; + return this; + } + public DataLoader build() { - return mkDataLoader(batchLoadFunction, options); + return mkDataLoader(name, batchLoadFunction, options); } } } diff --git a/src/main/java/org/dataloader/DataLoaderHelper.java b/src/main/java/org/dataloader/DataLoaderHelper.java index 7858780..feb6184 100644 --- a/src/main/java/org/dataloader/DataLoaderHelper.java +++ b/src/main/java/org/dataloader/DataLoaderHelper.java @@ -1,6 +1,5 @@ package org.dataloader; -import org.dataloader.annotations.GuardedBy; import org.dataloader.annotations.Internal; import org.dataloader.impl.CompletableFutureKit; import org.dataloader.instrumentation.DataLoaderInstrumentation; @@ -13,11 +12,13 @@ import org.dataloader.stats.context.IncrementCacheHitCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; +import org.jspecify.annotations.Nullable; import org.reactivestreams.Subscriber; import java.time.Clock; import java.time.Instant; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; @@ -48,23 +49,27 @@ @Internal class DataLoaderHelper { - static class LoaderQueueEntry { + private static class LoaderQueueEntry { final K key; - final V value; + final CompletableFuture value; final Object callContext; + final LoaderQueueEntry prev; + final int queueSize; - public LoaderQueueEntry(K key, V value, Object callContext) { + public LoaderQueueEntry(K key, CompletableFuture value, Object callContext, LoaderQueueEntry prev, int queueSize) { this.key = key; this.value = value; this.callContext = callContext; + this.prev = prev; + this.queueSize = queueSize; } K getKey() { return key; } - V getValue() { + CompletableFuture getValue() { return value; } @@ -78,7 +83,8 @@ Object getCallContext() { private final DataLoaderOptions loaderOptions; private final CacheMap futureCache; private final ValueCache valueCache; - private final List>> loaderQueue; + // private final List>> loaderQueue; + private final AtomicReference<@Nullable LoaderQueueEntry> loaderQueue = new AtomicReference<>(); private final StatisticsCollector stats; private final Clock clock; private final AtomicReference lastDispatchTime; @@ -95,7 +101,6 @@ Object getCallContext() { this.loaderOptions = loaderOptions; this.futureCache = futureCache; this.valueCache = valueCache; - this.loaderQueue = new ArrayList<>(); this.stats = stats; this.clock = clock; this.lastDispatchTime = new AtomicReference<>(); @@ -111,31 +116,27 @@ public Instant getLastDispatchTime() { } Optional> getIfPresent(K key) { - synchronized (dataLoader) { - boolean cachingEnabled = loaderOptions.cachingEnabled(); - if (cachingEnabled) { - Object cacheKey = getCacheKey(nonNull(key)); - try { - CompletableFuture cacheValue = futureCache.get(cacheKey); - if (cacheValue != null) { - stats.incrementCacheHitCount(new IncrementCacheHitCountStatisticsContext<>(key)); - return Optional.of(cacheValue); - } - } catch (Exception ignored) { + boolean cachingEnabled = loaderOptions.cachingEnabled(); + if (cachingEnabled) { + Object cacheKey = getCacheKey(nonNull(key)); + try { + CompletableFuture cacheValue = futureCache.get(cacheKey); + if (cacheValue != null) { + stats.incrementCacheHitCount(new IncrementCacheHitCountStatisticsContext<>(key)); + return Optional.of(cacheValue); } + } catch (Exception ignored) { } } return Optional.empty(); } Optional> getIfCompleted(K key) { - synchronized (dataLoader) { - Optional> cachedPromise = getIfPresent(key); - if (cachedPromise.isPresent()) { - CompletableFuture promise = cachedPromise.get(); - if (promise.isDone()) { - return cachedPromise; - } + Optional> cachedPromise = getIfPresent(key); + if (cachedPromise.isPresent()) { + CompletableFuture promise = cachedPromise.get(); + if (promise.isDone()) { + return cachedPromise; } } return Optional.empty(); @@ -143,21 +144,68 @@ Optional> getIfCompleted(K key) { CompletableFuture load(K key, Object loadContext) { - synchronized (dataLoader) { - boolean batchingEnabled = loaderOptions.batchingEnabled(); - boolean cachingEnabled = loaderOptions.cachingEnabled(); - - stats.incrementLoadCount(new IncrementLoadCountStatisticsContext<>(key, loadContext)); - DataLoaderInstrumentationContext ctx = ctxOrNoopCtx(instrumentation().beginLoad(dataLoader, key,loadContext)); - CompletableFuture cf; - if (cachingEnabled) { - cf = loadFromCache(key, loadContext, batchingEnabled); - } else { - cf = queueOrInvokeLoader(key, loadContext, batchingEnabled, false); + boolean batchingEnabled = loaderOptions.batchingEnabled(); + boolean futureCachingEnabled = loaderOptions.cachingEnabled(); + + stats.incrementLoadCount(new IncrementLoadCountStatisticsContext<>(key, loadContext)); + DataLoaderInstrumentationContext ctx = ctxOrNoopCtx(instrumentation().beginLoad(dataLoader, key, loadContext)); + Object cacheKey = null; + if (futureCachingEnabled) { + cacheKey = loadContext == null ? getCacheKey(key) : getCacheKeyWithContext(key, loadContext); + try { + CompletableFuture cachedFuture = futureCache.get(cacheKey); + if (cachedFuture != null) { + // We already have a promise for this key, no need to check value cache or queue this load + return incrementCacheHitAndReturnCF(ctx, key, loadContext, cachedFuture); + } + } catch (Exception ignored) { + } + } + CompletableFuture loadCallFuture; + if (batchingEnabled) { + loadCallFuture = new CompletableFuture<>(); + if (futureCachingEnabled) { + CompletableFuture cachedFuture = futureCache.putIfAbsentAtomically(cacheKey, loadCallFuture); + if (cachedFuture != null) { + // another thread was faster and created a matching CF ... hence this is really a cache hit and we are done + return incrementCacheHitAndReturnCF(ctx, key, loadContext, cachedFuture); + } + } + addEntryToLoaderQueue(key, loadCallFuture, loadContext); + } else { + stats.incrementBatchLoadCountBy(1, new IncrementBatchLoadCountByStatisticsContext<>(key, loadContext)); + // immediate execution of batch function + loadCallFuture = invokeLoaderImmediately(key, loadContext, true); + if (futureCachingEnabled) { + CompletableFuture cachedFuture = futureCache.putIfAbsentAtomically(cacheKey, loadCallFuture); + if (cachedFuture != null) { + // another thread was faster and the loader was invoked twice with the same key + // we are disregarding the result of our dispatch call and use the already cached value + // meaning this is a cache hit and we are done + return incrementCacheHitAndReturnCF(ctx, key, loadContext, cachedFuture); + } + } + } + + ctx.onDispatched(); + loadCallFuture.whenComplete(ctx::onCompleted); + return loadCallFuture; + } + + private CompletableFuture incrementCacheHitAndReturnCF(DataLoaderInstrumentationContext ctx, K key, Object loadContext, CompletableFuture cachedFuture) { + stats.incrementCacheHitCount(new IncrementCacheHitCountStatisticsContext<>(key, loadContext)); + ctx.onDispatched(); + cachedFuture.whenComplete(ctx::onCompleted); + return cachedFuture; + } + + private void addEntryToLoaderQueue(K key, CompletableFuture future, Object loadContext) { + while (true) { + LoaderQueueEntry prev = loaderQueue.get(); + LoaderQueueEntry curr = new LoaderQueueEntry<>(key, future, loadContext, prev, prev != null ? prev.queueSize + 1 : 1); + if (loaderQueue.compareAndSet(prev, curr)) { + return; } - ctx.onDispatched(); - cf.whenComplete(ctx::onCompleted); - return cf; } } @@ -173,38 +221,49 @@ Object getCacheKeyWithContext(K key, Object context) { loaderOptions.cacheKeyFunction().get().getKeyWithContext(key, context) : key; } + @SuppressWarnings("unchecked") DispatchResult dispatch() { DataLoaderInstrumentationContext> instrCtx = ctxOrNoopCtx(instrumentation().beginDispatch(dataLoader)); boolean batchingEnabled = loaderOptions.batchingEnabled(); - final List keys; - final List callContexts; - final List> queuedFutures; - synchronized (dataLoader) { - int queueSize = loaderQueue.size(); - if (queueSize == 0) { - lastDispatchTime.set(now()); - instrCtx.onDispatched(); - return endDispatchCtx(instrCtx, emptyDispatchResult()); - } - // we copy the pre-loaded set of futures ready for dispatch - keys = new ArrayList<>(queueSize); - callContexts = new ArrayList<>(queueSize); - queuedFutures = new ArrayList<>(queueSize); - - loaderQueue.forEach(entry -> { - keys.add(entry.getKey()); - queuedFutures.add(entry.getValue()); - callContexts.add(entry.getCallContext()); - }); - loaderQueue.clear(); + LoaderQueueEntry loaderQueueEntryHead; + while (true) { + loaderQueueEntryHead = loaderQueue.get(); + if (loaderQueue.compareAndSet(loaderQueueEntryHead, null)) { + // one or more threads competed and this one won. This thread holds + // the loader queue root in the local variable loaderQueueEntryHead + break; + } + } + if (loaderQueueEntryHead == null) { lastDispatchTime.set(now()); + instrCtx.onDispatched(); + return endDispatchCtx(instrCtx, emptyDispatchResult()); + } + int queueSize = loaderQueueEntryHead.queueSize; + // we copy the pre-loaded set of futures ready for dispatch + Object[] keysArray = new Object[queueSize]; + CompletableFuture[] queuedFuturesArray = new CompletableFuture[queueSize]; + Object[] callContextsArray = new Object[queueSize]; + int index = queueSize - 1; + while (loaderQueueEntryHead != null) { + keysArray[index] = loaderQueueEntryHead.getKey(); + queuedFuturesArray[index] = loaderQueueEntryHead.getValue(); + callContextsArray[index] = loaderQueueEntryHead.getCallContext(); + loaderQueueEntryHead = loaderQueueEntryHead.prev; + index--; } + final List keys = (List) Arrays.asList(keysArray); + final List> queuedFutures = Arrays.asList(queuedFuturesArray); + final List callContexts = Arrays.asList(callContextsArray); + + lastDispatchTime.set(now()); if (!batchingEnabled) { instrCtx.onDispatched(); return endDispatchCtx(instrCtx, emptyDispatchResult()); } + final int totalEntriesHandled = keys.size(); // // order of keys -> values matter in data loader hence the use of linked hash map @@ -334,38 +393,6 @@ private void possiblyClearCacheEntriesOnExceptions(List keys) { } } - @GuardedBy("dataLoader") - private CompletableFuture loadFromCache(K key, Object loadContext, boolean batchingEnabled) { - final Object cacheKey = loadContext == null ? getCacheKey(key) : getCacheKeyWithContext(key, loadContext); - - try { - CompletableFuture cacheValue = futureCache.get(cacheKey); - if (cacheValue != null) { - // We already have a promise for this key, no need to check value cache or queue up load - stats.incrementCacheHitCount(new IncrementCacheHitCountStatisticsContext<>(key, loadContext)); - return cacheValue; - } - } catch (Exception ignored) { - } - - CompletableFuture loadCallFuture = queueOrInvokeLoader(key, loadContext, batchingEnabled, true); - futureCache.set(cacheKey, loadCallFuture); - return loadCallFuture; - } - - @GuardedBy("dataLoader") - private CompletableFuture queueOrInvokeLoader(K key, Object loadContext, boolean batchingEnabled, boolean cachingEnabled) { - if (batchingEnabled) { - CompletableFuture loadCallFuture = new CompletableFuture<>(); - loaderQueue.add(new LoaderQueueEntry<>(key, loadCallFuture, loadContext)); - return loadCallFuture; - } else { - stats.incrementBatchLoadCountBy(1, new IncrementBatchLoadCountByStatisticsContext<>(key, loadContext)); - // immediate execution of batch function - return invokeLoaderImmediately(key, loadContext, cachingEnabled); - } - } - CompletableFuture invokeLoaderImmediately(K key, Object keyContext, boolean cachingEnabled) { List keys = singletonList(key); List keyContexts = singletonList(keyContext); @@ -594,11 +621,11 @@ private boolean isMapLoader() { } private boolean isPublisher() { - return batchLoadFunction instanceof BatchPublisher; + return batchLoadFunction instanceof BatchPublisher || batchLoadFunction instanceof BatchPublisherWithContext; } private boolean isMappedPublisher() { - return batchLoadFunction instanceof MappedBatchPublisher; + return batchLoadFunction instanceof MappedBatchPublisher || batchLoadFunction instanceof MappedBatchPublisherWithContext; } private DataLoaderInstrumentation instrumentation() { @@ -606,11 +633,15 @@ private DataLoaderInstrumentation instrumentation() { } int dispatchDepth() { - synchronized (dataLoader) { - return loaderQueue.size(); + LoaderQueueEntry loaderQueueEntry = loaderQueue.get(); + if (loaderQueueEntry != null) { + return loaderQueueEntry.queueSize; + } else { + return 0; } } + private final List> NOT_SUPPORTED_LIST = emptyList(); private final CompletableFuture>> NOT_SUPPORTED = CompletableFuture.completedFuture(NOT_SUPPORTED_LIST); private final Try ALWAYS_FAILED = Try.alwaysFailed(); diff --git a/src/main/java/org/dataloader/DataLoaderOptions.java b/src/main/java/org/dataloader/DataLoaderOptions.java index 8667943..f7c006f 100644 --- a/src/main/java/org/dataloader/DataLoaderOptions.java +++ b/src/main/java/org/dataloader/DataLoaderOptions.java @@ -89,47 +89,27 @@ private DataLoaderOptions(Builder builder) { this.instrumentation = builder.instrumentation; } - /** - * Clones the provided data loader options. - * - * @param other the other options instance - */ - public DataLoaderOptions(DataLoaderOptions other) { - nonNull(other); - this.batchingEnabled = other.batchingEnabled; - this.cachingEnabled = other.cachingEnabled; - this.cachingExceptionsEnabled = other.cachingExceptionsEnabled; - this.cacheKeyFunction = other.cacheKeyFunction; - this.cacheMap = other.cacheMap; - this.valueCache = other.valueCache; - this.maxBatchSize = other.maxBatchSize; - this.statisticsCollector = other.statisticsCollector; - this.environmentProvider = other.environmentProvider; - this.valueCacheOptions = other.valueCacheOptions; - this.batchLoaderScheduler = other.batchLoaderScheduler; - this.instrumentation = other.instrumentation; - } - /** * @return a new default data loader options that you can then customize */ - public static DataLoaderOptions newOptions() { + public static DataLoaderOptions newDefaultOptions() { return new DataLoaderOptions(); } /** - * @return a new default data loader options {@link Builder} that you can then customize + * @return a new default data loader options builder that you can then customize */ - public static DataLoaderOptions.Builder newOptionsBuilder() { - return new DataLoaderOptions.Builder(); + public static DataLoaderOptions.Builder newOptions() { + return new Builder(); } /** - * @param otherOptions the options to copy - * @return a new default data loader options {@link Builder} from the specified one that you can then customize + * Copies the options into a new builder + * + * @return a new default data loader options builder that you can then customize */ - public static DataLoaderOptions.Builder newDataLoaderOptions(DataLoaderOptions otherOptions) { - return new DataLoaderOptions.Builder(otherOptions); + public static DataLoaderOptions.Builder newOptions(DataLoaderOptions otherOptions) { + return new Builder(otherOptions); } /** @@ -139,7 +119,7 @@ public static DataLoaderOptions.Builder newDataLoaderOptions(DataLoaderOptions o * @return a new {@link DataLoaderOptions} object */ public DataLoaderOptions transform(Consumer builderConsumer) { - Builder builder = newDataLoaderOptions(this); + Builder builder = new Builder(this); builderConsumer.accept(builder); return builder.build(); } @@ -171,16 +151,6 @@ public boolean batchingEnabled() { return batchingEnabled; } - /** - * Sets the option that determines whether batch loading is enabled. - * - * @param batchingEnabled {@code true} to enable batch loading, {@code false} otherwise - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setBatchingEnabled(boolean batchingEnabled) { - return builder().setBatchingEnabled(batchingEnabled).build(); - } - /** * Option that determines whether to use caching of futures (the default), or not. * @@ -190,16 +160,6 @@ public boolean cachingEnabled() { return cachingEnabled; } - /** - * Sets the option that determines whether caching is enabled. - * - * @param cachingEnabled {@code true} to enable caching, {@code false} otherwise - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setCachingEnabled(boolean cachingEnabled) { - return builder().setCachingEnabled(cachingEnabled).build(); - } - /** * Option that determines whether to cache exceptional values (the default), or not. *

@@ -214,16 +174,6 @@ public boolean cachingExceptionsEnabled() { return cachingExceptionsEnabled; } - /** - * Sets the option that determines whether exceptional values are cache enabled. - * - * @param cachingExceptionsEnabled {@code true} to enable caching exceptional values, {@code false} otherwise - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setCachingExceptionsEnabled(boolean cachingExceptionsEnabled) { - return builder().setCachingExceptionsEnabled(cachingExceptionsEnabled).build(); - } - /** * Gets an (optional) function to invoke for creation of the cache key, if caching is enabled. *

@@ -235,16 +185,6 @@ public Optional cacheKeyFunction() { return Optional.ofNullable(cacheKeyFunction); } - /** - * Sets the function to use for creating the cache key, if caching is enabled. - * - * @param cacheKeyFunction the cache key function to use - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setCacheKeyFunction(CacheKey cacheKeyFunction) { - return builder().setCacheKeyFunction(cacheKeyFunction).build(); - } - /** * Gets the (optional) cache map implementation that is used for caching, if caching is enabled. *

@@ -256,15 +196,6 @@ public DataLoaderOptions setCacheKeyFunction(CacheKey cacheKeyFunction) { return Optional.ofNullable(cacheMap); } - /** - * Sets the cache map implementation to use for caching, if caching is enabled. - * - * @param cacheMap the cache map instance - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setCacheMap(CacheMap cacheMap) { - return builder().setCacheMap(cacheMap).build(); - } /** * Gets the maximum number of keys that will be presented to the {@link BatchLoader} function @@ -276,17 +207,6 @@ public int maxBatchSize() { return maxBatchSize; } - /** - * Sets the maximum number of keys that will be presented to the {@link BatchLoader} function - * before they are split into multiple class - * - * @param maxBatchSize the maximum batch size - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setMaxBatchSize(int maxBatchSize) { - return builder().setMaxBatchSize(maxBatchSize).build(); - } - /** * @return the statistics collector to use with these options */ @@ -294,18 +214,6 @@ public StatisticsCollector getStatisticsCollector() { return nonNull(this.statisticsCollector.get()); } - /** - * Sets the statistics collector supplier that will be used with these data loader options. Since it uses - * the supplier pattern, you can create a new statistics collector on each call, or you can reuse - * a common value - * - * @param statisticsCollector the statistics collector to use - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setStatisticsCollector(Supplier statisticsCollector) { - return builder().setStatisticsCollector(nonNull(statisticsCollector)).build(); - } - /** * @return the batch environment provider that will be used to give context to batch load functions */ @@ -313,16 +221,6 @@ public BatchLoaderContextProvider getBatchLoaderContextProvider() { return environmentProvider; } - /** - * Sets the batch loader environment provider that will be used to give context to batch load functions - * - * @param contextProvider the batch loader context provider - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setBatchLoaderContextProvider(BatchLoaderContextProvider contextProvider) { - return builder().setBatchLoaderContextProvider(nonNull(contextProvider)).build(); - } - /** * Gets the (optional) cache store implementation that is used for value caching, if caching is enabled. *

@@ -334,15 +232,6 @@ public DataLoaderOptions setBatchLoaderContextProvider(BatchLoaderContextProvide return Optional.ofNullable(valueCache); } - /** - * Sets the value cache implementation to use for caching values, if caching is enabled. - * - * @param valueCache the value cache instance - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setValueCache(ValueCache valueCache) { - return builder().setValueCache(valueCache).build(); - } /** * @return the {@link ValueCacheOptions} that control how the {@link ValueCache} will be used @@ -351,16 +240,6 @@ public ValueCacheOptions getValueCacheOptions() { return valueCacheOptions; } - /** - * Sets the {@link ValueCacheOptions} that control how the {@link ValueCache} will be used - * - * @param valueCacheOptions the value cache options - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setValueCacheOptions(ValueCacheOptions valueCacheOptions) { - return builder().setValueCacheOptions(nonNull(valueCacheOptions)).build(); - } - /** * @return the {@link BatchLoaderScheduler} to use, which can be null */ @@ -368,17 +247,6 @@ public BatchLoaderScheduler getBatchLoaderScheduler() { return batchLoaderScheduler; } - /** - * Sets in a new {@link BatchLoaderScheduler} that allows the call to a {@link BatchLoader} function to be scheduled - * to some future time. - * - * @param batchLoaderScheduler the scheduler - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setBatchLoaderScheduler(BatchLoaderScheduler batchLoaderScheduler) { - return builder().setBatchLoaderScheduler(batchLoaderScheduler).build(); - } - /** * @return the {@link DataLoaderInstrumentation} to use */ @@ -386,20 +254,6 @@ public DataLoaderInstrumentation getInstrumentation() { return instrumentation; } - /** - * Sets in a new {@link DataLoaderInstrumentation} - * - * @param instrumentation the new {@link DataLoaderInstrumentation} - * @return a new data loader options instance for fluent coding - */ - public DataLoaderOptions setInstrumentation(DataLoaderInstrumentation instrumentation) { - return builder().setInstrumentation(instrumentation).build(); - } - - private Builder builder() { - return new Builder(this); - } - public static class Builder { private boolean batchingEnabled; private boolean cachingEnabled; @@ -433,61 +287,137 @@ public Builder() { this.instrumentation = other.instrumentation; } + /** + * Sets the option that determines whether batch loading is enabled. + * + * @param batchingEnabled {@code true} to enable batch loading, {@code false} otherwise + * @return this builder for fluent coding + */ public Builder setBatchingEnabled(boolean batchingEnabled) { this.batchingEnabled = batchingEnabled; return this; } + /** + * Sets the option that determines whether caching is enabled. + * + * @param cachingEnabled {@code true} to enable caching, {@code false} otherwise + * @return this builder for fluent coding + */ public Builder setCachingEnabled(boolean cachingEnabled) { this.cachingEnabled = cachingEnabled; return this; } + /** + * Sets the option that determines whether exceptional values are cache enabled. + * + * @param cachingExceptionsEnabled {@code true} to enable caching exceptional values, {@code false} otherwise + * @return this builder for fluent coding + */ public Builder setCachingExceptionsEnabled(boolean cachingExceptionsEnabled) { this.cachingExceptionsEnabled = cachingExceptionsEnabled; return this; } + /** + * Sets the function to use for creating the cache key, if caching is enabled. + * + * @param cacheKeyFunction the cache key function to use + * @return this builder for fluent coding + */ public Builder setCacheKeyFunction(CacheKey cacheKeyFunction) { this.cacheKeyFunction = cacheKeyFunction; return this; } + /** + * Sets the cache map implementation to use for caching, if caching is enabled. + * + * @param cacheMap the cache map instance + * @return this builder for fluent coding + */ public Builder setCacheMap(CacheMap cacheMap) { this.cacheMap = cacheMap; return this; } + /** + * Sets the value cache implementation to use for caching values, if caching is enabled. + * + * @param valueCache the value cache instance + * @return this builder for fluent coding + */ public Builder setValueCache(ValueCache valueCache) { this.valueCache = valueCache; return this; } + /** + * Sets the maximum number of keys that will be presented to the {@link BatchLoader} function + * before they are split into multiple class + * + * @param maxBatchSize the maximum batch size + * @return this builder for fluent coding + */ public Builder setMaxBatchSize(int maxBatchSize) { this.maxBatchSize = maxBatchSize; return this; } + /** + * Sets the statistics collector supplier that will be used with these data loader options. Since it uses + * the supplier pattern, you can create a new statistics collector on each call, or you can reuse + * a common value + * + * @param statisticsCollector the statistics collector to use + * @return this builder for fluent coding + */ public Builder setStatisticsCollector(Supplier statisticsCollector) { this.statisticsCollector = statisticsCollector; return this; } + /** + * Sets the batch loader environment provider that will be used to give context to batch load functions + * + * @param environmentProvider the batch loader context provider + * @return this builder for fluent coding + */ public Builder setBatchLoaderContextProvider(BatchLoaderContextProvider environmentProvider) { this.environmentProvider = environmentProvider; return this; } + /** + * Sets the {@link ValueCacheOptions} that control how the {@link ValueCache} will be used + * + * @param valueCacheOptions the value cache options + * @return this builder for fluent coding + */ public Builder setValueCacheOptions(ValueCacheOptions valueCacheOptions) { this.valueCacheOptions = valueCacheOptions; return this; } + /** + * Sets in a new {@link BatchLoaderScheduler} that allows the call to a {@link BatchLoader} function to be scheduled + * to some future time. + * + * @param batchLoaderScheduler the scheduler + * @return this builder for fluent coding + */ public Builder setBatchLoaderScheduler(BatchLoaderScheduler batchLoaderScheduler) { this.batchLoaderScheduler = batchLoaderScheduler; return this; } + /** + * Sets in a new {@link DataLoaderInstrumentation} + * + * @param instrumentation the new {@link DataLoaderInstrumentation} + * @return this builder for fluent coding + */ public Builder setInstrumentation(DataLoaderInstrumentation instrumentation) { this.instrumentation = nonNull(instrumentation); return this; diff --git a/src/main/java/org/dataloader/DataLoaderRegistry.java b/src/main/java/org/dataloader/DataLoaderRegistry.java index 06c93c4..6bc79f6 100644 --- a/src/main/java/org/dataloader/DataLoaderRegistry.java +++ b/src/main/java/org/dataloader/DataLoaderRegistry.java @@ -1,10 +1,13 @@ package org.dataloader; import org.dataloader.annotations.PublicApi; +import org.dataloader.impl.Assertions; import org.dataloader.instrumentation.ChainedDataLoaderInstrumentation; import org.dataloader.instrumentation.DataLoaderInstrumentation; import org.dataloader.instrumentation.DataLoaderInstrumentationHelper; import org.dataloader.stats.Statistics; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.ArrayList; import java.util.HashMap; @@ -12,10 +15,13 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; +import static org.dataloader.impl.Assertions.assertState; + /** * This allows data loaders to be registered together into a single place, so * they can be dispatched as one. It also allows you to retrieve data loaders by @@ -35,9 +41,10 @@ * are the same object, then nothing is changed, since the same instrumentation code is being run. */ @PublicApi +@NullMarked public class DataLoaderRegistry { protected final Map> dataLoaders; - protected final DataLoaderInstrumentation instrumentation; + protected final @Nullable DataLoaderInstrumentation instrumentation; public DataLoaderRegistry() { @@ -48,15 +55,15 @@ private DataLoaderRegistry(Builder builder) { this(builder.dataLoaders, builder.instrumentation); } - protected DataLoaderRegistry(Map> dataLoaders, DataLoaderInstrumentation instrumentation) { + protected DataLoaderRegistry(Map> dataLoaders, @Nullable DataLoaderInstrumentation instrumentation) { this.dataLoaders = instrumentDLs(dataLoaders, instrumentation); this.instrumentation = instrumentation; } - private Map> instrumentDLs(Map> incomingDataLoaders, DataLoaderInstrumentation registryInstrumentation) { + private Map> instrumentDLs(Map> incomingDataLoaders, @Nullable DataLoaderInstrumentation registryInstrumentation) { Map> dataLoaders = new ConcurrentHashMap<>(incomingDataLoaders); if (registryInstrumentation != null) { - dataLoaders.replaceAll((k, existingDL) -> instrumentDL(registryInstrumentation, existingDL)); + dataLoaders.replaceAll((k, existingDL) -> nameAndInstrumentDL(k, registryInstrumentation, existingDL)); } return dataLoaders; } @@ -64,11 +71,14 @@ protected DataLoaderRegistry(Map> dataLoaders, DataLoad /** * Can be called to tweak a {@link DataLoader} so that it has the registry {@link DataLoaderInstrumentation} added as the first one. * + * @param key the key used to register the data loader * @param registryInstrumentation the common registry {@link DataLoaderInstrumentation} * @param existingDL the existing data loader * @return a new {@link DataLoader} or the same one if there is nothing to change */ - private static DataLoader instrumentDL(DataLoaderInstrumentation registryInstrumentation, DataLoader existingDL) { + private static DataLoader nameAndInstrumentDL(String key, @Nullable DataLoaderInstrumentation registryInstrumentation, DataLoader existingDL) { + existingDL = checkAndSetName(key, existingDL); + if (registryInstrumentation == null) { return existingDL; } @@ -97,6 +107,15 @@ protected DataLoaderRegistry(Map> dataLoaders, DataLoad } } + private static DataLoader checkAndSetName(String key, DataLoader dataLoader) { + if (dataLoader.getName() == null) { + return dataLoader.transform(b -> b.name(key)); + } + assertState(key.equals(dataLoader.getName()), + () -> String.format("Data loader name '%s' is not the same as registered key '%s'", dataLoader.getName(), key)); + return dataLoader; + } + private static DataLoader mkInstrumentedDataLoader(DataLoader existingDL, DataLoaderOptions options, DataLoaderInstrumentation newInstrumentation) { return existingDL.transform(builder -> builder.options(setInInstrumentation(options, newInstrumentation))); } @@ -108,28 +127,69 @@ private static DataLoaderOptions setInInstrumentation(DataLoaderOptions options, /** * @return the {@link DataLoaderInstrumentation} associated with this registry which can be null */ - public DataLoaderInstrumentation getInstrumentation() { + public @Nullable DataLoaderInstrumentation getInstrumentation() { return instrumentation; } /** - * This will register a new dataloader + * This will register a new named dataloader. The {@link DataLoader} must be named something and + * cannot have a null name. + *

+ * Note: Registration can change the data loader instance since it might get an {@link DataLoaderInstrumentation} applied to + * it. So the {@link DataLoader} instance your read via {@link DataLoaderRegistry#getDataLoader(String)} might not be the same + * object that was registered. + * + * @param dataLoader the named data loader to register + * @return this registry + */ + public DataLoaderRegistry register(DataLoader dataLoader) { + String name = Assertions.nonNull(dataLoader.getName(), () -> "The DataLoader must have a non null name"); + dataLoaders.put(name, nameAndInstrumentDL(name, instrumentation, dataLoader)); + return this; + } + + /** + * This will register a new {@link DataLoader} + *

+ * Note: Registration can change the data loader instance since it might get an {@link DataLoaderInstrumentation} applied to + * it. So the {@link DataLoader} instance your read via {@link DataLoaderRegistry#getDataLoader(String)} might not be the same + * object that was registered. * * @param key the key to put the data loader under * @param dataLoader the data loader to register * @return this registry */ public DataLoaderRegistry register(String key, DataLoader dataLoader) { - dataLoaders.put(key, instrumentDL(instrumentation, dataLoader)); + dataLoaders.put(key, nameAndInstrumentDL(key, instrumentation, dataLoader)); return this; } + /** + * This will register a new {@link DataLoader} and then return it. + *

+ * Note: Registration can change the data loader instance since it might get an {@link DataLoaderInstrumentation} applied to + * it. So the {@link DataLoader} instance your read via {@link DataLoaderRegistry#getDataLoader(String)} might not be the same + * object that was registered. + * + * @param key the key to put the data loader under + * @param dataLoader the data loader to register + * @return the data loader instance that was registered + */ + public DataLoader registerAndGet(String key, DataLoader dataLoader) { + dataLoaders.put(key, nameAndInstrumentDL(key, instrumentation, dataLoader)); + return Objects.requireNonNull(getDataLoader(key)); + } + /** * Computes a data loader if absent or return it if it was * already registered at that key. *

* Note: The entire method invocation is performed atomically, * so the function is applied at most once per key. + *

+ * Note: Registration can change the data loader instance since it might get an {@link DataLoaderInstrumentation} applied to + * it. So the {@link DataLoader} instance your read via {@link DataLoaderRegistry#getDataLoader(String)} might not be the same + * object that was registered. * * @param key the key of the data loader * @param mappingFunction the function to compute a data loader @@ -142,7 +202,7 @@ public DataLoader computeIfAbsent(final String key, final Function> mappingFunction) { return (DataLoader) dataLoaders.computeIfAbsent(key, (k) -> { DataLoader dl = mappingFunction.apply(k); - return instrumentDL(instrumentation, dl); + return nameAndInstrumentDL(key, instrumentation, dl); }); } @@ -192,10 +252,10 @@ public DataLoaderRegistry unregister(String key) { * @param key the key of the data loader * @param the type of keys * @param the type of values - * @return a data loader or null if its not present + * @return a data loader or null if it's not present */ @SuppressWarnings("unchecked") - public DataLoader getDataLoader(String key) { + public @Nullable DataLoader getDataLoader(String key) { return (DataLoader) dataLoaders.get(key); } @@ -262,7 +322,7 @@ public static Builder newRegistry() { public static class Builder { private final Map> dataLoaders = new HashMap<>(); - private DataLoaderInstrumentation instrumentation; + private @Nullable DataLoaderInstrumentation instrumentation; /** * This will register a new dataloader diff --git a/src/main/java/org/dataloader/DelegatingDataLoader.java b/src/main/java/org/dataloader/DelegatingDataLoader.java index c54a731..0175059 100644 --- a/src/main/java/org/dataloader/DelegatingDataLoader.java +++ b/src/main/java/org/dataloader/DelegatingDataLoader.java @@ -9,6 +9,7 @@ import java.time.Duration; import java.time.Instant; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.function.BiConsumer; @@ -29,8 +30,8 @@ * CompletableFuture cf = super.load(key, keyContext); * return cf.thenApply(v -> "|" + v + "|"); * } - *}; - *} + * }; + * } * * @param type parameter indicating the type of the data load keys * @param type parameter indicating the type of the data that is returned @@ -58,7 +59,7 @@ public static DataLoader unwrap(DataLoader dataLoader) { } public DelegatingDataLoader(DataLoader delegate) { - super(delegate.getBatchLoadFunction(), delegate.getOptions()); + super(delegate.getName(), delegate.getBatchLoadFunction(), delegate.getOptions()); this.delegate = delegate; } @@ -66,19 +67,31 @@ public DataLoader getDelegate() { return delegate; } - /** - * The {@link DataLoader#load(Object)} and {@link DataLoader#loadMany(List)} type methods all call back - * to the {@link DataLoader#load(Object, Object)} and hence we don't override them. - * - * @param key the key to load - * @param keyContext a context object that is specific to this key - * @return the future of the value - */ + @Override + public CompletableFuture load(K key) { + return delegate.load(key); + } + @Override public CompletableFuture load(@NonNull K key, @Nullable Object keyContext) { return delegate.load(key, keyContext); } + @Override + public CompletableFuture> loadMany(List keys) { + return delegate.loadMany(keys); + } + + @Override + public CompletableFuture> loadMany(List keys, List keyContexts) { + return delegate.loadMany(keys, keyContexts); + } + + @Override + public CompletableFuture> loadMany(Map keysAndContexts) { + return delegate.loadMany(keysAndContexts); + } + @Override public DataLoader transform(Consumer> builderConsumer) { return delegate.transform(builderConsumer); diff --git a/src/main/java/org/dataloader/MappedBatchLoader.java b/src/main/java/org/dataloader/MappedBatchLoader.java index 1ad4c79..179d6a2 100644 --- a/src/main/java/org/dataloader/MappedBatchLoader.java +++ b/src/main/java/org/dataloader/MappedBatchLoader.java @@ -18,6 +18,7 @@ import org.dataloader.annotations.PublicSpi; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.Map; import java.util.Set; @@ -59,7 +60,7 @@ */ @PublicSpi @NullMarked -public interface MappedBatchLoader { +public interface MappedBatchLoader { /** * Called to batch load the provided keys and return a promise to a map of values. diff --git a/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java b/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java index 9559260..9f342d4 100644 --- a/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java +++ b/src/main/java/org/dataloader/MappedBatchLoaderWithContext.java @@ -18,6 +18,7 @@ import org.dataloader.annotations.PublicSpi; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.Map; import java.util.Set; @@ -33,7 +34,7 @@ */ @PublicSpi @NullMarked -public interface MappedBatchLoaderWithContext { +public interface MappedBatchLoaderWithContext { /** * Called to batch load the provided keys and return a promise to a map of values. * diff --git a/src/main/java/org/dataloader/MappedBatchPublisher.java b/src/main/java/org/dataloader/MappedBatchPublisher.java index 493401f..6637157 100644 --- a/src/main/java/org/dataloader/MappedBatchPublisher.java +++ b/src/main/java/org/dataloader/MappedBatchPublisher.java @@ -2,6 +2,7 @@ import org.dataloader.annotations.PublicSpi; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import org.reactivestreams.Subscriber; import java.util.Map; @@ -20,7 +21,7 @@ */ @PublicSpi @NullMarked -public interface MappedBatchPublisher { +public interface MappedBatchPublisher { /** * Called to batch the provided keys into a stream of map entries of keys and values. *

diff --git a/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java b/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java index 7b862ca..dd8b5f9 100644 --- a/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java +++ b/src/main/java/org/dataloader/MappedBatchPublisherWithContext.java @@ -2,6 +2,7 @@ import org.dataloader.annotations.PublicSpi; import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import org.reactivestreams.Subscriber; import java.util.List; @@ -17,7 +18,7 @@ */ @PublicSpi @NullMarked -public interface MappedBatchPublisherWithContext { +public interface MappedBatchPublisherWithContext { /** * Called to batch the provided keys into a stream of map entries of keys and values. diff --git a/src/main/java/org/dataloader/ValueCache.java b/src/main/java/org/dataloader/ValueCache.java index 80c8402..b06fdb8 100644 --- a/src/main/java/org/dataloader/ValueCache.java +++ b/src/main/java/org/dataloader/ValueCache.java @@ -3,6 +3,7 @@ import org.dataloader.annotations.PublicSpi; import org.dataloader.impl.CompletableFutureKit; import org.dataloader.impl.NoOpValueCache; +import org.jspecify.annotations.Nullable; import org.jspecify.annotations.NullMarked; import java.util.ArrayList; @@ -40,7 +41,7 @@ */ @PublicSpi @NullMarked -public interface ValueCache { +public interface ValueCache { /** * Creates a new value cache, using the default no-op implementation. diff --git a/src/main/java/org/dataloader/impl/DefaultCacheMap.java b/src/main/java/org/dataloader/impl/DefaultCacheMap.java index fa89bb0..e8db681 100644 --- a/src/main/java/org/dataloader/impl/DefaultCacheMap.java +++ b/src/main/java/org/dataloader/impl/DefaultCacheMap.java @@ -18,11 +18,12 @@ import org.dataloader.CacheMap; import org.dataloader.annotations.Internal; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; /** * Default implementation of {@link CacheMap} that is based on a regular {@link java.util.HashMap}. @@ -33,15 +34,16 @@ * @author Arnold Schrijver */ @Internal +@NullMarked public class DefaultCacheMap implements CacheMap { - private final Map> cache; + private final ConcurrentHashMap> cache; /** * Default constructor */ public DefaultCacheMap() { - cache = new HashMap<>(); + cache = new ConcurrentHashMap<>(); } /** @@ -57,7 +59,7 @@ public boolean containsKey(K key) { * {@inheritDoc} */ @Override - public CompletableFuture get(K key) { + public @Nullable CompletableFuture get(K key) { return cache.get(key); } @@ -73,9 +75,8 @@ public Collection> getAll() { * {@inheritDoc} */ @Override - public CacheMap set(K key, CompletableFuture value) { - cache.put(key, value); - return this; + public @Nullable CompletableFuture putIfAbsentAtomically(K key, CompletableFuture value) { + return cache.putIfAbsent(key, value); } /** @@ -95,4 +96,9 @@ public CacheMap clear() { cache.clear(); return this; } + + @Override + public int size() { + return cache.size(); + } } diff --git a/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java b/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java index c2f5438..76c94b3 100644 --- a/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java +++ b/src/main/java/org/dataloader/reactive/AbstractBatchSubscriber.java @@ -10,6 +10,8 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantLock; import static org.dataloader.impl.Assertions.assertState; @@ -25,6 +27,7 @@ abstract class AbstractBatchSubscriber implements Subscriber { final List callContexts; final List> queuedFutures; final ReactiveSupport.HelperIntegration helperIntegration; + final Lock lock = new ReentrantLock(); List clearCacheKeys = new ArrayList<>(); List completedValues = new ArrayList<>(); diff --git a/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java b/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java index d0b8110..74a500a 100644 --- a/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java +++ b/src/main/java/org/dataloader/reactive/BatchSubscriberImpl.java @@ -29,58 +29,74 @@ class BatchSubscriberImpl extends AbstractBatchSubscriber { super(valuesFuture, keys, callContexts, queuedFutures, helperIntegration); } - // onNext may be called by multiple threads - for the time being, we pass 'synchronized' to guarantee + // onNext may be called by multiple threads - for the time being, we use a ReentrantLock to guarantee // correctness (at the cost of speed). @Override - public synchronized void onNext(V value) { - super.onNext(value); + public void onNext(V value) { + try { + lock.lock(); - if (idx >= keys.size()) { - // hang on they have given us more values than we asked for in keys - // we cant handle this - return; - } - K key = keys.get(idx); - Object callContext = callContexts.get(idx); - CompletableFuture future = queuedFutures.get(idx); - onNextValue(key, value, callContext, List.of(future)); + super.onNext(value); + + if (idx >= keys.size()) { + // hang on they have given us more values than we asked for in keys + // we cant handle this + return; + } + K key = keys.get(idx); + Object callContext = callContexts.get(idx); + CompletableFuture future = queuedFutures.get(idx); + onNextValue(key, value, callContext, List.of(future)); - completedValues.add(value); - idx++; + completedValues.add(value); + idx++; + } finally { + lock.unlock(); + } } @Override - public synchronized void onComplete() { - super.onComplete(); - if (keys.size() != completedValues.size()) { - // we have more or less values than promised - // we will go through all the outstanding promises and mark those that - // have not finished as failed - for (CompletableFuture queuedFuture : queuedFutures) { - if (!queuedFuture.isDone()) { - queuedFuture.completeExceptionally(new DataLoaderAssertionException("The size of the promised values MUST be the same size as the key list")); + public void onComplete() { + try { + lock.lock(); + super.onComplete(); + if (keys.size() != completedValues.size()) { + // we have more or less values than promised + // we will go through all the outstanding promises and mark those that + // have not finished as failed + for (CompletableFuture queuedFuture : queuedFutures) { + if (!queuedFuture.isDone()) { + queuedFuture.completeExceptionally(new DataLoaderAssertionException("The size of the promised values MUST be the same size as the key list")); + } } } + possiblyClearCacheEntriesOnExceptions(); + valuesFuture.complete(completedValues); + } finally { + lock.unlock(); } - possiblyClearCacheEntriesOnExceptions(); - valuesFuture.complete(completedValues); } @Override - public synchronized void onError(Throwable ex) { - super.onError(ex); - ex = unwrapThrowable(ex); - // Set the remaining keys to the exception. - for (int i = idx; i < queuedFutures.size(); i++) { - K key = keys.get(i); - CompletableFuture future = queuedFutures.get(i); - if (!future.isDone()) { - future.completeExceptionally(ex); - // clear any cached view of this key because it failed - helperIntegration.clearCacheView(key); + public void onError(Throwable ex) { + try { + lock.lock(); + super.onError(ex); + ex = unwrapThrowable(ex); + // Set the remaining keys to the exception. + for (int i = idx; i < queuedFutures.size(); i++) { + K key = keys.get(i); + CompletableFuture future = queuedFutures.get(i); + if (!future.isDone()) { + future.completeExceptionally(ex); + // clear any cached view of this key because it failed + helperIntegration.clearCacheView(key); + } } + valuesFuture.completeExceptionally(ex); + } finally { + lock.unlock(); } - valuesFuture.completeExceptionally(ex); } } diff --git a/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java b/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java index d56efa0..3c937b0 100644 --- a/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java +++ b/src/main/java/org/dataloader/reactive/MappedBatchSubscriberImpl.java @@ -43,61 +43,77 @@ class MappedBatchSubscriberImpl extends AbstractBatchSubscriber entry) { - super.onNext(entry); - K key = entry.getKey(); - V value = entry.getValue(); + public void onNext(Map.Entry entry) { + try { + lock.lock(); + super.onNext(entry); + K key = entry.getKey(); + V value = entry.getValue(); - Object callContext = callContextByKey.get(key); - List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); + Object callContext = callContextByKey.get(key); + List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); - onNextValue(key, value, callContext, futures); + onNextValue(key, value, callContext, futures); - // did we have an actual key for this value - ignore it if they send us one outside the key set - if (!futures.isEmpty()) { - completedValuesByKey.put(key, value); + // did we have an actual key for this value - ignore it if they send us one outside the key set + if (!futures.isEmpty()) { + completedValuesByKey.put(key, value); + } + } finally { + lock.unlock(); } + } @Override - public synchronized void onComplete() { - super.onComplete(); + public void onComplete() { + try { + lock.lock(); + super.onComplete(); - possiblyClearCacheEntriesOnExceptions(); - List values = new ArrayList<>(keys.size()); - for (K key : keys) { - V value = completedValuesByKey.get(key); - values.add(value); + possiblyClearCacheEntriesOnExceptions(); + List values = new ArrayList<>(keys.size()); + for (K key : keys) { + V value = completedValuesByKey.get(key); + values.add(value); - List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); - for (CompletableFuture future : futures) { - if (!future.isDone()) { - // we have a future that never came back for that key - // but the publisher is done sending in data - it must be null - // e.g. for key X when found no value - future.complete(null); + List> futures = queuedFuturesByKey.getOrDefault(key, List.of()); + for (CompletableFuture future : futures) { + if (!future.isDone()) { + // we have a future that never came back for that key + // but the publisher is done sending in data - it must be null + // e.g. for key X when found no value + future.complete(null); + } } } + valuesFuture.complete(values); + } finally { + lock.unlock(); } - valuesFuture.complete(values); } @Override - public synchronized void onError(Throwable ex) { - super.onError(ex); - ex = unwrapThrowable(ex); - // Complete the futures for the remaining keys with the exception. - for (int idx = 0; idx < queuedFutures.size(); idx++) { - K key = keys.get(idx); - List> futures = queuedFuturesByKey.get(key); - if (!completedValuesByKey.containsKey(key)) { - for (CompletableFuture future : futures) { - future.completeExceptionally(ex); + public void onError(Throwable ex) { + try { + lock.lock(); + super.onError(ex); + ex = unwrapThrowable(ex); + // Complete the futures for the remaining keys with the exception. + for (int idx = 0; idx < queuedFutures.size(); idx++) { + K key = keys.get(idx); + List> futures = queuedFuturesByKey.get(key); + if (!completedValuesByKey.containsKey(key)) { + for (CompletableFuture future : futures) { + future.completeExceptionally(ex); + } + // clear any cached view of this key because they all failed + helperIntegration.clearCacheView(key); } - // clear any cached view of this key because they all failed - helperIntegration.clearCacheView(key); } + valuesFuture.completeExceptionally(ex); + } finally { + lock.unlock(); } - valuesFuture.completeExceptionally(ex); } } diff --git a/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java b/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java index b6bc257..4f62378 100644 --- a/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java +++ b/src/main/java/org/dataloader/registries/ScheduledDataLoaderRegistry.java @@ -3,7 +3,10 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; import org.dataloader.annotations.ExperimentalApi; +import org.dataloader.impl.Assertions; import org.dataloader.instrumentation.DataLoaderInstrumentation; +import org.jspecify.annotations.NullMarked; +import org.jspecify.annotations.Nullable; import java.time.Duration; import java.util.LinkedHashMap; @@ -54,6 +57,7 @@ * This code is currently marked as {@link ExperimentalApi} */ @ExperimentalApi +@NullMarked public class ScheduledDataLoaderRegistry extends DataLoaderRegistry implements AutoCloseable { private final Map, DispatchPredicate> dataLoaderPredicates = new ConcurrentHashMap<>(); @@ -66,7 +70,7 @@ public class ScheduledDataLoaderRegistry extends DataLoaderRegistry implements A private ScheduledDataLoaderRegistry(Builder builder) { super(builder.dataLoaders, builder.instrumentation); - this.scheduledExecutorService = builder.scheduledExecutorService; + this.scheduledExecutorService = Assertions.nonNull(builder.scheduledExecutorService); this.defaultExecutorUsed = builder.defaultExecutorUsed; this.schedule = builder.schedule; this.tickerMode = builder.tickerMode; @@ -112,7 +116,6 @@ public boolean isTickerMode() { * and return a new combined registry * * @param registry the registry to combine into this registry - * * @return a new combined registry */ public ScheduledDataLoaderRegistry combine(DataLoaderRegistry registry) { @@ -128,7 +131,6 @@ public ScheduledDataLoaderRegistry combine(DataLoaderRegistry registry) { * This will unregister a new dataloader * * @param key the key of the data loader to unregister - * * @return this registry */ public ScheduledDataLoaderRegistry unregister(String key) { @@ -161,7 +163,6 @@ public DispatchPredicate getDispatchPredicate() { * @param key the key to put the data loader under * @param dataLoader the data loader to register * @param dispatchPredicate the dispatch predicate to associate with this data loader - * * @return this registry */ public ScheduledDataLoaderRegistry register(String key, DataLoader dataLoader, DispatchPredicate dispatchPredicate) { @@ -222,7 +223,6 @@ public void rescheduleNow() { * * @param dataLoaderKey the key in the dataloader map * @param dataLoader the dataloader - * * @return true if it should dispatch */ private boolean shouldDispatch(String dataLoaderKey, DataLoader dataLoader) { @@ -267,11 +267,11 @@ public static class Builder { private final Map> dataLoaders = new LinkedHashMap<>(); private final Map, DispatchPredicate> dataLoaderPredicates = new LinkedHashMap<>(); private DispatchPredicate dispatchPredicate = DispatchPredicate.DISPATCH_ALWAYS; - private ScheduledExecutorService scheduledExecutorService; + private @Nullable ScheduledExecutorService scheduledExecutorService; private boolean defaultExecutorUsed = false; private Duration schedule = Duration.ofMillis(10); private boolean tickerMode = false; - private DataLoaderInstrumentation instrumentation; + private @Nullable DataLoaderInstrumentation instrumentation; /** @@ -279,7 +279,6 @@ public static class Builder { * {@link ScheduledDataLoaderRegistry#close()} is called. This is left to the code that made this setup code * * @param executorService the executor service to run the ticker on - * * @return this builder for a fluent pattern */ public Builder scheduledExecutorService(ScheduledExecutorService executorService) { @@ -297,7 +296,6 @@ public Builder schedule(Duration schedule) { * * @param key the key to put the data loader under * @param dataLoader the data loader to register - * * @return this builder for a fluent pattern */ public Builder register(String key, DataLoader dataLoader) { @@ -312,7 +310,6 @@ public Builder register(String key, DataLoader dataLoader) { * @param key the key to put the data loader under * @param dataLoader the data loader to register * @param dispatchPredicate the dispatch predicate - * * @return this builder for a fluent pattern */ public Builder register(String key, DataLoader dataLoader, DispatchPredicate dispatchPredicate) { @@ -326,7 +323,6 @@ public Builder register(String key, DataLoader dataLoader, DispatchPredica * from a previous {@link DataLoaderRegistry} * * @param otherRegistry the previous {@link DataLoaderRegistry} - * * @return this builder for a fluent pattern */ public Builder registerAll(DataLoaderRegistry otherRegistry) { @@ -343,7 +339,6 @@ public Builder registerAll(DataLoaderRegistry otherRegistry) { * whether all {@link DataLoader}s in the {@link DataLoaderRegistry }should be dispatched. * * @param dispatchPredicate the predicate - * * @return this builder for a fluent pattern */ public Builder dispatchPredicate(DispatchPredicate dispatchPredicate) { @@ -357,7 +352,6 @@ public Builder dispatchPredicate(DispatchPredicate dispatchPredicate) { * to dispatchAll. * * @param tickerMode true or false - * * @return this builder for a fluent pattern */ public Builder tickerMode(boolean tickerMode) { diff --git a/src/main/java/org/dataloader/stats/DelegatingStatisticsCollector.java b/src/main/java/org/dataloader/stats/DelegatingStatisticsCollector.java index 563d37b..36ed69a 100644 --- a/src/main/java/org/dataloader/stats/DelegatingStatisticsCollector.java +++ b/src/main/java/org/dataloader/stats/DelegatingStatisticsCollector.java @@ -26,63 +26,63 @@ public DelegatingStatisticsCollector(StatisticsCollector delegateCollector) { } @Override - public long incrementLoadCount(IncrementLoadCountStatisticsContext context) { + public void incrementLoadCount(IncrementLoadCountStatisticsContext context) { delegateCollector.incrementLoadCount(context); - return collector.incrementLoadCount(context); + collector.incrementLoadCount(context); } @Deprecated @Override - public long incrementLoadCount() { - return incrementLoadCount(null); + public void incrementLoadCount() { + incrementLoadCount(null); } @Override - public long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { + public void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { delegateCollector.incrementLoadErrorCount(context); - return collector.incrementLoadErrorCount(context); + collector.incrementLoadErrorCount(context); } @Deprecated @Override - public long incrementLoadErrorCount() { - return incrementLoadErrorCount(null); + public void incrementLoadErrorCount() { + incrementLoadErrorCount(null); } @Override - public long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { + public void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { delegateCollector.incrementBatchLoadCountBy(delta, context); - return collector.incrementBatchLoadCountBy(delta, context); + collector.incrementBatchLoadCountBy(delta, context); } @Deprecated @Override - public long incrementBatchLoadCountBy(long delta) { - return incrementBatchLoadCountBy(delta, null); + public void incrementBatchLoadCountBy(long delta) { + incrementBatchLoadCountBy(delta, null); } @Override - public long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + public void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { delegateCollector.incrementBatchLoadExceptionCount(context); - return collector.incrementBatchLoadExceptionCount(context); + collector.incrementBatchLoadExceptionCount(context); } @Deprecated @Override - public long incrementBatchLoadExceptionCount() { - return incrementBatchLoadExceptionCount(null); + public void incrementBatchLoadExceptionCount() { + incrementBatchLoadExceptionCount(null); } @Override - public long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { + public void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { delegateCollector.incrementCacheHitCount(context); - return collector.incrementCacheHitCount(context); + collector.incrementCacheHitCount(context); } @Deprecated @Override - public long incrementCacheHitCount() { - return incrementCacheHitCount(null); + public void incrementCacheHitCount() { + incrementCacheHitCount(null); } /** diff --git a/src/main/java/org/dataloader/stats/NoOpStatisticsCollector.java b/src/main/java/org/dataloader/stats/NoOpStatisticsCollector.java index e7267b3..6397d8f 100644 --- a/src/main/java/org/dataloader/stats/NoOpStatisticsCollector.java +++ b/src/main/java/org/dataloader/stats/NoOpStatisticsCollector.java @@ -14,58 +14,54 @@ public class NoOpStatisticsCollector implements StatisticsCollector { private static final Statistics ZERO_STATS = new Statistics(); @Override - public long incrementLoadCount(IncrementLoadCountStatisticsContext context) { - return 0; + public void incrementLoadCount(IncrementLoadCountStatisticsContext context) { } @Deprecated @Override - public long incrementLoadCount() { - return incrementLoadCount(null); + public void incrementLoadCount() { + incrementLoadCount(null); } @Override - public long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { - return 0; + public void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { } @Deprecated @Override - public long incrementLoadErrorCount() { - return incrementLoadErrorCount(null); + public void incrementLoadErrorCount() { + incrementLoadErrorCount(null); } @Override - public long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { - return 0; + public void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { } @Deprecated @Override - public long incrementBatchLoadCountBy(long delta) { - return incrementBatchLoadCountBy(delta, null); + public void incrementBatchLoadCountBy(long delta) { + incrementBatchLoadCountBy(delta, null); } @Override - public long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { - return 0; + public void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + } @Deprecated @Override - public long incrementBatchLoadExceptionCount() { - return incrementBatchLoadExceptionCount(null); + public void incrementBatchLoadExceptionCount() { + incrementBatchLoadExceptionCount(null); } @Override - public long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { - return 0; + public void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { } @Deprecated @Override - public long incrementCacheHitCount() { - return incrementCacheHitCount(null); + public void incrementCacheHitCount() { + incrementCacheHitCount(null); } @Override diff --git a/src/main/java/org/dataloader/stats/SimpleStatisticsCollector.java b/src/main/java/org/dataloader/stats/SimpleStatisticsCollector.java index 22b3662..2c2898a 100644 --- a/src/main/java/org/dataloader/stats/SimpleStatisticsCollector.java +++ b/src/main/java/org/dataloader/stats/SimpleStatisticsCollector.java @@ -6,7 +6,7 @@ import org.dataloader.stats.context.IncrementLoadCountStatisticsContext; import org.dataloader.stats.context.IncrementLoadErrorCountStatisticsContext; -import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; /** * This simple collector uses {@link java.util.concurrent.atomic.AtomicLong}s to collect @@ -15,72 +15,73 @@ * @see org.dataloader.stats.StatisticsCollector */ public class SimpleStatisticsCollector implements StatisticsCollector { - private final AtomicLong loadCount = new AtomicLong(); - private final AtomicLong batchInvokeCount = new AtomicLong(); - private final AtomicLong batchLoadCount = new AtomicLong(); - private final AtomicLong cacheHitCount = new AtomicLong(); - private final AtomicLong batchLoadExceptionCount = new AtomicLong(); - private final AtomicLong loadErrorCount = new AtomicLong(); + + private final LongAdder loadCount = new LongAdder(); + private final LongAdder batchInvokeCount = new LongAdder(); + private final LongAdder batchLoadCount = new LongAdder(); + private final LongAdder cacheHitCount = new LongAdder(); + private final LongAdder batchLoadExceptionCount = new LongAdder(); + private final LongAdder loadErrorCount = new LongAdder(); @Override - public long incrementLoadCount(IncrementLoadCountStatisticsContext context) { - return loadCount.incrementAndGet(); + public void incrementLoadCount(IncrementLoadCountStatisticsContext context) { + loadCount.increment(); } @Deprecated @Override - public long incrementLoadCount() { - return incrementLoadCount(null); + public void incrementLoadCount() { + incrementLoadCount(null); } @Override - public long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { - return loadErrorCount.incrementAndGet(); + public void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { + loadErrorCount.increment(); } @Deprecated @Override - public long incrementLoadErrorCount() { - return incrementLoadErrorCount(null); + public void incrementLoadErrorCount() { + incrementLoadErrorCount(null); } @Override - public long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { - batchInvokeCount.incrementAndGet(); - return batchLoadCount.addAndGet(delta); + public void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { + batchInvokeCount.increment(); + batchLoadCount.add(delta); } @Deprecated @Override - public long incrementBatchLoadCountBy(long delta) { - return incrementBatchLoadCountBy(delta, null); + public void incrementBatchLoadCountBy(long delta) { + incrementBatchLoadCountBy(delta, null); } @Override - public long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { - return batchLoadExceptionCount.incrementAndGet(); + public void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + batchLoadExceptionCount.increment(); } @Deprecated @Override - public long incrementBatchLoadExceptionCount() { - return incrementBatchLoadExceptionCount(null); + public void incrementBatchLoadExceptionCount() { + incrementBatchLoadExceptionCount(null); } @Override - public long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { - return cacheHitCount.incrementAndGet(); + public void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { + cacheHitCount.increment(); } @Deprecated @Override - public long incrementCacheHitCount() { - return incrementCacheHitCount(null); + public void incrementCacheHitCount() { + incrementCacheHitCount(null); } @Override public Statistics getStatistics() { - return new Statistics(loadCount.get(), loadErrorCount.get(), batchInvokeCount.get(), batchLoadCount.get(), batchLoadExceptionCount.get(), cacheHitCount.get()); + return new Statistics(loadCount.sum(), loadErrorCount.sum(), batchInvokeCount.sum(), batchLoadCount.sum(), batchLoadExceptionCount.sum(), cacheHitCount.sum()); } @Override diff --git a/src/main/java/org/dataloader/stats/StatisticsCollector.java b/src/main/java/org/dataloader/stats/StatisticsCollector.java index 33e417f..7b14eab 100644 --- a/src/main/java/org/dataloader/stats/StatisticsCollector.java +++ b/src/main/java/org/dataloader/stats/StatisticsCollector.java @@ -18,21 +18,18 @@ public interface StatisticsCollector { * * @param the class of the key in the data loader * @param context the context containing metadata of the data loader invocation - * - * @return the current value after increment */ - default long incrementLoadCount(IncrementLoadCountStatisticsContext context) { - return incrementLoadCount(); + default void incrementLoadCount(IncrementLoadCountStatisticsContext context) { + incrementLoadCount(); } /** * Called to increment the number of loads * * @deprecated use {@link #incrementLoadCount(IncrementLoadCountStatisticsContext)} - * @return the current value after increment */ @Deprecated - long incrementLoadCount(); + void incrementLoadCount(); /** * Called to increment the number of loads that resulted in an object deemed in error @@ -40,20 +37,18 @@ default long incrementLoadCount(IncrementLoadCountStatisticsContext conte * @param the class of the key in the data loader * @param context the context containing metadata of the data loader invocation * - * @return the current value after increment */ - default long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { - return incrementLoadErrorCount(); + default void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { + incrementLoadErrorCount(); } /** * Called to increment the number of loads that resulted in an object deemed in error * * @deprecated use {@link #incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext)} - * @return the current value after increment */ @Deprecated - long incrementLoadErrorCount(); + void incrementLoadErrorCount(); /** * Called to increment the number of batch loads @@ -61,11 +56,9 @@ default long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContex * @param the class of the key in the data loader * @param delta how much to add to the count * @param context the context containing metadata of the data loader invocation - * - * @return the current value after increment */ - default long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { - return incrementBatchLoadCountBy(delta); + default void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { + incrementBatchLoadCountBy(delta); } /** @@ -74,52 +67,45 @@ default long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountBy * @param delta how much to add to the count * * @deprecated use {@link #incrementBatchLoadCountBy(long, IncrementBatchLoadCountByStatisticsContext)} - * @return the current value after increment */ @Deprecated - long incrementBatchLoadCountBy(long delta); + void incrementBatchLoadCountBy(long delta); /** * Called to increment the number of batch loads exceptions * * @param the class of the key in the data loader * @param context the context containing metadata of the data loader invocation - * - * @return the current value after increment */ - default long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { - return incrementBatchLoadExceptionCount(); + default void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + incrementBatchLoadExceptionCount(); } /** * Called to increment the number of batch loads exceptions * * @deprecated use {@link #incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext)} - * @return the current value after increment */ @Deprecated - long incrementBatchLoadExceptionCount(); + void incrementBatchLoadExceptionCount(); /** * Called to increment the number of cache hits * * @param the class of the key in the data loader * @param context the context containing metadata of the data loader invocation - * - * @return the current value after increment */ - default long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { - return incrementCacheHitCount(); + default void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { + incrementCacheHitCount(); } /** * Called to increment the number of cache hits * * @deprecated use {@link #incrementCacheHitCount(IncrementCacheHitCountStatisticsContext)} - * @return the current value after increment */ @Deprecated - long incrementCacheHitCount(); + void incrementCacheHitCount(); /** * @return the statistics that have been gathered to this point in time diff --git a/src/main/java/org/dataloader/stats/ThreadLocalStatisticsCollector.java b/src/main/java/org/dataloader/stats/ThreadLocalStatisticsCollector.java index d091c5a..cab6d0d 100644 --- a/src/main/java/org/dataloader/stats/ThreadLocalStatisticsCollector.java +++ b/src/main/java/org/dataloader/stats/ThreadLocalStatisticsCollector.java @@ -35,63 +35,63 @@ public ThreadLocalStatisticsCollector resetThread() { } @Override - public long incrementLoadCount(IncrementLoadCountStatisticsContext context) { + public void incrementLoadCount(IncrementLoadCountStatisticsContext context) { overallCollector.incrementLoadCount(context); - return collector.get().incrementLoadCount(context); + collector.get().incrementLoadCount(context); } @Deprecated @Override - public long incrementLoadCount() { - return incrementLoadCount(null); + public void incrementLoadCount() { + incrementLoadCount(null); } @Override - public long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { + public void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { overallCollector.incrementLoadErrorCount(context); - return collector.get().incrementLoadErrorCount(context); + collector.get().incrementLoadErrorCount(context); } @Deprecated @Override - public long incrementLoadErrorCount() { - return incrementLoadErrorCount(null); + public void incrementLoadErrorCount() { + incrementLoadErrorCount(null); } @Override - public long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { + public void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { overallCollector.incrementBatchLoadCountBy(delta, context); - return collector.get().incrementBatchLoadCountBy(delta, context); + collector.get().incrementBatchLoadCountBy(delta, context); } @Deprecated @Override - public long incrementBatchLoadCountBy(long delta) { - return incrementBatchLoadCountBy(delta, null); + public void incrementBatchLoadCountBy(long delta) { + incrementBatchLoadCountBy(delta, null); } @Override - public long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + public void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { overallCollector.incrementBatchLoadExceptionCount(context); - return collector.get().incrementBatchLoadExceptionCount(context); + collector.get().incrementBatchLoadExceptionCount(context); } @Deprecated @Override - public long incrementBatchLoadExceptionCount() { - return incrementBatchLoadExceptionCount(null); + public void incrementBatchLoadExceptionCount() { + incrementBatchLoadExceptionCount(null); } @Override - public long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { + public void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { overallCollector.incrementCacheHitCount(context); - return collector.get().incrementCacheHitCount(context); + collector.get().incrementCacheHitCount(context); } @Deprecated @Override - public long incrementCacheHitCount() { - return incrementCacheHitCount(null); + public void incrementCacheHitCount() { + incrementCacheHitCount(null); } /** diff --git a/src/test/java/ReadmeExamples.java b/src/test/java/ReadmeExamples.java index 1f718aa..6705cb8 100644 --- a/src/test/java/ReadmeExamples.java +++ b/src/test/java/ReadmeExamples.java @@ -105,7 +105,7 @@ public CompletionStage> load(List userIds) { private void callContextExample() { DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()); + .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()).build(); BatchLoaderWithContext batchLoader = new BatchLoaderWithContext() { @Override @@ -120,7 +120,7 @@ public CompletionStage> load(List keys, BatchLoaderEnvironm private void keyContextExample() { DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()); + .setBatchLoaderContextProvider(() -> SecurityCtx.getCallingUserCtx()).build(); BatchLoaderWithContext batchLoader = new BatchLoaderWithContext() { @Override @@ -236,7 +236,7 @@ private void clearCacheOnError() { BatchLoader teamsBatchLoader; private void disableCache() { - DataLoaderFactory.newDataLoader(userBatchLoader, DataLoaderOptions.newOptions().setCachingEnabled(false)); + DataLoaderFactory.newDataLoader(userBatchLoader, DataLoaderOptions.newOptions().setCachingEnabled(false).build()); userDataLoader.load("A"); @@ -265,7 +265,7 @@ public Collection> getAll() { } @Override - public CacheMap set(Object key, CompletableFuture value) { + public CompletableFuture putIfAbsentAtomically(Object key, CompletableFuture value) { return null; } @@ -278,12 +278,17 @@ public CacheMap delete(Object key) { public CacheMap clear() { return null; } + + @Override + public int size() { + return 0; + } } private void customCache() { MyCustomCache customCache = new MyCustomCache(); - DataLoaderOptions options = DataLoaderOptions.newOptions().setCacheMap(customCache); + DataLoaderOptions options = DataLoaderOptions.newOptions().setCacheMap(customCache).build(); DataLoaderFactory.newDataLoader(userBatchLoader, options); } @@ -311,7 +316,7 @@ private void statsExample() { private void statsConfigExample() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setStatisticsCollector(() -> new ThreadLocalStatisticsCollector()); + DataLoaderOptions options = DataLoaderOptions.newOptions().setStatisticsCollector(() -> new ThreadLocalStatisticsCollector()).build(); DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options); } @@ -410,7 +415,7 @@ public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader userDataLoader = DataLoaderFactory.newDataLoader(userBatchLoader, options); } diff --git a/src/test/java/org/dataloader/ClockDataLoader.java b/src/test/java/org/dataloader/ClockDataLoader.java index 21faeea..0c83316 100644 --- a/src/test/java/org/dataloader/ClockDataLoader.java +++ b/src/test/java/org/dataloader/ClockDataLoader.java @@ -9,7 +9,7 @@ public ClockDataLoader(Object batchLoadFunction, Clock clock) { } public ClockDataLoader(Object batchLoadFunction, DataLoaderOptions options, Clock clock) { - super(batchLoadFunction, options, clock); + super(null, batchLoadFunction, options, clock); } } diff --git a/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java b/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java index 90adbc5..274820d 100644 --- a/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java +++ b/src/test/java/org/dataloader/DataLoaderBatchLoaderEnvironmentTest.java @@ -41,7 +41,7 @@ public void context_is_passed_to_batch_loader_function() { return CompletableFuture.completedFuture(list); }; DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> "ctx"); + .setBatchLoaderContextProvider(() -> "ctx").build(); DataLoader loader = newDataLoader(batchLoader, options); loader.load("A"); @@ -61,7 +61,7 @@ public void context_is_passed_to_batch_loader_function() { public void key_contexts_are_passed_to_batch_loader_function() { BatchLoaderWithContext batchLoader = contextBatchLoader(); DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> "ctx"); + .setBatchLoaderContextProvider(() -> "ctx").build(); DataLoader loader = newDataLoader(batchLoader, options); loader.load("A", "aCtx"); @@ -82,7 +82,8 @@ public void key_contexts_are_passed_to_batch_loader_function_when_batching_disab BatchLoaderWithContext batchLoader = contextBatchLoader(); DataLoaderOptions options = DataLoaderOptions.newOptions() .setBatchingEnabled(false) - .setBatchLoaderContextProvider(() -> "ctx"); + .setBatchLoaderContextProvider(() -> "ctx") + .build(); DataLoader loader = newDataLoader(batchLoader, options); CompletableFuture aLoad = loader.load("A", "aCtx"); @@ -104,7 +105,8 @@ public void key_contexts_are_passed_to_batch_loader_function_when_batching_disab public void missing_key_contexts_are_passed_to_batch_loader_function() { BatchLoaderWithContext batchLoader = contextBatchLoader(); DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> "ctx"); + .setBatchLoaderContextProvider(() -> "ctx") + .build(); DataLoader loader = newDataLoader(batchLoader, options); loader.load("A", "aCtx"); @@ -133,7 +135,8 @@ public void context_is_passed_to_map_batch_loader_function() { return CompletableFuture.completedFuture(map); }; DataLoaderOptions options = DataLoaderOptions.newOptions() - .setBatchLoaderContextProvider(() -> "ctx"); + .setBatchLoaderContextProvider(() -> "ctx") + .build(); DataLoader loader = newMappedDataLoader(mapBatchLoader, options); loader.load("A", "aCtx"); @@ -200,7 +203,8 @@ public void mmap_semantics_apply_to_batch_loader_context() { BatchLoaderWithContext batchLoader = contextBatchLoader(); DataLoaderOptions options = DataLoaderOptions.newOptions() .setBatchLoaderContextProvider(() -> "ctx") - .setCachingEnabled(false); + .setCachingEnabled(false) + .build(); DataLoader loader = newDataLoader(batchLoader, options); loader.load("A", "aCtx"); diff --git a/src/test/java/org/dataloader/DataLoaderBuilderTest.java b/src/test/java/org/dataloader/DataLoaderBuilderTest.java index f38ff82..bf8b762 100644 --- a/src/test/java/org/dataloader/DataLoaderBuilderTest.java +++ b/src/test/java/org/dataloader/DataLoaderBuilderTest.java @@ -12,8 +12,8 @@ public class DataLoaderBuilderTest { BatchLoader batchLoader2 = keys -> null; - DataLoaderOptions defaultOptions = DataLoaderOptions.newOptions(); - DataLoaderOptions differentOptions = DataLoaderOptions.newOptions().setCachingEnabled(false); + DataLoaderOptions defaultOptions = DataLoaderOptions.newOptions().build(); + DataLoaderOptions differentOptions = DataLoaderOptions.newOptions().setCachingEnabled(false).build(); @Test void canBuildNewDataLoaders() { diff --git a/src/test/java/org/dataloader/DataLoaderCacheMapTest.java b/src/test/java/org/dataloader/DataLoaderCacheMapTest.java index df364a2..d3de4aa 100644 --- a/src/test/java/org/dataloader/DataLoaderCacheMapTest.java +++ b/src/test/java/org/dataloader/DataLoaderCacheMapTest.java @@ -14,6 +14,7 @@ /** * Tests for cacheMap functionality.. */ +@SuppressWarnings("NullableProblems") public class DataLoaderCacheMapTest { private BatchLoader keysAsValues() { @@ -24,12 +25,33 @@ private BatchLoader keysAsValues() { public void should_provide_all_futures_from_cache() { DataLoader dataLoader = newDataLoader(keysAsValues()); - dataLoader.load(1); - dataLoader.load(2); - dataLoader.load(1); + CompletableFuture cf1 = dataLoader.load(1); + CompletableFuture cf2 = dataLoader.load(2); + CompletableFuture cf3 = dataLoader.load(3); + + CacheMap cacheMap = dataLoader.getCacheMap(); + Collection> futures = cacheMap.getAll(); + assertThat(futures.size(), equalTo(3)); + + + assertThat(cacheMap.get(1), equalTo(cf1)); + assertThat(cacheMap.get(2), equalTo(cf2)); + assertThat(cacheMap.get(3), equalTo(cf3)); + assertThat(cacheMap.containsKey(1), equalTo(true)); + assertThat(cacheMap.containsKey(2), equalTo(true)); + assertThat(cacheMap.containsKey(3), equalTo(true)); + assertThat(cacheMap.containsKey(4), equalTo(false)); + + cacheMap.delete(1); + assertThat(cacheMap.containsKey(1), equalTo(false)); + assertThat(cacheMap.containsKey(2), equalTo(true)); + + cacheMap.clear(); + assertThat(cacheMap.containsKey(1), equalTo(false)); + assertThat(cacheMap.containsKey(2), equalTo(false)); + assertThat(cacheMap.containsKey(3), equalTo(false)); + assertThat(cacheMap.containsKey(4), equalTo(false)); - Collection> futures = dataLoader.getCacheMap().getAll(); - assertThat(futures.size(), equalTo(2)); } @Test diff --git a/src/test/java/org/dataloader/DataLoaderFactoryTest.java b/src/test/java/org/dataloader/DataLoaderFactoryTest.java new file mode 100644 index 0000000..3b3f368 --- /dev/null +++ b/src/test/java/org/dataloader/DataLoaderFactoryTest.java @@ -0,0 +1,51 @@ +package org.dataloader; + +import org.junit.jupiter.api.Test; + +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +class DataLoaderFactoryTest { + + @Test + void can_create_via_builder() { + BatchLoaderWithContext loader = (keys, environment) -> CompletableFuture.completedFuture(keys); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchingEnabled(true).build(); + + DataLoader dl = DataLoaderFactory.builder() + .name("x").batchLoader(loader).options(options).build(); + + assertNotNull(dl.getName()); + assertThat(dl.getName(), equalTo("x")); + assertThat(dl.getBatchLoadFunction(), equalTo(loader)); + assertThat(dl.getOptions(), equalTo(options)); + + BatchLoaderWithContext> loaderTry = (keys, environment) + -> CompletableFuture.completedFuture(keys.stream().map(Try::succeeded).collect(Collectors.toList())); + + DataLoader> dlTry = DataLoaderFactory.>builder() + .name("try").batchLoader(loaderTry).options(options).build(); + + assertNotNull(dlTry.getName()); + assertThat(dlTry.getName(), equalTo("try")); + assertThat(dlTry.getBatchLoadFunction(), equalTo(loaderTry)); + assertThat(dlTry.getOptions(), equalTo(options)); + + MappedBatchLoader> mappedLoaderTry = (keys) + -> CompletableFuture.completedFuture( + keys.stream().collect(Collectors.toMap(k -> k, Try::succeeded)) + ); + + DataLoader> dlTry2 = DataLoaderFactory.>builder() + .name("try2").mappedBatchLoader(mappedLoaderTry).options(options).build(); + + assertNotNull(dlTry2.getName()); + assertThat(dlTry2.getName(), equalTo("try2")); + assertThat(dlTry2.getBatchLoadFunction(), equalTo(mappedLoaderTry)); + assertThat(dlTry2.getOptions(), equalTo(options)); + } +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/DataLoaderOptionsTest.java b/src/test/java/org/dataloader/DataLoaderOptionsTest.java index b4ebb9e..81a7126 100644 --- a/src/test/java/org/dataloader/DataLoaderOptionsTest.java +++ b/src/test/java/org/dataloader/DataLoaderOptionsTest.java @@ -31,7 +31,7 @@ void canCreateDefaultOptions() { assertThat(optionsDefault.maxBatchSize(), equalTo(-1)); assertThat(optionsDefault.getBatchLoaderScheduler(), equalTo(null)); - DataLoaderOptions builtOptions = DataLoaderOptions.newOptionsBuilder().build(); + DataLoaderOptions builtOptions = DataLoaderOptions.newDefaultOptions(); assertThat(builtOptions, equalTo(optionsDefault)); assertThat(builtOptions == optionsDefault, equalTo(false)); @@ -43,11 +43,7 @@ void canCreateDefaultOptions() { @Test void canCopyOk() { - DataLoaderOptions optionsNext = new DataLoaderOptions(optionsDefault); - assertThat(optionsNext, equalTo(optionsDefault)); - assertThat(optionsNext == optionsDefault, equalTo(false)); - - optionsNext = DataLoaderOptions.newDataLoaderOptions(optionsDefault).build(); + DataLoaderOptions optionsNext = DataLoaderOptions.newOptions(optionsDefault).build(); assertThat(optionsNext, equalTo(optionsDefault)); assertThat(optionsNext == optionsDefault, equalTo(false)); } @@ -89,25 +85,25 @@ public Object getKey(Object input) { @Test void canBuildOk() { - assertThat(optionsDefault.setBatchingEnabled(false).batchingEnabled(), + assertThat(optionsDefault.transform(b -> b.setBatchingEnabled(false)).batchingEnabled(), equalTo(false)); - assertThat(optionsDefault.setBatchLoaderScheduler(testBatchLoaderScheduler).getBatchLoaderScheduler(), + assertThat(optionsDefault.transform(b -> b.setBatchLoaderScheduler(testBatchLoaderScheduler)).getBatchLoaderScheduler(), equalTo(testBatchLoaderScheduler)); - assertThat(optionsDefault.setBatchLoaderContextProvider(testBatchLoaderContextProvider).getBatchLoaderContextProvider(), + assertThat(optionsDefault.transform(b -> b.setBatchLoaderContextProvider(testBatchLoaderContextProvider)).getBatchLoaderContextProvider(), equalTo(testBatchLoaderContextProvider)); - assertThat(optionsDefault.setCacheMap(testCacheMap).cacheMap().get(), + assertThat(optionsDefault.transform(b -> b.setCacheMap(testCacheMap)).cacheMap().get(), equalTo(testCacheMap)); - assertThat(optionsDefault.setCachingEnabled(false).cachingEnabled(), + assertThat(optionsDefault.transform(b -> b.setCachingEnabled(false)).cachingEnabled(), equalTo(false)); - assertThat(optionsDefault.setValueCacheOptions(testValueCacheOptions).getValueCacheOptions(), + assertThat(optionsDefault.transform(b -> b.setValueCacheOptions(testValueCacheOptions)).getValueCacheOptions(), equalTo(testValueCacheOptions)); - assertThat(optionsDefault.setCacheKeyFunction(testCacheKey).cacheKeyFunction().get(), + assertThat(optionsDefault.transform(b -> b.setCacheKeyFunction(testCacheKey)).cacheKeyFunction().get(), equalTo(testCacheKey)); - assertThat(optionsDefault.setValueCache(testValueCache).valueCache().get(), + assertThat(optionsDefault.transform(b -> b.setValueCache(testValueCache)).valueCache().get(), equalTo(testValueCache)); - assertThat(optionsDefault.setMaxBatchSize(10).maxBatchSize(), + assertThat(optionsDefault.transform(b -> b.setMaxBatchSize(10)).maxBatchSize(), equalTo(10)); - assertThat(optionsDefault.setStatisticsCollector(testStatisticsCollectorSupplier).getStatisticsCollector(), + assertThat(optionsDefault.transform(b -> b.setStatisticsCollector(testStatisticsCollectorSupplier)).getStatisticsCollector(), equalTo(testStatisticsCollectorSupplier.get())); DataLoaderOptions builtOptions = optionsDefault.transform(builder -> { @@ -150,7 +146,7 @@ void canBuildOk() { @Test void canBuildViaBuilderOk() { - DataLoaderOptions.Builder builder = DataLoaderOptions.newOptionsBuilder(); + DataLoaderOptions.Builder builder = DataLoaderOptions.newOptions(); builder.setBatchingEnabled(false); builder.setCachingExceptionsEnabled(false); builder.setCachingEnabled(false); @@ -196,7 +192,7 @@ void canCopyExistingOptionValuesOnTransform() { }; BatchLoaderContextProvider contextProvider1 = () -> null; - DataLoaderOptions startingOptions = DataLoaderOptions.newOptionsBuilder().setBatchingEnabled(false) + DataLoaderOptions startingOptions = DataLoaderOptions.newOptions().setBatchingEnabled(false) .setCachingEnabled(false) .setInstrumentation(instrumentation1) .setBatchLoaderContextProvider(contextProvider1) diff --git a/src/test/java/org/dataloader/DataLoaderRegistryTest.java b/src/test/java/org/dataloader/DataLoaderRegistryTest.java index bd1534d..89624d7 100644 --- a/src/test/java/org/dataloader/DataLoaderRegistryTest.java +++ b/src/test/java/org/dataloader/DataLoaderRegistryTest.java @@ -2,8 +2,10 @@ import org.dataloader.stats.SimpleStatisticsCollector; import org.dataloader.stats.Statistics; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.util.List; import java.util.concurrent.CompletableFuture; import static java.util.Arrays.asList; @@ -18,9 +20,10 @@ public class DataLoaderRegistryTest { @Test public void registration_works() { - DataLoader dlA = newDataLoader(identityBatchLoader); - DataLoader dlB = newDataLoader(identityBatchLoader); - DataLoader dlC = newDataLoader(identityBatchLoader); + DataLoader dlA = newDataLoader("a", identityBatchLoader); + DataLoader dlB = newDataLoader("b", identityBatchLoader); + DataLoader dlC = newDataLoader("c", identityBatchLoader); + DataLoader dlUnnamed = newDataLoader(identityBatchLoader); DataLoaderRegistry registry = new DataLoaderRegistry(); @@ -40,7 +43,7 @@ public void registration_works() { // and unregister (fluently) DataLoaderRegistry dlR = registry.unregister("c"); - assertThat(dlR,equalTo(registry)); + assertThat(dlR, equalTo(registry)); assertThat(registry.getDataLoaders(), equalTo(asList(dlA, dlB))); @@ -49,15 +52,27 @@ public void registration_works() { assertThat(readDL, sameInstance(dlA)); assertThat(registry.getKeys(), hasItems("a", "b")); + + + // named registry + registry = new DataLoaderRegistry(); + registry.register(dlA); + assertThat(registry.getDataLoaders(), equalTo(List.of(dlA))); + + try { + registry.register(dlUnnamed); + Assertions.fail("Should have thrown an exception"); + } catch (NullPointerException ignored) { + } } @Test public void registries_can_be_combined() { - DataLoader dlA = newDataLoader(identityBatchLoader); - DataLoader dlB = newDataLoader(identityBatchLoader); - DataLoader dlC = newDataLoader(identityBatchLoader); - DataLoader dlD = newDataLoader(identityBatchLoader); + DataLoader dlA = newDataLoader("a", identityBatchLoader); + DataLoader dlB = newDataLoader("b", identityBatchLoader); + DataLoader dlC = newDataLoader("c", identityBatchLoader); + DataLoader dlD = newDataLoader("d", identityBatchLoader); DataLoaderRegistry registry1 = new DataLoaderRegistry(); @@ -79,17 +94,21 @@ public void stats_can_be_collected() { DataLoaderRegistry registry = new DataLoaderRegistry(); DataLoader dlA = newDataLoader(identityBatchLoader, - DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new) + DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new).build() ); DataLoader dlB = newDataLoader(identityBatchLoader, - DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new) + DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new).build() ); DataLoader dlC = newDataLoader(identityBatchLoader, - DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new) + DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new).build() ); registry.register("a", dlA).register("b", dlB).register("c", dlC); + dlA = registry.getDataLoader("a"); + dlB = registry.getDataLoader("b"); + dlC = registry.getDataLoader("b"); + dlA.load("X"); dlB.load("Y"); dlC.load("Z"); @@ -116,7 +135,7 @@ public void computeIfAbsent_creates_a_data_loader_if_there_was_no_value_at_key() DataLoaderRegistry registry = new DataLoaderRegistry(); - DataLoader dlA = newDataLoader(identityBatchLoader); + DataLoader dlA = newDataLoader("a", identityBatchLoader); DataLoader registered = registry.computeIfAbsent("a", (key) -> dlA); assertThat(registered, equalTo(dlA)); @@ -129,11 +148,11 @@ public void computeIfAbsent_returns_an_existing_data_loader_if_there_was_a_value DataLoaderRegistry registry = new DataLoaderRegistry(); - DataLoader dlA = newDataLoader(identityBatchLoader); + DataLoader dlA = newDataLoader("a", identityBatchLoader); registry.computeIfAbsent("a", (key) -> dlA); // register again at same key - DataLoader dlA2 = newDataLoader(identityBatchLoader); + DataLoader dlA2 = newDataLoader("a", identityBatchLoader); DataLoader registered = registry.computeIfAbsent("a", (key) -> dlA2); assertThat(registered, equalTo(dlA)); @@ -149,8 +168,8 @@ public void dispatch_counts_are_maintained() { DataLoader dlA = newDataLoader(identityBatchLoader); DataLoader dlB = newDataLoader(identityBatchLoader); - registry.register("a", dlA); - registry.register("b", dlB); + dlA = registry.registerAndGet("a", dlA); + dlB = registry.registerAndGet("b", dlB); dlA.load("av1"); dlA.load("av2"); diff --git a/src/test/java/org/dataloader/DataLoaderStatsTest.java b/src/test/java/org/dataloader/DataLoaderStatsTest.java index b8393e6..04ea2e7 100644 --- a/src/test/java/org/dataloader/DataLoaderStatsTest.java +++ b/src/test/java/org/dataloader/DataLoaderStatsTest.java @@ -33,7 +33,7 @@ public class DataLoaderStatsTest { public void stats_are_collected_by_default() { BatchLoader batchLoader = CompletableFuture::completedFuture; DataLoader loader = newDataLoader(batchLoader, - DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new) + DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new).build() ); loader.load("A"); @@ -75,7 +75,7 @@ public void stats_are_collected_with_specified_collector() { collector.incrementBatchLoadCountBy(1, new IncrementBatchLoadCountByStatisticsContext<>(1, null)); BatchLoader batchLoader = CompletableFuture::completedFuture; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setStatisticsCollector(() -> collector); + DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setStatisticsCollector(() -> collector).build(); DataLoader loader = newDataLoader(batchLoader, loaderOptions); loader.load("A"); @@ -113,7 +113,7 @@ public void stats_are_collected_with_caching_disabled() { StatisticsCollector collector = new SimpleStatisticsCollector(); BatchLoader batchLoader = CompletableFuture::completedFuture; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setStatisticsCollector(() -> collector).setCachingEnabled(false); + DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setStatisticsCollector(() -> collector).setCachingEnabled(false).build(); DataLoader loader = newDataLoader(batchLoader, loaderOptions); loader.load("A"); @@ -166,7 +166,7 @@ public void stats_are_collected_with_caching_disabled() { @Test public void stats_are_collected_on_exceptions() { DataLoader loader = DataLoaderFactory.newDataLoaderWithTry(batchLoaderThatBlows, - DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new) + DataLoaderOptions.newOptions().setStatisticsCollector(SimpleStatisticsCollector::new).build() ); loader.load("A"); @@ -221,63 +221,55 @@ private static class ContextPassingStatisticsCollector implements StatisticsColl public List> incrementCacheHitCountStatisticsContexts = new ArrayList<>(); @Override - public long incrementLoadCount(IncrementLoadCountStatisticsContext context) { + public void incrementLoadCount(IncrementLoadCountStatisticsContext context) { incrementLoadCountStatisticsContexts.add(context); - return 0; } @Deprecated @Override - public long incrementLoadCount() { - return 0; + public void incrementLoadCount() { + } @Override - public long incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { + public void incrementLoadErrorCount(IncrementLoadErrorCountStatisticsContext context) { incrementLoadErrorCountStatisticsContexts.add(context); - return 0; } @Deprecated @Override - public long incrementLoadErrorCount() { - return 0; + public void incrementLoadErrorCount() { + } @Override - public long incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { + public void incrementBatchLoadCountBy(long delta, IncrementBatchLoadCountByStatisticsContext context) { incrementBatchLoadCountByStatisticsContexts.add(context); - return 0; } @Deprecated @Override - public long incrementBatchLoadCountBy(long delta) { - return 0; + public void incrementBatchLoadCountBy(long delta) { } @Override - public long incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { + public void incrementBatchLoadExceptionCount(IncrementBatchLoadExceptionCountStatisticsContext context) { incrementBatchLoadExceptionCountStatisticsContexts.add(context); - return 0; } @Deprecated @Override - public long incrementBatchLoadExceptionCount() { - return 0; + public void incrementBatchLoadExceptionCount() { } @Override - public long incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { + public void incrementCacheHitCount(IncrementCacheHitCountStatisticsContext context) { incrementCacheHitCountStatisticsContexts.add(context); - return 0; } @Deprecated @Override - public long incrementCacheHitCount() { - return 0; + public void incrementCacheHitCount() { } @Override @@ -290,7 +282,7 @@ public Statistics getStatistics() { public void context_is_passed_through_to_collector() { ContextPassingStatisticsCollector statisticsCollector = new ContextPassingStatisticsCollector(); DataLoader> loader = newDataLoader(batchLoaderThatBlows, - DataLoaderOptions.newOptions().setStatisticsCollector(() -> statisticsCollector) + DataLoaderOptions.newOptions().setStatisticsCollector(() -> statisticsCollector).build() ); loader.load("key", "keyContext"); diff --git a/src/test/java/org/dataloader/DataLoaderTest.java b/src/test/java/org/dataloader/DataLoaderTest.java index 069d390..6ec548a 100644 --- a/src/test/java/org/dataloader/DataLoaderTest.java +++ b/src/test/java/org/dataloader/DataLoaderTest.java @@ -33,27 +33,46 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Arrays.asList; -import static java.util.Collections.*; -import static java.util.concurrent.CompletableFuture.*; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; +import static java.util.concurrent.CompletableFuture.allOf; +import static java.util.concurrent.CompletableFuture.completedFuture; +import static java.util.concurrent.CompletableFuture.supplyAsync; import static org.awaitility.Awaitility.await; import static org.dataloader.DataLoaderFactory.newDataLoader; +import static org.dataloader.DataLoaderOptions.newDefaultOptions; import static org.dataloader.DataLoaderOptions.newOptions; import static org.dataloader.fixtures.TestKit.areAllDone; +import static org.dataloader.fixtures.TestKit.asSet; import static org.dataloader.fixtures.TestKit.listFrom; import static org.dataloader.impl.CompletableFutureKit.cause; import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.*; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; /** * Tests for {@link DataLoader}. @@ -82,6 +101,20 @@ public void should_Build_a_really_really_simple_data_loader() { await().untilAtomic(success, is(true)); } + @Test + public void should_Build_a_named_data_loader() { + BatchLoader loadFunction = CompletableFuture::completedFuture; + DataLoader dl = newDataLoader("name", loadFunction, DataLoaderOptions.newDefaultOptions()); + + assertNotNull(dl.getName()); + assertThat(dl.getName(), equalTo("name")); + + DataLoader dl2 = DataLoaderFactory.builder().name("name2").batchLoader(loadFunction).build(); + + assertNotNull(dl2.getName()); + assertThat(dl2.getName(), equalTo("name2")); + } + @Test public void basic_map_batch_loading() { MappedBatchLoader evensOnlyMappedBatchLoader = (keys) -> { @@ -588,7 +621,7 @@ public void should_Cache_failed_fetches(TestDataLoaderFactory factory) { @ParameterizedTest @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_NOT_Cache_failed_fetches_if_told_not_too(TestDataLoaderFactory factory) { - DataLoaderOptions options = DataLoaderOptions.newOptions().setCachingExceptionsEnabled(false); + DataLoaderOptions options = DataLoaderOptions.newOptions().setCachingExceptionsEnabled(false).build(); List> loadCalls = new ArrayList<>(); DataLoader errorLoader = factory.idLoaderAllExceptions(options, loadCalls); @@ -736,7 +769,7 @@ public void should_Accept_objects_as_keys(TestDataLoaderFactory factory) { public void should_Disable_caching(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - factory.idLoader(newOptions().setCachingEnabled(false), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(false).build(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -774,7 +807,7 @@ public void should_Disable_caching(TestDataLoaderFactory factory) throws Executi public void should_work_with_duplicate_keys_when_caching_disabled(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - factory.idLoader(newOptions().setCachingEnabled(false), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(false).build(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -797,7 +830,7 @@ public void should_work_with_duplicate_keys_when_caching_disabled(TestDataLoader public void should_work_with_duplicate_keys_when_caching_enabled(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); DataLoader identityLoader = - factory.idLoader(newOptions().setCachingEnabled(true), loadCalls); + factory.idLoader(newOptions().setCachingEnabled(true).build(), loadCalls); CompletableFuture future1 = identityLoader.load("A"); CompletableFuture future2 = identityLoader.load("B"); @@ -817,7 +850,7 @@ public void should_work_with_duplicate_keys_when_caching_enabled(TestDataLoaderF @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_Accept_objects_with_a_complex_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); + DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); @@ -839,7 +872,7 @@ public void should_Accept_objects_with_a_complex_key(TestDataLoaderFactory facto @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_Clear_objects_with_complex_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); + DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); @@ -864,7 +897,7 @@ public void should_Clear_objects_with_complex_key(TestDataLoaderFactory factory) @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_Accept_objects_with_different_order_of_keys(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); + DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("a", 123).put("b", 321); @@ -887,7 +920,7 @@ public void should_Accept_objects_with_different_order_of_keys(TestDataLoaderFac @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_Allow_priming_the_cache_with_an_object_key(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()); + DataLoaderOptions options = newOptions().setCacheKeyFunction(getJsonObjectCacheMapFn()).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); JsonObject key1 = new JsonObject().put("id", 123); @@ -910,7 +943,7 @@ public void should_Allow_priming_the_cache_with_an_object_key(TestDataLoaderFact public void should_Accept_a_custom_cache_map_implementation(TestDataLoaderFactory factory) throws ExecutionException, InterruptedException { CustomCacheMap customMap = new CustomCacheMap(); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setCacheMap(customMap); + DataLoaderOptions options = newOptions().setCacheMap(customMap).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -949,7 +982,7 @@ public void should_Accept_a_custom_cache_map_implementation(TestDataLoaderFactor assertThat(future2b.get(), equalTo("b")); assertThat(loadCalls, equalTo(asList(asList("a", "b"), singletonList("c"), singletonList("b")))); - assertArrayEquals(customMap.stash.keySet().toArray(), asList("a", "c", "b").toArray()); + assertEquals(customMap.stash.keySet(), asSet("a", "c", "b")); // Supports clear all @@ -961,7 +994,7 @@ public void should_Accept_a_custom_cache_map_implementation(TestDataLoaderFactor @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_degrade_gracefully_if_cache_get_throws(TestDataLoaderFactory factory) { CacheMap cache = new ThrowingCacheMap(); - DataLoaderOptions options = newOptions().setCachingEnabled(true).setCacheMap(cache); + DataLoaderOptions options = newOptions().setCachingEnabled(true).setCacheMap(cache).build(); List> loadCalls = new ArrayList<>(); DataLoader identityLoader = factory.idLoader(options, loadCalls); @@ -976,7 +1009,7 @@ public void should_degrade_gracefully_if_cache_get_throws(TestDataLoaderFactory @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void batching_disabled_should_dispatch_immediately(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setBatchingEnabled(false); + DataLoaderOptions options = newOptions().setBatchingEnabled(false).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fa = identityLoader.load("A"); @@ -1005,7 +1038,7 @@ public void batching_disabled_should_dispatch_immediately(TestDataLoaderFactory @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void batching_disabled_and_caching_disabled_should_dispatch_immediately_and_forget(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setBatchingEnabled(false).setCachingEnabled(false); + DataLoaderOptions options = newOptions().setBatchingEnabled(false).setCachingEnabled(false).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fa = identityLoader.load("A"); @@ -1037,7 +1070,7 @@ public void batching_disabled_and_caching_disabled_should_dispatch_immediately_a @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void batches_multiple_requests_with_max_batch_size(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(2), loadCalls); + DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(2).build(), loadCalls); CompletableFuture f1 = identityLoader.load(1); CompletableFuture f2 = identityLoader.load(2); @@ -1059,7 +1092,7 @@ public void batches_multiple_requests_with_max_batch_size(TestDataLoaderFactory @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void can_split_max_batch_sizes_correctly(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(5), loadCalls); + DataLoader identityLoader = factory.idLoader(newOptions().setMaxBatchSize(5).build(), loadCalls); for (int i = 0; i < 21; i++) { identityLoader.load(i); @@ -1082,7 +1115,7 @@ public void can_split_max_batch_sizes_correctly(TestDataLoaderFactory factory) { @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void should_Batch_loads_occurring_within_futures(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoader identityLoader = factory.idLoader(newOptions(), loadCalls); + DataLoader identityLoader = factory.idLoader(newDefaultOptions(), loadCalls); Supplier nullValue = () -> null; @@ -1200,6 +1233,31 @@ public void when_values_size_are_more_then_key_size(TestDataLoaderFactory factor } } + @ParameterizedTest + @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") + public void should_Support_loading_values_with_context(TestDataLoaderFactory factory) { + AtomicReference environmentREF = new AtomicReference<>(); + DataLoader identityLoader = factory.idLoaderWithContext(new DataLoaderOptions(), new ArrayList<>(), environmentREF); + + identityLoader.load(1, "ctx1"); + identityLoader.load(2, "ctx2"); + identityLoader.loadMany(List.of(3, 4), List.of("ctx3", "ctx4")); + + CompletableFuture> cf = identityLoader.dispatch(); + await().atMost(Duration.FIVE_SECONDS).until(cf::isDone); + + assertThat(cf.toCompletableFuture().join(), equalTo(asList(1, 2, 3, 4))); + + Map keyContexts = environmentREF.get().getKeyContexts(); + assertThat(keyContexts, equalTo(Map.of( + 1, "ctx1", + 2, "ctx2", + 3, "ctx3", + 4, "ctx4" + ))); + } + + @Test public void can_call_a_loader_from_a_loader() throws Exception { List> deepLoadCalls = new ArrayList<>(); diff --git a/src/test/java/org/dataloader/DataLoaderValueCacheTest.java b/src/test/java/org/dataloader/DataLoaderValueCacheTest.java index 732febe..24d111b 100644 --- a/src/test/java/org/dataloader/DataLoaderValueCacheTest.java +++ b/src/test/java/org/dataloader/DataLoaderValueCacheTest.java @@ -36,7 +36,7 @@ public class DataLoaderValueCacheTest { @MethodSource("org.dataloader.fixtures.parameterized.TestDataLoaderFactories#get") public void test_by_default_we_have_no_value_caching(TestDataLoaderFactory factory) { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions(); + DataLoaderOptions options = newOptions().build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -72,7 +72,7 @@ public void test_by_default_we_have_no_value_caching(TestDataLoaderFactory facto public void should_accept_a_remote_value_store_for_caching(TestDataLoaderFactory factory) { CustomValueCache customValueCache = new CustomValueCache(); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -127,7 +127,7 @@ public void can_use_caffeine_for_caching(TestDataLoaderFactory factory) { ValueCache caffeineValueCache = new CaffeineValueCache(caffeineCache); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(caffeineValueCache); + DataLoaderOptions options = newOptions().setValueCache(caffeineValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); // Fetches as expected @@ -170,7 +170,7 @@ public CompletableFuture get(String key) { customValueCache.set("b", "From Cache"); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -198,7 +198,7 @@ public CompletableFuture set(String key, Object value) { }; List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -237,7 +237,7 @@ public CompletableFuture get(String key) { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -279,7 +279,7 @@ public CompletableFuture>> getValues(List keys) { List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -323,7 +323,7 @@ public CompletableFuture>> getValues(List keys) { }; List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -359,7 +359,7 @@ public CompletableFuture get(String key) { }; List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(false); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(false).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -403,7 +403,7 @@ public CompletableFuture> setValues(List keys, List customValueCache.asMap().put("c", "cachedC"); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); @@ -444,7 +444,7 @@ public CompletableFuture> setValues(List keys, List customValueCache.asMap().put("a", "cachedA"); List> loadCalls = new ArrayList<>(); - DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true).setBatchingEnabled(false); + DataLoaderOptions options = newOptions().setValueCache(customValueCache).setCachingEnabled(true).setBatchingEnabled(false).build(); DataLoader identityLoader = factory.idLoader(options, loadCalls); CompletableFuture fA = identityLoader.load("a"); diff --git a/src/test/java/org/dataloader/DelegatingDataLoaderTest.java b/src/test/java/org/dataloader/DelegatingDataLoaderTest.java index 9103eca..0f51b5f 100644 --- a/src/test/java/org/dataloader/DelegatingDataLoaderTest.java +++ b/src/test/java/org/dataloader/DelegatingDataLoaderTest.java @@ -2,17 +2,21 @@ import org.dataloader.fixtures.TestKit; import org.dataloader.fixtures.parameterized.DelegatingDataLoaderFactory; -import org.jspecify.annotations.NonNull; +import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; import org.junit.jupiter.api.Test; import java.util.List; +import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import static org.awaitility.Awaitility.await; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotNull; /** * There are WAY more tests via the {@link DelegatingDataLoaderFactory} @@ -30,14 +34,37 @@ void canUnwrapDataLoaders() { } @Test + @NullMarked void canCreateAClassOk() { DataLoader rawLoader = TestKit.idLoader(); DelegatingDataLoader delegatingDataLoader = new DelegatingDataLoader<>(rawLoader) { - @Override - public CompletableFuture load(@NonNull String key, @Nullable Object keyContext) { - CompletableFuture cf = super.load(key, keyContext); + private CompletableFuture enhance(CompletableFuture cf) { return cf.thenApply(v -> "|" + v + "|"); } + + private CompletableFuture> enhanceList(CompletableFuture> cf) { + return cf.thenApply(v -> v.stream().map(s -> "|" + s + "|").collect(Collectors.toList())); + } + + @Override + public CompletableFuture load(String key, @Nullable Object keyContext) { + return enhance(super.load(key, keyContext)); + } + + @Override + public CompletableFuture load(String key) { + return enhance(super.load(key)); + } + + @Override + public CompletableFuture> loadMany(List keys) { + return enhanceList(super.loadMany(keys)); + } + + @Override + public CompletableFuture> loadMany(List keys, List keyContexts) { + return enhanceList(super.loadMany(keys, keyContexts)); + } }; assertThat(delegatingDataLoader.getDelegate(), is(rawLoader)); @@ -61,4 +88,79 @@ public CompletableFuture load(@NonNull String key, @Nullable Object keyC assertThat(delegatingDataLoader.getIfCompleted("A").isEmpty(), equalTo(false)); assertThat(delegatingDataLoader.getIfCompleted("X").isEmpty(), equalTo(true)); } + + @Test + void can_delegate_simple_properties() { + DataLoaderOptions options = DataLoaderOptions.newOptions().build(); + BatchLoader loadFunction = CompletableFuture::completedFuture; + + DataLoader rawLoader = DataLoaderFactory.newDataLoader("name", loadFunction, options); + DelegatingDataLoader delegate = new DelegatingDataLoader<>(rawLoader); + + assertNotNull(delegate.getName()); + assertThat(delegate.getName(), equalTo("name")); + assertThat(delegate.getOptions(), equalTo(options)); + assertThat(delegate.getBatchLoadFunction(), equalTo(loadFunction)); + } + + @NullMarked + @Test + void can_create_a_delegate_class_that_has_post_side_effects() { + DataLoaderOptions options = DataLoaderOptions.newOptions().build(); + BatchLoader loadFunction = CompletableFuture::completedFuture; + DataLoader rawLoader = DataLoaderFactory.newDataLoader("name", loadFunction, options); + + AtomicInteger loadCalled = new AtomicInteger(0); + AtomicInteger loadManyCalled = new AtomicInteger(0); + AtomicInteger loadManyMapCalled = new AtomicInteger(0); + DelegatingDataLoader delegate = new DelegatingDataLoader<>(rawLoader) { + + @Override + public CompletableFuture load(String key) { + CompletableFuture cf = super.load(key); + loadCalled.incrementAndGet(); + return cf; + } + + @Override + public CompletableFuture load(String key, @Nullable Object keyContext) { + CompletableFuture cf = super.load(key, keyContext); + loadCalled.incrementAndGet(); + return cf; + } + + @Override + public CompletableFuture> loadMany(List keys, List keyContexts) { + CompletableFuture> cf = super.loadMany(keys, keyContexts); + loadManyCalled.incrementAndGet(); + return cf; + } + + @Override + public CompletableFuture> loadMany(List keys) { + CompletableFuture> cf = super.loadMany(keys); + loadManyCalled.incrementAndGet(); + return cf; + } + + @Override + public CompletableFuture> loadMany(Map keysAndContexts) { + CompletableFuture> cf = super.loadMany(keysAndContexts); + loadManyMapCalled.incrementAndGet(); + return cf; + } + }; + + + delegate.load("L1"); + delegate.load("L2", null); + delegate.loadMany(List.of("LM1", "LM2"), List.of()); + delegate.loadMany(List.of("LM3")); + delegate.loadMany(Map.of("LMM1", "kc1", "LMM2", "kc2")); + + assertNotNull(delegate.getDelegate()); + assertThat(loadCalled.get(), equalTo(2)); + assertThat(loadManyCalled.get(), equalTo(2)); + assertThat(loadManyMapCalled.get(), equalTo(1)); + } } \ No newline at end of file diff --git a/src/test/java/org/dataloader/fixtures/CustomCacheMap.java b/src/test/java/org/dataloader/fixtures/CustomCacheMap.java index 695da5e..6e20f68 100644 --- a/src/test/java/org/dataloader/fixtures/CustomCacheMap.java +++ b/src/test/java/org/dataloader/fixtures/CustomCacheMap.java @@ -3,16 +3,15 @@ import org.dataloader.CacheMap; import java.util.Collection; -import java.util.LinkedHashMap; -import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ConcurrentHashMap; public class CustomCacheMap implements CacheMap { - public Map> stash; + public ConcurrentHashMap> stash; public CustomCacheMap() { - stash = new LinkedHashMap<>(); + stash = new ConcurrentHashMap<>(); } @Override @@ -31,9 +30,8 @@ public Collection> getAll() { } @Override - public CacheMap set(String key, CompletableFuture value) { - stash.put(key, value); - return this; + public CompletableFuture putIfAbsentAtomically(String key, CompletableFuture value) { + return stash.putIfAbsent(key, value); } @Override @@ -47,4 +45,9 @@ public CacheMap clear() { stash.clear(); return this; } + + @Override + public int size() { + return stash.size(); + } } diff --git a/src/test/java/org/dataloader/fixtures/TestKit.java b/src/test/java/org/dataloader/fixtures/TestKit.java index 04ec5e5..e6ba319 100644 --- a/src/test/java/org/dataloader/fixtures/TestKit.java +++ b/src/test/java/org/dataloader/fixtures/TestKit.java @@ -11,8 +11,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.LinkedHashSet; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -64,10 +64,18 @@ public static DataLoader idLoader() { return idLoader(null, new ArrayList<>()); } + public static DataLoader idLoader(String name) { + return idLoader(name, null, new ArrayList<>()); + } + public static DataLoader idLoader(DataLoaderOptions options, List> loadCalls) { return DataLoaderFactory.newDataLoader(keysAsValues(loadCalls), options); } + public static DataLoader idLoader(String name, DataLoaderOptions options, List> loadCalls) { + return DataLoaderFactory.newDataLoader(name, keysAsValues(loadCalls), options); + } + public static Collection listFrom(int i, int max) { List ints = new ArrayList<>(); for (int j = i; j < max; j++) { @@ -104,7 +112,7 @@ public static Set asSet(Collection elements) { public static boolean areAllDone(CompletableFuture... cfs) { for (CompletableFuture cf : cfs) { - if (! cf.isDone()) { + if (!cf.isDone()) { return false; } } diff --git a/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java index 0cbd3f3..8d1f815 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/DelegatingDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.DelegatingDataLoader; @@ -8,6 +9,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.concurrent.atomic.AtomicReference; public class DelegatingDataLoaderFactory implements TestDataLoaderFactory { // its delegates all the way down to the turtles @@ -38,6 +40,11 @@ public DataLoader idLoader(DataLoaderOptions options, List DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF) { + return mkDelegateDataLoader(delegateFactory.idLoaderWithContext(options, loadCalls, environmentREF)); + } + @Override public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { return mkDelegateDataLoader(delegateFactory.idLoaderDelayed(options, loadCalls, delay)); diff --git a/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java index 0644d3c..8ec69d7 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/ListDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.fixtures.TestKit; @@ -9,6 +10,7 @@ import java.util.Collection; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.util.concurrent.CompletableFuture.completedFuture; @@ -23,6 +25,15 @@ public DataLoader idLoader(DataLoaderOptions options, List DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF) { + return newDataLoader((keys, env) -> { + environmentREF.set(env); + loadCalls.add(new ArrayList<>(keys)); + return completedFuture(keys); + }, options); + } + @Override public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { return newDataLoader(keys -> CompletableFuture.supplyAsync(() -> { diff --git a/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java index e7c47ec..f1c548e 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/MappedDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.fixtures.TestKit; @@ -11,10 +12,10 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.util.concurrent.CompletableFuture.completedFuture; -import static org.dataloader.DataLoaderFactory.newDataLoader; import static org.dataloader.DataLoaderFactory.newMappedDataLoader; import static org.dataloader.fixtures.TestKit.futureError; @@ -31,6 +32,17 @@ public DataLoader idLoader( }, options); } + @Override + public DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF) { + return newMappedDataLoader((keys, environment) -> { + environmentREF.set(environment); + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + return completedFuture(map); + }, options); + } + @Override public DataLoader idLoaderDelayed( DataLoaderOptions options, List> loadCalls, Duration delay) { diff --git a/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java index fa920cf..3a0f54e 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/MappedPublisherDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.Try; @@ -12,16 +13,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.CompletableFuture; -import java.util.stream.Collectors; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; import static java.util.stream.Collectors.toList; -import static java.util.stream.Collectors.toSet; import static org.dataloader.DataLoaderFactory.newMappedPublisherDataLoader; import static org.dataloader.DataLoaderFactory.newMappedPublisherDataLoaderWithTry; -import static org.dataloader.DataLoaderFactory.newPublisherDataLoader; public class MappedPublisherDataLoaderFactory implements TestDataLoaderFactory, TestReactiveDataLoaderFactory { @@ -36,6 +34,18 @@ public DataLoader idLoader( }, options); } + @Override + public DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF) { + return newMappedPublisherDataLoader((keys, subscriber, environment) -> { + environmentREF.set(environment); + + loadCalls.add(new ArrayList<>(keys)); + Map map = new HashMap<>(); + keys.forEach(k -> map.put(k, k)); + Flux.fromIterable(map.entrySet()).subscribe(subscriber); + }, options); + } + @Override public DataLoader idLoaderDelayed( DataLoaderOptions options, List> loadCalls, Duration delay) { diff --git a/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java index 2049719..c8e8b67 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/PublisherDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; import org.dataloader.Try; @@ -11,9 +12,9 @@ import java.util.Collection; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Stream; -import static org.dataloader.DataLoaderFactory.newDataLoader; import static org.dataloader.DataLoaderFactory.newPublisherDataLoader; import static org.dataloader.DataLoaderFactory.newPublisherDataLoaderWithTry; @@ -28,6 +29,15 @@ public DataLoader idLoader( }, options); } + @Override + public DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF) { + return newPublisherDataLoader((keys, subscriber, environment) -> { + environmentREF.set(environment); + loadCalls.add(new ArrayList<>(keys)); + Flux.fromIterable(keys).subscribe(subscriber); + }, options); + } + @Override public DataLoader idLoaderDelayed(DataLoaderOptions options, List> loadCalls, Duration delay) { return newPublisherDataLoader((keys, subscriber) -> { diff --git a/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java index 789b136..3c584fd 100644 --- a/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java +++ b/src/test/java/org/dataloader/fixtures/parameterized/TestDataLoaderFactory.java @@ -1,5 +1,6 @@ package org.dataloader.fixtures.parameterized; +import org.dataloader.BatchLoaderEnvironment; import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; @@ -7,6 +8,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; +import java.util.concurrent.atomic.AtomicReference; public interface TestDataLoaderFactory { DataLoader idLoader(DataLoaderOptions options, List> loadCalls); @@ -23,6 +25,11 @@ public interface TestDataLoaderFactory { DataLoader idLoaderReturnsTooMany(int howManyMore, DataLoaderOptions options, ArrayList loadCalls); + // similar to above but batch loaders with context + + DataLoader idLoaderWithContext(DataLoaderOptions options, List> loadCalls, AtomicReference environmentREF); + + // Convenience methods default DataLoader idLoader(DataLoaderOptions options) { diff --git a/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java index 0d5ddb1..28cef70 100644 --- a/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java +++ b/src/test/java/org/dataloader/instrumentation/ChainedDataLoaderInstrumentationTest.java @@ -14,7 +14,7 @@ import java.util.concurrent.CompletableFuture; import static org.awaitility.Awaitility.await; -import static org.dataloader.DataLoaderOptions.newOptionsBuilder; +import static org.dataloader.DataLoaderOptions.newOptions; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; @@ -37,7 +37,7 @@ void canChainTogetherZeroInstrumentation() { // just to prove its useless but harmless ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation(); - DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + DataLoaderOptions options = newOptions().setInstrumentation(chainedItn).build(); DataLoader dl = DataLoaderFactory.newDataLoader(TestKit.keysAsValues(), options); @@ -57,7 +57,7 @@ void canChainTogetherOneInstrumentation() { ChainedDataLoaderInstrumentation chainedItn = new ChainedDataLoaderInstrumentation() .add(capturingA); - DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + DataLoaderOptions options = newOptions().setInstrumentation(chainedItn).build(); DataLoader dl = DataLoaderFactory.newDataLoader(TestKit.keysAsValues(), options); @@ -88,7 +88,7 @@ public void canChainTogetherManyInstrumentationsWithDifferentBatchLoaders(TestDa .add(capturingB) .add(capturingButReturnsNull); - DataLoaderOptions options = newOptionsBuilder().setInstrumentation(chainedItn).build(); + DataLoaderOptions options = newOptions().setInstrumentation(chainedItn).build(); DataLoader dl = factory.idLoader(options); diff --git a/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java index 97f21d3..3e2b94f 100644 --- a/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java +++ b/src/test/java/org/dataloader/instrumentation/DataLoaderInstrumentationTest.java @@ -48,9 +48,10 @@ public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); @@ -109,9 +110,10 @@ public DataLoaderInstrumentationContext> beginBatchLoader(DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); @@ -155,7 +157,7 @@ public void onCompleted(List result, Throwable t) { } }; - DataLoaderOptions options = new DataLoaderOptions().setInstrumentation(instrumentation); + DataLoaderOptions options = DataLoaderOptions.newOptions().setInstrumentation(instrumentation).build(); DataLoader dl = DataLoaderFactory.newDataLoader(snoozingBatchLoader, options); dl.load("A", "kcA"); diff --git a/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java b/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java index 49ccf0e..e672d80 100644 --- a/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java +++ b/src/test/java/org/dataloader/instrumentation/DataLoaderRegistryInstrumentationTest.java @@ -32,9 +32,9 @@ public class DataLoaderRegistryInstrumentationTest { @BeforeEach void setUp() { - dlX = TestKit.idLoader(); - dlY = TestKit.idLoader(); - dlZ = TestKit.idLoader(); + dlX = TestKit.idLoader("X"); + dlY = TestKit.idLoader("Y"); + dlZ = TestKit.idLoader("Z"); instrA = new CapturingInstrumentation("A"); instrB = new CapturingInstrumentation("B"); chainedInstrA = new ChainedDataLoaderInstrumentation().add(instrA); @@ -120,9 +120,9 @@ void wontDoAnyThingIfThereIsNoRegistryInstrumentation() { @Test void wontDoAnyThingIfThereTheyAreTheSameInstrumentationAlready() { - DataLoader newX = dlX.transform(builder -> builder.options(dlX.getOptions().setInstrumentation(instrA))); - DataLoader newY = dlX.transform(builder -> builder.options(dlY.getOptions().setInstrumentation(instrA))); - DataLoader newZ = dlX.transform(builder -> builder.options(dlZ.getOptions().setInstrumentation(instrA))); + DataLoader newX = dlX.transform(builder -> builder.options(dlX.getOptions().transform(b-> b.setInstrumentation(instrA)))); + DataLoader newY = dlY.transform(builder -> builder.options(dlY.getOptions().transform(b-> b.setInstrumentation(instrA)))); + DataLoader newZ = dlZ.transform(builder -> builder.options(dlZ.getOptions().transform(b-> b.setInstrumentation(instrA)))); DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() .instrumentation(instrA) .register("X", newX) @@ -145,7 +145,7 @@ void wontDoAnyThingIfThereTheyAreTheSameInstrumentationAlready() { @Test void ifTheDLHasAInstrumentationThenItsTurnedIntoAChainedOne() { - DataLoaderOptions options = dlX.getOptions().setInstrumentation(instrA); + DataLoaderOptions options = dlX.getOptions().transform(b -> b.setInstrumentation(instrA)); DataLoader newX = dlX.transform(builder -> builder.options(options)); DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() @@ -164,7 +164,7 @@ void ifTheDLHasAInstrumentationThenItsTurnedIntoAChainedOne() { @Test void chainedInstrumentationsWillBeCombined() { - DataLoaderOptions options = dlX.getOptions().setInstrumentation(chainedInstrB); + DataLoaderOptions options = dlX.getOptions().transform(b -> b.setInstrumentation(chainedInstrB)); DataLoader newX = dlX.transform(builder -> builder.options(options)); DataLoaderRegistry registry = DataLoaderRegistry.newRegistry() diff --git a/src/test/java/org/dataloader/performance/AtomicVsAdder.java b/src/test/java/org/dataloader/performance/AtomicVsAdder.java new file mode 100644 index 0000000..ee5e02a --- /dev/null +++ b/src/test/java/org/dataloader/performance/AtomicVsAdder.java @@ -0,0 +1,111 @@ +package org.dataloader.performance; + +import java.time.Duration; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.LongAdder; + +public class AtomicVsAdder { + + private static final ExecutorService EXECUTOR = Executors.newCachedThreadPool(); + + public static void main(final String[] args) throws Exception { + // knobs + final var iterationsList = List.of(1 << 20L, 1 << 24L); + final var numberOfThreadsList = List.of(1, 2, 4, 8, 16); + final var strategies = List.of(new LongAdderStrategy(), new AtomicLongStrategy()); + + // test + System.out.println("testing with #cpu=" + Runtime.getRuntime().availableProcessors()); + for (int iterations : iterationsList) { + for (int numberOfThreads : numberOfThreadsList) { + for (Strategy strategy : strategies) { + performTest(iterations, numberOfThreads, strategy); + } + } + } + + EXECUTOR.shutdownNow(); + + } + + private static void performTest(final long iterations, final int numberOfThreads, Strategy strategy) throws Exception { + final List> futures = new ArrayList<>(); + System.out.println("start test with " + iterations + " iterations using " + numberOfThreads + " threads and strategy " + strategy.getClass().getSimpleName()); + final long start = System.nanoTime(); + + for (int i = 0; i < numberOfThreads; i++) { + Future submit = EXECUTOR.submit(() -> concurrentWork(strategy, iterations)); + futures.add(submit); + } + for (final Future future : futures) { + future.get(); // wait for all + } + final long end = System.nanoTime(); + System.out.println("done in " + Duration.ofNanos(end - start).toMillis() + "ms => result " + strategy.get()); + System.out.println("----"); + strategy.reset(); + } + + @SuppressWarnings("SameParameterValue") + private static void concurrentWork(final Strategy strategy, final long iterations) { + long work = iterations; + while (work-- > 0) { + strategy.increment(); + } + } + + interface Strategy { + void increment(); + + long get(); + + void reset(); + } + + static class LongAdderStrategy implements Strategy { + + private LongAdder longAdder = new LongAdder(); + + @Override + public void increment() { + longAdder.increment(); + } + + @Override + public long get() { + return longAdder.sum(); + } + + @Override + public void reset() { + longAdder = new LongAdder(); + } + } + + static class AtomicLongStrategy implements Strategy { + + private final AtomicLong atomicLong = new AtomicLong(0); + + @Override + public void increment() { + atomicLong.incrementAndGet(); + } + + @Override + public long get() { + return atomicLong.get(); + } + + @Override + public void reset() { + atomicLong.set(0); + } + } + +} \ No newline at end of file diff --git a/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java b/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java index ff9ec8e..b9a7c01 100644 --- a/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java +++ b/src/test/java/org/dataloader/scheduler/BatchLoaderSchedulerTest.java @@ -83,7 +83,7 @@ private static void commonSetupAndSimpleAsserts(DataLoader ide @Test public void can_allow_a_simple_scheduler() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling).build(); DataLoader identityLoader = newDataLoader(keysAsValues(), options); @@ -92,7 +92,7 @@ public void can_allow_a_simple_scheduler() { @Test public void can_allow_a_simple_scheduler_with_context() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling).build(); DataLoader identityLoader = newDataLoader(keysAsValuesWithContext(), options); @@ -101,7 +101,7 @@ public void can_allow_a_simple_scheduler_with_context() { @Test public void can_allow_a_simple_scheduler_with_mapped_batch_load() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling).build(); DataLoader identityLoader = newMappedDataLoader(keysAsMapOfValues(), options); @@ -110,7 +110,7 @@ public void can_allow_a_simple_scheduler_with_mapped_batch_load() { @Test public void can_allow_a_simple_scheduler_with_mapped_batch_load_with_context() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(immediateScheduling).build(); DataLoader identityLoader = newMappedDataLoader(keysAsMapOfValuesWithContext(), options); @@ -119,7 +119,7 @@ public void can_allow_a_simple_scheduler_with_mapped_batch_load_with_context() { @Test public void can_allow_an_async_scheduler() { - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(delayedScheduling(50)); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(delayedScheduling(50)).build(); DataLoader identityLoader = newDataLoader(keysAsValues(), options); @@ -160,7 +160,7 @@ public void scheduleBatchPublisher(ScheduledBatchPublisherCall scheduledCall }); } }; - DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(funkyScheduler); + DataLoaderOptions options = DataLoaderOptions.newOptions().setBatchLoaderScheduler(funkyScheduler).build(); DataLoader identityLoader = newDataLoader(keysAsValues(), options); diff --git a/src/test/kotlin/org/dataloader/KotlinExamples.kt b/src/test/kotlin/org/dataloader/KotlinExamples.kt new file mode 100644 index 0000000..f53faf4 --- /dev/null +++ b/src/test/kotlin/org/dataloader/KotlinExamples.kt @@ -0,0 +1,110 @@ +package org.dataloader + +import java.util.concurrent.CompletableFuture +import org.junit.jupiter.api.Test +import reactor.core.publisher.Flux +import java.util.concurrent.CompletableFuture.completedFuture +import org.dataloader.impl.NoOpValueCache + +/** + * Some Kotlin code to prove that are JSpecify annotations work here + * as expected in Kotlin land. We don't intend to ue Kotlin in our tests + * or to deliver Kotlin code in the java + */ +class KotlinExamples { + + @Test + fun `basic kotlin test of non nullable value types`() { + val batchLoadFunction = BatchLoader + { keys -> completedFuture(keys.toList()) } + val dataLoader: DataLoader = + DataLoaderFactory.newDataLoader(batchLoadFunction) + + val cfA = dataLoader.load("A") + val cfB = dataLoader.load("B") + + dataLoader.dispatch() + + assert(cfA.join().equals("A")) + assert(cfB.join().equals("B")) + } + + @Test + fun `basic kotlin test of nullable value types`() { + val batchLoadFunction: BatchLoader = BatchLoader { keys -> completedFuture(keys.toList()) } + val dataLoader: DataLoader = DataLoaderFactory.newDataLoader(batchLoadFunction) + + standardNullableAsserts(dataLoader) + } + + @Test + fun `basic kotlin test of nullable value types in mapped batch loader`() { + val batchLoadFunction = MappedBatchLoader + { keys -> completedFuture(keys.associateBy({ it })) } + + val dataLoader: DataLoader = DataLoaderFactory.newMappedDataLoader(batchLoadFunction) + + standardNullableAsserts(dataLoader) + } + + @Test + fun `basic kotlin test of nullable value types in mapped batch loader with context`() { + val batchLoadFunction = MappedBatchLoaderWithContext + { keys, env -> completedFuture(keys.associateBy({ it })) } + + val dataLoader: DataLoader = DataLoaderFactory.newMappedDataLoader(batchLoadFunction) + + standardNullableAsserts(dataLoader) + } + + @Test + fun `basic kotlin test of nullable value types in mapped batch publisher`() { + val batchLoadFunction = MappedBatchPublisher + { keys, subscriber -> + val map: Map = keys.associateBy({ it }) + Flux.fromIterable(map.entries).subscribe(subscriber); + } + + val dataLoader: DataLoader = DataLoaderFactory.newMappedPublisherDataLoader(batchLoadFunction) + + standardNullableAsserts(dataLoader) + } + + @Test + fun `basic kotlin test of nullable value types in mapped batch publisher with context`() { + val batchLoadFunction = MappedBatchPublisherWithContext + { keys, subscriber, env -> + val map: Map = keys.associateBy({ it }) + Flux.fromIterable(map.entries).subscribe(subscriber); + } + + val dataLoader: DataLoader = DataLoaderFactory.newMappedPublisherDataLoader(batchLoadFunction) + + standardNullableAsserts(dataLoader) + } + + @Test + fun `basic kotlin test of nullable value types in value cache`() { + val valueCache = object : ValueCache by NoOpValueCache() { + override fun get(key: String): CompletableFuture = if (key == "null") + completedFuture(null) + else + completedFuture(key) + } + + assert(valueCache["key"].get() == "key") + assert(valueCache["null"].get() == null) + } + + private fun standardNullableAsserts(dataLoader: DataLoader) { + val cfA = dataLoader.load("A") + val cfB = dataLoader.load("B") + + dataLoader.dispatch() + + assert(cfA.join().equals("A")) + assert(cfB.join().equals("B")) + } + + +} \ No newline at end of file