diff --git a/.forbiddenapis/banned-signatures-common.txt b/.forbiddenapis/banned-signatures-common.txt index 23203f1247cfc..8e773e90bd467 100644 --- a/.forbiddenapis/banned-signatures-common.txt +++ b/.forbiddenapis/banned-signatures-common.txt @@ -6,3 +6,13 @@ org.jboss.jandex.AnnotationValue#toString() @defaultMessage Replace this by using InputStream.transferTo(OutputStream) org.apache.commons.io.IOUtils#copy(java.io.InputStream,java.io.OutputStream) org.apache.commons.compress.utils.IOUtils#copy(java.io.InputStream,java.io.OutputStream) + +@defaultMessage We should avoid using WildFly Common APIs +org.wildfly.common.Assert +org.wildfly.common.net.** +org.wildfly.common.os.** + +@defaultMessage Use JUnit asserts +io.smallrye.common.Assert.assertTrue(**) +io.smallrye.common.Assert.assertFalse(**) +io.smallrye.common.Assert.assertNotNull(**) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c066b9b432ddf..a356ecdfc9426 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -36,6 +36,7 @@ updates: - dependency-name: com.google.cloud.tools:jib-core - dependency-name: org.jboss.threads:jboss-threads - dependency-name: org.jboss.marshalling:* + - dependency-name: org.wildfly.common:* # Quarkus - dependency-name: io.quarkus.*:* - dependency-name: io.quarkus:* diff --git a/.github/matrix-jvm-tests.json b/.github/matrix-jvm-tests.json index 4f2654e282c37..cdbb871a3b757 100644 --- a/.github/matrix-jvm-tests.json +++ b/.github/matrix-jvm-tests.json @@ -1,20 +1,46 @@ -[ { - "name": "17", +[ + { + "name": "JVM Tests - JDK 17", + "java-version": 17, + "maven_args": "$JVM_TEST_MAVEN_ARGS", + "maven_opts": "-Xmx2g -XX:MaxMetaspaceSize=1g", + "os-name": "ubuntu-22.04" + }, + { + "name": "JVM Tests - JDK 21", + "java-version": 21, + "java-version-gradle": 20, + "maven_args": "$JVM_TEST_MAVEN_ARGS", + "maven_opts": "-Xmx3g -XX:MaxMetaspaceSize=1g", + "os-name": "ubuntu-latest" + }, + { + "name": "JVM Tests - JDK 17 Windows", + "java-version": 17, + "maven_args": "-DskipDocs -Dformat.skip", + "maven_opts": "-Xmx2g -XX:MaxMetaspaceSize=1g", + "os-name": "windows-latest" + }, + { + "name": "JVM Integration Tests - JDK 17", + "category": "Integration", "java-version": 17, "maven_args": "$JVM_TEST_MAVEN_ARGS", "maven_opts": "-Xmx2g -XX:MaxMetaspaceSize=1g", "os-name": "ubuntu-latest" -} -, { - "name": "21", + }, + { + "name": "JVM Integration Tests - JDK 21", + "category": "Integration", "java-version": 21, "java-version-gradle": 20, "maven_args": "$JVM_TEST_MAVEN_ARGS", "maven_opts": "-Xmx3g -XX:MaxMetaspaceSize=1g", "os-name": "ubuntu-latest" -} -, { - "name": "17 Windows", + }, + { + "name": "JVM Integration Tests - JDK 17 Windows", + "category": "Integration", "java-version": 17, "maven_args": "-DskipDocs -Dformat.skip", "maven_opts": "-Xmx2g -XX:MaxMetaspaceSize=1g", diff --git a/.github/native-tests.json b/.github/native-tests.json index ee6584b2b45f4..3e8c8cf1b02b5 100644 --- a/.github/native-tests.json +++ b/.github/native-tests.json @@ -22,13 +22,13 @@ "category": "Data3", "timeout": 75, "test-modules": "flyway, hibernate-orm-panache, hibernate-orm-panache-kotlin, hibernate-orm-envers, liquibase, liquibase-mongodb", - "os-name": "ubuntu-latest" + "os-name": "ubuntu-latest" }, { "category": "Data4", "timeout": 60, "test-modules": "mongodb-client, mongodb-devservices, mongodb-panache, mongodb-rest-data-panache, mongodb-panache-kotlin, redis-client, hibernate-orm-rest-data-panache", - "os-name": "ubuntu-latest" + "os-name": "ubuntu-latest" }, { "category": "Data5", @@ -44,8 +44,14 @@ }, { "category": "Data7", - "timeout": 85, - "test-modules": "reactive-oracle-client, reactive-mysql-client, reactive-db2-client, hibernate-reactive-db2, hibernate-reactive-mariadb, hibernate-reactive-mysql, hibernate-reactive-mysql-agroal-flyway, hibernate-reactive-panache, hibernate-reactive-panache-kotlin", + "timeout": 90, + "test-modules": "reactive-oracle-client, reactive-mysql-client, reactive-db2-client, hibernate-reactive-db2, hibernate-reactive-mariadb, hibernate-reactive-mssql, hibernate-reactive-mysql, hibernate-reactive-mysql-agroal-flyway, hibernate-reactive-panache, hibernate-reactive-panache-kotlin", + "os-name": "ubuntu-latest" + }, + { + "category": "Build tools and DevTools", + "timeout": 75, + "test-modules": "maven, gradle, devtools-registry-client", "os-name": "ubuntu-latest" }, { @@ -76,48 +82,48 @@ "category": "Security2", "timeout": 75, "test-modules": "oidc, oidc-code-flow, oidc-tenancy, oidc-client, oidc-client-reactive, oidc-token-propagation, oidc-wiremock, oidc-client-wiremock, oidc-wiremock-providers, oidc-dev-services", - "os-name": "ubuntu-latest" - }, - { - "category": "Security3", - "timeout": 55, - "test-modules": "keycloak-authorization, smallrye-jwt-token-propagation, security-webauthn", - "os-name": "ubuntu-latest" - }, - { - "category": "Cache", - "timeout": 75, - "test-modules": "infinispan-cache-jpa, infinispan-client, cache, redis-cache, infinispan-cache", - "os-name": "ubuntu-latest" - }, - { - "category": "HTTP", - "timeout": 130, - "test-modules": "elytron-resteasy, resteasy-jackson, elytron-resteasy-reactive, resteasy-mutiny, resteasy-reactive-kotlin/standard, vertx, vertx-http, vertx-web, vertx-http-compressors/all, vertx-http-compressors/some, vertx-web-jackson, vertx-graphql, virtual-http, rest-client, rest-client-reactive, rest-client-reactive-stork, rest-client-reactive-multipart, websockets, websockets-next, management-interface, management-interface-auth, mutiny-native-jctools", - "os-name": "ubuntu-latest" - }, - { - "category": "Misc1", - "timeout": 70, - "test-modules": "maven, jackson, jsonb, kotlin, rest-client-reactive-kotlin-serialization, quartz, qute, logging-min-level-unset, logging-min-level-set, simple with space", + "os-name": "ubuntu-latest" + }, + { + "category": "Security3", + "timeout": 55, + "test-modules": "keycloak-authorization, smallrye-jwt-token-propagation, security-webauthn", + "os-name": "ubuntu-latest" + }, + { + "category": "Cache", + "timeout": 75, + "test-modules": "infinispan-cache-jpa, infinispan-client, cache, redis-cache, infinispan-cache", + "os-name": "ubuntu-latest" + }, + { + "category": "HTTP", + "timeout": 130, + "test-modules": "elytron-resteasy, resteasy-jackson, elytron-resteasy-reactive, resteasy-mutiny, resteasy-reactive-kotlin/standard, vertx, vertx-http, vertx-web, vertx-http-compressors/all, vertx-http-compressors/some, vertx-web-jackson, vertx-graphql, virtual-http, rest-client, rest-client-reactive, rest-client-reactive-stork, rest-client-reactive-multipart, websockets, websockets-next, management-interface, management-interface-auth, mutiny-native-jctools", + "os-name": "ubuntu-latest" + }, + { + "category": "Misc1", + "timeout": 70, + "test-modules": "jackson, jsonb, kotlin, rest-client-reactive-kotlin-serialization, quartz, qute, logging-min-level-unset, logging-min-level-set, simple with space, web-dependency-locator", "os-name": "ubuntu-latest" }, { "category": "Misc2", "timeout": 75, - "test-modules": "hibernate-validator, test-extension/tests, logging-gelf, mailer, native-config-profile, locales/all, locales/some, locales/default", + "test-modules": "hibernate-validator, test-extension/tests, logging-gelf, mailer, native-config-profile, locales/all, locales/some, locales/default, jaxp, jaxb", "os-name": "ubuntu-latest" }, { "category": "Misc3", "timeout": 80, - "test-modules": "kubernetes-client, openshift-client, kubernetes-service-binding-jdbc, smallrye-config, smallrye-graphql, smallrye-graphql-client, smallrye-graphql-client-keycloak, smallrye-metrics", + "test-modules": "kubernetes-client, openshift-client, kubernetes-service-binding-jdbc, smallrye-config, smallrye-graphql, smallrye-graphql-client, smallrye-graphql-client-keycloak, picocli-native", "os-name": "ubuntu-latest" }, { - "category": "Misc4", + "category": "Observability", "timeout": 130, - "test-modules": "picocli-native, gradle, micrometer-mp-metrics, micrometer-prometheus, logging-json, jaxp, jaxb, observability-lgtm, opentelemetry, opentelemetry-jdbc-instrumentation, opentelemetry-mongodb-client-instrumentation, opentelemetry-redis-instrumentation, web-dependency-locator", + "test-modules": "smallrye-metrics, micrometer-mp-metrics, micrometer-prometheus, logging-json, observability-lgtm, opentelemetry, opentelemetry-jdbc-instrumentation, opentelemetry-mongodb-client-instrumentation, opentelemetry-redis-instrumentation, micrometer-opentelemetry", "os-name": "ubuntu-latest" }, { @@ -138,12 +144,6 @@ "test-modules": "resteasy-jackson, qute, liquibase", "os-name": "windows-latest" }, - { - "category": "DevTools Integration Tests", - "timeout": 75, - "test-modules": "devtools-registry-client", - "os-name": "ubuntu-latest" - }, { "category": "AWT, ImageIO and Java2D, Packaging .so files", "timeout": 40, diff --git a/.github/quarkus-github-bot.yml b/.github/quarkus-github-bot.yml index baee30aab0814..161c666ea22b8 100644 --- a/.github/quarkus-github-bot.yml +++ b/.github/quarkus-github-bot.yml @@ -37,6 +37,11 @@ projects: triage: discussions: monitoredCategories: [33575230] + guardedBranches: + - ref: 3.15 + notify: [jmartisk, gsmet, gastaldi, rsvoboda, aloubyansky] + - ref: 3.8 + notify: [jmartisk, gsmet, gastaldi, rsvoboda, aloubyansky] rules: - id: amazon-lambda labels: [area/amazon-lambda] @@ -470,7 +475,7 @@ triage: notify: [radcortez] - id: core labels: [area/core] - notify: [aloubyansky, gsmet, geoand, radcortez, Sanne, stuartwdouglas] + notify: [aloubyansky, gsmet, geoand, radcortez, Sanne] directories: - core/ - id: dependencies @@ -624,7 +629,7 @@ triage: - id: rest labels: [area/rest] title: resteasy.reactive - notify: [geoand, FroMage, stuartwdouglas] + notify: [geoand, FroMage] directories: - extensions/resteasy-reactive/ - id: scala @@ -719,11 +724,11 @@ triage: - id: continuous-testing labels: [area/continuous-testing] title: "continuous.testing" - notify: [stuartwdouglas] + notify: [holly-cummins, geoand] - id: devservices labels: [area/devservices] title: "dev.?services?" - notify: [stuartwdouglas, geoand] + notify: [geoand] - id: jdbc labels: [area/jdbc] title: "jdbc" diff --git a/.github/quarkus-github-lottery.yml b/.github/quarkus-github-lottery.yml index 656c638938e6e..99bb505e8ae7e 100644 --- a/.github/quarkus-github-lottery.yml +++ b/.github/quarkus-github-lottery.yml @@ -29,6 +29,23 @@ buckets: timeout: P14D ignoreLabels: ["triage/on-ice"] participants: + - username: "DavideD" + timezone: "Europe/Vienna" + triage: + days: ["MONDAY", "TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY"] + maxIssues: 3 + maintenance: + labels: ["area/hibernate-orm", "area/hibernate-reactive", "area/hibernate-validator", "area/hibernate-search", "area/jdbc"] + days: ["WEDNESDAY", "THURSDAY", "FRIDAY"] # Count me out on Monday and Tuesday. + created: + maxIssues: 3 + feedback: + needed: + maxIssues: 3 + provided: + maxIssues: 3 + stale: + maxIssues: 1 - username: "yrodiere" timezone: "Europe/Paris" triage: diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml index b1198cd0cd23a..1d787e1cea6d5 100644 --- a/.github/workflows/ci-actions-incremental.yml +++ b/.github/workflows/ci-actions-incremental.yml @@ -53,7 +53,11 @@ env: COMMON_MAVEN_ARGS: "-e -B --settings .github/mvn-settings.xml --fail-at-end" COMMON_TEST_MAVEN_ARGS: "-Dformat.skip -Denforcer.skip -DskipDocs -Dforbiddenapis.skip -DskipExtensionValidation -DskipCodestartValidation" NATIVE_TEST_MAVEN_ARGS: "-Dtest-containers -Dstart-containers -Dquarkus.native.native-image-xmx=6g -Dnative -Dnative.surefire.skip -Dno-descriptor-tests clean install -DskipDocs" - JVM_TEST_MAVEN_ARGS: "-Dtest-containers -Dstart-containers -Dquarkus.test.hang-detection-timeout=60" + JVM_TEST_MAVEN_ARGS: "-Dtest-containers -Dstart-containers -Dquarkus.test.hang-detection-timeout=300" + # Important: keep these selectors in sync with the grep commands in the calc_run_flags job! + # This may be a lot better with maven 4, but with maven 3, excluding a project does not exclude its children, and it's not possible to include a project and explicitly exclude some children; compensate by doing excludes the low-tech way, at the shell level + JVM_TEST_INTEGRATION_TESTS_SELECTOR: "-f integration-tests -pl !gradle -pl !maven -pl !devmode -pl !devtools" + JVM_TEST_NORMAL_TESTS_SELECTOR: "-pl !docs -Dno-test-modules" PTS_MAVEN_ARGS: "-Ddevelocity.pts.enabled=${{ github.event_name == 'pull_request' && github.base_ref == 'main' && 'true' || 'false' }}" DB_USER: hibernate_orm_test DB_PASSWORD: hibernate_orm_test @@ -336,7 +340,7 @@ jobs: echo "run_tcks=${run_tcks}" >> $GITHUB_OUTPUT jvm-tests: - name: JVM Tests - JDK ${{matrix.java.name}} + name: ${{ matrix.java.name }} runs-on: ${{ matrix.java.os-name }} needs: [build-jdk17, calculate-test-jobs] # Skip main in forks @@ -352,8 +356,8 @@ jobs: steps: - name: Gradle Enterprise environment run: | - echo "GE_TAGS=jdk-${{matrix.java.name}}" >> "$GITHUB_ENV" - echo "GE_CUSTOM_VALUES=gh-job-name=JVM Tests - JDK ${{matrix.java.name}}" >> "$GITHUB_ENV" + echo "GE_TAGS=jdk-${{matrix.java.java-version}}" >> "$GITHUB_ENV" + echo "GE_CUSTOM_VALUES=gh-job-name=${{ matrix.java.name }}" >> "$GITHUB_ENV" - name: Stop mysql if: "!startsWith(matrix.java.os-name, 'windows') && !startsWith(matrix.java.os-name, 'macos')" run: | @@ -419,14 +423,14 @@ jobs: if: github.event_name == 'pull_request' with: path: ~/.m2/.develocity/build-cache - key: develocity-cache-JVM Tests - JDK ${{matrix.java.name}}-${{ github.event.pull_request.number }}-${{ github.event.pull_request.head.sha }} + key: develocity-cache-${{matrix.java.name}}-${{ github.event.pull_request.number }}-${{ github.event.pull_request.head.sha }} restore-keys: | - develocity-cache-JVM Tests - JDK ${{matrix.java.name}}-${{ github.event.pull_request.number }}- + develocity-cache-${{matrix.java.name}}-${{ github.event.pull_request.number }}- - name: Setup Develocity Build Scan capture uses: gradle/develocity-actions/setup-maven@v1.3 with: capture-strategy: ON_DEMAND - job-name: "JVM Tests - JDK ${{matrix.java.name}}" + job-name: "${{ matrix.java.name }}" add-pr-comment: false add-job-summary: false develocity-access-key: ${{ secrets.GRADLE_ENTERPRISE_ACCESS_KEY }} @@ -434,8 +438,14 @@ jobs: - name: Build env: CAPTURE_BUILD_SCAN: true - # Despite the pre-calculated run_jvm flag, GIB has to be re-run here to figure out the exact submodules to build. - run: ./mvnw $COMMON_MAVEN_ARGS $COMMON_TEST_MAVEN_ARGS $PTS_MAVEN_ARGS clean install -Dsurefire.timeout=1200 -pl !integration-tests/gradle -pl !integration-tests/maven -pl !integration-tests/devmode -pl !integration-tests/devtools -Dno-test-kubernetes -pl !docs ${{ matrix.java.maven_args }} ${{ needs.build-jdk17.outputs.gib_args }} + run: | + if [[ "${{ matrix.java.category }}" == *"Integration"* ]]; then + PL=$JVM_TEST_INTEGRATION_TESTS_SELECTOR + else + PL=$JVM_TEST_NORMAL_TESTS_SELECTOR + fi + # Despite the pre-calculated run_jvm flag, GIB has to be re-run here to figure out the exact submodules to build. + ./mvnw $COMMON_MAVEN_ARGS $COMMON_TEST_MAVEN_ARGS $PTS_MAVEN_ARGS $PL clean install -Dsurefire.timeout=1200 -Dno-test-kubernetes ${{ matrix.java.maven_args }} ${{ needs.build-jdk17.outputs.gib_args }} - name: Clean Gradle temp directory if: always() run: devtools/gradle/gradlew --stop && rm -rf devtools/gradle/gradle-extension-plugin/build/tmp @@ -449,7 +459,7 @@ jobs: uses: actions/upload-artifact@v4 if: failure() with: - name: test-reports-jvm${{matrix.java.name}} + name: test-reports-${{matrix.java.name}} path: 'test-reports.tgz' retention-days: 7 - name: Prepare build reports archive @@ -464,7 +474,7 @@ jobs: uses: actions/upload-artifact@v4 if: always() with: - name: "build-reports-${{ github.run_attempt }}-JVM Tests - JDK ${{matrix.java.name}}" + name: "build-reports-${{ github.run_attempt }}-${{ matrix.java.name }}" path: | build-reports.zip retention-days: 7 @@ -474,7 +484,7 @@ jobs: # -- even in case of success, as some flaky tests won't fail the build if: always() with: - name: "debug-${{ github.run_attempt }}-JVM Tests - JDK ${{matrix.java.name}}" + name: "debug-${{ github.run_attempt }}-${{ matrix.java.name }}" path: "**/target/debug/**" if-no-files-found: ignore # If we're not currently debugging any test, it's fine. retention-days: 28 # We don't get notified for flaky tests, so let's give maintainers time to get back to it @@ -482,7 +492,7 @@ jobs: uses: actions/upload-artifact@v4 if: ${{ failure() || cancelled() }} with: - name: "build-logs-JVM Tests - JDK ${{matrix.java.name}}" + name: "build-logs-${{ matrix.java.name }}" path: | **/build.log retention-days: 7 diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml index 137e9b99695e7..7e18386ef2c8e 100644 --- a/.github/workflows/doc-build.yml +++ b/.github/workflows/doc-build.yml @@ -50,7 +50,7 @@ jobs: timeout-minutes: 60 runs-on: ubuntu-latest # Skip main in forks - if: "github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main')" + if: github.repository == 'quarkusio/quarkus' || !endsWith(github.ref, '/main') steps: - uses: actions/checkout@v4 - name: Set up JDK 17 @@ -104,3 +104,19 @@ jobs: docs/ target/asciidoc/generated/config/ retention-days: 1 + + - name: Prepare build reports archive + if: always() + run: | + 7z a -tzip build-reports.zip -r \ + '**/target/*-reports/TEST-*.xml' \ + 'target/build-report.json' \ + LICENSE + - name: Upload build reports + uses: actions/upload-artifact@v4 + if: always() + with: + name: "build-reports-${{ github.run_attempt }}-Documentation Build" + path: | + build-reports.zip + retention-days: 7 diff --git a/.github/workflows/native-cron-build.yml.disabled b/.github/workflows/native-cron-build.yml.disabled index 0440990e0f744..84b3be4fee21d 100644 --- a/.github/workflows/native-cron-build.yml.disabled +++ b/.github/workflows/native-cron-build.yml.disabled @@ -20,7 +20,7 @@ jobs: run: sudo systemctl stop mysql - name: Pull docker image - run: docker pull quay.io/quarkus/ubi-quarkus-graalvmce-builder-image:22.3-java${{ matrix.java }} + run: docker pull quay.io/quarkus/ubi9-quarkus-graalvmce-builder-image:22.3-java${{ matrix.java }} - name: Set up JDK ${{ matrix.java }} uses: actions/setup-java@v2 @@ -55,7 +55,7 @@ jobs: run: ./mvnw -B install -DskipTests -DskipITs -Dformat.skip - name: Run integration tests in native - run: ./mvnw -B --settings .github/mvn-settings.xml verify -f integration-tests/pom.xml --fail-at-end -Dno-format -Dtest-containers -Dstart-containers -Dnative -Dquarkus.native.container-build=true -Dquarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-graalvmce-builder-image:22.3-java${{ matrix.java }} -pl '!io.quarkus:quarkus-integration-test-google-cloud-functions-http,!io.quarkus:quarkus-integration-test-google-cloud-functions,!io.quarkus:quarkus-integration-test-funqy-google-cloud-functions' + run: ./mvnw -B --settings .github/mvn-settings.xml verify -f integration-tests/pom.xml --fail-at-end -Dno-format -Dtest-containers -Dstart-containers -Dnative -Dquarkus.native.container-build=true -Dquarkus.native.builder-image=quay.io/quarkus/ubi9-quarkus-graalvmce-builder-image:22.3-java${{ matrix.java }} -pl '!io.quarkus:quarkus-integration-test-google-cloud-functions-http,!io.quarkus:quarkus-integration-test-google-cloud-functions,!io.quarkus:quarkus-integration-test-funqy-google-cloud-functions' - name: Report if: always() diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml index b8529bff74821..79afa0aa0acb0 100644 --- a/.mvn/extensions.xml +++ b/.mvn/extensions.xml @@ -2,7 +2,7 @@ com.gradle develocity-maven-extension - 1.23 + 1.23.1 com.gradle diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3fdd79b643051..6da739594c516 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -139,7 +139,14 @@ You can check the last publication date here: [!NOTE] +> It is recommended to build Quarkus with Java 17 as it is the minimum requirement for Quarkus. +> +> You can however build Quarkus with more recent JDKs (such as Java 21) but some Gradle-related modules need to be able to find a Java 17 toolchain so you will need to have Java 17 around. +> +> The easiest way to achieve that is to use [SDKMAN!](https://sdkman.io/) to install Java 17 alongside your preferred JDK: it will be automatically detected by Gradle when building the Gradle modules. + +You can build Quarkus using the following commands: ```sh git clone git@github.com:quarkusio/quarkus.git @@ -252,7 +259,6 @@ If you have not done so on this machine, you need to: * Windows: * enable longpaths: `git config --global core.longpaths true` * avoid CRLF breaks: `git config --global core.autocrlf false` - * enable symlinks: `git config --global core.symlinks true` * Install Java SDK 17+ (OpenJDK recommended) * Install [GraalVM](https://quarkus.io/guides/building-native-image) * Install platform C developer tools: @@ -421,7 +427,7 @@ productive. The following Maven tips can vastly speed up development when workin [mvnd](https://github.com/apache/maven-mvnd) is a daemon for Maven providing faster builds. It parallelizes your builds by default and makes sure the output is consistent even for a parallelized build. -You can https://github.com/apache/maven-mvnd?tab=readme-ov-file#how-to-install-mvnd[install mvnd] with SDKMAN!, Homebrew... +You can [install mvnd](https://github.com/apache/maven-mvnd?tab=readme-ov-file#how-to-install-mvnd) with SDKMAN!, Homebrew... mvnd is a good companion for your Quarkus builds. diff --git a/bom/application/pom.xml b/bom/application/pom.xml index f4d18e095bd1a..db3e57c4ff8a2 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -22,7 +22,7 @@ 9.0.5 5.0.0 3.0.2 - 3.2.3 + 3.2.4 1.3.2 1 1.1.7 @@ -47,18 +47,18 @@ 2.0 4.0.2 2.9.0 - 3.11.1 - 4.1.1 + 3.11.2 + 4.2.0 4.0.0 - 4.0.6 - 2.12.0 - 6.7.3 + 4.0.8 + 2.12.1 + 6.8.0 4.6.1 2.2.0 1.0.13 3.0.1 - 3.17.1 - 4.26.0 + 3.18.1 + 4.27.0 2.7.0 2.1.3 3.0.0 @@ -82,7 +82,7 @@ 4.0.5 9.7.1 2.18.0 - 16.0.0.Final + 16.1.0.Final 3.0-alpha-2 2.1.0 @@ -91,7 +91,7 @@ 2.18.2 1.0.0.Final 3.17.0 - 1.17.2 + 1.18.0 1.7.0 @@ -99,18 +99,18 @@ 0.0.9.Final 2.5 8.0.0.Final - 8.17.0 + 8.17.1 2.2.21 2.2.5.Final 2.2.2.SP01 3.0.3.Final 2.0.0.Final - 1.7.0.Final + 2.0.1 1.0.1.Final 2.6.0.Final 2.2.2.Final 3.8.0.Final - 4.5.11 + 4.5.12 4.5.14 4.4.16 4.1.5 @@ -123,22 +123,22 @@ 8.3.0 12.8.1.jre11 1.6.7 - 23.5.0.24.07 + 23.6.0.24.10 10.16.1.1 12.1.0.0 1.2.6 2.2 5.10.5 - 15.0.11.Final - 5.0.12.Final - 3.1.8 - 4.1.115.Final + 15.0.13.Final + 5.0.13.Final + 3.2.0 + 4.1.117.Final 1.16.0 1.0.4 3.6.1.Final - 2.7.0 + 2.8.0 4.0.5 - 3.7.2 + 3.9.0 1.8.0 1.1.10.5 0.109.1 @@ -158,14 +158,14 @@ 3.2.0 4.2.2 3.1.1.Final - 11.2.0 + 11.3.1 3.0.4 4.29.1 4.29.1 2.3 6.0.0 - 5.3.0 + 5.3.1 0.34.1 3.26.3 0.3.0 @@ -179,7 +179,7 @@ 2.2.0 26.0.3 1.15.1 - 3.48.4 + 3.49.0 2.36.0 0.27.2 1.45.3 @@ -188,12 +188,12 @@ 1.1.4 1.27.1 1.13.0 - 2.11.0 + 2.12.1 2.0.1.Final 2.24.3 1.3.1.Final 1.12.0 - 2.6.6.Final + 2.6.8.Final 0.1.18.Final 1.20.4 3.4.0 @@ -212,7 +212,7 @@ 0.1.3 2.12.1 0.8.11 - 1.1.0 + 1.1.1 3.3.0 2.12.4 @@ -248,7 +248,7 @@ import - + io.quarkus quarkus-bom-dev-ui @@ -502,7 +502,7 @@ org.jetbrains annotations - 26.0.1 + 26.0.2 @@ -1004,6 +1004,16 @@ quarkus-oidc-client-graphql-deployment ${project.version} + + io.quarkus + quarkus-oidc-token-propagation-common-deployment + ${project.version} + + + io.quarkus + quarkus-oidc-token-propagation-common + ${project.version} + io.quarkus quarkus-resteasy-client-oidc-token-propagation @@ -3199,6 +3209,16 @@ quarkus-micrometer ${project.version} + + io.quarkus + quarkus-micrometer-opentelemetry-deployment + ${project.version} + + + io.quarkus + quarkus-micrometer-opentelemetry + ${project.version} + io.quarkus quarkus-micrometer-registry-prometheus-deployment diff --git a/bom/dev-ui/pom.xml b/bom/dev-ui/pom.xml index 9cf2ab7296347..90bde85a3b092 100644 --- a/bom/dev-ui/pom.xml +++ b/bom/dev-ui/pom.xml @@ -13,7 +13,7 @@ Dependency management for dev-ui. Importable by third party extension developers. - 24.6.1 + 24.6.2 3.2.1 4.1.1 3.2.1 @@ -28,7 +28,7 @@ 1.7.5 1.7.0 5.6.0 - 2.0.5 + 2.0.9 2.4.0 1.0.17 1.0.1 diff --git a/build-parent/pom.xml b/build-parent/pom.xml index 0470ec3298fed..1b31434ccfeda 100644 --- a/build-parent/pom.xml +++ b/build-parent/pom.xml @@ -29,11 +29,11 @@ 1.6.Final - 3.2.3 + 3.2.4 1.0.0 2.5.13 - 4.8.0 + 4.9.0 3.26.3 2.0.3.Final 6.0.1 @@ -104,7 +104,7 @@ 3.27.3 - 3.10.0 + 3.11.0 7.3.0 @@ -597,7 +597,7 @@ org.apache.groovy groovy - 4.0.24 + 4.0.25 diff --git a/core/builder/pom.xml b/core/builder/pom.xml index bcb4f3bcfd078..450e83a0ac9ec 100644 --- a/core/builder/pom.xml +++ b/core/builder/pom.xml @@ -25,6 +25,10 @@ org.wildfly.common wildfly-common + + io.smallrye.common + smallrye-common-constraint + org.jboss.logging jboss-logging diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildChain.java b/core/builder/src/main/java/io/quarkus/builder/BuildChain.java index 4e68cfd2d9e53..84d12ad767757 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildChain.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildChain.java @@ -6,7 +6,7 @@ import java.util.ServiceLoader; import java.util.Set; -import org.wildfly.common.Assert; +import io.smallrye.common.constraint.Assert; /** * A build chain. diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java index 7d72d220502cc..aa242d9647553 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildChainBuilder.java @@ -19,9 +19,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.BuildItem; +import io.smallrye.common.constraint.Assert; /** * A build chain builder. @@ -179,7 +178,7 @@ private Map> wireDependencies(Set entry : stepBuilder.getConsumes().entrySet()) { final Consume consume = entry.getValue(); final ItemId id = entry.getKey(); - if (!consume.getFlags().contains(ConsumeFlag.OPTIONAL) && !id.isMulti()) { + if (!consume.flags().contains(ConsumeFlag.OPTIONAL) && !id.isMulti()) { if (!initialIds.contains(id) && !allProduces.containsKey(id)) { throw new ChainBuildException("No producers for required item " + id); } diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java index 4d2c23ee429f2..e6bd5a91d6177 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java @@ -12,13 +12,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; -import org.wildfly.common.Assert; - import io.quarkus.builder.diag.Diagnostic; import io.quarkus.builder.item.BuildItem; import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.builder.location.Location; +import io.smallrye.common.constraint.Assert; /** * The context passed to a deployer's operation. diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildException.java b/core/builder/src/main/java/io/quarkus/builder/BuildException.java index cef749bdd724c..4bd8cdd0bcd30 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildException.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildException.java @@ -3,9 +3,8 @@ import java.util.Collections; import java.util.List; -import org.wildfly.common.Assert; - import io.quarkus.builder.diag.Diagnostic; +import io.smallrye.common.constraint.Assert; /** * @author David M. Lloyd diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java index de947a13fb98d..90a4d0b57df0c 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildExecutionBuilder.java @@ -6,9 +6,8 @@ import java.util.List; import java.util.Map; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.BuildItem; +import io.smallrye.common.constraint.Assert; /** * A builder for a deployer execution. diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildStepBuilder.java b/core/builder/src/main/java/io/quarkus/builder/BuildStepBuilder.java index a873488c897c6..75431f9b6e694 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildStepBuilder.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildStepBuilder.java @@ -6,9 +6,8 @@ import java.util.Set; import java.util.function.BooleanSupplier; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.BuildItem; +import io.smallrye.common.constraint.Assert; /** * A builder for build step instances within a chain. A build step can consume and produce items. It may also register @@ -236,7 +235,7 @@ Map getProduces() { Set getRealConsumes() { final HashMap map = new HashMap<>(consumes); - map.entrySet().removeIf(e -> e.getValue().getConstraint() == Constraint.ORDER_ONLY); + map.entrySet().removeIf(e -> e.getValue().constraint() == Constraint.ORDER_ONLY); return map.keySet(); } diff --git a/core/builder/src/main/java/io/quarkus/builder/Consume.java b/core/builder/src/main/java/io/quarkus/builder/Consume.java index d64980fa102de..806bf8bdcb18a 100644 --- a/core/builder/src/main/java/io/quarkus/builder/Consume.java +++ b/core/builder/src/main/java/io/quarkus/builder/Consume.java @@ -1,42 +1,20 @@ package io.quarkus.builder; -final class Consume { - private final BuildStepBuilder buildStepBuilder; - private final ItemId itemId; - private final Constraint constraint; - private final ConsumeFlags flags; +import static io.quarkus.builder.Constraint.ORDER_ONLY; +import static io.quarkus.builder.Constraint.REAL; +import static io.quarkus.builder.ConsumeFlag.OPTIONAL; - Consume(final BuildStepBuilder buildStepBuilder, final ItemId itemId, final Constraint constraint, - final ConsumeFlags flags) { - this.buildStepBuilder = buildStepBuilder; - this.itemId = itemId; - this.constraint = constraint; - this.flags = flags; - } - - BuildStepBuilder getBuildStepBuilder() { - return buildStepBuilder; - } - - ItemId getItemId() { - return itemId; - } - - ConsumeFlags getFlags() { - return flags; - } +record Consume(BuildStepBuilder buildStepBuilder, ItemId itemId, Constraint constraint, ConsumeFlags flags) { Consume combine(final Constraint constraint, final ConsumeFlags flags) { - final Constraint outputConstraint = constraint == Constraint.REAL || this.constraint == Constraint.REAL - ? Constraint.REAL - : Constraint.ORDER_ONLY; - final ConsumeFlags outputFlags = !flags.contains(ConsumeFlag.OPTIONAL) || !this.flags.contains(ConsumeFlag.OPTIONAL) - ? flags.with(this.flags).without(ConsumeFlag.OPTIONAL) - : flags.with(this.flags); - return new Consume(buildStepBuilder, itemId, outputConstraint, outputFlags); - } - - Constraint getConstraint() { - return constraint; + return new Consume( + buildStepBuilder, + itemId, + constraint == REAL || this.constraint == REAL + ? REAL + : ORDER_ONLY, + !flags.contains(OPTIONAL) || !this.flags.contains(OPTIONAL) + ? flags.with(this.flags).without(OPTIONAL) + : flags.with(this.flags)); } } diff --git a/core/builder/src/main/java/io/quarkus/builder/ConsumeFlags.java b/core/builder/src/main/java/io/quarkus/builder/ConsumeFlags.java index 4ab63939cf045..290547df67d62 100644 --- a/core/builder/src/main/java/io/quarkus/builder/ConsumeFlags.java +++ b/core/builder/src/main/java/io/quarkus/builder/ConsumeFlags.java @@ -1,8 +1,9 @@ package io.quarkus.builder; -import org.wildfly.common.Assert; import org.wildfly.common.flags.Flags; +import io.smallrye.common.constraint.Assert; + /** * Flags which can be set on consume declarations. */ diff --git a/core/builder/src/main/java/io/quarkus/builder/ItemId.java b/core/builder/src/main/java/io/quarkus/builder/ItemId.java index 2e934ccefcacd..bbcfed998cad7 100644 --- a/core/builder/src/main/java/io/quarkus/builder/ItemId.java +++ b/core/builder/src/main/java/io/quarkus/builder/ItemId.java @@ -2,10 +2,9 @@ import java.util.Objects; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.BuildItem; import io.quarkus.builder.item.MultiBuildItem; +import io.smallrye.common.constraint.Assert; final class ItemId { private final Class itemType; diff --git a/core/builder/src/main/java/io/quarkus/builder/ProduceFlags.java b/core/builder/src/main/java/io/quarkus/builder/ProduceFlags.java index 8f8a8cf4e3488..c0a1ba046f2f1 100644 --- a/core/builder/src/main/java/io/quarkus/builder/ProduceFlags.java +++ b/core/builder/src/main/java/io/quarkus/builder/ProduceFlags.java @@ -1,8 +1,9 @@ package io.quarkus.builder; -import org.wildfly.common.Assert; import org.wildfly.common.flags.Flags; +import io.smallrye.common.constraint.Assert; + /** * Flags which can be set on consume declarations. */ diff --git a/core/builder/src/main/java/io/quarkus/builder/diag/Diagnostic.java b/core/builder/src/main/java/io/quarkus/builder/diag/Diagnostic.java index bc14469557768..34ff41eeea9bd 100644 --- a/core/builder/src/main/java/io/quarkus/builder/diag/Diagnostic.java +++ b/core/builder/src/main/java/io/quarkus/builder/diag/Diagnostic.java @@ -5,9 +5,8 @@ import java.io.StringWriter; import java.io.Writer; -import org.wildfly.common.Assert; - import io.quarkus.builder.location.Location; +import io.smallrye.common.constraint.Assert; public final class Diagnostic { private final Level level; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java index 2ccf7e384f420..9f34b56ad7005 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/BootstrapConfig.java @@ -1,5 +1,6 @@ package io.quarkus.deployment; +import io.quarkus.bootstrap.model.ApplicationModel; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; @@ -47,13 +48,13 @@ public interface BootstrapConfig { boolean disableJarCache(); /** - * A temporary option introduced to avoid a logging warning when {@code -Dquarkus.bootstrap.incubating-model-resolver} + * A temporary option introduced to avoid a logging warning when {@code -Dquarkus.bootstrap.legacy-model-resolver} * is added to the build command line. - * This option enables an incubating implementation of the Quarkus Application Model resolver. - * This option will be removed as soon as the incubating implementation becomes the default one. + * This option enables the legacy implementation of the Quarkus Application Model resolver. + * This option will be removed once the legacy {@link ApplicationModel} resolver implementation gets removed. */ @WithDefault("false") - boolean incubatingModelResolver(); + boolean legacyModelResolver(); /** * Whether to throw an error, warn or silently ignore misaligned platform BOM imports diff --git a/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java b/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java index 1780bb3010b8a..f80dccea5d0a5 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/Capabilities.java @@ -35,12 +35,6 @@ public Set getCapabilities() { return capabilities; } - // @deprecated in 1.14.0.Final - @Deprecated - public boolean isCapabilityPresent(String capability) { - return isPresent(capability); - } - /** * Checks whether a given capability is present during the build. * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenProvider.java b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenProvider.java index c06f0277f2031..8fbb056302d81 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CodeGenProvider.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CodeGenProvider.java @@ -6,7 +6,6 @@ import java.util.Objects; import org.eclipse.microprofile.config.Config; -import org.wildfly.common.annotation.NotNull; import io.quarkus.bootstrap.model.ApplicationModel; import io.quarkus.bootstrap.prebuild.CodeGenException; @@ -18,7 +17,6 @@ public interface CodeGenProvider { /** * @return unique name of the code gen provider, will correspond to the directory in generated-sources */ - @NotNull String providerId(); /** @@ -37,7 +35,6 @@ default String inputExtension() { * * @return file extensions */ - @NotNull default String[] inputExtensions() { if (inputExtension() != null) { return new String[] { inputExtension() }; @@ -53,7 +50,6 @@ default String[] inputExtensions() { * * @return the input directory */ - @NotNull String inputDirectory(); /** diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigurationTypeBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigurationTypeBuildItem.java index 0820564b4af8d..85a162ee56faa 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigurationTypeBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigurationTypeBuildItem.java @@ -1,8 +1,7 @@ package io.quarkus.deployment.builditem; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; +import io.smallrye.common.constraint.Assert; /** * The configuration type build item. Every configuration type should be registered using this build item diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogCategoryBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogCategoryBuildItem.java index 327a92d44be2c..5f97c5efcc86d 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogCategoryBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogCategoryBuildItem.java @@ -2,9 +2,8 @@ import java.util.logging.Level; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; +import io.smallrye.common.constraint.Assert; /** * Establish the default log level of a log category. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogConsoleFormatBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogConsoleFormatBuildItem.java index a30522477735a..1ded7eb6ade63 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogConsoleFormatBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogConsoleFormatBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Formatter; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * The log console format build item. Producing this item will cause the logging subsystem to disregard its diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogFileFormatBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogFileFormatBuildItem.java index 2547b409e8984..cdb79e2b0401b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogFileFormatBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogFileFormatBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Formatter; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * The log file format build item. Producing this item will cause the logging subsystem to disregard its diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogHandlerBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogHandlerBuildItem.java index 963c17912dcc8..4cecbea94c652 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogHandlerBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogHandlerBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Handler; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * A build item for adding additional logging handlers. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSocketFormatBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSocketFormatBuildItem.java index 23aeaf109b955..519849e0d1400 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSocketFormatBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSocketFormatBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Formatter; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * The socket format build item. Producing this item will cause the logging subsystem to disregard its diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSyslogFormatBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSyslogFormatBuildItem.java index f20812149a246..fb23437d10471 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSyslogFormatBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/LogSyslogFormatBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Formatter; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * The syslog format build item. Producing this item will cause the logging subsystem to disregard its diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/NamedLogHandlersBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/NamedLogHandlersBuildItem.java index 6764f740726de..e6f7b7e3dca93 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/NamedLogHandlersBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/NamedLogHandlersBuildItem.java @@ -3,10 +3,9 @@ import java.util.Map; import java.util.logging.Handler; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * A build item for adding additional named logging handlers. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/RunTimeConfigurationDefaultBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/RunTimeConfigurationDefaultBuildItem.java index aecb5c9186fbf..24e7f142a1a21 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/RunTimeConfigurationDefaultBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/RunTimeConfigurationDefaultBuildItem.java @@ -1,9 +1,8 @@ package io.quarkus.deployment.builditem; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.MultiBuildItem; import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.common.constraint.Assert; /** * A build item which specifies a configuration default value for run time, which is used to establish a default other diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/StreamingLogHandlerBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/StreamingLogHandlerBuildItem.java index 5271f0e8f11de..68688e60f973c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/StreamingLogHandlerBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/StreamingLogHandlerBuildItem.java @@ -3,10 +3,9 @@ import java.util.Optional; import java.util.logging.Handler; -import org.wildfly.common.Assert; - import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.runtime.RuntimeValue; +import io.smallrye.common.constraint.Assert; /** * A build item for adding the dev stream log via mutiny. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java index 1d3af46be5a0a..c319892a9da0f 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/BuildTimeConfigurationReader.java @@ -37,7 +37,6 @@ import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.Converter; import org.jboss.logging.Logger; -import org.wildfly.common.Assert; import io.quarkus.deployment.configuration.definition.ClassDefinition; import io.quarkus.deployment.configuration.definition.ClassDefinition.ClassMember; @@ -72,6 +71,7 @@ import io.quarkus.runtime.configuration.HyphenateEnumConverter; import io.quarkus.runtime.configuration.NameIterator; import io.quarkus.runtime.configuration.PropertiesUtil; +import io.smallrye.common.constraint.Assert; import io.smallrye.config.ConfigMapping; import io.smallrye.config.ConfigMappings; import io.smallrye.config.ConfigMappings.ConfigClass; @@ -651,7 +651,7 @@ ReadResult run() { } if (runTimeNames.contains(name)) { unknownBuildProperties.remove(property); - ConfigValue value = runtimeConfig.getConfigValue(property); + ConfigValue value = withoutExpansion(() -> runtimeConfig.getConfigValue(property)); if (value.getRawValue() != null) { runTimeValues.put(value.getNameProfiled(), value.noProblems().withValue(value.getRawValue())); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java index 3eb7b693f46ec..ae14536df79d6 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/ClassLoadingConfig.java @@ -6,9 +6,9 @@ import java.util.Set; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; /** * Class loading @@ -18,7 +18,8 @@ * This is because it is needed before any of the config infrastructure is set up. */ @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public class ClassLoadingConfig { +@ConfigMapping(prefix = "quarkus.class-loading") +public interface ClassLoadingConfig { /** * Artifacts that are loaded in a parent first manner. This can be used to work around issues where a given @@ -30,8 +31,7 @@ public class ClassLoadingConfig { *

* WARNING: This config property can only be set in application.properties */ - @ConfigItem(defaultValue = "") - public Optional> parentFirstArtifacts; + Optional> parentFirstArtifacts(); /** * Artifacts that are loaded in the runtime ClassLoader in dev mode, so they will be dropped @@ -48,15 +48,13 @@ public class ClassLoadingConfig { *

* WARNING: This config property can only be set in application.properties */ - @ConfigItem(defaultValue = "") - public Optional reloadableArtifacts; + Optional reloadableArtifacts(); /** * Artifacts that will never be loaded by the class loader, and will not be packed into the final application. This allows * you to explicitly remove artifacts from your application even though they may be present on the class path. */ - @ConfigItem(defaultValue = "") - public Optional> removedArtifacts; + Optional> removedArtifacts(); /** * Resources that should be removed/hidden from dependencies. @@ -73,8 +71,7 @@ public class ClassLoadingConfig { *

* Note that for technical reasons this is not supported when running with JBang. */ - @ConfigItem @ConfigDocMapKey("group-id:artifact-id") - public Map> removedResources; + Map> removedResources(); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java index 5c7089a68ec5e..9288e8f7127f0 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/RunTimeConfigurationGenerator.java @@ -24,7 +24,6 @@ import org.eclipse.microprofile.config.spi.ConfigBuilder; import org.eclipse.microprofile.config.spi.Converter; import org.objectweb.asm.Opcodes; -import org.wildfly.common.Assert; import io.quarkus.deployment.AccessorFinder; import io.quarkus.deployment.configuration.definition.ClassDefinition; @@ -64,6 +63,7 @@ import io.quarkus.runtime.configuration.NameIterator; import io.quarkus.runtime.configuration.PropertiesUtil; import io.quarkus.runtime.configuration.QuarkusConfigFactory; +import io.smallrye.common.constraint.Assert; import io.smallrye.config.ConfigMappings; import io.smallrye.config.ConfigMappings.ConfigClass; import io.smallrye.config.Converters; @@ -99,10 +99,10 @@ public final class RunTimeConfigurationGenerator { static final MethodDescriptor CD_MISSING_VALUE = MethodDescriptor.ofMethod(ConfigDiagnostic.class, "missingValue", void.class, String.class, NoSuchElementException.class); static final MethodDescriptor CD_RESET_ERROR = MethodDescriptor.ofMethod(ConfigDiagnostic.class, "resetError", void.class); - static final MethodDescriptor CD_UNKNOWN_PROPERTIES = MethodDescriptor.ofMethod(ConfigDiagnostic.class, "unknownProperties", + static final MethodDescriptor CD_REPORT_UNKNOWN = MethodDescriptor.ofMethod(ConfigDiagnostic.class, "reportUnknown", void.class, Set.class); - static final MethodDescriptor CD_UNKNOWN_PROPERTIES_RT = MethodDescriptor.ofMethod(ConfigDiagnostic.class, - "unknownPropertiesRuntime", void.class, Set.class); + static final MethodDescriptor CD_REPORT_UNKNOWN_RUNTIME = MethodDescriptor.ofMethod(ConfigDiagnostic.class, + "reportUnknownRuntime", void.class, Set.class); static final MethodDescriptor CONVS_NEW_ARRAY_CONVERTER = MethodDescriptor.ofMethod(Converters.class, "newArrayConverter", Converter.class, Converter.class, Class.class); @@ -449,14 +449,14 @@ public void run() { // generate sweep for clinit configSweepLoop(siParserBody, clinit, clinitConfig, getRegisteredRoots(BUILD_AND_RUN_TIME_FIXED), Type.BUILD_TIME); - clinit.invokeStaticMethod(CD_UNKNOWN_PROPERTIES, clinit.readStaticField(C_UNKNOWN)); + clinit.invokeStaticMethod(CD_REPORT_UNKNOWN, clinit.readStaticField(C_UNKNOWN)); if (liveReloadPossible) { configSweepLoop(siParserBody, readConfig, runTimeConfig, getRegisteredRoots(RUN_TIME), Type.RUNTIME); } // generate sweep for run time configSweepLoop(rtParserBody, readConfig, runTimeConfig, getRegisteredRoots(RUN_TIME), Type.RUNTIME); - readConfig.invokeStaticMethod(CD_UNKNOWN_PROPERTIES_RT, readConfig.readStaticField(C_UNKNOWN_RUNTIME)); + readConfig.invokeStaticMethod(CD_REPORT_UNKNOWN_RUNTIME, readConfig.readStaticField(C_UNKNOWN_RUNTIME)); // generate ensure-initialized method // the point of this method is simply to initialize the Config class diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java index 33d1713e78f6a..45d7e802a2ffe 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/ClassDefinition.java @@ -5,11 +5,10 @@ import java.util.LinkedHashMap; import java.util.Map; -import org.wildfly.common.Assert; - import io.quarkus.gizmo.FieldDescriptor; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.util.StringUtil; +import io.smallrye.common.constraint.Assert; /** * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java index bd0122ab96e69..e5207265f6478 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/definition/RootDefinition.java @@ -9,11 +9,10 @@ import java.util.List; -import org.wildfly.common.Assert; - import io.quarkus.gizmo.FieldDescriptor; import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; +import io.smallrye.common.constraint.Assert; /** * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/ConfigPatternMap.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/ConfigPatternMap.java index fbf525fc1c6bf..b0d941ad66d02 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/ConfigPatternMap.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/ConfigPatternMap.java @@ -6,9 +6,8 @@ import java.util.TreeMap; import java.util.function.BiFunction; -import org.wildfly.common.Assert; - import io.quarkus.runtime.configuration.NameIterator; +import io.smallrye.common.constraint.Assert; /** * A pattern-matching mapping of configuration key pattern to value. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/FieldContainer.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/FieldContainer.java index 83235050cf98b..2b306bbf6510c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/FieldContainer.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/FieldContainer.java @@ -1,9 +1,8 @@ package io.quarkus.deployment.configuration.matching; -import org.wildfly.common.Assert; - import io.quarkus.deployment.configuration.definition.ClassDefinition; import io.quarkus.deployment.configuration.definition.RootDefinition; +import io.smallrye.common.constraint.Assert; /** * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/MapContainer.java b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/MapContainer.java index 9360e81635e07..126b6dae51415 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/MapContainer.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/configuration/matching/MapContainer.java @@ -1,8 +1,7 @@ package io.quarkus.deployment.configuration.matching; -import org.wildfly.common.Assert; - import io.quarkus.deployment.configuration.definition.ClassDefinition; +import io.smallrye.common.constraint.Assert; /** * A map container. diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/AppMakerHelper.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/AppMakerHelper.java new file mode 100644 index 0000000000000..04b666f3335b8 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/AppMakerHelper.java @@ -0,0 +1,362 @@ +package io.quarkus.deployment.dev.testing; + +import static io.quarkus.test.common.PathTestHelper.getAppClassLocationForTestLocation; +import static io.quarkus.test.common.PathTestHelper.getTestClassesLocation; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.stream.Collectors; + +import jakarta.enterprise.inject.Alternative; + +import org.jboss.jandex.Index; + +import io.quarkus.bootstrap.BootstrapConstants; +import io.quarkus.bootstrap.BootstrapException; +import io.quarkus.bootstrap.app.AugmentAction; +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.QuarkusBootstrap; +import io.quarkus.bootstrap.app.StartupAction; +import io.quarkus.bootstrap.model.ApplicationModel; +import io.quarkus.bootstrap.resolver.AppModelResolverException; +import io.quarkus.bootstrap.runner.Timing; +import io.quarkus.bootstrap.utils.BuildToolHelper; +import io.quarkus.bootstrap.workspace.ArtifactSources; +import io.quarkus.bootstrap.workspace.SourceDir; +import io.quarkus.bootstrap.workspace.WorkspaceModule; +import io.quarkus.commons.classloading.ClassLoaderHelper; +import io.quarkus.paths.PathList; +import io.quarkus.runtime.LaunchMode; +import io.quarkus.test.common.PathTestHelper; +import io.quarkus.test.common.RestorableSystemProperties; +import io.quarkus.test.junit.QuarkusTestProfile; +import io.quarkus.test.junit.TestBuildChainFunction; + +public class AppMakerHelper { + + // Copied from superclass of thing we copied + protected static final String TEST_LOCATION = "test-location"; + protected static final String TEST_CLASS = "test-class"; + protected static final String TEST_PROFILE = "test-profile"; + /// end copied + + private static Class quarkusTestMethodContextClass; + private static boolean hasPerTestResources; + + private static List testMethodInvokers; + private Runnable configCleanup; + + public static class PrepareResult { + protected final AugmentAction augmentAction; + public final QuarkusTestProfile profileInstance; + protected final CuratedApplication curatedApplication; + public final Path testClassLocation; + + public PrepareResult(AugmentAction augmentAction, QuarkusTestProfile profileInstance, + CuratedApplication curatedApplication, Path testClassLocation) { + System.out.println("PrepareResult" + augmentAction + ": " + profileInstance + " test class " + testClassLocation); + + this.augmentAction = augmentAction; + this.profileInstance = profileInstance; + this.curatedApplication = curatedApplication; + this.testClassLocation = testClassLocation; + } + } + + // TODO Re-used from AbstractJvmQuarkusTestExtension + protected ApplicationModel getGradleAppModelForIDE(Path projectRoot) throws IOException, AppModelResolverException { + return System.getProperty(BootstrapConstants.SERIALIZED_TEST_APP_MODEL) == null + ? BuildToolHelper.enableGradleAppModelForTest(projectRoot) + : null; + } + + private PrepareResult createAugmentor(final Class requiredTestClass, String displayName, boolean isContinuousTesting, + CuratedApplication curatedApplication, + Class profile, + Collection shutdownTasks) throws Exception { + + System.out.println("HOLLY WAHOO creating augmentor for " + requiredTestClass); + + if (curatedApplication == null) { + curatedApplication = makeCuratedApplication(requiredTestClass, displayName, isContinuousTesting, shutdownTasks); + } + Path testClassLocation = getTestClassLocationIncludingPossibilityOfGradleModel(requiredTestClass); + + // clear the test.url system property as the value leaks into the run when using different profiles + System.clearProperty("test.url"); + Map additional = new HashMap<>(); + + QuarkusTestProfile profileInstance = null; + if (profile != null) { + profileInstance = profile.getConstructor() + .newInstance(); + // TODO we make this twice, also in abstractjvmextension can we streamline that? + // TODO We can't get rid of the one here because config needs to be set before augmentation, but maybe we can get rid of it on the test side? + additional.putAll(profileInstance.getConfigOverrides()); + if (!profileInstance.getEnabledAlternatives() + .isEmpty()) { + additional.put("quarkus.arc.selected-alternatives", profileInstance.getEnabledAlternatives() + .stream() + .peek((c) -> { + if (!c.isAnnotationPresent(Alternative.class)) { + throw new RuntimeException( + "Enabled alternative " + c + " is not annotated with @Alternative"); + } + }) + .map(Class::getName) + .collect(Collectors.joining(","))); + } + if (profileInstance.disableApplicationLifecycleObservers()) { + additional.put("quarkus.arc.test.disable-application-lifecycle-observers", "true"); + } + if (profileInstance.getConfigProfile() != null) { + additional.put(LaunchMode.TEST.getProfileKey(), profileInstance.getConfigProfile()); + } + //we just use system properties for now + //it's a lot simpler + // TODO this is really ugly, set proper config on the app + // Sadly, I don't think #42715 helps, because it kicks in after this code + configCleanup = RestorableSystemProperties.setProperties(additional)::close; + } + + if (curatedApplication + .getApplicationModel().getRuntimeDependencies().isEmpty()) { + throw new RuntimeException( + "The tests were run against a directory that does not contain a Quarkus project. Please ensure that the test is configured to use the proper working directory."); + } + + // TODO should we do this here, or when we prepare the curated application? + // Or is it needed at all? + Index testClassesIndex = TestClassIndexer.indexTestClasses(testClassLocation); + // we need to write the Index to make it reusable from other parts of the testing infrastructure that run in different ClassLoaders + TestClassIndexer.writeIndex(testClassesIndex, testClassLocation, requiredTestClass); + + Timing.staticInitStarted(curatedApplication + .getOrCreateBaseRuntimeClassLoader(), + curatedApplication + .getQuarkusBootstrap() + .isAuxiliaryApplication()); + final Map props = new HashMap<>(); + props.put(TEST_LOCATION, testClassLocation); + props.put(TEST_CLASS, requiredTestClass); + if (profile != null) { + props.put(TEST_PROFILE, profile.getName()); + } + return new PrepareResult(curatedApplication + .createAugmentor(TestBuildChainFunction.class.getName(), props), profileInstance, + curatedApplication, testClassLocation); + } + + public CuratedApplication makeCuratedApplication(Class requiredTestClass, String displayName, + boolean isContinuousTesting, + Collection shutdownTasks) throws IOException, AppModelResolverException, BootstrapException { + final PathList.Builder rootBuilder = PathList.builder(); + Consumer addToBuilderIfConditionMet = path -> { + if (path != null && Files.exists(path) && !rootBuilder.contains(path)) { + rootBuilder.add(path); + } + }; + + final Path testClassLocation; + final Path appClassLocation; + final Path projectRoot = Paths.get("").normalize().toAbsolutePath(); + + final ApplicationModel gradleAppModel = getGradleAppModelForIDE(projectRoot); + // If gradle project running directly with IDE + if (gradleAppModel != null && gradleAppModel.getApplicationModule() != null) { + final WorkspaceModule module = gradleAppModel.getApplicationModule(); + final String testClassFileName = ClassLoaderHelper + .fromClassNameToResourceName(requiredTestClass.getName()); + Path testClassesDir = null; + for (String classifier : module.getSourceClassifiers()) { + final ArtifactSources sources = module.getSources(classifier); + if (sources.isOutputAvailable() && sources.getOutputTree().contains(testClassFileName)) { + for (SourceDir src : sources.getSourceDirs()) { + addToBuilderIfConditionMet.accept(src.getOutputDir()); + if (Files.exists(src.getOutputDir().resolve(testClassFileName))) { + testClassesDir = src.getOutputDir(); + } + } + for (SourceDir src : sources.getResourceDirs()) { + addToBuilderIfConditionMet.accept(src.getOutputDir()); + } + for (SourceDir src : module.getMainSources().getSourceDirs()) { + addToBuilderIfConditionMet.accept(src.getOutputDir()); + } + for (SourceDir src : module.getMainSources().getResourceDirs()) { + addToBuilderIfConditionMet.accept(src.getOutputDir()); + } + break; + } + } + validateTestDir(requiredTestClass, testClassesDir, module); + testClassLocation = testClassesDir; + + } else { + if (System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR) != null) { + final String[] sourceDirectories = System.getProperty(BootstrapConstants.OUTPUT_SOURCES_DIR).split(","); + for (String sourceDirectory : sourceDirectories) { + final Path directory = Paths.get(sourceDirectory); + addToBuilderIfConditionMet.accept(directory); + } + } + + testClassLocation = getTestClassesLocation(requiredTestClass); + appClassLocation = getAppClassLocationForTestLocation(testClassLocation); + if (!appClassLocation.equals(testClassLocation)) { + addToBuilderIfConditionMet.accept(testClassLocation); + // if test classes is a dir, we should also check whether test resources dir exists as a separate dir (gradle) + // TODO: this whole app/test path resolution logic is pretty dumb, it needs be re-worked using proper workspace discovery + final Path testResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(testClassLocation, "test"); + addToBuilderIfConditionMet.accept(testResourcesLocation); + } + + addToBuilderIfConditionMet.accept(appClassLocation); + final Path appResourcesLocation = PathTestHelper.getResourcesForClassesDirOrNull(appClassLocation, "main"); + addToBuilderIfConditionMet.accept(appResourcesLocation); + } + + CuratedApplication curatedApplication = QuarkusBootstrap.builder() + //.setExistingModel(gradleAppModel) unfortunately this model is not re-usable due to PathTree serialization by Gradle + .setBaseName(displayName + " (QuarkusTest)") + .setIsolateDeployment(true) + .setMode(QuarkusBootstrap.Mode.TEST) + .setTest(true) + .setAuxiliaryApplication(isContinuousTesting) + + .setTargetDirectory(PathTestHelper.getProjectBuildDir(projectRoot, testClassLocation)) + .setProjectRoot(projectRoot) + .setApplicationRoot(rootBuilder.build()) + .build() + .bootstrap(); + shutdownTasks.add(curatedApplication::close); + + // TODO can we consolidate some of this with TestSupport? The code over there is + // final QuarkusBootstrap.Builder bootstrapConfig = curatedApplication.getQuarkusBootstrap().clonedBuilder() + // .setMode(QuarkusBootstrap.Mode.TEST) + // .setAssertionsEnabled(true) + // .setDisableClasspathCache(false) + // .setIsolateDeployment(true) + // .setExistingModel(null) + // .setBaseClassLoader(getClass().getClassLoader().getParent()) + // .setTest(true) + // .setAuxiliaryApplication(true) + // .setHostApplicationIsTestOnly(devModeType == DevModeType.TEST_ONLY) + // .setProjectRoot(projectDir) + // .setApplicationRoot(getRootPaths(module, mainModule)) + // .clearLocalArtifacts(); + + return curatedApplication; + } + + private Path getTestClassLocationIncludingPossibilityOfGradleModel(Class requiredTestClass) + throws IOException, AppModelResolverException, BootstrapException { + + final Path projectRoot = Paths.get("").normalize().toAbsolutePath(); + + final Path testClassLocation; + + final ApplicationModel gradleAppModel = getGradleAppModelForIDE(projectRoot); + // If gradle project running directly with IDE + if (gradleAppModel != null && gradleAppModel.getApplicationModule() != null) { + final WorkspaceModule module = gradleAppModel.getApplicationModule(); + final String testClassFileName = ClassLoaderHelper + .fromClassNameToResourceName(requiredTestClass.getName()); + Path testClassesDir = null; + for (String classifier : module.getSourceClassifiers()) { + final ArtifactSources sources = module.getSources(classifier); + if (sources.isOutputAvailable() && sources.getOutputTree().contains(testClassFileName)) { + for (SourceDir src : sources.getSourceDirs()) { + if (Files.exists(src.getOutputDir().resolve(testClassFileName))) { + testClassesDir = src.getOutputDir(); + } + } + + break; + } + } + validateTestDir(requiredTestClass, testClassesDir, module); + testClassLocation = testClassesDir; + + } else { + testClassLocation = getTestClassesLocation(requiredTestClass); + } + + return testClassLocation; + } + + private static void validateTestDir(Class requiredTestClass, Path testClassesDir, WorkspaceModule module) { + if (testClassesDir == null) { + final StringBuilder sb = new StringBuilder(); + sb.append("Failed to locate ").append(requiredTestClass.getName()).append(" in "); + for (String classifier : module.getSourceClassifiers()) { + final ArtifactSources sources = module.getSources(classifier); + if (sources.isOutputAvailable()) { + for (SourceDir d : sources.getSourceDirs()) { + if (Files.exists(d.getOutputDir())) { + sb.append(System.lineSeparator()).append(d.getOutputDir()); + } + } + } + } + throw new RuntimeException(sb.toString()); + } + } + + // TODO surely there's a cleaner way to see if it's continuous testing? + // TODO should we be doing something with these unused arguments? + // Note that curated application cannot be re-used between restarts, so this application + // should have been freshly created + // TODO maybe don't even accept one? + public StartupAction getStartupAction(Class testClass, CuratedApplication curatedApplication, + boolean isContinuousTesting, Class ignoredProfile) + throws Exception { + + Class profile = ignoredProfile; + // TODO do we want any of these? + Collection shutdownTasks = new HashSet(); + // TODO work out a good display name + PrepareResult result = createAugmentor(testClass, "(QuarkusTest)", isContinuousTesting, curatedApplication, profile, + shutdownTasks); + AugmentAction augmentAction = result.augmentAction; + + try { + System.out.println("HOLLY about to make app for " + testClass); + StartupAction startupAction = augmentAction.createInitialRuntimeApplication(); + + // TODO this seems to be safe to do because the classloaders are the same + // TODO not doing it startupAction.store(); + System.out.println("HOLLY did store " + startupAction); + + // TODO this is ugly, there must be a better way? + // TODO tests to run to check changes here are integration-tests/elytron-resteasy-reactive and SharedProfileTestCase in integration-tests/main + + return startupAction; + } catch (Throwable e) { + // Errors at this point just get reported as org.junit.platform.commons.JUnitException: TestEngine with ID 'junit-jupiter' failed to discover tests + // Give a little help to debuggers + System.out.println("HOLLY IT ALL WENT WRONG + + e" + e); + e.printStackTrace(); + throw e; + + } finally { + if (configCleanup != null) { + configCleanup.run(); + } + } + + } + + // TODO prepareResult is no longer used, so we can get rid of this whole record + public record DumbHolder(StartupAction startupAction, PrepareResult prepareResult) { + } + +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CollaboratingClassLoader.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CollaboratingClassLoader.java new file mode 100644 index 0000000000000..68f73e136a385 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CollaboratingClassLoader.java @@ -0,0 +1,66 @@ +package io.quarkus.deployment.dev.testing; + +import java.io.Closeable; +import java.lang.reflect.InvocationTargetException; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * Gives JUnitTestRunner visibility of the FacadeClassLoader if it's in a different test-framework module. + * Unfortunately, this technique isn't working for multimodule projects. + * TODO reinstate this, and get DevMojoIT tests passing with it + */ +public abstract class CollaboratingClassLoader extends ClassLoader implements Closeable { + private static Map cls = new HashMap<>(); + + public CollaboratingClassLoader(ClassLoader parent) { + + super(parent); + } + + public static CollaboratingClassLoader construct(ClassLoader parent) { + // TODO what happens when it's not available, becuse there's no JUnit 5? Callers need to just not set a TCCL, I guess? + // TODO tidy this up + if (cls.get(parent) == null) { + System.out.println("HOLLY constryucting collaborating classloader "); + try { + System.out.println( + "CollaboratingClassLoader.construct using class " + CollaboratingClassLoader.class.getClassLoader()); + + CollaboratingClassLoader cl = (CollaboratingClassLoader) Class + .forName("io.quarkus.test.junit.classloading.FacadeClassLoader") + .getConstructor(ClassLoader.class) + .newInstance(parent); + cls.put(parent, cl); + return cl; + } catch (InstantiationException e) { + throw new RuntimeException(e); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } catch (InvocationTargetException e) { + throw new RuntimeException(e); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } else { + return cls.get(parent); + } + } + + public abstract void setAuxiliaryApplication(boolean b); + + public void setProfiles(Map profiles) { + } + + public void setClassPath(String classesPath) { + } + + public void setQuarkusTestClasses(Set quarkusTestClasses) { + } + + public void setQuarkusMainTestClasses(Set quarkusMainTestClasses) { + } +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CurrentTestApplication.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CurrentTestApplication.java index 67c405779b8f3..169d124879501 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CurrentTestApplication.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/CurrentTestApplication.java @@ -6,6 +6,8 @@ /** * This class is a bit of a hack, it provides a way to pass in the current curratedApplication into the TestExtension + * TODO It is only needed for QuarkusMainTest, so we may be able to find a better way. + * For example, what about JUnit state? */ public class CurrentTestApplication implements Consumer { public static volatile CuratedApplication curatedApplication; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/DotNames.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/DotNames.java new file mode 100644 index 0000000000000..bf7cbefa1041b --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/DotNames.java @@ -0,0 +1,16 @@ +package io.quarkus.deployment.dev.testing; + +import org.jboss.jandex.DotName; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.extension.RegisterExtension; + +public final class DotNames { + + private DotNames() { + } + + public static final DotName EXTEND_WITH = DotName.createSimple(ExtendWith.class.getName()); + public static final DotName REGISTER_EXTENSION = DotName.createSimple(RegisterExtension.class.getName()); + // TODO this leaks knowledge of the junit5 module into this module + public static final DotName QUARKUS_TEST_EXTENSION = DotName.createSimple("io.quarkus.test.junit.QuarkusTestExtension"); +} diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java index 6f0f4bce6ea97..3826813c93613 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/JunitTestRunner.java @@ -12,6 +12,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.Comparator; +import java.util.Date; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; @@ -22,7 +23,6 @@ import java.util.Set; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -74,6 +74,7 @@ import io.quarkus.deployment.util.IoUtil; import io.quarkus.dev.console.QuarkusConsole; import io.quarkus.dev.testing.TracingHandler; +import io.quarkus.test.junit.classloading.FacadeClassLoader; /** * This class is responsible for running a single run of JUnit tests. @@ -112,6 +113,7 @@ public class JunitTestRunner { private volatile boolean testsRunning = false; private volatile boolean aborted; + private QuarkusClassLoader deploymentClassLoader; public JunitTestRunner(Builder builder) { this.runId = builder.runId; @@ -135,22 +137,34 @@ public JunitTestRunner(Builder builder) { public Runnable prepare() { try { long start = System.currentTimeMillis(); - ClassLoader old = Thread.currentThread().getContextClassLoader(); + ClassLoader old = Thread.currentThread() + .getContextClassLoader(); + System.out.println("HOLLU junit prepare, old TCCL is " + old); + QuarkusClassLoader tcl = testApplication.createDeploymentClassLoader(); + deploymentClassLoader = tcl; LogCapturingOutputFilter logHandler = new LogCapturingOutputFilter(testApplication, true, true, - TestSupport.instance().get()::isDisplayTestOutput); - Thread.currentThread().setContextClassLoader(tcl); - Consumer currentTestAppConsumer = (Consumer) tcl.loadClass(CurrentTestApplication.class.getName()) - .getDeclaredConstructor().newInstance(); - currentTestAppConsumer.accept(testApplication); + TestSupport.instance() + .get()::isDisplayTestOutput); + // TODO do we want to do this setting of the TCCL? I think it just makes problems? + Thread.currentThread() + .setContextClassLoader(tcl); + System.out.println("139 HOLLY junit runner set classloader to deployment TCCL" + tcl); Set allDiscoveredIds = new HashSet<>(); Set dynamicIds = new HashSet<>(); DiscoveryResult quarkusTestClasses = discoverTestClasses(); - Launcher launcher = LauncherFactory.create(LauncherConfig.builder().build()); + Launcher launcher = LauncherFactory.create(LauncherConfig.builder() + .build()); LauncherDiscoveryRequestBuilder launchBuilder = LauncherDiscoveryRequestBuilder.request() - .selectors(quarkusTestClasses.testClasses.stream().map(DiscoverySelectors::selectClass) + .selectors(quarkusTestClasses.testClasses.stream() + .map(DiscoverySelectors::selectClass) + .collect(Collectors.toList())); + + System.out.println("HOLLY in prepare, launch is " + + quarkusTestClasses.testClasses.stream() + .map(DiscoverySelectors::selectClass) .collect(Collectors.toList())); launchBuilder.filters(new PostDiscoveryFilter() { @Override @@ -188,13 +202,15 @@ public FilterResult apply(TestDescriptor testDescriptor) { .build(); TestPlan testPlan = launcher.discover(request); long toRun = testPlan.countTestIdentifiers(TestIdentifier::isTest); + System.out.println("HOLLY to run is " + toRun); for (TestRunListener listener : listeners) { listener.runStarted(toRun); } return new Runnable() { @Override public void run() { - final ClassLoader origCl = Thread.currentThread().getContextClassLoader(); + final ClassLoader origCl = Thread.currentThread() + .getContextClassLoader(); try { synchronized (JunitTestRunner.this) { testsRunning = true; @@ -223,7 +239,9 @@ public void quarkusStarting() { Map> resultsByClass = new HashMap<>(); AtomicReference currentNonDynamicTest = new AtomicReference<>(); - Thread.currentThread().setContextClassLoader(tcl); + Thread.currentThread() + .setContextClassLoader(tcl); + System.out.println("224 HOLLY junit runner set classloader to " + tcl); launcher.execute(testPlan, new TestExecutionListener() { @Override @@ -244,6 +262,7 @@ public void executionStarted(TestIdentifier testIdentifier) { for (TestRunListener listener : listeners) { listener.testStarted(testIdentifier, testClassName); } + System.out.println("HOLLY runner pushing onto touched "); touchedClasses.push(Collections.synchronizedSet(new HashSet<>())); } @@ -255,6 +274,7 @@ public void executionSkipped(TestIdentifier testIdentifier, String reason) { touchedClasses.pop(); Class testClass = getTestClassFromSource(testIdentifier.getSource()); String displayName = getDisplayNameFromIdentifier(testIdentifier, testClass); + System.out.println("HOLLY skipping " + displayName); UniqueId id = UniqueId.parse(testIdentifier.getUniqueId()); if (testClass != null) { Map results = resultsByClass.computeIfAbsent(testClass.getName(), @@ -284,6 +304,13 @@ public void dynamicTestRegistered(TestIdentifier testIdentifier) { @Override public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) { + System.out.println("execution finished, " + testExecutionResult); + if (testExecutionResult.getThrowable() + .isPresent()) { + testExecutionResult.getThrowable() + .get() + .printStackTrace(); + } if (aborted) { return; } @@ -291,6 +318,7 @@ public void executionFinished(TestIdentifier testIdentifier, Set touched = touchedClasses.pop(); Class testClass = getTestClassFromSource(testIdentifier.getSource()); String displayName = getDisplayNameFromIdentifier(testIdentifier, testClass); + System.out.println("execution finished display name was " + displayName); UniqueId id = UniqueId.parse(testIdentifier.getUniqueId()); if (testClass == null) { @@ -306,7 +334,9 @@ public void executionFinished(TestIdentifier testIdentifier, if (startupClasses.get() != null) { touched.addAll(startupClasses.get()); } - if (testIdentifier.getSource().map(ClassSource.class::isInstance).orElse(false)) { + if (testIdentifier.getSource() + .map(ClassSource.class::isInstance) + .orElse(false)) { testClassUsages.updateTestData(testClassName, touched); } else { testClassUsages.updateTestData(testClassName, id, touched); @@ -333,15 +363,18 @@ public void executionFinished(TestIdentifier testIdentifier, RuntimeException failure = new RuntimeException("A child test failed"); failure.setStackTrace(new StackTraceElement[0]); results.put(id, - new TestResult(currentNonDynamicTest.get().getDisplayName(), + new TestResult(currentNonDynamicTest.get() + .getDisplayName(), result.getTestClass(), toTagList(testIdentifier), currentNonDynamicTest.get().getUniqueIdObject(), TestExecutionResult.failed(failure), List.of(), false, runId, 0, false)); - results.put(UniqueId.parse(currentNonDynamicTest.get().getUniqueId()), result); + results.put(UniqueId.parse(currentNonDynamicTest.get() + .getUniqueId()), result); } else if (testExecutionResult.getStatus() == TestExecutionResult.Status.FAILED) { - Throwable throwable = testExecutionResult.getThrowable().get(); + Throwable throwable = testExecutionResult.getThrowable() + .get(); trimStackTrace(testClass, throwable); for (var i : throwable.getSuppressed()) { trimStackTrace(testClass, i); @@ -367,7 +400,8 @@ public void executionFinished(TestIdentifier testIdentifier, } } - Throwable throwable = testExecutionResult.getThrowable().get(); + Throwable throwable = testExecutionResult.getThrowable() + .get(); trimStackTrace(testClass, throwable); for (var i : throwable.getSuppressed()) { trimStackTrace(testClass, i); @@ -396,10 +430,11 @@ public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry e } } finally { try { - currentTestAppConsumer.accept(null); TracingHandler.setTracingHandler(null); QuarkusConsole.removeOutputFilter(logHandler); - Thread.currentThread().setContextClassLoader(old); + Thread.currentThread() + .setContextClassLoader(old); + System.out.println("398 HOLLY junit runner set classloader to old " + old); tcl.close(); try { quarkusTestClasses.close(); @@ -407,7 +442,9 @@ public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry e throw new RuntimeException(e); } } finally { - Thread.currentThread().setContextClassLoader(origCl); + Thread.currentThread() + .setContextClassLoader(origCl); + System.out.println("406 HOLLY junit runner TCCL set classloader to orig " + origCl); synchronized (JunitTestRunner.this) { testsRunning = false; if (aborted) { @@ -440,9 +477,13 @@ private Class getTestClassFromSource(Optional optionalTestSource) return ((ClassSource) testSource).getJavaClass(); } else if (testSource instanceof MethodSource) { return ((MethodSource) testSource).getJavaClass(); - } else if (testSource.getClass().getName().equals(ARCHUNIT_FIELDSOURCE_FQCN)) { + } else if (testSource.getClass() + .getName() + .equals(ARCHUNIT_FIELDSOURCE_FQCN)) { try { - return (Class) testSource.getClass().getMethod("getJavaClass").invoke(testSource); + return (Class) testSource.getClass() + .getMethod("getJavaClass") + .invoke(testSource); } catch (ReflectiveOperationException e) { log.warnf(e, "Failed to read javaClass reflectively from %s. ArchUnit >= 0.23.0 is required.", testSource); } @@ -452,12 +493,16 @@ private Class getTestClassFromSource(Optional optionalTestSource) } private String getDisplayNameFromIdentifier(TestIdentifier testIdentifier, Class testClass) { - if (testIdentifier.getSource().isPresent() && testClass != null) { - var testSource = testIdentifier.getSource().get(); + if (testIdentifier.getSource() + .isPresent() && testClass != null) { + var testSource = testIdentifier.getSource() + .get(); if (testSource instanceof ClassSource) { return testIdentifier.getDisplayName(); } else if (testSource instanceof MethodSource - || testSource.getClass().getName().equals(ARCHUNIT_FIELDSOURCE_FQCN)) { + || testSource.getClass() + .getName() + .equals(ARCHUNIT_FIELDSOURCE_FQCN)) { return testClass.getSimpleName() + "#" + testIdentifier.getDisplayName(); } } @@ -472,7 +517,8 @@ private void trimStackTrace(Class testClass, Throwable throwable) { StackTraceElement[] st = cause.getStackTrace(); for (int i = st.length - 1; i >= 0; --i) { StackTraceElement elem = st[i]; - if (elem.getClassName().equals(testClass.getName())) { + if (elem.getClassName() + .equals(testClass.getName())) { StackTraceElement[] newst = new StackTraceElement[i + 1]; System.arraycopy(st, 0, newst, 0, i + 1); st = newst; @@ -484,7 +530,8 @@ private void trimStackTrace(Class testClass, Throwable throwable) { //TODO: this should be pluggable for (int i = st.length - 1; i >= 0; --i) { StackTraceElement elem = st[i]; - if (elem.getClassName().startsWith("io.restassured")) { + if (elem.getClassName() + .startsWith("io.restassured")) { StackTraceElement[] newst = new StackTraceElement[st.length - i]; System.arraycopy(st, i, newst, 0, st.length - i); st = newst; @@ -524,15 +571,22 @@ private Map toResultsMap( List failing = new ArrayList<>(); List skipped = new ArrayList<>(); long time = 0; - for (TestResult i : Optional.ofNullable(resultsByClass.get(clazz)).orElse(Collections.emptyMap()).values()) { - if (i.getTestExecutionResult().getStatus() == TestExecutionResult.Status.FAILED) { + for (TestResult i : Optional.ofNullable(resultsByClass.get(clazz)) + .orElse(Collections.emptyMap()) + .values()) { + if (i.getTestExecutionResult() + .getStatus() == TestExecutionResult.Status.FAILED) { failing.add(i); - } else if (i.getTestExecutionResult().getStatus() == TestExecutionResult.Status.ABORTED) { + } else if (i.getTestExecutionResult() + .getStatus() == TestExecutionResult.Status.ABORTED) { skipped.add(i); } else { passing.add(i); } - if (i.getUniqueId().getLastSegment().getType().equals("class")) { + if (i.getUniqueId() + .getLastSegment() + .getType() + .equals("class")) { time = i.time; } } @@ -542,45 +596,83 @@ private Map toResultsMap( } private DiscoveryResult discoverTestClasses() { + System.out.println(new Date() + "533 HOLLY doing discovery"); //maven has a lot of rules around this and is configurable //for now this is out of scope, we are just going to do annotation based discovery //we will need to fix this sooner rather than later though //we also only run tests from the current module, which we can also revisit later + + // TODO consolidate logic here with facadeclassloader, which is trying to solve similar problems; maybe even share the canary loader class? Indexer indexer = new Indexer(); - moduleInfo.getTest().ifPresent(test -> { - try (Stream files = Files.walk(Paths.get(test.getClassesPath()))) { - files.filter(s -> s.getFileName().toString().endsWith(".class")).forEach(s -> { - try (InputStream in = Files.newInputStream(s)) { - indexer.index(in); + moduleInfo.getTest() + .ifPresent(test -> { + System.out.println("HOLLY classes path is " + test.getClassesPath()); + try (Stream files = Files.walk(Paths.get(test.getClassesPath()))) { + files.filter(s -> s.getFileName() + .toString() + .endsWith(".class")) + .forEach(s -> { + try (InputStream in = Files.newInputStream(s)) { + indexer.index(in); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); } catch (IOException e) { throw new RuntimeException(e); } }); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); Index index = indexer.complete(); + + System.out.println("HOLLY index found known classes " + Arrays.toString(index.getKnownClasses() + .toArray())); //we now have all the classes by name //these tests we never run Set integrationTestClasses = new HashSet<>(); for (AnnotationInstance i : index.getAnnotations(QUARKUS_INTEGRATION_TEST)) { - DotName name = i.target().asClass().name(); + DotName name = i.target() + .asClass() + .name(); integrationTestClasses.add(name.toString()); for (ClassInfo clazz : index.getAllKnownSubclasses(name)) { - integrationTestClasses.add(clazz.name().toString()); + integrationTestClasses.add(clazz.name() + .toString()); } } Set quarkusTestClasses = new HashSet<>(); for (var a : Arrays.asList(QUARKUS_TEST, QUARKUS_MAIN_TEST)) { for (AnnotationInstance i : index.getAnnotations(a)) { - DotName name = i.target().asClass().name(); + + DotName name = i.target() + .asClass() + .name(); quarkusTestClasses.add(name.toString()); for (ClassInfo clazz : index.getAllKnownSubclasses(name)) { - if (!integrationTestClasses.contains(clazz.name().toString())) { - quarkusTestClasses.add(clazz.name().toString()); + if (!integrationTestClasses.contains(clazz.name() + .toString())) { + quarkusTestClasses.add(clazz.name() + .toString()); + } + } + } + } + + Set quarkusMainTestClasses = new HashSet<>(); + // TODO looping over these twice is silly + for (var a : Arrays.asList(QUARKUS_MAIN_TEST)) { + for (AnnotationInstance i : index.getAnnotations(a)) { + + DotName name = i.target() + .asClass() + .name(); + quarkusTestClasses.add(name.toString()); + for (ClassInfo clazz : index.getAllKnownSubclasses(name)) { + if (!integrationTestClasses.contains(clazz.name() + .toString())) { + quarkusMainTestClasses.add(clazz.name() + .toString()); } } } @@ -589,10 +681,14 @@ private DiscoveryResult discoverTestClasses() { Set allTestAnnotations = collectTestAnnotations(index); Set allTestClasses = new HashSet<>(); Map enclosingClasses = new HashMap<>(); + Map profiles = new HashMap<>(); for (DotName annotation : allTestAnnotations) { for (AnnotationInstance instance : index.getAnnotations(annotation)) { - if (instance.target().kind() == AnnotationTarget.Kind.METHOD) { - ClassInfo classInfo = instance.target().asMethod().declaringClass(); + if (instance.target() + .kind() == AnnotationTarget.Kind.METHOD) { + ClassInfo classInfo = instance.target() + .asMethod() + .declaringClass(); allTestClasses.add(classInfo.name()); if (classInfo.declaredAnnotation(NESTED) != null) { var enclosing = classInfo.enclosingClass(); @@ -600,8 +696,11 @@ private DiscoveryResult discoverTestClasses() { enclosingClasses.put(classInfo.name(), enclosing); } } - } else if (instance.target().kind() == AnnotationTarget.Kind.FIELD) { - ClassInfo classInfo = instance.target().asField().declaringClass(); + } else if (instance.target() + .kind() == AnnotationTarget.Kind.FIELD) { + ClassInfo classInfo = instance.target() + .asField() + .declaringClass(); allTestClasses.add(classInfo.name()); if (classInfo.declaredAnnotation(NESTED) != null) { var enclosing = classInfo.enclosingClass(); @@ -612,6 +711,9 @@ private DiscoveryResult discoverTestClasses() { } } } + System.out.println("HOLLY all test classes is " + Arrays.toString(allTestClasses.toArray())); + System.out.println("HOLLY quarkus test classes is " + Arrays.toString(quarkusTestClasses.toArray())); + System.out.println("HOLLY integration classes is " + Arrays.toString(integrationTestClasses.toArray())); //now we have all the classes with @Test //figure out which ones we want to actually run Set unitTestClasses = new HashSet<>(); @@ -635,18 +737,88 @@ private DiscoveryResult discoverTestClasses() { if (Modifier.isAbstract(clazz.flags())) { continue; } + + // TODO do we also need to go up the tree? + AnnotationInstance testProfile = clazz.declaredAnnotation(TEST_PROFILE); + // TODO is there a cleaner way to do this? probably! + // It would be nice to pass the value of the annotation, but jandex just gives us a Type, and I don't see how to get a class from it + if (testProfile != null) { + + System.out.println( + "HOLLY profile is " + testProfile + testProfile.value() + .asString() + + testProfile.value() + .asClass() + + testProfile.value() + .name()); + try { + Class clazzy = Class.forName(testProfile.value() + .asString()); + profiles.put(name, + clazzy); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + + } unitTestClasses.add(name); } List> itClasses = new ArrayList<>(); List> utClasses = new ArrayList<>(); + + // TODO batch by profile and start once for each profile + + // TODO guard to only do this once? is this guard sufficient? see "wrongprofile" in QuarkusTestExtension + + System.out.println( + "HOLLY after the re-add or whatever? quarkus test classes is " + Arrays.toString(quarkusTestClasses.toArray())); + System.out.println("classload thread is " + Thread.currentThread()); + + // TODO what is the right parent? this way of getting an app classloader is super-fragile + // ... but is the system one the one we want? surefire sometimes uses an isolated classloader, other launchers might too, but in dev mode we shoud be safe + // ClassLoader parent = this.getClass() + // .getClassLoader() + // .getParent(); + // TODO this seems logical, but DOES NOT makes integration-test/test-extension/tests fail + ClassLoader parent = ClassLoader.getSystemClassLoader(); + System.out.println("HOLLY using parent for facade loader " + parent); + FacadeClassLoader.clearSingleton(); + FacadeClassLoader facadeClassLoader = FacadeClassLoader.instance(parent); // TODO ideally it would be in a different module, but that is hard CollaboratingClassLoader.construct(parent); + + facadeClassLoader.setAuxiliaryApplication(true); + // TODO clumsy hack, consolidate logic properly; this path is nicer if we can do it, because it has the moduleinfo + facadeClassLoader.setProfiles(profiles); + facadeClassLoader.setClassPath(moduleInfo.getMain() + .getClassesPath(), + moduleInfo.getTest() + .get() + .getClassesPath()); + + // TODO this is annoyingly necessary because in dev mode getAnnotations() on the class returns an empty array + facadeClassLoader.setQuarkusTestClasses(quarkusTestClasses); + facadeClassLoader.setQuarkusMainTestClasses(quarkusMainTestClasses); + + Thread.currentThread() + .setContextClassLoader(facadeClassLoader); + for (String i : quarkusTestClasses) { + + // TODO get rid of all the profile stuff, we are doing it twice try { - itClasses.add(Thread.currentThread().getContextClassLoader().loadClass(i)); - } catch (ClassNotFoundException e) { + // We could load these classes directly, since we know the profile and we have a handy interception point; + // but we need to signal to the downstream interceptor that it shouldn't interfere with the classloading + // While we're doing that, we may as well share the classloading logic + itClasses.add(facadeClassLoader.loadClass(i)); + } catch (Exception e) { + e.printStackTrace(); + // TODO handle this exception + System.out.println("HOLLY BAD BAD" + e); log.warnf( "Failed to load test class %s (possibly as it was added after the test run started), it will not be executed this run.", i); + } finally { + // TODO should we do this? Thread.currentThread().setContextClassLoader(old); } } itClasses.sort(Comparator.comparing(new Function, String>() { @@ -657,16 +829,24 @@ public String apply(Class aClass) { if (testProfile == null) { return "$$" + aClass.getName(); } - return testProfile.value().asClass().name().toString() + "$$" + aClass.getName(); + return testProfile.value() + .asClass() + .name() + .toString() + "$$" + aClass.getName(); } })); QuarkusClassLoader cl = null; + System.out.println("HOLLY made unit test classes " + Arrays.toString(unitTestClasses.toArray())); if (!unitTestClasses.isEmpty()) { //we need to work the unit test magic //this is a lot more complex //we need to transform the classes to make the tracing magic work - QuarkusClassLoader deploymentClassLoader = (QuarkusClassLoader) Thread.currentThread().getContextClassLoader(); + Set classesToTransform = new HashSet<>(deploymentClassLoader.getReloadableClassNames()); + // this won't be the right classloader for some profiles, but that is ok because it's only for vanilla tests + + System.out.println("HOLLY asking classloader " + deploymentClassLoader); + System.out.println("HOLLY to transform is " + Arrays.toString(classesToTransform.toArray())); Map transformedClasses = new HashMap<>(); for (String i : classesToTransform) { try { @@ -683,9 +863,11 @@ public String apply(Class aClass) { } } cl = testApplication.createDeploymentClassLoader(); + deploymentClassLoader = cl; cl.reset(Collections.emptyMap(), transformedClasses); for (String i : unitTestClasses) { try { + System.out.println("678 HOLLY loaded " + i + " with loader " + cl); utClasses.add(cl.loadClass(i)); } catch (ClassNotFoundException exception) { log.warnf( @@ -727,21 +909,27 @@ private static Set collectTestAnnotations(Index index) { //so we take this into account for (DotName an : metaAnnotations) { for (AnnotationInstance instance : index.getAnnotations(an)) { - if (instance.target().kind() == AnnotationTarget.Kind.CLASS) { - ret.add(instance.target().asClass().name()); + if (instance.target() + .kind() == AnnotationTarget.Kind.CLASS) { + ret.add(instance.target() + .asClass() + .name()); } } } Set processed = new HashSet<>(); processed.addAll(ret); for (ClassInfo clazz : index.getKnownClasses()) { - for (DotName annotation : clazz.annotationsMap().keySet()) { + for (DotName annotation : clazz.annotationsMap() + .keySet()) { if (processed.contains(annotation)) { continue; } processed.add(annotation); try { - Class loadedAnnotation = Thread.currentThread().getContextClassLoader().loadClass(annotation.toString()); + Class loadedAnnotation = Thread.currentThread() + .getContextClassLoader() + .loadClass(annotation.toString()); if (loadedAnnotation.isAnnotationPresent(Testable.class)) { ret.add(annotation); } @@ -794,6 +982,7 @@ public Builder setTestType(TestType testType) { return this; } + // TODO we now ignore what gets set here and make our own, how to handle that? public Builder setTestApplication(CuratedApplication testApplication) { this.testApplication = testApplication; return this; @@ -879,11 +1068,16 @@ private RegexFilter(boolean exclude, Pattern pattern) { @Override public FilterResult apply(TestDescriptor testDescriptor) { - if (testDescriptor.getSource().isPresent()) { - if (testDescriptor.getSource().get() instanceof MethodSource) { - MethodSource methodSource = (MethodSource) testDescriptor.getSource().get(); - String name = methodSource.getJavaClass().getName(); - if (pattern.matcher(name).matches()) { + if (testDescriptor.getSource() + .isPresent()) { + if (testDescriptor.getSource() + .get() instanceof MethodSource) { + MethodSource methodSource = (MethodSource) testDescriptor.getSource() + .get(); + String name = methodSource.getJavaClass() + .getName(); + if (pattern.matcher(name) + .matches()) { return FilterResult.includedIf(!exclude); } return FilterResult.includedIf(exclude); @@ -903,12 +1097,17 @@ private class CurrentlyFailingFilter implements PostDiscoveryFilter { @Override public FilterResult apply(TestDescriptor testDescriptor) { - if (testDescriptor.getSource().isPresent()) { - if (testDescriptor.getSource().get() instanceof MethodSource) { - MethodSource methodSource = (MethodSource) testDescriptor.getSource().get(); - - String name = methodSource.getJavaClass().getName(); - Map results = testState.getCurrentResults().get(name); + if (testDescriptor.getSource() + .isPresent()) { + if (testDescriptor.getSource() + .get() instanceof MethodSource) { + MethodSource methodSource = (MethodSource) testDescriptor.getSource() + .get(); + + String name = methodSource.getJavaClass() + .getName(); + Map results = testState.getCurrentResults() + .get(name); if (results == null) { return FilterResult.included("new test"); } @@ -917,7 +1116,8 @@ public FilterResult apply(TestDescriptor testDescriptor) { return FilterResult.included("new test"); } return FilterResult - .includedIf(testResult.getTestExecutionResult().getStatus() == TestExecutionResult.Status.FAILED); + .includedIf(testResult.getTestExecutionResult() + .getStatus() == TestExecutionResult.Status.FAILED); } } return FilterResult.included("not a method"); diff --git a/test-framework/common/src/main/java/io/quarkus/test/common/TestClassIndexer.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestClassIndexer.java similarity index 98% rename from test-framework/common/src/main/java/io/quarkus/test/common/TestClassIndexer.java rename to core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestClassIndexer.java index 59eaac4c89322..ed486855e8c7b 100644 --- a/test-framework/common/src/main/java/io/quarkus/test/common/TestClassIndexer.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestClassIndexer.java @@ -1,4 +1,4 @@ -package io.quarkus.test.common; +package io.quarkus.deployment.dev.testing; import static io.quarkus.test.common.PathTestHelper.getTestClassesLocation; @@ -22,6 +22,7 @@ import org.jboss.jandex.UnsupportedVersion; import io.quarkus.fs.util.ZipUtils; +import io.quarkus.test.common.PathTestHelper; public final class TestClassIndexer { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestConfig.java index f93894a7a1e72..02abd53733599 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestConfig.java @@ -185,7 +185,7 @@ public interface TestConfig { * When the artifact is a {@code container}, this string is passed right after the {@code docker run} command. * When the artifact is a {@code native binary}, this string is passed right after the native binary name. */ - Optional<@WithConverter(TrimmedStringConverter.class) List> argLine(); + Optional<@WithConverter(TrimmedStringConverter.class) String> argLine(); /** * Additional environment variables to be set in the process that {@code @QuarkusIntegrationTest} launches. diff --git a/test-framework/common/src/main/java/io/quarkus/test/common/TestStatus.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestStatus.java similarity index 92% rename from test-framework/common/src/main/java/io/quarkus/test/common/TestStatus.java rename to core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestStatus.java index ebebe30eed078..2c677bb6de63d 100644 --- a/test-framework/common/src/main/java/io/quarkus/test/common/TestStatus.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestStatus.java @@ -1,4 +1,4 @@ -package io.quarkus.test.common; +package io.quarkus.deployment.dev.testing; public class TestStatus { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestSupport.java b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestSupport.java index da05af9d0b217..f0f71975c1bf0 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestSupport.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/dev/testing/TestSupport.java @@ -212,6 +212,8 @@ public void init() { final ApplicationModel testModel = appModelFactory.resolveAppModel().getApplicationModel(); bootstrapConfig.setExistingModel(testModel); + // TODO I don't think we should have both this and AppMakerHelper, doing apparently the same thing? + QuarkusClassLoader.Builder clBuilder = null; var currentParentFirst = curatedApplication.getApplicationModel().getParentFirst(); for (ResolvedDependency d : testModel.getDependencies()) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/images/ContainerImages.java b/core/deployment/src/main/java/io/quarkus/deployment/images/ContainerImages.java index 7d1be878aa593..5abe63b65654c 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/images/ContainerImages.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/images/ContainerImages.java @@ -7,9 +7,9 @@ *

* For each image, the image name and version are defined as constants: *

- * - {@code x_IMAGE_NAME} - the name of the image without the version (e.g. {@code registry.access.redhat.com/ubi8/ubi-minimal}) - * - {@code x_VERSION} - the version of the image (e.g. {@code 8.10}) - * - {@code x} - the full image name (e.g. {@code registry.access.redhat.com/ubi8/ubi-minimal:8.10}) + * - {@code x_IMAGE_NAME} - the name of the image without the version (e.g. {@code registry.access.redhat.com/ubi9/ubi-minimal}) + * - {@code x_VERSION} - the version of the image (e.g. {@code 9.5}) + * - {@code x} - the full image name (e.g. {@code registry.access.redhat.com/ubi9/ubi-minimal:9.5}) */ public class ContainerImages { @@ -21,14 +21,19 @@ public class ContainerImages { public static final String UBI8_VERSION = "8.10"; /** - * UBI 8 version + * UBI 9 version + */ + public static final String UBI9_VERSION = "9.5"; + + /** + * Version used for more UBI8 Java images. */ - public static final String UBI9_VERSION = "9.4"; + public static final String UBI8_JAVA_VERSION = "1.21"; /** - * Version used for more UBI Java images. + * Version used for more UBI9 Java images. */ - public static final String UBI8_JAVA_VERSION = "1.20"; + public static final String UBI9_JAVA_VERSION = "1.21"; /** * Version uses for the native builder image. @@ -66,12 +71,12 @@ public class ContainerImages { // UBI 9 OpenJDK 17 Runtime - https://catalog.redhat.com/software/containers/ubi9/openjdk-17-runtime/61ee7d45384a3eb331996bee public static final String UBI9_JAVA_17_IMAGE_NAME = "registry.access.redhat.com/ubi9/openjdk-17-runtime"; - public static final String UBI9_JAVA_17_VERSION = UBI8_JAVA_VERSION; + public static final String UBI9_JAVA_17_VERSION = UBI9_JAVA_VERSION; public static final String UBI9_JAVA_17 = UBI9_JAVA_17_IMAGE_NAME + ":" + UBI9_JAVA_17_VERSION; // UBI 9 OpenJDK 21 Runtime - https://catalog.redhat.com/software/containers/ubi9/openjdk-21-runtime/6501ce769a0d86945c422d5f public static final String UBI9_JAVA_21_IMAGE_NAME = "registry.access.redhat.com/ubi9/openjdk-21-runtime"; - public static final String UBI9_JAVA_21_VERSION = UBI8_JAVA_VERSION; + public static final String UBI9_JAVA_21_VERSION = UBI9_JAVA_VERSION; public static final String UBI9_JAVA_21 = UBI9_JAVA_21_IMAGE_NAME + ":" + UBI9_JAVA_21_VERSION; // === Source To Image images @@ -81,34 +86,44 @@ public class ContainerImages { public static final String QUARKUS_BINARY_S2I_VERSION = "2.0"; public static final String QUARKUS_BINARY_S2I = QUARKUS_BINARY_S2I_IMAGE_NAME + ":" + QUARKUS_BINARY_S2I_VERSION; - // Java 17 Source To Image - https://catalog.redhat.com/software/containers/ubi8/openjdk-17/618bdbf34ae3739687568813 - public static final String S2I_JAVA_17_IMAGE_NAME = "registry.access.redhat.com/ubi8/openjdk-17"; - public static final String S2I_JAVA_17_VERSION = UBI8_JAVA_VERSION; + // Java 17 Source To Image - https://catalog.redhat.com/software/containers/ubi9/openjdk-17/61ee7c26ed74b2ffb22b07f6 + public static final String S2I_JAVA_17_IMAGE_NAME = "registry.access.redhat.com/ubi9/openjdk-17"; + public static final String S2I_JAVA_17_VERSION = UBI9_JAVA_VERSION; public static final String S2I_JAVA_17 = S2I_JAVA_17_IMAGE_NAME + ":" + S2I_JAVA_17_VERSION; - // Java Source To Image - https://catalog.redhat.com/software/containers/ubi8/openjdk-21/653fb7e21b2ec10f7dfc10d0?q=openjdk%2021&architecture=amd64&image=66bcc007a3857fbc34f4dce1 - public static final String S2I_JAVA_21_IMAGE_NAME = "registry.access.redhat.com/ubi8/openjdk-21"; - public static final String S2I_JAVA_21_VERSION = UBI8_JAVA_VERSION; + // Java Source To Image - https://catalog.redhat.com/software/containers/ubi9/openjdk-21/6501cdb5c34ae048c44f7814 + public static final String S2I_JAVA_21_IMAGE_NAME = "registry.access.redhat.com/ubi9/openjdk-21"; + public static final String S2I_JAVA_21_VERSION = UBI9_JAVA_VERSION; public static final String S2I_JAVA_21 = S2I_JAVA_21_IMAGE_NAME + ":" + S2I_JAVA_21_VERSION; // === Native Builder images // Mandrel Builder Image - https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags - public static final String MANDREL_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi-quarkus-mandrel-builder-image"; - public static final String MANDREL_BUILDER_VERSION = NATIVE_BUILDER_VERSION; - public static final String MANDREL_BUILDER = MANDREL_BUILDER_IMAGE_NAME + ":" + MANDREL_BUILDER_VERSION; + public static final String UBI8_MANDREL_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi-quarkus-mandrel-builder-image"; + public static final String UBI8_MANDREL_BUILDER_VERSION = NATIVE_BUILDER_VERSION; + public static final String UBI8_MANDREL_BUILDER = UBI8_MANDREL_BUILDER_IMAGE_NAME + ":" + UBI8_MANDREL_BUILDER_VERSION; + + // Mandrel Builder Image - https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags + public static final String UBI9_MANDREL_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi9-quarkus-mandrel-builder-image"; + public static final String UBI9_MANDREL_BUILDER_VERSION = NATIVE_BUILDER_VERSION; + public static final String UBI9_MANDREL_BUILDER = UBI9_MANDREL_BUILDER_IMAGE_NAME + ":" + UBI9_MANDREL_BUILDER_VERSION; // GraalVM CE Builder Image - https://quay.io/repository/quarkus/ubi-quarkus-graalvmce-builder-image?tab=tags - public static final String GRAALVM_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi-quarkus-graalvmce-builder-image"; - public static final String GRAALVM_BUILDER_VERSION = NATIVE_BUILDER_VERSION; - public static final String GRAALVM_BUILDER = GRAALVM_BUILDER_IMAGE_NAME + ":" + GRAALVM_BUILDER_VERSION; + public static final String UBI8_GRAALVM_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi-quarkus-graalvmce-builder-image"; + public static final String UBI8_GRAALVM_BUILDER_VERSION = NATIVE_BUILDER_VERSION; + public static final String UBI8_GRAALVM_BUILDER = UBI8_GRAALVM_BUILDER_IMAGE_NAME + ":" + UBI8_GRAALVM_BUILDER_VERSION; + + // GraalVM CE Builder Image - https://quay.io/repository/quarkus/ubi9-quarkus-graalvmce-builder-image?tab=tags + public static final String UBI9_GRAALVM_BUILDER_IMAGE_NAME = "quay.io/quarkus/ubi9-quarkus-graalvmce-builder-image"; + public static final String UBI9_GRAALVM_BUILDER_VERSION = NATIVE_BUILDER_VERSION; + public static final String UBI9_GRAALVM_BUILDER = UBI9_GRAALVM_BUILDER_IMAGE_NAME + ":" + UBI9_GRAALVM_BUILDER_VERSION; public static String getDefaultJvmImage(CompiledJavaVersionBuildItem.JavaVersion version) { switch (version.isJava21OrHigher()) { case TRUE: - return UBI8_JAVA_21; + return UBI9_JAVA_21; default: - return UBI8_JAVA_17; + return UBI9_JAVA_17; } } } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java index 4efa94db2d4ca..0eb9f56003939 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java @@ -117,7 +117,7 @@ ApplicationArchivesBuildItem build( } Map> removedResources = new HashMap<>(); - for (Map.Entry> entry : classLoadingConfig.removedResources.entrySet()) { + for (Map.Entry> entry : classLoadingConfig.removedResources().entrySet()) { removedResources.put(new GACT(entry.getKey().split(":")), entry.getValue()); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java index 5671b68d6c5e6..4e8fa2cd55c3a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/index/IndexDependencyConfig.java @@ -3,27 +3,23 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class IndexDependencyConfig { +public interface IndexDependencyConfig { /** * The maven groupId of the artifact. */ - @ConfigItem - public String groupId; + String groupId(); /** * The maven artifactId of the artifact (optional). */ - @ConfigItem - public Optional artifactId; + Optional artifactId(); /** * The maven classifier of the artifact (optional). */ - @ConfigItem - public Optional classifier; + Optional classifier(); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java index 89d40eb1db431..b340fa6884c2e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/NativeConfig.java @@ -114,7 +114,7 @@ public interface NativeConfig { /** * Defines the file encoding as in {@code -Dfile.encoding=...}. - * + *

* Native image runtime uses the host's (i.e. build time) value of {@code file.encoding} * system property. We intentionally default this to UTF-8 to avoid platform specific * defaults to be picked up which can then result in inconsistent behavior in the @@ -253,7 +253,16 @@ default boolean isExplicitContainerBuild() { interface BuilderImageConfig { /** * The docker image to use to do the image build. It can be one of `graalvm`, `mandrel`, or the full image path, e.g. - * {@code quay.io/quarkus/ubi-quarkus-mandrel-builder-image:jdk-21}. + * {@code quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:jdk-21}. + *

+ * Note: Builder images are available using UBI 8 and UBI 9 base images, for example: + *

    + *
  • UBI 8: {@code quay.io/quarkus/ubi-quarkus-mandrel-builder-image:jdk-21} (UBI 8)
  • + *
  • UBI 9: {@code quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:jdk-21} (UBI 9)
  • + *
+ *

+ * You need to be aware that if you use a builder image using UBI9 and you plan to build a container, you must + * ensure that the base image used in the container is also UBI9. */ @WithParentName @WithDefault("${platform.quarkus.native.builder-image}") @@ -278,9 +287,9 @@ interface BuilderImageConfig { default String getEffectiveImage() { final String builderImageName = this.image().toUpperCase(); if (builderImageName.equals(BuilderImageProvider.GRAALVM.name())) { - return ContainerImages.GRAALVM_BUILDER; + return ContainerImages.UBI9_GRAALVM_BUILDER; } else if (builderImageName.equals(BuilderImageProvider.MANDREL.name())) { - return ContainerImages.MANDREL_BUILDER; + return ContainerImages.UBI9_MANDREL_BUILDER; } else { return this.image(); } @@ -346,7 +355,7 @@ default String getEffectiveImage() { * If errors should be reported at runtime. This is a more relaxed setting, however it is not recommended as it * means * your application may fail at runtime if an unsupported feature is used by accident. - * + *

* Note that the use of this flag may result in build time failures due to {@code ClassNotFoundException}s. * Reason most likely being that the Quarkus extension already optimized it away or do not actually need it. * In such cases you should explicitly add the corresponding dependency providing the missing classes as a @@ -357,9 +366,9 @@ default String getEffectiveImage() { /** * Don't build a native image if it already exists. - * + *

* This is useful if you have already built an image and you want to use Quarkus to deploy it somewhere. - * + *

* Note that this is not able to detect if the existing image is outdated, if you have modified source * or config and want a new image you must not use this flag. */ @@ -387,7 +396,7 @@ interface ResourcesConfig { *

          * quarkus.native.resources.includes = foo/**,bar/**/*.txt
          * 
- * + *

* the files {@code src/main/resources/foo/selected.png} and {@code bar/some.txt} will be included in the native * image, while {@code src/main/resources/ignored.png} will not be included. *

@@ -467,7 +476,7 @@ interface ResourcesConfig { * quarkus.native.resources.includes = **/*.png * quarkus.native.resources.excludes = foo/**,**/green.png * - * + *

* the resource {@code red.png} will be available in the native image while the resources {@code foo/green.png} * and {@code bar/blue.png} will not be available in the native image. */ @@ -532,7 +541,7 @@ interface Compression { /** * Allows passing extra arguments to the UPX command line (like --brute). * The arguments are comma-separated. - * + *

* The exhaustive list of parameters can be found in * https://github.com/upx/upx/blob/devel/doc/upx.pod. */ diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java index 89c620d6f7c45..efb2f3cc50c11 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/GraalVM.java @@ -186,8 +186,6 @@ public static final class Version extends io.quarkus.runtime.graal.GraalVM.Versi static final Version VERSION_21_3_0 = new Version("GraalVM 21.3.0", "21.3.0", Distribution.GRAALVM); public static final Version VERSION_23_0_0 = new Version("GraalVM 23.0.0", "23.0.0", "17", Distribution.GRAALVM); public static final Version VERSION_23_1_0 = new Version("GraalVM 23.1.0", "23.1.0", "21", Distribution.GRAALVM); - public static final Version VERSION_23_1_2 = new Version("GraalVM 23.1.2", "23.1.2", "21", Distribution.GRAALVM); - public static final Version VERSION_23_1_3 = new Version("GraalVM 23.1.3", "23.1.3", "21", Distribution.GRAALVM); public static final Version VERSION_24_0_0 = new Version("GraalVM 24.0.0", "24.0.0", "22", Distribution.GRAALVM); public static final Version VERSION_24_0_999 = new Version("GraalVM 24.0.999", "24.0.999", "22", Distribution.GRAALVM); public static final Version VERSION_24_1_0 = new Version("GraalVM 24.1.0", "24.1.0", "23", Distribution.GRAALVM); diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java index bd36f8ba78ae9..b9d5b25fb68ae 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java @@ -1,7 +1,6 @@ package io.quarkus.deployment.pkg.steps; import static io.quarkus.commons.classloading.ClassLoaderHelper.fromClassNameToResourceName; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.LEGACY_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.UBER_JAR; @@ -358,7 +357,7 @@ private void buildUberJar0(CurateOutcomeBuildItem curateOutcomeBuildItem, final Set mergeResourcePaths = mergedResources.stream() .map(UberJarMergedResourceBuildItem::getPath) .collect(Collectors.toSet()); - final Set removed = getRemovedKeys(classLoadingConfig); + final Set removed = getRemovedArtifactKeys(classLoadingConfig); Set ignoredEntries = new HashSet<>(); packageConfig.jar().userConfiguredIgnoredEntries().ifPresent(ignoredEntries::addAll); @@ -554,7 +553,7 @@ private JarBuildItem buildLegacyThinJar(CurateOutcomeBuildItem curateOutcomeBuil doLegacyThinJarGeneration(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, applicationInfo, packageConfig, generatedResources, libDir, generatedClasses, runnerZipFs, mainClassBuildItem, - classLoadingConfig); + getRemovedArtifactKeys(classLoadingConfig)); } runnerJar.toFile().setReadable(true, false); @@ -707,7 +706,7 @@ private JarBuildItem buildThinJar(CurateOutcomeBuildItem curateOutcomeBuildItem, } final Set parentFirstKeys = getParentFirstKeys(curateOutcomeBuildItem, classLoadingConfig); final StringBuilder classPath = new StringBuilder(); - final Set removed = getRemovedKeys(classLoadingConfig); + final Set removed = getRemovedArtifactKeys(classLoadingConfig); final Map> copiedArtifacts = new HashMap<>(); for (ResolvedDependency appDep : curateOutcomeBuildItem.getApplicationModel().getRuntimeDependencies()) { if (!rebuild) { @@ -882,7 +881,7 @@ private Set getParentFirstKeys(CurateOutcomeBuildItem curateOutcome parentFirstKeys.add(d.getKey()); } }); - classLoadingConfig.parentFirstArtifacts.ifPresent( + classLoadingConfig.parentFirstArtifacts().ifPresent( parentFirstArtifacts -> { for (String artifact : parentFirstArtifacts) { parentFirstKeys.add(new GACT(artifact.split(":"))); @@ -891,18 +890,16 @@ private Set getParentFirstKeys(CurateOutcomeBuildItem curateOutcome return parentFirstKeys; } - /** - * @return a {@code Set} containing the key of the artifacts to load from the parent ClassLoader first. - */ - private Set getRemovedKeys(ClassLoadingConfig classLoadingConfig) { - final Set removed = new HashSet<>(); - classLoadingConfig.removedArtifacts.ifPresent( - removedArtifacts -> { - for (String artifact : removedArtifacts) { - removed.add(new GACT(artifact.split(":"))); - } - }); - return removed; + private Set getRemovedArtifactKeys(ClassLoadingConfig classLoadingConfig) { + if (classLoadingConfig.removedArtifacts().isEmpty()) { + return Set.of(); + } + + Set removedArtifacts = new HashSet<>(); + for (String artifact : classLoadingConfig.removedArtifacts().get()) { + removedArtifacts.add(GACT.fromString(artifact)); + } + return Collections.unmodifiableSet(removedArtifacts); } private void copyDependency(Set parentFirstArtifacts, OutputTargetBuildItem outputTargetBuildItem, @@ -1044,16 +1041,13 @@ private NativeImageSourceJarBuildItem buildNativeImageThinJar(CurateOutcomeBuild log.info("Building native image source jar: " + runnerJar); + final Set removedArtifacts = new HashSet<>(getRemovedArtifactKeys(classLoadingConfig)); // Remove svm and graal-sdk artifacts as they are provided by GraalVM itself - if (classLoadingConfig.removedArtifacts.isEmpty()) { - classLoadingConfig.removedArtifacts = Optional.of(new ArrayList<>(6)); - } - List removedArtifacts = classLoadingConfig.removedArtifacts.get(); - removedArtifacts.add("org.graalvm.nativeimage:svm"); - removedArtifacts.add("org.graalvm.sdk:graal-sdk"); - removedArtifacts.add("org.graalvm.sdk:nativeimage"); - removedArtifacts.add("org.graalvm.sdk:word"); - removedArtifacts.add("org.graalvm.sdk:collections"); + removedArtifacts.add(GACT.fromString("org.graalvm.nativeimage:svm")); + removedArtifacts.add(GACT.fromString("org.graalvm.sdk:graal-sdk")); + removedArtifacts.add(GACT.fromString("org.graalvm.sdk:nativeimage")); + removedArtifacts.add(GACT.fromString("org.graalvm.sdk:word")); + removedArtifacts.add(GACT.fromString("org.graalvm.sdk:collections")); // complain if graal-sdk is present as a dependency as nativeimage should be preferred if (curateOutcomeBuildItem.getApplicationModel().getDependencies().stream() @@ -1065,7 +1059,7 @@ private NativeImageSourceJarBuildItem buildNativeImageThinJar(CurateOutcomeBuild doLegacyThinJarGeneration(curateOutcomeBuildItem, outputTargetBuildItem, transformedClasses, applicationArchivesBuildItem, applicationInfo, packageConfig, generatedResources, libDir, allClasses, - runnerZipFs, mainClassBuildItem, classLoadingConfig); + runnerZipFs, mainClassBuildItem, removedArtifacts); } runnerJar.toFile().setReadable(true, false); return new NativeImageSourceJarBuildItem(runnerJar, libDir); @@ -1109,7 +1103,7 @@ private void doLegacyThinJarGeneration(CurateOutcomeBuildItem curateOutcomeBuild List allClasses, FileSystem runnerZipFs, MainClassBuildItem mainClassBuildItem, - ClassLoadingConfig classLoadingConfig) + Set removedArtifacts) throws IOException { final Map seen = new HashMap<>(); final StringBuilder classPath = new StringBuilder(); @@ -1120,9 +1114,8 @@ private void doLegacyThinJarGeneration(CurateOutcomeBuildItem curateOutcomeBuild Predicate ignoredEntriesPredicate = getThinJarIgnoredEntriesPredicate(packageConfig); - final Set removed = getRemovedKeys(classLoadingConfig); copyLibraryJars(runnerZipFs, outputTargetBuildItem, transformedClasses, libDir, classPath, appDeps, services, - ignoredEntriesPredicate, removed); + ignoredEntriesPredicate, removedArtifacts); ResolvedDependency appArtifact = curateOutcomeBuildItem.getApplicationModel().getAppArtifact(); // the manifest needs to be the first entry in the jar, otherwise JarInputStream does not work properly diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java index 9f04816dc6244..ee7fe5a908c89 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunner.java @@ -119,7 +119,16 @@ private void pull(String effectiveBuilderImage, boolean processInheritIODisabled @Override protected String[] getGraalVMVersionCommand(List args) { - return buildCommand("run", Collections.singletonList("--rm"), args); + List containerRuntimeArgs; + if (nativeConfig.containerRuntimeOptions().isPresent()) { + List runtimeOptions = nativeConfig.containerRuntimeOptions().get(); + containerRuntimeArgs = new ArrayList<>(runtimeOptions.size() + 1); + containerRuntimeArgs.addAll(runtimeOptions); + containerRuntimeArgs.add("--rm"); + } else { + containerRuntimeArgs = Collections.singletonList("--rm"); + } + return buildCommand("run", containerRuntimeArgs, args); } @Override diff --git a/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java b/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java index fe7de16a3e408..bdfe74a0a7931 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/recording/BytecodeRecorderImpl.java @@ -51,7 +51,6 @@ import org.jboss.jandex.ClassInfo; import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; -import org.wildfly.common.Assert; import io.quarkus.deployment.proxy.ProxyConfiguration; import io.quarkus.deployment.proxy.ProxyFactory; @@ -76,6 +75,7 @@ import io.quarkus.runtime.types.GenericArrayTypeImpl; import io.quarkus.runtime.types.ParameterizedTypeImpl; import io.quarkus.runtime.types.WildcardTypeImpl; +import io.smallrye.common.constraint.Assert; /** * A class that can be used to record invocations to bytecode so they can be replayed later. This is done through the diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ApplicationIndexBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ApplicationIndexBuildStep.java index b90e9592f69e4..bff7b73344d81 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ApplicationIndexBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ApplicationIndexBuildStep.java @@ -84,7 +84,7 @@ public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOEx private Set removedApplicationClasses(CurateOutcomeBuildItem curation, ClassLoadingConfig classLoadingConfig) { ResolvedDependency appArtifact = curation.getApplicationModel().getAppArtifact(); - Set entry = classLoadingConfig.removedResources + Set entry = classLoadingConfig.removedResources() .get(appArtifact.getGroupId() + ":" + appArtifact.getArtifactId()); return entry != null ? entry : Collections.emptySet(); } diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java index 921cb5fc8993e..fe0ed1f052263 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ClassTransformingBuildStep.java @@ -88,7 +88,7 @@ TransformedClassesBuildItem handleClassTransformation(List removedResourceBuildItems) { //a little bit of a hack, but we use an empty transformed class to represent removed resources, as transforming a class removes it from the original archive Map> removed = new HashMap<>(); - for (Map.Entry> entry : classLoadingConfig.removedResources.entrySet()) { + for (Map.Entry> entry : classLoadingConfig.removedResources().entrySet()) { removed.put(new GACT(entry.getKey().split(":")), entry.getValue()); } for (RemovedResourceBuildItem i : removedResourceBuildItems) { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java index a8d4b305e2fc4..83f2dc3d964f8 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/ConfigGenerationBuildStep.java @@ -65,6 +65,7 @@ import io.quarkus.deployment.builditem.SuppressNonRuntimeConfigChangedWarningBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.deployment.configuration.BuildTimeConfigurationReader; import io.quarkus.deployment.configuration.RunTimeConfigurationGenerator; import io.quarkus.deployment.configuration.tracker.ConfigTrackingConfig; @@ -114,6 +115,12 @@ public class ConfigGenerationBuildStep { SmallRyeConfigBuilder.class, "withSources", SmallRyeConfigBuilder.class, ConfigSource[].class); + @BuildStep + void nativeSupport(BuildProducer runtimeInitializedClassProducer) { + runtimeInitializedClassProducer.produce(new RuntimeInitializedClassBuildItem( + "io.quarkus.runtime.configuration.RuntimeConfigBuilder$UuidConfigSource$Holder")); + } + @BuildStep void buildTimeRunTimeConfig( ConfigurationBuildItem configItem, diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java index 58d9740842aa2..5a57e7e21ef90 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/RunningQuarkusApplicationImpl.java @@ -41,17 +41,27 @@ public void close() throws Exception { @Override public Optional getConfigValue(String key, Class type) { - //the config is in an isolated CL - //we need to extract it via reflection - //this is pretty yuck, but I don't really see a solution - ClassLoader old = Thread.currentThread().getContextClassLoader(); + + ClassLoader old = Thread.currentThread() + .getContextClassLoader(); try { - Class configProviderClass = classLoader.loadClass(ConfigProvider.class.getName()); - Method getConfig = configProviderClass.getMethod("getConfig", ClassLoader.class); - Thread.currentThread().setContextClassLoader(classLoader); - Object config = getConfig.invoke(null, classLoader); - return (Optional) getConfig.getReturnType().getMethod("getOptionalValue", String.class, Class.class) - .invoke(config, key, type); + // we are assuming here that the the classloader has been initialised with some kind of different provider that does not infinite loop. + Thread.currentThread() + .setContextClassLoader(classLoader); + if (classLoader == ConfigProvider.class.getClassLoader()) { + return ConfigProvider.getConfig(classLoader) + .getOptionalValue(key, type); + } else { + //the config is in an isolated CL + //we need to extract it via reflection + //this is pretty yuck, but I don't really see a solution + Class configProviderClass = classLoader.loadClass(ConfigProvider.class.getName()); + Method getConfig = configProviderClass.getMethod("getConfig", ClassLoader.class); + Object config = getConfig.invoke(null, classLoader); + return (Optional) getConfig.getReturnType() + .getMethod("getOptionalValue", String.class, Class.class) + .invoke(config, key, type); + } } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -79,8 +89,16 @@ public Iterable getConfigKeys() { @Override public Object instance(Class clazz, Annotation... qualifiers) { try { - Class actualClass = Class.forName(clazz.getName(), true, - classLoader); + // TODO can we drop the class forname entirely? + Class actualClass; + if (classLoader == clazz.getClassLoader()) { + actualClass = clazz; + } else { + // TODO this should never happen + actualClass = Class.forName(clazz.getName(), true, + classLoader); + } + Class cdi = classLoader.loadClass("jakarta.enterprise.inject.spi.CDI"); Object instance = cdi.getMethod("current").invoke(null); Method selectMethod = cdi.getMethod("select", Class.class, Annotation[].class); diff --git a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java index 8287573c246e8..372ce06c63fcf 100644 --- a/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java +++ b/core/deployment/src/main/java/io/quarkus/runner/bootstrap/StartupActionImpl.java @@ -79,10 +79,12 @@ public StartupActionImpl(CuratedApplication curatedApplication, BuildResult buil } else { baseClassLoader.reset(extractGeneratedResources(buildResult, false), transformedClasses); + // TODO Need to do recreations in JUnitTestRunner for dev mode case runtimeClassLoader = curatedApplication.createRuntimeClassLoader( resources, transformedClasses); } this.runtimeClassLoader = runtimeClassLoader; + runtimeClassLoader.setStartupAction(this); } /** @@ -184,6 +186,7 @@ public void addRuntimeCloseTask(Closeable closeTask) { } private void doClose() { + curatedApplication.tidy(); try { runtimeClassLoader.loadClass(Quarkus.class.getName()).getMethod("blockingExit").invoke(null); } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException @@ -280,6 +283,7 @@ public RunningQuarkusApplication run(String... args) throws Exception { //we have our class loaders ClassLoader old = Thread.currentThread().getContextClassLoader(); + System.out.println("HOLLY running about to trigger SC " + runtimeClassLoader); try { Thread.currentThread().setContextClassLoader(runtimeClassLoader); final String className = applicationClassName; @@ -356,7 +360,7 @@ public void close() throws IOException { } @Override - public ClassLoader getClassLoader() { + public QuarkusClassLoader getClassLoader() { return runtimeClassLoader; } diff --git a/test-framework/common/src/main/java/io/quarkus/test/common/PathTestHelper.java b/core/deployment/src/main/java/io/quarkus/test/common/PathTestHelper.java similarity index 80% rename from test-framework/common/src/main/java/io/quarkus/test/common/PathTestHelper.java rename to core/deployment/src/main/java/io/quarkus/test/common/PathTestHelper.java index 17d6c874fe54d..972e493582680 100644 --- a/test-framework/common/src/main/java/io/quarkus/test/common/PathTestHelper.java +++ b/core/deployment/src/main/java/io/quarkus/test/common/PathTestHelper.java @@ -144,8 +144,22 @@ public static Path getTestClassesLocation(Class testClass) { } catch (MalformedURLException e) { throw new RuntimeException("Failed to resolve the location of the JAR containing " + testClass, e); } + } else if (resource.getProtocol().equals("quarkus")) { + // resources loaded in memory in the runtime classloader may have a quarkus: prefix + // TODO terrible hack, why was this not needed in earlier prototypes? maybe it only happens second time round? + Path projectRoot = Paths.get("") + .normalize() + .toAbsolutePath(); + Path applicationRoot = getTestClassLocationForRootLocation(projectRoot.toString()); + System.out.println("HOLLY dealinh with " + resource); + Path path = applicationRoot.resolve(classFileName); + System.out.println("HOLLY so made " + path); + path = path.getRoot().resolve(path.subpath(0, path.getNameCount() - Path.of(classFileName).getNameCount())); + // TODO should we check existence in the test dir-ness, like we do on the other path? + return path; } Path path = toPath(resource); + path = path.getRoot().resolve(path.subpath(0, path.getNameCount() - Path.of(classFileName).getNameCount())); if (!isInTestDir(resource) && !path.getParent().getFileName().toString().equals(TARGET)) { @@ -168,16 +182,17 @@ public static Path getTestClassesLocation(Class testClass) { * @return directory or JAR containing the application being tested by the test class */ public static Path getAppClassLocation(Class testClass) { - return getAppClassLocationForTestLocation(getTestClassesLocation(testClass).toString()); + return getAppClassLocationForTestLocation(getTestClassesLocation(testClass)); } /** * Resolves the directory or the JAR file containing the application being tested by a test from the given location. * - * @param testClassLocation the test class location + * @param testClassLocationPath the test class location * @return directory or JAR containing the application being tested by a test from the given location */ - public static Path getAppClassLocationForTestLocation(String testClassLocation) { + public static Path getAppClassLocationForTestLocation(Path testClassLocationPath) { + String testClassLocation = testClassLocationPath.toString(); if (testClassLocation.endsWith(".jar")) { if (testClassLocation.endsWith("-tests.jar")) { return Paths.get(new StringBuilder() @@ -300,4 +315,52 @@ public static Path getProjectBuildDir(Path projectRoot, Path testClassLocation) } return projectRoot.resolve(projectRoot.relativize(testClassLocation).getName(0)); } + + public static Path getTestClassLocationForRootLocation(String rootLocation) { + if (rootLocation.endsWith(".jar")) { + if (rootLocation.endsWith("-tests.jar")) { + return Paths.get(new StringBuilder() + .append(rootLocation, 0, rootLocation.length() - "-tests.jar".length()) + .append(".jar") + .toString()); + } + return Path.of(rootLocation); + } + Optional mainClassesDir = TEST_TO_MAIN_DIR_FRAGMENTS.keySet() + .stream() + .map(s -> Path.of( + (rootLocation + File.separator + s).replaceAll("//", "/")).normalize()) + .filter(path -> Files.exists(path)) + .findFirst(); + if (mainClassesDir.isPresent()) { + return mainClassesDir.get(); + } + + // TODO reduce duplicated code, check if we can get rid of some of the regexes + + mainClassesDir = TEST_TO_MAIN_DIR_FRAGMENTS.keySet() + .stream() + .map(s -> Path.of( + (rootLocation + File.separator + "target" + File.separator + s).replaceAll("//", "/")).normalize()) + .filter(path -> Files.exists(path)) + .findFirst(); + if (mainClassesDir.isPresent()) { + return mainClassesDir.get(); + } + + // Try the gradle build dir + mainClassesDir = TEST_TO_MAIN_DIR_FRAGMENTS.keySet() + .stream() + .map(s -> Path.of( + (rootLocation + File.separator + "build" + File.separator + s).replaceAll("//", "/")).normalize()) + .filter(path -> Files.exists(path)) + .findFirst(); + if (mainClassesDir.isPresent()) { + return mainClassesDir.get(); + } + + // TODO is it safe to throw or return null? are there other build systems we should be considering? + // throw new IllegalStateException("Unable to find any application content in " + rootLocation); + return null; + } } diff --git a/test-framework/common/src/main/java/io/quarkus/test/common/RestorableSystemProperties.java b/core/deployment/src/main/java/io/quarkus/test/common/RestorableSystemProperties.java similarity index 100% rename from test-framework/common/src/main/java/io/quarkus/test/common/RestorableSystemProperties.java rename to core/deployment/src/main/java/io/quarkus/test/common/RestorableSystemProperties.java diff --git a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java b/core/deployment/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java similarity index 79% rename from test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java rename to core/deployment/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java index 6b64d87733a4b..19e36b9a8fdba 100644 --- a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java +++ b/core/deployment/src/main/java/io/quarkus/test/junit/QuarkusTestProfile.java @@ -5,8 +5,6 @@ import java.util.Map; import java.util.Set; -import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; - /** * Defines a 'test profile'. Tests run under a test profile * will have different configuration options to other tests. @@ -47,7 +45,7 @@ default String getConfigProfile() { } /** - * Additional {@link QuarkusTestResourceLifecycleManager} classes (along with their init params) to be used from this + * Additional { QuarkusTestResourceLifecycleManager} classes (along with their init params) to be used from this * specific test profile. * * If this method is not overridden, then only the {@link QuarkusTestResourceLifecycleManager} classes enabled via the @@ -60,7 +58,7 @@ default List testResources() { } /** - * If this returns true then only the test resources returned from {@link #testResources()} will be started, + * If this returns true then only the test resources returned from { #testResources()} will be started, * global annotated test resources will be ignored. */ default boolean disableGlobalTestResources() { @@ -80,7 +78,7 @@ default Set tags() { /** * The command line parameters that are passed to the main method on startup. * - * This is ignored for {@link io.quarkus.test.junit.main.QuarkusMainTest}, which has its own way of passing parameters. + * This is ignored for { io.quarkus.test.junit.main.QuarkusMainTest}, which has its own way of passing parameters. */ default String[] commandLineParameters() { return new String[0]; @@ -89,7 +87,7 @@ default String[] commandLineParameters() { /** * If the main method should be run. * - * This is ignored for {@link io.quarkus.test.junit.main.QuarkusMainTest}, where the main method is always run. + * This is ignored for { io.quarkus.test.junit.main.QuarkusMainTest}, where the main method is always run. */ default boolean runMainMethod() { return false; @@ -104,26 +102,26 @@ default boolean disableApplicationLifecycleObservers() { } final class TestResourceEntry { - private final Class clazz; + private final Class clazz; //TODO was extends QuarkusTestResourceLifecycleManager but that class is in the wrong module now private final Map args; private final boolean parallel; - public TestResourceEntry(Class clazz) { + public TestResourceEntry(Class clazz) { this(clazz, Collections.emptyMap()); } - public TestResourceEntry(Class clazz, Map args) { + public TestResourceEntry(Class clazz, Map args) { this(clazz, args, false); } - public TestResourceEntry(Class clazz, Map args, + public TestResourceEntry(Class clazz, Map args, boolean parallel) { this.clazz = clazz; this.args = args; this.parallel = parallel; } - public Class getClazz() { + public Class getClazz() { return clazz; } diff --git a/test-framework/junit5/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java b/core/deployment/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java similarity index 89% rename from test-framework/junit5/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java rename to core/deployment/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java index b75ac09a25761..5494171f639cd 100644 --- a/test-framework/junit5/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java +++ b/core/deployment/src/main/java/io/quarkus/test/junit/TestBuildChainFunction.java @@ -25,18 +25,24 @@ import io.quarkus.deployment.builditem.TestClassBeanBuildItem; import io.quarkus.deployment.builditem.TestClassPredicateBuildItem; import io.quarkus.deployment.builditem.TestProfileBuildItem; +import io.quarkus.deployment.dev.testing.DotNames; +import io.quarkus.deployment.dev.testing.TestClassIndexer; import io.quarkus.test.common.PathTestHelper; -import io.quarkus.test.common.TestClassIndexer; import io.quarkus.test.junit.buildchain.TestBuildChainCustomizerProducer; +// TODO ideally this would live in the test-framework modules, but that needs the FacadeClassLoader to be over there, which needs the JUnitRunner to be over there. public class TestBuildChainFunction implements Function, List>> { + protected static final String TEST_LOCATION = "test-location"; + protected static final String TEST_CLASS = "test-class"; + protected static final String TEST_PROFILE = "test-profile"; + @Override public List> apply(Map stringObjectMap) { - Path testLocation = (Path) stringObjectMap.get(AbstractJvmQuarkusTestExtension.TEST_LOCATION); + Path testLocation = (Path) stringObjectMap.get(TEST_LOCATION); // the index was written by the extension Index testClassesIndex = TestClassIndexer.readIndex(testLocation, - (Class) stringObjectMap.get(AbstractJvmQuarkusTestExtension.TEST_CLASS)); + (Class) stringObjectMap.get(TEST_CLASS)); List> allCustomizers = new ArrayList<>(1); Consumer defaultCustomizer = new Consumer() { @@ -76,7 +82,9 @@ public boolean test(String className) { buildChainBuilder.addBuildStep(new BuildStep() { @Override public void execute(BuildContext context) { - context.produce(new TestAnnotationBuildItem(QuarkusTest.class.getName())); + // TODO ideally we would use the .class object, but we can't if we're in core + // TODO should this be a dot name? + context.produce(new TestAnnotationBuildItem("io.quarkus.test.junit.QuarkusTest")); // QuarkusTest.class.getName())); } }) .produces(TestAnnotationBuildItem.class) @@ -138,7 +146,7 @@ public void execute(BuildContext context) { buildChainBuilder.addBuildStep(new BuildStep() { @Override public void execute(BuildContext context) { - Object testProfile = stringObjectMap.get(AbstractJvmQuarkusTestExtension.TEST_PROFILE); + Object testProfile = stringObjectMap.get(TEST_PROFILE); if (testProfile != null) { context.produce(new TestProfileBuildItem(testProfile.toString())); } diff --git a/test-framework/junit5/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java b/core/deployment/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java similarity index 84% rename from test-framework/junit5/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java rename to core/deployment/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java index c3577ac222770..8b153f44db063 100644 --- a/test-framework/junit5/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java +++ b/core/deployment/src/main/java/io/quarkus/test/junit/buildchain/TestBuildChainCustomizerProducer.java @@ -8,6 +8,7 @@ /** * Implementation of this class have the ability to add build items + * // TODO move this back to junit5 when we move FacadeClassLoader */ public interface TestBuildChainCustomizerProducer { diff --git a/core/deployment/src/main/java/io/quarkus/test/junit/classloading/FacadeClassLoader.java b/core/deployment/src/main/java/io/quarkus/test/junit/classloading/FacadeClassLoader.java new file mode 100644 index 0000000000000..404d02541d727 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/test/junit/classloading/FacadeClassLoader.java @@ -0,0 +1,540 @@ +package io.quarkus.test.junit.classloading; + +import java.io.Closeable; +import java.io.File; +import java.io.IOException; +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import org.eclipse.microprofile.config.spi.ConfigProviderResolver; +import org.jboss.logging.Logger; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.platform.commons.support.AnnotationSupport; + +import io.quarkus.bootstrap.app.CuratedApplication; +import io.quarkus.bootstrap.app.StartupAction; +import io.quarkus.bootstrap.classloading.QuarkusClassLoader; +import io.quarkus.deployment.dev.testing.AppMakerHelper; + +/** + * JUnit has many interceptors and listeners, but it does not allow us to intercept test discovery in a fine-grained way that + * would allow us to swap the thread context classloader. + * Since we can't intercept with a JUnit hook, we hijack from inside the classloader. + *

+ * We need to load all our test classes in one go, during the discovery phase, before we start the applications. + * We may need several applications and therefore, several classloaders, depending on what profiles are set. + * To solve that, we prepare the applications, to get classloaders, and file them here. + */ +public class FacadeClassLoader extends ClassLoader implements Closeable { + private static final Logger log = Logger.getLogger(FacadeClassLoader.class); + protected static final String JAVA = "java."; + + private static final String NAME = "FacadeLoader"; + // TODO it would be nice, and maybe theoretically possible, to re-use the curated application? + // TODO and if we don't, how do we get a re-usable deployment classloader? + + // TODO does this need to be a thread safe maps? + private final Map curatedApplications = new HashMap<>(); + private final Map runtimeClassLoaders = new HashMap<>(); + private final ClassLoader parent; + + /* + * It seems kind of wasteful to load every class twice; that's true, but it's been the case (by a different mechanism) + * ever since Quarkus 1.2 and the move to isolated classloaders, because the test extension would reload classes into the + * runtime classloader. + * In the future, https://openjdk.org/jeps/466 would allow us to avoid inspecting the classes to avoid a double load in the + * delegating + * classloader + * The solution referenced by + * https://github.com/junit-team/junit5/discussions/4203,https://github.com/marcphilipp/gradle-sandbox/blob/ + * baaa1972e939f5817f54a3d287611cef0601a58d/classloader-per-test-class/src/test/java/org/example/ + * ClassLoaderReplacingLauncherSessionListener.java#L23-L44 + * does use a similar approach, although they have a default loader rather than a canary loader. + * // TODO should we use the canary loader, or the parent loader? + * // TODO we need to close this when we're done + * //If we use the parent loader, does that stop the quarkus classloaders getting a crack at some classes? + */ + private final ClassLoader canaryLoader; + + private Map profiles; + private String classesPath; + private ClassLoader otherLoader; + private Set quarkusTestClasses; + private Set quarkusMainTestClasses; + private boolean isAuxiliaryApplication; + private QuarkusClassLoader keyMakerClassLoader; + + private static volatile FacadeClassLoader instance; + + public static void clearSingleton() { + if (instance != null) { + instance.close(); + } + instance = null; + } + + // TODO does it make sense to have a parent here when it is sometimes ignored? + // We don't ever want more than one FacadeClassLoader active, especially since config gets initialised on it. + // The gradle test execution can make more than one, perhaps because of its threading model. + public static FacadeClassLoader instance(ClassLoader parent) { + if (instance == null) { + instance = new FacadeClassLoader(parent); + } + return instance; + } + + public FacadeClassLoader(ClassLoader parent) { + // We need to set the super or things don't work on paths which use the maven isolated classloader, such as google cloud functions tests + // It seems something in that path is using a method other than loadClass(), and so the inherited method can't do the right thing without a parent + super(parent); + // TODO in dev mode, sometimes this is the deployment classloader, which doesn't seem right? + this.parent = parent; + // TODO if this is launched with a launcher, java.class.path may not be correct - see https://maven.apache.org/surefire/maven-surefire-plugin/examples/class-loading.html + // TODO paths with spaces in them break this - and at the moment, no test catches that + String classPath = System.getProperty("java.class.path"); + // This manipulation is needed to work in IDEs + URL[] urls = Arrays.stream(classPath.split(File.pathSeparator)) + .map(spec -> { + try { + // TODO is this adjustment even needed? + if (!spec.endsWith("jar") && !spec.endsWith(File.separator)) { + spec = spec + File.separator; + } + + return Path.of(spec) + .toUri() + .toURL(); + + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .toArray(URL[]::new); + + canaryLoader = new URLClassLoader(urls, null); + } + + @Override + public Class loadClass(String name) throws ClassNotFoundException { + // try { + System.out.println("HOLLY facade classloader loading " + name); + boolean isQuarkusTest = false; + boolean isMainTest = false; + boolean isIntegrationTest = false; + + // TODO we can almost get away with using a string, except for type safety - maybe a dotname? + // TODO since of course avoiding the classload would be ideal + // Lots of downstream logic uses the class to work back to the classpath, so we can't just get rid of it (yet) + // ... but of course at this stage we don't know anything more about the classpath than anyone else, and are just using the system property + // ... so anything using this to get the right information will be disappointed + // TODO we should just pass through the moduleInfo, right? + Class fromCanary = null; + + try { + if (otherLoader != null) { + try { + // TODO this is dumb, we are only loading it so that other stuff can discover a classpath from it + fromCanary = otherLoader + .loadClass(name); + } catch (ClassNotFoundException e) { + System.out.println("Could not load with the OTHER loader " + name); + System.out.println("Used class path " + classesPath); + return super.loadClass(name); + } + } else { + try { + fromCanary = canaryLoader.loadClass(name); + } catch (ClassNotFoundException e) { + return parent.loadClass(name); + } + } + + // TODO should we use JUnit's AnnotationSupport? It searches class hierarchies. Unless we have a good reason not to use it, perhaps we should? + // See, for example, https://github.com/marcphilipp/gradle-sandbox/blob/baaa1972e939f5817f54a3d287611cef0601a58d/classloader-per-test-class/src/test/java/org/example/ClassLoaderReplacingLauncherSessionListener.java#L23-L44 + // One reason not to use it is that it needs an annotation class, but we can load one with the canary + // It looks up the hierarchy which our current logic doesn't, which is risky + + Class profile = null; + if (profiles != null) { + // TODO the good is that we're re-using what JUnitRunner already worked out, the bad is that this is seriously clunky with multiple code paths, brittle information sharing ... + // TODO at the very least, should we have a test landscape holder class? + // TODO and what if JUnitRunner wasn't invoked, because this wasn't dev mode?! + isMainTest = quarkusMainTestClasses.contains(name); + // The JUnitRunner counts main tests as quarkus tests + isQuarkusTest = quarkusTestClasses.contains(name) && !isMainTest; + + profile = profiles.get(name); + + } else { + // TODO JUnitRunner already worked all this out for the dev mode case, could we share some logic? + + // TODO make this test cleaner + more rigorous + // A Quarkus Test could be annotated with @QuarkusTest or with @ExtendWith[... QuarkusTestExtension.class ] or @RegisterExtension + // An @interface isn't a quarkus test, and doesn't want its own application; to detect it, just check if it has a superclass - except that fails for things whose superclass isn't on the classpath, like javax.tools subclasses + // TODO we probably need to walk the class hierarchy for the annotations, too? or do they get added to getAnnotations? + isQuarkusTest = !fromCanary.isAnnotation() && Arrays.stream(fromCanary.getAnnotations()) + .anyMatch(annotation -> annotation.annotationType() + .getName() + .endsWith("QuarkusTest")) + || Arrays.stream(fromCanary.getAnnotations()) + .anyMatch(annotation -> annotation.annotationType() + .getName() + .endsWith("org.junit.jupiter.api.extension.ExtendWith") + && annotation.toString() + .contains( + "io.quarkus.test.junit.QuarkusTestExtension")) // TODO should this be an equals(), for performance? Probably can do a better check than toString, which adds an @ and a .class + // (I think) + || registersQuarkusTestExtension(fromCanary); + + // TODO knowledge of test annotations leaking in to here, although JUnitTestRunner also has the same leak - should we have a superclass that lives in this package that we check for? + // TODO be tighter with the names we check for + // TODO this would be way easier if this was in the same module as the profile, could just do clazz.getAnnotation(TestProfile.class) + + isIntegrationTest = Arrays.stream(fromCanary.getAnnotations()) + .anyMatch(annotation -> annotation.annotationType() + .getName() + .endsWith("QuarkusIntegrationTest")); + + Optional profileAnnotation = Arrays.stream(fromCanary.getAnnotations()) + .filter(annotation -> annotation.annotationType() + .getName() + .endsWith("TestProfile")) + .findFirst(); + if (profileAnnotation.isPresent()) { + + // TODO could do getAnnotationsByType if we were in the same module + Method m = profileAnnotation.get() + .getClass() + .getMethod("value"); + profile = (Class) m.invoke(profileAnnotation.get()); // TODO extends quarkustestprofile + } + } + + String profileDescriptor = profile == null ? "no-profile" : profile.getName(); + String profileKey = "QuarkusTest" + "-" + profileDescriptor; + // TODO do we need to do extra work to make sure all of the quarkus app is in the cp? We'll return versions from the parent otherwise + // TODO think we need to make a 'first' runtime cl, and then switch for each new test? + // TODO how do we decide what to load with our classloader - everything? + // Doing it just for the test loads too little, doing it for everything gives java.lang.ClassCircularityError: io/quarkus/runtime/configuration/QuarkusConfigFactory + // Anything loaded by JUnit will come through this classloader + + // TODO clunky, loading it many times, especially since JUnitRunner only had a string; but if we don't do this, we are in the wrong classloader + if (profile != null) { + profile = Class.forName(profile.getName()); + } + + if (isQuarkusTest && !isIntegrationTest) { + + preloadTestResourceClasses(fromCanary); + QuarkusClassLoader runtimeClassLoader = getQuarkusClassLoader(profileKey, fromCanary, profile); + Class thing = runtimeClassLoader.loadClass(name); + System.out.println("HOLLY did load " + thing + " using CL " + thing.getClassLoader()); + + return thing; + } else { + return super.loadClass(name); + } + + } catch (NoSuchMethodException e) { + System.out.println("Could get method " + e); + throw new RuntimeException(e); + } catch (InvocationTargetException e) { + System.out.println("Could not invoke " + e); + throw new RuntimeException(e); + } catch (IllegalAccessException e) { + System.out.println("Could not access " + e); + throw new RuntimeException(e); + } + + } + + /* + * What's this for? + * TODO Hopefully, once https://github.com/quarkusio/quarkus/issues/45785 is done, it will not be needed. + * Some tests, especially in kubernetes-client and openshift-client, check config to decide whether to start a dev service. + * That happens at augmentation, which happens before test execution. + * In the old model, the test class would have already been loaded by JUnit first, and it would have had a chance to write + * config to the system properties. + * That config would influence whether dev services were started. + * TODO even without 45785 it might be nice to find a better way, perhaps rewriting the AbstractKubernetesTestResource test + * resource to work differently? + * + */ + private void preloadTestResourceClasses(Class fromCanary) { + try { + Class ca = (Class) canaryLoader.loadClass("io.quarkus.test.common.QuarkusTestResource"); + List ans = AnnotationSupport.findRepeatableAnnotations(fromCanary, ca); + for (Annotation a : ans) { + Method m = a + .getClass() + .getMethod("value"); + Class resourceClass = (Class) m.invoke(a); + // Only do this hack for the resources we know need it, since it can cause failures in other areas + if (resourceClass.getName().contains("Kubernetes")) { + parent.loadClass(resourceClass.getName()); + } + } + } catch (ClassNotFoundException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { + // In some projects, these classes are not on the canary classpath. That's fine, we know there's nothing to preload. + log.debug("Canary classloader could not preload test resources:" + e); + } + } + + private boolean registersQuarkusTestExtension(Class fromCanary) { + Class clazz = fromCanary; + try { + while (clazz != null) { + // TODO this call is not safe in our nobbled classloader, which is sort of surprising since I thought declared meant 'on this class' but I guess it needs to be able to access the parent + for (Field field : clazz.getDeclaredFields()) { + // We can't use isAnnotationPresent because the classloader of the JUnit classes will be wrong (the canary classloader rather than our classloader) + // TODO will all this searching be dreadfully slow? + // TODO redo the canary loader to load JUnit classes with the same classloader as this? + + if (Arrays.stream(field.getAnnotations()) + .anyMatch(annotation -> annotation.annotationType() + .getName() + .equals(RegisterExtension.class.getName()))) { + if (field.getType() + .getName() + .equals("io.quarkus.test.junit.QuarkusTestExtension")) { + return true; + } + } + } + + clazz = clazz.getSuperclass(); + } + } catch (NoClassDefFoundError e) { + // Because the canary loader doesn't have a parent, this is possible + // We also see this error in getDeclaredFields(), which is more surprising to me + // If it happens, assume it's ok + // It's very unlikely something on the app classloader will extend quarkus test + // TODO suppress error once we know this is safe + System.out.println("HOLLY could not get parent of " + clazz.getName() + " got error " + e); + } + + return false; + } + + private QuarkusClassLoader getQuarkusClassLoader(String profileKey, Class requiredTestClass, Class profile) { + try { + StartupAction holder; + String key; + + // We cannot directly access TestResourceUtil as long as we're in the core module, but the app classloaders can. + // But, chicken-and-egg, we may not have an app classloader yet. However, if we don't, we won't need to worry about restarts, but this instance clearly cannot need a restart + if (keyMakerClassLoader == null) { + // Making a classloader uses the profile key to look up a curated application + holder = makeClassLoader(profileKey, requiredTestClass, profile); + keyMakerClassLoader = holder.getClassLoader(); + + // Now make sure to get the right key, so that the next test along can compare to see if it needs a restart + final String resourceKey = getResourceKey(requiredTestClass, profile); + key = profileKey + resourceKey; + } else { + final String resourceKey = getResourceKey(requiredTestClass, profile); + + // The resource key might be null, and that's ok + key = profileKey + resourceKey; + System.out.println("HOLLY With resources, key is " + key); + + holder = runtimeClassLoaders.get(key); + System.out.println("HOLLY seen this key before " + holder); + + if (holder == null) { + // TODO can we make this less confusing? + + // Making a classloader uses the profile key to look up a curated application + holder = makeClassLoader(profileKey, requiredTestClass, profile); + } + } + + // If we didn't have a classloader and didn't get a resource key + + runtimeClassLoaders.put(key, holder); + + return holder.getClassLoader(); + } catch (Exception e) { + // Exceptions here get swallowed by the JUnit framework and we don't get any debug information unless we print it ourself + // TODO what's the best way to do this? + e.printStackTrace(); + throw new RuntimeException(e); + } + } + + private String getResourceKey(Class requiredTestClass, Class profile) + throws NoSuchMethodException, ClassNotFoundException, IllegalAccessException, InvocationTargetException { + String resourceKey; + Method method = Class + .forName("io.quarkus.test.junit.TestResourceUtil", true, keyMakerClassLoader) // TODO use class, not string, but that would need us to be in a different module + .getMethod("getReloadGroupIdentifier", Class.class, Class.class); + + // TODO this is kind of annoying, can we find a nicer way? + // The resource checks assume that there's a useful TCCL and load the class with that TCCL to do reference equality checks and casting against resource classes + // That does mean we potentially load the test class three times, if there's resources + ClassLoader original = Thread.currentThread() + .getContextClassLoader(); + try { + Thread.currentThread() + .setContextClassLoader(keyMakerClassLoader); + + // we reload the test resources (and thus the application) if we changed test class and the new test class is not a nested class, and if we had or will have per-test test resources + resourceKey = (String) method.invoke(null, requiredTestClass, profile); // TestResourceUtil.getResourcesKey(requiredTestClass); + } finally { + Thread.currentThread() + .setContextClassLoader(original); + } + return resourceKey; + } + + private StartupAction makeClassLoader(String key, Class requiredTestClass, Class profile) throws Exception { + + // This interception is only actually needed in limited circumstances; when + // - running in normal mode + // - *and* there is a @QuarkusTest to run + + // This class sets a Thead Context Classloader, which JUnit uses to load classes. + // However, in continuous testing mode, setting a TCCL here isn't sufficient for the + // tests to come in with our desired classloader; + // downstream code sets the classloader to the deployment classloader, so we then need + // to come in *after* that code. + + // TODO sometimes this is called in dev mode and sometimes it isn't? Ah, it's only not + // called if we die early, before we get to this + + // In continuous testing mode, the runner code will have executed before this + // interceptor, so + // this interceptor doesn't need to do anything. + // TODO what if we removed the changes in the runner code? + + // TODO I think all these comments are wrong? Bypass all this in continuous testing mode, where the custom runner will have already initialised things before we hit this class; the startup action holder is our best way + // of detecting it + + // TODO alternate way of detecting it ? Needs the build item, though + // TODO could the extension pass this through to us? no, I think we're invoked before anything quarkusy, and junit5 isn't even an extension + // DevModeType devModeType = launchModeBuildItem.getDevModeType().orElse(null); + // if (devModeType == null || !devModeType.isContinuousTestingSupported()) { + // return; + // } + + // TODO Some places do this, but that assumes we already have a classloader! boolean isContinuousTesting = testClassClassLoader instanceof QuarkusClassLoader; + + Thread currentThread = Thread.currentThread(); + + AppMakerHelper appMakerHelper = new AppMakerHelper(); + + CuratedApplication curatedApplication = curatedApplications.get(key); + + if (curatedApplication == null) { + Collection shutdownTasks = new HashSet(); + + String displayName = "JUnit" + key; // TODO come up with a good display name + curatedApplication = appMakerHelper.makeCuratedApplication(requiredTestClass, displayName, isAuxiliaryApplication, + shutdownTasks); + curatedApplications.put(key, curatedApplication); + + } + + // TODO are all these args used? + // TODO we are hardcoding is continuous testing to the wrong value! + StartupAction startupAction = appMakerHelper.getStartupAction(requiredTestClass, + curatedApplication, isAuxiliaryApplication, profile); + + ClassLoader original = Thread.currentThread() + .getContextClassLoader(); + try { + // See comments on AbstractJVMTestExtension#evaluateExecutionCondition for why this is the system classloader + Thread.currentThread() + .setContextClassLoader(ClassLoader.getSystemClassLoader()); + + QuarkusClassLoader loader = (QuarkusClassLoader) startupAction + .getClassLoader(); + + Class configProviderResolverClass = loader.loadClass(ConfigProviderResolver.class.getName()); + Object configProviderResolver = configProviderResolverClass.getMethod("instance") + .invoke(null); + + Class testConfigProviderResolverClass = loader.loadClass(QuarkusTestConfigProviderResolver.class.getName()); + Object testConfigProviderResolver = testConfigProviderResolverClass.getDeclaredConstructor(ClassLoader.class) + .newInstance(loader); + + configProviderResolverClass.getDeclaredMethod("setInstance", configProviderResolverClass) + .invoke(null, + testConfigProviderResolver); + } finally { + Thread.currentThread() + .setContextClassLoader(original); + } + + System.out.println("HOLLY at end of classload TCCL is " + currentThread.getContextClassLoader()); + return startupAction; + + } + + @Override + public String getName() { + return NAME; + } + + @Override + public void close() { + for (CuratedApplication curatedApplication : curatedApplications.values()) { + curatedApplication.close(); + } + } + + public void setProfiles(Map profiles) { + this.profiles = profiles; + } + + public void setClassPath(String... classPaths) { + + this.classesPath = String.join(File.pathSeparator, classPaths); + System.out.println("HOLLY setting other classpath to " + classesPath); + URL[] urls = Arrays.stream(classesPath.split(File.pathSeparator)) + .map(spec -> { + try { + if (!spec.endsWith("jar") && !spec.endsWith(File.separator)) { + spec = spec + File.separator; + } + + return Path.of(spec) + .toUri() + .toURL(); + } catch (MalformedURLException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + }) + .toArray(URL[]::new); + System.out.println("HOLLY urls is " + Arrays.toString(urls)); + otherLoader = new URLClassLoader(urls, null); + } + + public void setQuarkusTestClasses(Set quarkusTestClasses) { + this.quarkusTestClasses = quarkusTestClasses; + } + + public void setQuarkusMainTestClasses(Set quarkusMainTestClasses) { + this.quarkusMainTestClasses = quarkusMainTestClasses; + } + + public void setAuxiliaryApplication(boolean b) { + this.isAuxiliaryApplication = b; + } +} diff --git a/core/deployment/src/main/java/io/quarkus/test/junit/classloading/QuarkusTestConfigProviderResolver.java b/core/deployment/src/main/java/io/quarkus/test/junit/classloading/QuarkusTestConfigProviderResolver.java new file mode 100644 index 0000000000000..8e1fef6a41355 --- /dev/null +++ b/core/deployment/src/main/java/io/quarkus/test/junit/classloading/QuarkusTestConfigProviderResolver.java @@ -0,0 +1,23 @@ +package io.quarkus.test.junit.classloading; + +import io.quarkus.deployment.dev.testing.TestConfig; +import io.quarkus.runtime.LaunchMode; +import io.quarkus.runtime.configuration.ConfigUtils; +import io.smallrye.config.SmallRyeConfig; +import io.smallrye.config.SmallRyeConfigProviderResolver; + +public class QuarkusTestConfigProviderResolver extends SmallRyeConfigProviderResolver { + private final SmallRyeConfigProviderResolver resolver; + + public QuarkusTestConfigProviderResolver(final ClassLoader classLoader) { + this.resolver = (SmallRyeConfigProviderResolver) SmallRyeConfigProviderResolver.instance(); + + SmallRyeConfig config = ConfigUtils.configBuilder(false, true, LaunchMode.TEST) + .withProfile(LaunchMode.TEST.getDefaultProfile()) + .withMapping(TestConfig.class, "quarkus.test") + .forClassLoader(classLoader) + .build(); + + this.registerConfig(config, Thread.currentThread().getContextClassLoader()); + } +} diff --git a/core/deployment/src/test/java/io/quarkus/deployment/conditionaldeps/DependencyConditionMatchesConditionalDependencyTest.java b/core/deployment/src/test/java/io/quarkus/deployment/conditionaldeps/DependencyConditionMatchesConditionalDependencyTest.java index 51aa5660bdc3a..e38b26313ad72 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/conditionaldeps/DependencyConditionMatchesConditionalDependencyTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/conditionaldeps/DependencyConditionMatchesConditionalDependencyTest.java @@ -8,20 +8,11 @@ import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; import io.quarkus.bootstrap.resolver.TsArtifact; import io.quarkus.bootstrap.resolver.TsQuarkusExt; -import io.quarkus.bootstrap.resolver.maven.IncubatingApplicationModelResolver; -import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; import io.quarkus.deployment.runnerjar.BootstrapFromOriginalJarTestBase; import io.quarkus.maven.dependency.ResolvedDependency; public class DependencyConditionMatchesConditionalDependencyTest extends BootstrapFromOriginalJarTestBase { - @Override - protected BootstrapAppModelResolver newAppModelResolver(LocalProject currentProject) throws Exception { - var resolver = super.newAppModelResolver(currentProject); - // resolver.setIncubatingModelResolver(true); - return resolver; - } - @Override protected TsArtifact composeApplication() { @@ -62,7 +53,7 @@ protected void assertAppModel(ApplicationModel appModel) { } assertThat(extensions).hasSize(8); - if (IncubatingApplicationModelResolver.isIncubatingEnabled(null)) { + if (!BootstrapAppModelResolver.isLegacyModelResolver(null)) { var extA = extensions.get("ext-a"); assertThat(extA.getDependencies()).isEmpty(); var extADeployment = extensions.get("ext-a-deployment"); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/pkg/NativeConfigTest.java b/core/deployment/src/test/java/io/quarkus/deployment/pkg/NativeConfigTest.java index 654655f32234e..84ee03064d044 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/pkg/NativeConfigTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/pkg/NativeConfigTest.java @@ -8,20 +8,20 @@ class NativeConfigTest { @Test public void testBuilderImageProperlyDetected() { - assertThat(createConfig("graalvm").builderImage().getEffectiveImage()).contains("ubi-quarkus-graalvmce-builder-image") + assertThat(createConfig("graalvm").builderImage().getEffectiveImage()).contains("ubi9-quarkus-graalvmce-builder-image") .contains("jdk-21"); - assertThat(createConfig("GraalVM").builderImage().getEffectiveImage()).contains("ubi-quarkus-graalvmce-builder-image") + assertThat(createConfig("GraalVM").builderImage().getEffectiveImage()).contains("ubi9-quarkus-graalvmce-builder-image") .contains("jdk-21"); - assertThat(createConfig("GraalVM").builderImage().getEffectiveImage()).contains("ubi-quarkus-graalvmce-builder-image") + assertThat(createConfig("GraalVM").builderImage().getEffectiveImage()).contains("ubi9-quarkus-graalvmce-builder-image") .contains("jdk-21"); - assertThat(createConfig("GRAALVM").builderImage().getEffectiveImage()).contains("ubi-quarkus-graalvmce-builder-image") + assertThat(createConfig("GRAALVM").builderImage().getEffectiveImage()).contains("ubi9-quarkus-graalvmce-builder-image") .contains("jdk-21"); - assertThat(createConfig("mandrel").builderImage().getEffectiveImage()).contains("ubi-quarkus-mandrel-builder-image") + assertThat(createConfig("mandrel").builderImage().getEffectiveImage()).contains("ubi9-quarkus-mandrel-builder-image") .contains("jdk-21"); - assertThat(createConfig("Mandrel").builderImage().getEffectiveImage()).contains("ubi-quarkus-mandrel-builder-image") + assertThat(createConfig("Mandrel").builderImage().getEffectiveImage()).contains("ubi9-quarkus-mandrel-builder-image") .contains("jdk-21"); - assertThat(createConfig("MANDREL").builderImage().getEffectiveImage()).contains("ubi-quarkus-mandrel-builder-image") + assertThat(createConfig("MANDREL").builderImage().getEffectiveImage()).contains("ubi9-quarkus-mandrel-builder-image") .contains("jdk-21"); assertThat(createConfig("aRandomString").builderImage().getEffectiveImage()).isEqualTo("aRandomString"); diff --git a/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunnerTest.java b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunnerTest.java index 695ab0b45af48..e63b52f110094 100644 --- a/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunnerTest.java +++ b/core/deployment/src/test/java/io/quarkus/deployment/pkg/steps/NativeImageBuildContainerRunnerTest.java @@ -29,7 +29,7 @@ void testBuilderImageBeingPickedUp() { Collections.emptyList()); found = false; for (String part : command) { - if (part.contains("ubi-quarkus-graalvmce-builder-image")) { + if (part.contains("ubi9-quarkus-graalvmce-builder-image")) { found = true; break; } @@ -42,7 +42,7 @@ void testBuilderImageBeingPickedUp() { Collections.emptyList()); found = false; for (String part : command) { - if (part.contains("ubi-quarkus-mandrel-builder-image")) { + if (part.contains("ubi9-quarkus-mandrel-builder-image")) { found = true; break; } diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/model/Extension.java b/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/model/Extension.java index 17d5a68498a98..cc658227244e9 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/model/Extension.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/model/Extension.java @@ -59,7 +59,7 @@ public boolean equals(Object obj) { @Deprecated(forRemoval = true) @JsonIgnore public boolean isMixedModule() { - return "io.quarkus".equals(groupId) && ("quarkus-core".equals(artifactId) || "quarkus-messaging".equals(artifactId)); + return "io.quarkus".equals(groupId) && ("quarkus-core".equals(artifactId) || "quarkus-vertx-http".equals(artifactId)); } @JsonIgnore diff --git a/core/runtime/pom.xml b/core/runtime/pom.xml index 4c92590c0b6b4..52742455423b0 100644 --- a/core/runtime/pom.xml +++ b/core/runtime/pom.xml @@ -303,6 +303,15 @@ + + de.thetaphi + forbiddenapis + + + **/Target_org_wildfly_common_net* + + + diff --git a/core/runtime/src/main/java/io/quarkus/runtime/Application.java b/core/runtime/src/main/java/io/quarkus/runtime/Application.java index e235f12eb7db9..6bea203e59c1b 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/Application.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/Application.java @@ -6,12 +6,12 @@ import org.eclipse.microprofile.config.spi.ConfigProviderResolver; import org.jboss.logging.Logger; -import org.wildfly.common.Assert; import org.wildfly.common.lock.Locks; import io.quarkus.bootstrap.runner.Timing; import io.quarkus.dev.appstate.ApplicationStateNotification; import io.quarkus.runtime.shutdown.ShutdownRecorder; +import io.smallrye.common.constraint.Assert; /** * The application base class, which is extended and implemented by a generated class which implements the application @@ -68,7 +68,18 @@ protected Application(boolean auxiliaryApplication) { * letting the user hook into it. */ public final void start(String[] args) { - if (!auxiliaryApplication) { + /* + * We can't make assumptions about the order that the main and auxiliary application get created. + * Because the test application gets created quite early in the test lifecycle, it usually beats the main application. + * In order to avoid returning null from getCurrentApplication and causing catastrophe in all the lambda tests, just use + * the auxiliary application if it's all we have + * TODO this comment is wrong, delete it + */ + // TODO this is all still a bit brittle and fragile; can we do better? maybe even formally linked pairs of applications? + // TODO check if this is still needed after fixing the over-eager setting of applications as auxiliary + // Or do something else in the calling code so that it does the bootstrap if current is null? some tests do that anyway, like FunqyCloudEventsFunction + // TODO are there any negative consequences to using the auxiliary application as the current one? + if (!auxiliaryApplication) {// TODO || currentApplication == null) { currentApplication = this; } final Lock stateLock = this.stateLock; diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigItem.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigItem.java index 02f0e65d1e04f..b068d3299a144 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigItem.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigItem.java @@ -11,10 +11,13 @@ /** * A single container configuration item. + * + * @deprecated Use interface-based {@code @ConfigMapping} instead. */ @Retention(RUNTIME) @Target({ FIELD, PARAMETER }) @Documented +@Deprecated(since = "3.19", forRemoval = true) public @interface ConfigItem { /** diff --git a/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigRoot.java b/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigRoot.java index 5124cb4d812ad..aec805336f35d 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigRoot.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/annotations/ConfigRoot.java @@ -21,7 +21,10 @@ * Determine the prefix key of the configuration root. * * @return the prefix key name + * @deprecated Use interface-based {@code @ConfigMapping} instead. When moving to {@code @ConfigMapping}, the prefix has to + * be included in the {@code @ConfigMapping#prefix} together with the name. */ + @Deprecated(since = "3.19", forRemoval = true) String prefix() default "quarkus"; /** @@ -35,6 +38,10 @@ * Determine the base key of the configuration root. * * @return the base key name + * @deprecated Use interface-based {@code @ConfigMapping} instead. Be careful, {@code @ConfigRoot(name = "extension")} may + * be migrated to {@code @ConfigMapping(prefix = "quarkus.extension")}. If no name was defined, make sure to + * define a prefix in {@code @ConfigMapping} as it's mandatory. */ + @Deprecated(since = "3.19", forRemoval = true) String name() default ConfigItem.HYPHENATED_ELEMENT_NAME; } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Aliased.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/Aliased.java deleted file mode 100644 index 0f7c3049d0dba..0000000000000 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Aliased.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.quarkus.runtime.configuration; - -import java.util.Collection; - -/** - * An enum that has multiple possible textual representations. The representation used for output - * will always be the result of {@link Object#toString()}, but these additional aliases will be allowed - * on input as alternative spellings of the enum that implements the method. - */ -public interface Aliased { - /** - * Get the aliases for this value. - * - * @return the collection of aliases (must not be {@code null}) - */ - Collection getAliases(); -} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/CidrAddressConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/CidrAddressConverter.java index 7dfaefabf0118..ea633a388771c 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/CidrAddressConverter.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/CidrAddressConverter.java @@ -7,8 +7,9 @@ import jakarta.annotation.Priority; import org.eclipse.microprofile.config.spi.Converter; -import org.wildfly.common.net.CidrAddress; -import org.wildfly.common.net.Inet; + +import io.smallrye.common.net.CidrAddress; +import io.smallrye.common.net.Inet; /** * A converter which converts a CIDR address into an instance of {@link CidrAddress}. diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java index ef6de308b86c7..c3b93c1f49f22 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigDiagnostic.java @@ -142,21 +142,14 @@ public static void unknownProperties(Set properties) { } } - public static void unknownRunTime(String name) { - if (ImageMode.current() == ImageMode.NATIVE_RUN) { - // only warn at run time for native images, otherwise the user will get warned twice for every property - unknown(name); + public static void reportUnknown(Set properties) { + if (ImageMode.current() == ImageMode.NATIVE_BUILD) { + unknownProperties(properties); } } - public static void unknownRunTime(NameIterator name) { - unknownRunTime(name.getName()); - } - - public static void unknownPropertiesRuntime(Set properties) { - if (ImageMode.current() == ImageMode.NATIVE_RUN) { - unknownProperties(properties); - } + public static void reportUnknownRuntime(Set properties) { + unknownProperties(properties); } /** diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigRecorder.java index fbade91a5891f..f90d12727db5c 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigRecorder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/ConfigRecorder.java @@ -104,6 +104,11 @@ public void releaseConfig(ShutdownContext shutdownContext) { // While this may seem to duplicate code in IsolatedDevModeMain, // it actually does not because it operates on a different instance // of QuarkusConfigFactory from a different classloader. + + if (shutdownContext == null) { + throw new RuntimeException( + "Internal errror: shutdownContext is null. This probably happened because Quarkus failed to start properly in an earlier step, or because tests were run on a Quarkus instance that had already been shut down."); + } shutdownContext.addLastShutdownTask(QuarkusConfigFactory::releaseTCCLConfig); } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/HyphenateEnumConverter.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/HyphenateEnumConverter.java index 5bcae8611ce13..10b7a90a16ec8 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/HyphenateEnumConverter.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/HyphenateEnumConverter.java @@ -13,9 +13,10 @@ /** * A converter for hyphenated enums. * - * @deprecated Use {@link io.smallrye.config.Converters#getImplicitConverter(Class)} instead. + * @deprecated Use {@link io.smallrye.config.Converters#getImplicitConverter(Class)} instead. Best to keep it around until we + * entirely drop the config class support in favor of ConfigMapping. */ -@Deprecated(forRemoval = true) +@Deprecated(forRemoval = true, since = "3.9") public final class HyphenateEnumConverter> implements Converter, Serializable { private static final String HYPHEN = "-"; private static final Pattern PATTERN = Pattern.compile("([-_]+)"); diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/NameIterator.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/NameIterator.java index f78ad5352a65e..fa03d4a76ee18 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/NameIterator.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/NameIterator.java @@ -2,7 +2,7 @@ import java.util.NoSuchElementException; -import org.wildfly.common.Assert; +import io.smallrye.common.constraint.Assert; public final class NameIterator { /** diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java index 85f778217b946..2f65017fb3944 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/QuarkusConfigFactory.java @@ -1,7 +1,5 @@ package io.quarkus.runtime.configuration; -import org.eclipse.microprofile.config.spi.ConfigProviderResolver; - import io.quarkus.runtime.LaunchMode; import io.smallrye.config.SmallRyeConfig; import io.smallrye.config.SmallRyeConfigFactory; @@ -25,7 +23,8 @@ public SmallRyeConfig getConfigFor(final SmallRyeConfigProviderResolver configPr } public static void setConfig(SmallRyeConfig config) { - ConfigProviderResolver configProviderResolver = ConfigProviderResolver.instance(); + SmallRyeConfigProviderResolver configProviderResolver = (SmallRyeConfigProviderResolver) SmallRyeConfigProviderResolver + .instance(); // Uninstall previous config if (QuarkusConfigFactory.config != null) { configProviderResolver.releaseConfig(QuarkusConfigFactory.config); @@ -34,9 +33,8 @@ public static void setConfig(SmallRyeConfig config) { // Install new config if (config != null) { QuarkusConfigFactory.config = config; - // Register the new config for the TCCL, - // just in case the TCCL was using a different config - // than the one we uninstalled above. + // Someone may have called ConfigProvider.getConfig which automatically registers a Config in the TCCL + configProviderResolver.releaseConfig(Thread.currentThread().getContextClassLoader()); configProviderResolver.registerConfig(config, Thread.currentThread().getContextClassLoader()); } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/RuntimeConfigBuilder.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/RuntimeConfigBuilder.java index df8075d63bfa7..64279e30a9388 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/RuntimeConfigBuilder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/RuntimeConfigBuilder.java @@ -15,7 +15,7 @@ public class RuntimeConfigBuilder implements SmallRyeConfigBuilderCustomizer { @Override public void configBuilder(final SmallRyeConfigBuilder builder) { new QuarkusConfigBuilderCustomizer().configBuilder(builder); - builder.withSources(new UuiConfigSource()); + builder.withSources(new UuidConfigSource()); builder.forClassLoader(Thread.currentThread().getContextClassLoader()) .addDefaultInterceptors() @@ -27,7 +27,7 @@ public int priority() { return Integer.MIN_VALUE; } - private static class UuiConfigSource implements ConfigSource { + private static class UuidConfigSource implements ConfigSource { private static final String QUARKUS_UUID = "quarkus.uuid"; diff --git a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java index 78d04d1ccb5a7..9be7583257ad5 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/configuration/Substitutions.java @@ -1,5 +1,24 @@ package io.quarkus.runtime.configuration; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.Reader; +import java.io.Writer; +import java.nio.charset.Charset; +import java.util.Collection; +import java.util.Enumeration; +import java.util.InvalidPropertiesFormatException; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.BooleanSupplier; +import java.util.function.Function; + import org.eclipse.microprofile.config.spi.ConfigProviderResolver; import com.oracle.svm.core.annotate.Alias; @@ -97,4 +116,680 @@ public byte[] getClassBytes() { return null; } } + + /** + * The GraalVM provides a lazy implementation to access system properties that are expensive to calculate. Still, it + * ends up calculating all the properties anyway when {@link System#getProperties()} is called, which is a common + * call. Used, for instance, to get the list of names in Quarkus configuration, but also in + * GetPropertyAction#privilegedGetProperties() is used in many JVM APIs, for instance, when determining the default + * timezone. Such initialization may cost a few milliseconds of the native image startup time (measured between 5-6, + * depending on the system). + *

+ * This Substitution provides a delegate to the GraalVM lazy implementation, expanding the lazy check to each + * individual method of {@link Properties}. + */ + @TargetClass(className = "com.oracle.svm.core.jdk.SystemPropertiesSupport", onlyWith = Target_SystemPropertiesSupport.SystemPropertiesSupportGetPropertiesPresent.class) + static final class Target_SystemPropertiesSupport { + @Alias + private Properties properties; + + @Alias + private void ensureFullyInitialized() { + } + + @Alias + private void initializeLazyValue(String key) { + } + + @Substitute + public Properties getProperties() { + return new Properties() { + @Override + public synchronized Object setProperty(final String key, final String value) { + initializeLazyValue(key); + return properties.setProperty(key, value); + } + + @Override + public synchronized void load(final Reader reader) throws IOException { + properties.load(reader); + } + + @Override + public synchronized void load(final InputStream inStream) throws IOException { + properties.load(inStream); + } + + @Override + public void save(final OutputStream out, final String comments) { + ensureFullyInitialized(); + properties.save(out, comments); + } + + @Override + public void store(final Writer writer, final String comments) throws IOException { + ensureFullyInitialized(); + properties.store(writer, comments); + } + + @Override + public void store(final OutputStream out, final String comments) throws IOException { + ensureFullyInitialized(); + properties.store(out, comments); + } + + @Override + public synchronized void loadFromXML(final InputStream in) + throws IOException, InvalidPropertiesFormatException { + properties.loadFromXML(in); + } + + @Override + public void storeToXML(final OutputStream os, final String comment) throws IOException { + ensureFullyInitialized(); + properties.storeToXML(os, comment); + } + + @Override + public void storeToXML(final OutputStream os, final String comment, final String encoding) + throws IOException { + ensureFullyInitialized(); + properties.storeToXML(os, comment, encoding); + } + + @Override + public void storeToXML(final OutputStream os, final String comment, final Charset charset) + throws IOException { + ensureFullyInitialized(); + properties.storeToXML(os, comment, charset); + } + + @Override + public String getProperty(final String key) { + initializeLazyValue(key); + return properties.getProperty(key); + } + + @Override + public String getProperty(final String key, final String defaultValue) { + initializeLazyValue(key); + return properties.getProperty(key, defaultValue); + } + + @Override + public Enumeration propertyNames() { + return properties.propertyNames(); + } + + @Override + public Set stringPropertyNames() { + return properties.stringPropertyNames(); + } + + @Override + public void list(final PrintStream out) { + ensureFullyInitialized(); + properties.list(out); + } + + @Override + public void list(final PrintWriter out) { + ensureFullyInitialized(); + properties.list(out); + } + + @Override + public int size() { + return properties.size(); + } + + @Override + public boolean isEmpty() { + return properties.isEmpty(); + } + + @Override + public Enumeration keys() { + return properties.keys(); + } + + @Override + public Enumeration elements() { + ensureFullyInitialized(); + return properties.elements(); + } + + @Override + public boolean contains(final Object value) { + ensureFullyInitialized(); + return properties.contains(value); + } + + @Override + public boolean containsValue(final Object value) { + ensureFullyInitialized(); + return properties.containsValue(value); + } + + @Override + public boolean containsKey(final Object key) { + return properties.containsKey(key); + } + + @Override + public Object get(final Object key) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.get(key); + } + + @Override + public synchronized Object put(final Object key, final Object value) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.put(key, value); + } + + @Override + public synchronized Object remove(final Object key) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.remove(key); + } + + @Override + public synchronized void putAll(final Map t) { + properties.putAll(t); + } + + @Override + public synchronized void clear() { + properties.clear(); + } + + @Override + public synchronized String toString() { + ensureFullyInitialized(); + return properties.toString(); + } + + @Override + public Set keySet() { + return properties.keySet(); + } + + @Override + public Collection values() { + ensureFullyInitialized(); + return properties.values(); + } + + @Override + public Set> entrySet() { + ensureFullyInitialized(); + return properties.entrySet(); + } + + @Override + public synchronized boolean equals(final Object o) { + ensureFullyInitialized(); + return properties.equals(o); + } + + @Override + public synchronized int hashCode() { + ensureFullyInitialized(); + return properties.hashCode(); + } + + @Override + public Object getOrDefault(final Object key, final Object defaultValue) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.getOrDefault(key, defaultValue); + } + + @Override + public synchronized void forEach(final BiConsumer action) { + ensureFullyInitialized(); + properties.forEach(action); + } + + @Override + public synchronized void replaceAll(final BiFunction function) { + ensureFullyInitialized(); + properties.replaceAll(function); + } + + @Override + public synchronized Object putIfAbsent(final Object key, final Object value) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.putIfAbsent(key, value); + } + + @Override + public synchronized boolean remove(final Object key, final Object value) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.remove(key, value); + } + + @Override + public synchronized boolean replace(final Object key, final Object oldValue, final Object newValue) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.replace(key, oldValue, newValue); + } + + @Override + public synchronized Object replace(final Object key, final Object value) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.replace(key, value); + } + + @Override + public synchronized Object computeIfAbsent( + final Object key, + final Function mappingFunction) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.computeIfAbsent(key, mappingFunction); + } + + @Override + public synchronized Object computeIfPresent( + final Object key, + final BiFunction remappingFunction) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.computeIfPresent(key, remappingFunction); + } + + @Override + public synchronized Object compute( + final Object key, + final BiFunction remappingFunction) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.compute(key, remappingFunction); + } + + @Override + public synchronized Object merge( + final Object key, + final Object value, + final BiFunction remappingFunction) { + if (key instanceof String) { + initializeLazyValue((String) key); + } + return properties.merge(key, value, remappingFunction); + } + + @Override + public synchronized Object clone() { + ensureFullyInitialized(); + return properties.clone(); + } + }; + } + + private static final class SystemPropertiesSupportGetPropertiesPresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + try { + Class klass = Class.forName("com.oracle.svm.core.jdk.SystemPropertiesSupport"); + klass.getDeclaredMethod("getProperties"); + return true; + } catch (ClassNotFoundException | NoSuchMethodException e) { + return false; + } + } + } + } + + @TargetClass(className = "com.oracle.svm.core.jdk.SystemPropertiesSupport", onlyWith = Target_SystemPropertiesSupport_post_21.SystemPropertiesSupportGetCurrentPropertiesPresent.class) + static final class Target_SystemPropertiesSupport_post_21 { + + @Alias + private Properties currentProperties; + + @Alias + private void ensureAllPropertiesInitialized() { + } + + @Alias + private void ensurePropertyInitialized(String key) { + } + + @Substitute + public Properties getCurrentProperties() { + return new Properties() { + @Override + public synchronized Object setProperty(final String key, final String value) { + ensurePropertyInitialized(key); + return currentProperties.setProperty(key, value); + } + + @Override + public synchronized void load(final Reader reader) throws IOException { + currentProperties.load(reader); + } + + @Override + public synchronized void load(final InputStream inStream) throws IOException { + currentProperties.load(inStream); + } + + @Override + public void save(final OutputStream out, final String comments) { + ensureAllPropertiesInitialized(); + currentProperties.save(out, comments); + } + + @Override + public void store(final Writer writer, final String comments) throws IOException { + ensureAllPropertiesInitialized(); + currentProperties.store(writer, comments); + } + + @Override + public void store(final OutputStream out, final String comments) throws IOException { + ensureAllPropertiesInitialized(); + currentProperties.store(out, comments); + } + + @Override + public synchronized void loadFromXML(final InputStream in) + throws IOException, InvalidPropertiesFormatException { + currentProperties.loadFromXML(in); + } + + @Override + public void storeToXML(final OutputStream os, final String comment) throws IOException { + ensureAllPropertiesInitialized(); + currentProperties.storeToXML(os, comment); + } + + @Override + public void storeToXML(final OutputStream os, final String comment, final String encoding) + throws IOException { + ensureAllPropertiesInitialized(); + currentProperties.storeToXML(os, comment, encoding); + } + + @Override + public void storeToXML(final OutputStream os, final String comment, final Charset charset) + throws IOException { + ensureAllPropertiesInitialized(); + currentProperties.storeToXML(os, comment, charset); + } + + @Override + public String getProperty(final String key) { + ensurePropertyInitialized(key); + return currentProperties.getProperty(key); + } + + @Override + public String getProperty(final String key, final String defaultValue) { + ensurePropertyInitialized(key); + return currentProperties.getProperty(key, defaultValue); + } + + @Override + public Enumeration propertyNames() { + return currentProperties.propertyNames(); + } + + @Override + public Set stringPropertyNames() { + return currentProperties.stringPropertyNames(); + } + + @Override + public void list(final PrintStream out) { + ensureAllPropertiesInitialized(); + currentProperties.list(out); + } + + @Override + public void list(final PrintWriter out) { + ensureAllPropertiesInitialized(); + currentProperties.list(out); + } + + @Override + public int size() { + return currentProperties.size(); + } + + @Override + public boolean isEmpty() { + return currentProperties.isEmpty(); + } + + @Override + public Enumeration keys() { + return currentProperties.keys(); + } + + @Override + public Enumeration elements() { + ensureAllPropertiesInitialized(); + return currentProperties.elements(); + } + + @Override + public boolean contains(final Object value) { + ensureAllPropertiesInitialized(); + return currentProperties.contains(value); + } + + @Override + public boolean containsValue(final Object value) { + ensureAllPropertiesInitialized(); + return currentProperties.containsValue(value); + } + + @Override + public boolean containsKey(final Object key) { + return currentProperties.containsKey(key); + } + + @Override + public Object get(final Object key) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.get(key); + } + + @Override + public synchronized Object put(final Object key, final Object value) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.put(key, value); + } + + @Override + public synchronized Object remove(final Object key) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.remove(key); + } + + @Override + public synchronized void putAll(final Map t) { + currentProperties.putAll(t); + } + + @Override + public synchronized void clear() { + currentProperties.clear(); + } + + @Override + public synchronized String toString() { + ensureAllPropertiesInitialized(); + return currentProperties.toString(); + } + + @Override + public Set keySet() { + return currentProperties.keySet(); + } + + @Override + public Collection values() { + ensureAllPropertiesInitialized(); + return currentProperties.values(); + } + + @Override + public Set> entrySet() { + ensureAllPropertiesInitialized(); + return currentProperties.entrySet(); + } + + @Override + public synchronized boolean equals(final Object o) { + ensureAllPropertiesInitialized(); + return currentProperties.equals(o); + } + + @Override + public synchronized int hashCode() { + ensureAllPropertiesInitialized(); + return currentProperties.hashCode(); + } + + @Override + public Object getOrDefault(final Object key, final Object defaultValue) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.getOrDefault(key, defaultValue); + } + + @Override + public synchronized void forEach(final BiConsumer action) { + ensureAllPropertiesInitialized(); + currentProperties.forEach(action); + } + + @Override + public synchronized void replaceAll(final BiFunction function) { + ensureAllPropertiesInitialized(); + currentProperties.replaceAll(function); + } + + @Override + public synchronized Object putIfAbsent(final Object key, final Object value) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.putIfAbsent(key, value); + } + + @Override + public synchronized boolean remove(final Object key, final Object value) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.remove(key, value); + } + + @Override + public synchronized boolean replace(final Object key, final Object oldValue, final Object newValue) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.replace(key, oldValue, newValue); + } + + @Override + public synchronized Object replace(final Object key, final Object value) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.replace(key, value); + } + + @Override + public synchronized Object computeIfAbsent( + final Object key, + final Function mappingFunction) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.computeIfAbsent(key, mappingFunction); + } + + @Override + public synchronized Object computeIfPresent( + final Object key, + final BiFunction remappingFunction) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.computeIfPresent(key, remappingFunction); + } + + @Override + public synchronized Object compute( + final Object key, + final BiFunction remappingFunction) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.compute(key, remappingFunction); + } + + @Override + public synchronized Object merge( + final Object key, + final Object value, + final BiFunction remappingFunction) { + if (key instanceof String) { + ensurePropertyInitialized((String) key); + } + return currentProperties.merge(key, value, remappingFunction); + } + + @Override + public synchronized Object clone() { + ensureAllPropertiesInitialized(); + return currentProperties.clone(); + } + }; + } + + private static final class SystemPropertiesSupportGetCurrentPropertiesPresent implements BooleanSupplier { + @Override + public boolean getAsBoolean() { + try { + Class klass = Class.forName("com.oracle.svm.core.jdk.SystemPropertiesSupport"); + klass.getDeclaredMethod("getCurrentProperties"); + return true; + } catch (ClassNotFoundException | NoSuchMethodException e) { + return false; + } + } + } + } } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/CidrAddressSubstitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/CidrAddressSubstitutions.java deleted file mode 100644 index cef12dc3b548f..0000000000000 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/CidrAddressSubstitutions.java +++ /dev/null @@ -1,89 +0,0 @@ -package io.quarkus.runtime.graal; - -import java.net.InetAddress; - -import org.wildfly.common.net.CidrAddress; -import org.wildfly.common.net.Inet; - -import com.oracle.svm.core.annotate.Alias; -import com.oracle.svm.core.annotate.InjectAccessors; -import com.oracle.svm.core.annotate.TargetClass; - -import io.quarkus.runtime.graal.Target_org_wildfly_common_net_CidrAddress.CidrAddressUtil; - -/* - * The following substitutions are required because of a new restriction in GraalVM 19.3.0 that prohibits the presence of - * java.net.Inet4Address and java.net.Inet6Address in the image heap. Each field annotated with @InjectAccessors is lazily - * recomputed at runtime on first access while CidrAddress.class can still be initialized during the native image build. - */ -@TargetClass(CidrAddress.class) -final class Target_org_wildfly_common_net_CidrAddress { - - @Alias - private Target_org_wildfly_common_net_CidrAddress(InetAddress networkAddress, int netmaskBits) { - } - - @Alias - @InjectAccessors(Inet4AnyCidrAccessor.class) - public static CidrAddress INET4_ANY_CIDR; - - @Alias - @InjectAccessors(Inet6AnyCidrAccessor.class) - public static CidrAddress INET6_ANY_CIDR; - - static class CidrAddressUtil { - static Target_org_wildfly_common_net_CidrAddress newInstance(InetAddress networkAddress, int netmaskBits) { - return new Target_org_wildfly_common_net_CidrAddress(networkAddress, netmaskBits); - } - } -} - -class Inet4AnyCidrAccessor { - - private static volatile Target_org_wildfly_common_net_CidrAddress INET4_ANY_CIDR; - - static Target_org_wildfly_common_net_CidrAddress get() { - Target_org_wildfly_common_net_CidrAddress result = INET4_ANY_CIDR; - if (result == null) { - // Lazy initialization on first access. - result = initializeOnce(); - } - return result; - } - - private static synchronized Target_org_wildfly_common_net_CidrAddress initializeOnce() { - Target_org_wildfly_common_net_CidrAddress result = INET4_ANY_CIDR; - if (result != null) { - // Double-checked locking is OK because INSTANCE is volatile. - return result; - } - result = CidrAddressUtil.newInstance(Inet.INET4_ANY, 0); - INET4_ANY_CIDR = result; - return result; - } -} - -class Inet6AnyCidrAccessor { - - private static volatile Target_org_wildfly_common_net_CidrAddress INET6_ANY_CIDR; - - static Target_org_wildfly_common_net_CidrAddress get() { - Target_org_wildfly_common_net_CidrAddress result = INET6_ANY_CIDR; - if (result == null) { - // Lazy initialization on first access. - result = initializeOnce(); - } - return result; - } - - private static synchronized Target_org_wildfly_common_net_CidrAddress initializeOnce() { - Target_org_wildfly_common_net_CidrAddress result = INET6_ANY_CIDR; - if (result != null) { - // Double-checked locking is OK because INSTANCE is volatile. - return result; - } - result = CidrAddressUtil.newInstance(Inet.INET6_ANY, 0); - INET6_ANY_CIDR = result; - return result; - } -} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/InetRunTime.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/InetRunTime.java index 2c1b3ad592945..fa1ee1105412d 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/InetRunTime.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/InetRunTime.java @@ -3,7 +3,8 @@ import java.net.Inet4Address; import java.net.Inet6Address; -import org.wildfly.common.net.Inet; +import io.quarkus.runtime.graal.Target_io_smallrye_common_net_CidrAddress.CidrAddressUtil; +import io.smallrye.common.net.Inet; public class InetRunTime { public static final Inet4Address INET4_ANY = Inet.getInet4Address(0, 0, 0, 0); @@ -12,3 +13,83 @@ public class InetRunTime { public static final Inet6Address INET6_ANY = Inet.getInet6Address(0, 0, 0, 0, 0, 0, 0, 0); public static final Inet6Address INET6_LOOPBACK = Inet.getInet6Address(0, 0, 0, 0, 0, 0, 0, 1); } + +final class Inet4AnyAccessor { + static Inet4Address get() { + return InetRunTime.INET4_ANY; + } +} + +final class Inet4LoopbackAccessor { + static Inet4Address get() { + return InetRunTime.INET4_LOOPBACK; + } +} + +final class Inet4BroadcastAccessor { + static Inet4Address get() { + return InetRunTime.INET4_BROADCAST; + } +} + +final class Inet6AnyAccessor { + static Inet6Address get() { + return InetRunTime.INET6_ANY; + } +} + +final class Inet6LoopbackAccessor { + static Inet6Address get() { + return InetRunTime.INET6_LOOPBACK; + } +} + +class Inet4AnyCidrAccessor { + + private static volatile Target_io_smallrye_common_net_CidrAddress INET4_ANY_CIDR; + + static Target_io_smallrye_common_net_CidrAddress get() { + Target_io_smallrye_common_net_CidrAddress result = INET4_ANY_CIDR; + if (result == null) { + // Lazy initialization on first access. + result = initializeOnce(); + } + return result; + } + + private static synchronized Target_io_smallrye_common_net_CidrAddress initializeOnce() { + Target_io_smallrye_common_net_CidrAddress result = INET4_ANY_CIDR; + if (result != null) { + // Double-checked locking is OK because INSTANCE is volatile. + return result; + } + result = CidrAddressUtil.newInstance(Inet.INET4_ANY, 0); + INET4_ANY_CIDR = result; + return result; + } +} + +class Inet6AnyCidrAccessor { + + private static volatile Target_io_smallrye_common_net_CidrAddress INET6_ANY_CIDR; + + static Target_io_smallrye_common_net_CidrAddress get() { + Target_io_smallrye_common_net_CidrAddress result = INET6_ANY_CIDR; + if (result == null) { + // Lazy initialization on first access. + result = initializeOnce(); + } + return result; + } + + private static synchronized Target_io_smallrye_common_net_CidrAddress initializeOnce() { + Target_io_smallrye_common_net_CidrAddress result = INET6_ANY_CIDR; + if (result != null) { + // Double-checked locking is OK because INSTANCE is volatile. + return result; + } + result = CidrAddressUtil.newInstance(Inet.INET6_ANY, 0); + INET6_ANY_CIDR = result; + return result; + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_CidrAddress.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_CidrAddress.java new file mode 100644 index 0000000000000..2b633cbc3a338 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_CidrAddress.java @@ -0,0 +1,36 @@ +package io.quarkus.runtime.graal; + +import java.net.InetAddress; + +import com.oracle.svm.core.annotate.Alias; +import com.oracle.svm.core.annotate.InjectAccessors; +import com.oracle.svm.core.annotate.TargetClass; + +import io.smallrye.common.net.CidrAddress; + +/* + * The following substitutions are required because of a new restriction in GraalVM 19.3.0 that prohibits the presence of + * java.net.Inet4Address and java.net.Inet6Address in the image heap. Each field annotated with @InjectAccessors is lazily + * recomputed at runtime on first access while CidrAddress.class can still be initialized during the native image build. + */ +@TargetClass(CidrAddress.class) +final class Target_io_smallrye_common_net_CidrAddress { + + @Alias + private Target_io_smallrye_common_net_CidrAddress(InetAddress networkAddress, int netmaskBits) { + } + + @Alias + @InjectAccessors(Inet4AnyCidrAccessor.class) + public static CidrAddress INET4_ANY_CIDR; + + @Alias + @InjectAccessors(Inet6AnyCidrAccessor.class) + public static CidrAddress INET6_ANY_CIDR; + + static class CidrAddressUtil { + static Target_io_smallrye_common_net_CidrAddress newInstance(InetAddress networkAddress, int netmaskBits) { + return new Target_io_smallrye_common_net_CidrAddress(networkAddress, netmaskBits); + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_Inet.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_Inet.java new file mode 100644 index 0000000000000..41c3794a0832c --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_io_smallrye_common_net_Inet.java @@ -0,0 +1,39 @@ +package io.quarkus.runtime.graal; + +import java.net.Inet4Address; +import java.net.Inet6Address; + +import com.oracle.svm.core.annotate.Alias; +import com.oracle.svm.core.annotate.InjectAccessors; +import com.oracle.svm.core.annotate.TargetClass; + +import io.smallrye.common.net.Inet; + +/* + * The following substitutions are required because of a new restriction in GraalVM 19.3.0 that prohibits the presence of + * java.net.Inet4Address and java.net.Inet6Address in the image heap. Each field annotated with @InjectAccessors is lazily + * recomputed at runtime on first access while Inet.class can still be initialized during the native image build. + */ +@TargetClass(Inet.class) +final class Target_io_smallrye_common_net_Inet { + + @Alias + @InjectAccessors(Inet4AnyAccessor.class) + public static Inet4Address INET4_ANY; + + @Alias + @InjectAccessors(Inet4LoopbackAccessor.class) + public static Inet4Address INET4_LOOPBACK; + + @Alias + @InjectAccessors(Inet4BroadcastAccessor.class) + public static Inet4Address INET4_BROADCAST; + + @Alias + @InjectAccessors(Inet6AnyAccessor.class) + public static Inet6Address INET6_ANY; + + @Alias + @InjectAccessors(Inet6LoopbackAccessor.class) + public static Inet6Address INET6_LOOPBACK; +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_CidrAddress.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_CidrAddress.java new file mode 100644 index 0000000000000..fd97a3771f793 --- /dev/null +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_CidrAddress.java @@ -0,0 +1,37 @@ +package io.quarkus.runtime.graal; + +import java.net.InetAddress; + +import org.wildfly.common.net.CidrAddress; + +import com.oracle.svm.core.annotate.Alias; +import com.oracle.svm.core.annotate.InjectAccessors; +import com.oracle.svm.core.annotate.TargetClass; + +/* + * The following substitutions are required because of a new restriction in GraalVM 19.3.0 that prohibits the presence of + * java.net.Inet4Address and java.net.Inet6Address in the image heap. Each field annotated with @InjectAccessors is lazily + * recomputed at runtime on first access while CidrAddress.class can still be initialized during the native image build. + */ +@TargetClass(CidrAddress.class) +final class Target_org_wildfly_common_net_CidrAddress { + + @Alias + public static Target_org_wildfly_common_net_CidrAddress create(InetAddress networkAddress, int netmaskBits) { + return null; + } + + @Alias + @InjectAccessors(Inet4AnyCidrAccessor.class) + public static CidrAddress INET4_ANY_CIDR; + + @Alias + @InjectAccessors(Inet6AnyCidrAccessor.class) + public static CidrAddress INET6_ANY_CIDR; + + static class CidrAddressUtil { + static Target_org_wildfly_common_net_CidrAddress newInstance(InetAddress networkAddress, int netmaskBits) { + return Target_org_wildfly_common_net_CidrAddress.create(networkAddress, netmaskBits); + } + } +} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/graal/InetSubstitutions.java b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_Inet.java similarity index 68% rename from core/runtime/src/main/java/io/quarkus/runtime/graal/InetSubstitutions.java rename to core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_Inet.java index 22e9ec376b93c..f8d5a1fab8982 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/graal/InetSubstitutions.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/graal/Target_org_wildfly_common_net_Inet.java @@ -37,33 +37,3 @@ final class Target_org_wildfly_common_net_Inet { @InjectAccessors(Inet6LoopbackAccessor.class) public static Inet6Address INET6_LOOPBACK; } - -final class Inet4AnyAccessor { - static Inet4Address get() { - return InetRunTime.INET4_ANY; - } -} - -final class Inet4LoopbackAccessor { - static Inet4Address get() { - return InetRunTime.INET4_LOOPBACK; - } -} - -final class Inet4BroadcastAccessor { - static Inet4Address get() { - return InetRunTime.INET4_BROADCAST; - } -} - -final class Inet6AnyAccessor { - static Inet6Address get() { - return InetRunTime.INET6_ANY; - } -} - -final class Inet6LoopbackAccessor { - static Inet6Address get() { - return InetRunTime.INET6_LOOPBACK; - } -} diff --git a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java index d6ea4794e6b20..f35a464a7ad73 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/logging/LoggingSetupRecorder.java @@ -1,9 +1,9 @@ package io.quarkus.runtime.logging; +import static io.smallrye.common.net.HostName.getQualifiedHostName; +import static io.smallrye.common.os.Process.getProcessName; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; -import static org.wildfly.common.net.HostName.getQualifiedHostName; -import static org.wildfly.common.os.Process.getProcessName; import java.io.FileNotFoundException; import java.io.IOException; diff --git a/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java b/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java index e6c1bd6d07be9..b1196f18ea8a3 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/test/TestHttpEndpointProvider.java @@ -1,5 +1,7 @@ package io.quarkus.runtime.test; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.ServiceLoader; @@ -13,10 +15,33 @@ public interface TestHttpEndpointProvider { Function, String> endpointProvider(); static List, String>> load() { + + // TODO sometimes - always - this is loaded with the system classloader, but if we can fix that, we can drop the reflection + // TODO why does this have to be different than it was in the pre-WG-30 world? + // TODO does fixing the loading of QTE with the system classloader fix this? + // TODO add a bypass path if the classloader is the same? - not worth it, it never is + // TODO #store + List, String>> ret = new ArrayList<>(); - for (TestHttpEndpointProvider i : ServiceLoader.load(TestHttpEndpointProvider.class, - Thread.currentThread().getContextClassLoader())) { - ret.add(i.endpointProvider()); + System.out.println("HOLLY wull load " + TestHttpEndpointProvider.class.getClassLoader() + " and tccl " + + Thread.currentThread().getContextClassLoader()); + try { + + ClassLoader targetclassloader = Thread.currentThread().getContextClassLoader(); // TODO why did I have this as TestHttpEndpointProvider.class.getClassLoader(); // + Class target = targetclassloader.loadClass(TestHttpEndpointProvider.class.getName()); + Method method = target.getMethod("endpointProvider"); + + for (Object i : ServiceLoader.load(target, + targetclassloader)) { + ret.add((Function, String>) method.invoke(i)); + + } + } catch (IllegalAccessException | ClassNotFoundException e) { + throw new RuntimeException(e); + } catch (InvocationTargetException e) { + throw new RuntimeException(e); + } catch (NoSuchMethodException e) { + throw new RuntimeException(e); } return ret; } diff --git a/core/runtime/src/main/java/io/quarkus/runtime/util/StringUtil.java b/core/runtime/src/main/java/io/quarkus/runtime/util/StringUtil.java index e3bb032289536..52e33c76b2d50 100644 --- a/core/runtime/src/main/java/io/quarkus/runtime/util/StringUtil.java +++ b/core/runtime/src/main/java/io/quarkus/runtime/util/StringUtil.java @@ -112,13 +112,13 @@ public String next() { } /** - * @deprecated Use {@link String#join} instead. + * Dropped for public usage, kept in place for usage by hyphenate method to avoid need for lambda + * * @param delim delimiter * @param it iterator * @return the joined string */ - @Deprecated - public static String join(String delim, Iterator it) { + private static String join(String delim, Iterator it) { final StringBuilder b = new StringBuilder(); if (it.hasNext()) { b.append(it.next()); diff --git a/core/runtime/src/test/java/io/quarkus/runtime/configuration/ConverterTestCase.java b/core/runtime/src/test/java/io/quarkus/runtime/configuration/ConverterTestCase.java index fabe058dfdcb2..f524d57b9665d 100644 --- a/core/runtime/src/test/java/io/quarkus/runtime/configuration/ConverterTestCase.java +++ b/core/runtime/src/test/java/io/quarkus/runtime/configuration/ConverterTestCase.java @@ -8,8 +8,9 @@ import org.junit.jupiter.api.Test; import org.opentest4j.TestAbortedException; -import org.wildfly.common.net.CidrAddress; -import org.wildfly.common.net.Inet; + +import io.smallrye.common.net.CidrAddress; +import io.smallrye.common.net.Inet; public class ConverterTestCase { diff --git a/devtools/bom-descriptor-json/pom.xml b/devtools/bom-descriptor-json/pom.xml index d93441834129b..6d982ba55e511 100644 --- a/devtools/bom-descriptor-json/pom.xml +++ b/devtools/bom-descriptor-json/pom.xml @@ -1591,6 +1591,19 @@ + + io.quarkus + quarkus-micrometer-opentelemetry + ${project.version} + pom + test + + + * + * + + + io.quarkus quarkus-micrometer-registry-prometheus @@ -1864,6 +1877,19 @@ + + io.quarkus + quarkus-oidc-token-propagation-common + ${project.version} + pom + test + + + * + * + + + io.quarkus quarkus-openshift diff --git a/devtools/gradle/gradle/libs.versions.toml b/devtools/gradle/gradle/libs.versions.toml index c9af6e44aa83f..d5d736e288b29 100644 --- a/devtools/gradle/gradle/libs.versions.toml +++ b/devtools/gradle/gradle/libs.versions.toml @@ -1,9 +1,9 @@ [versions] -plugin-publish = "1.3.0" +plugin-publish = "1.3.1" # updating Kotlin here makes QuarkusPluginTest > shouldNotFailOnProjectDependenciesWithoutMain(Path) fail kotlin = "2.0.21" -smallrye-config = "3.11.1" +smallrye-config = "3.11.2" junit5 = "5.10.5" assertj = "3.27.3" diff --git a/devtools/gradle/settings.gradle.kts b/devtools/gradle/settings.gradle.kts index 6532acd5a053f..da5eaa3e276e4 100644 --- a/devtools/gradle/settings.gradle.kts +++ b/devtools/gradle/settings.gradle.kts @@ -1,5 +1,5 @@ plugins { - id("com.gradle.develocity") version "3.19" + id("com.gradle.develocity") version "3.19.1" } develocity { diff --git a/devtools/maven/src/main/java/io/quarkus/maven/DependencySbomMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/DependencySbomMojo.java index eec9de1fabb90..049be7d61d1e8 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/DependencySbomMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/DependencySbomMojo.java @@ -20,7 +20,6 @@ import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenContext; import io.quarkus.bootstrap.resolver.maven.EffectiveModelResolver; -import io.quarkus.bootstrap.resolver.maven.IncubatingApplicationModelResolver; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.cyclonedx.generator.CycloneDxSbomGenerator; import io.quarkus.maven.components.QuarkusWorkspaceProvider; @@ -131,9 +130,7 @@ private ApplicationModel resolveApplicationModel() "Parameter 'mode' was set to '" + mode + "' while expected one of 'dev', 'test' or 'prod'"); } } - // enable the incubating model resolver impl by default for this mojo - modelResolver.setIncubatingModelResolver( - !IncubatingApplicationModelResolver.isIncubatingModelResolverProperty(project.getProperties(), "false")); + modelResolver.setLegacyModelResolver(BootstrapAppModelResolver.isLegacyModelResolver(project.getProperties())); return modelResolver.resolveModel(appArtifact); } catch (Exception e) { throw new MojoExecutionException("Failed to resolve application model " + appArtifact + " dependencies", e); diff --git a/devtools/maven/src/main/java/io/quarkus/maven/DependencyTreeMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/DependencyTreeMojo.java index 7de487b87c0de..89f2223b14afb 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/DependencyTreeMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/DependencyTreeMojo.java @@ -24,7 +24,6 @@ import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenContext; import io.quarkus.bootstrap.resolver.maven.DependencyLoggingConfig; -import io.quarkus.bootstrap.resolver.maven.IncubatingApplicationModelResolver; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.maven.components.QuarkusWorkspaceProvider; import io.quarkus.maven.dependency.ArtifactCoords; @@ -55,7 +54,7 @@ public class DependencyTreeMojo extends AbstractMojo { String mode; /** - * INCUBATING option, enabled with @{code -Dquarkus.bootstrap.incubating-model-resolver} system or project property. + * INCUBATING option, enabled with {@code -Dquarkus.bootstrap.incubating-model-resolver} system or project property. *

* Whether to log dependency properties, such as on which classpath they belong, whether they are hot-reloadable in dev * mode, etc. @@ -64,7 +63,7 @@ public class DependencyTreeMojo extends AbstractMojo { boolean verbose; /** - * INCUBATING option, enabled with @{code -Dquarkus.bootstrap.incubating-model-resolver} system or project property. + * INCUBATING option, enabled with {@code -Dquarkus.bootstrap.incubating-model-resolver} system or project property. *

* Whether to log all dependencies of each dependency node in a tree, adding {@code [+]} suffix * to those whose dependencies are not expanded. @@ -154,9 +153,7 @@ private void logTree(final Consumer log) throws MojoExecutionException { "Parameter 'mode' was set to '" + mode + "' while expected one of 'dev', 'test' or 'prod'"); } } - // enable the incubating model resolver impl by default for this mojo - modelResolver.setIncubatingModelResolver( - !IncubatingApplicationModelResolver.isIncubatingModelResolverProperty(project.getProperties(), "false")); + modelResolver.setLegacyModelResolver(BootstrapAppModelResolver.isLegacyModelResolver(project.getProperties())); modelResolver.setDepLogConfig(DependencyLoggingConfig.builder() .setMessageConsumer(log) .setVerbose(verbose) diff --git a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java index b4c61c4377408..99da1770f7cf1 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/DevMojo.java @@ -97,7 +97,6 @@ import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenContext; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenContextConfig; -import io.quarkus.bootstrap.resolver.maven.IncubatingApplicationModelResolver; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.bootstrap.util.BootstrapUtils; import io.quarkus.bootstrap.workspace.ArtifactSources; @@ -151,6 +150,7 @@ public class DevMojo extends AbstractMojo { private static final String ORG_APACHE_MAVEN_PLUGINS = "org.apache.maven.plugins"; private static final String MAVEN_COMPILER_PLUGIN = "maven-compiler-plugin"; private static final String MAVEN_RESOURCES_PLUGIN = "maven-resources-plugin"; + private static final String MAVEN_SUREFIRE_PLUGIN = "maven-surefire-plugin"; private static final String MAVEN_TOOLCHAINS_PLUGIN = "maven-toolchains-plugin"; private static final String ORG_JETBRAINS_KOTLIN = "org.jetbrains.kotlin"; @@ -263,6 +263,22 @@ public class DevMojo extends AbstractMojo { @Parameter private Map systemProperties = Map.of(); + /** + * When enabled, the {@code } and {@code } + * elements of the Maven Surefire plugin are copied to environment variables and system + * properties defined by this plugin. Note that no other Surefire configuration is used + * (notably {@code }), only the 2 elements mentioned above. + *

+ * This plugin's {@code } and {@code } have + * priority, so duplicate keys are not copied. + *

+ * Since environment variables and system properties are global to the entire process, + * this also affects dev mode (when executed as {@code quarkus:dev}). Because of that, + * this copying action is disabled by default and requires opt-in. + */ + @Parameter(defaultValue = "false") + private boolean copySurefireVariables; + @Parameter(defaultValue = "${session}") private MavenSession session; @@ -485,6 +501,8 @@ public void close() throws IOException { } }); + copySurefireVariables(); + try { DevModeRunner runner = new DevModeRunner(bootstrapId); Map pomFiles = readPomFileTimestamps(runner); @@ -1406,9 +1424,7 @@ private DevModeCommandLine newLauncher(String actualDebugPort, String bootstrapI .setDevMode(true) .setTest(LaunchMode.TEST.equals(getLaunchModeClasspath())) .setCollectReloadableDependencies(!noDeps) - // enabled the incubating model resolver for in dev mode - .setIncubatingModelResolver(!IncubatingApplicationModelResolver - .isIncubatingModelResolverProperty(project.getProperties(), "false")) + .setLegacyModelResolver(BootstrapAppModelResolver.isLegacyModelResolver(project.getProperties())) .resolveModel(mvnCtx.getCurrentProject().getAppArtifact()); } @@ -1495,6 +1511,35 @@ private void setJvmArgs(DevModeCommandLineBuilder builder) throws Exception { } + private void copySurefireVariables() { + if (!copySurefireVariables) { + return; + } + + Plugin surefireMavenPlugin = getConfiguredPluginOrNull(ORG_APACHE_MAVEN_PLUGINS, MAVEN_SUREFIRE_PLUGIN); + if (surefireMavenPlugin == null) { + return; + } + + Xpp3Dom config = (Xpp3Dom) surefireMavenPlugin.getConfiguration(); + if (config != null) { + // we copy the maps because they can be unmodifiable + environmentVariables = new HashMap<>(environmentVariables); + copyConfiguration(config.getChild("environmentVariables"), environmentVariables); + systemProperties = new HashMap<>(systemProperties); + copyConfiguration(config.getChild("systemPropertyVariables"), systemProperties); + } + } + + private void copyConfiguration(Xpp3Dom config, Map targetMap) { + if (config == null) { + return; + } + for (Xpp3Dom child : config.getChildren()) { + targetMap.putIfAbsent(child.getName(), child.getValue()); + } + } + private void applyCompilerFlag(Optional compilerPluginConfiguration, String flagName, Consumer builderCall) { compilerPluginConfiguration @@ -1599,14 +1644,7 @@ private List getProjectAetherDependencyMana } private void setKotlinSpecificFlags(DevModeCommandLineBuilder builder) { - Plugin kotlinMavenPlugin = null; - for (Plugin plugin : project.getBuildPlugins()) { - if (plugin.getArtifactId().equals(KOTLIN_MAVEN_PLUGIN) && plugin.getGroupId().equals(ORG_JETBRAINS_KOTLIN)) { - kotlinMavenPlugin = plugin; - break; - } - } - + Plugin kotlinMavenPlugin = getConfiguredPluginOrNull(ORG_JETBRAINS_KOTLIN, KOTLIN_MAVEN_PLUGIN); if (kotlinMavenPlugin == null) { return; } @@ -1645,14 +1683,7 @@ private void setKotlinSpecificFlags(DevModeCommandLineBuilder builder) { } private void setAnnotationProcessorFlags(DevModeCommandLineBuilder builder) { - Plugin compilerMavenPlugin = null; - for (Plugin plugin : project.getBuildPlugins()) { - if (plugin.getArtifactId().equals("maven-compiler-plugin") - && plugin.getGroupId().equals("org.apache.maven.plugins")) { - compilerMavenPlugin = plugin; - break; - } - } + Plugin compilerMavenPlugin = getConfiguredPluginOrNull(ORG_APACHE_MAVEN_PLUGINS, MAVEN_COMPILER_PLUGIN); if (compilerMavenPlugin == null) { return; } @@ -1683,13 +1714,9 @@ protected void modifyDevModeContext(DevModeCommandLineBuilder builder) { } private Optional findCompilerPluginConfiguration() { - for (final Plugin plugin : project.getBuildPlugins()) { - if (plugin.getArtifactId().equals(MAVEN_COMPILER_PLUGIN) && plugin.getGroupId().equals(ORG_APACHE_MAVEN_PLUGINS)) { - final Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) plugin.getConfiguration(); - if (compilerPluginConfiguration != null) { - return Optional.of(compilerPluginConfiguration); - } - } + Plugin plugin = getConfiguredPluginOrNull(ORG_APACHE_MAVEN_PLUGINS, MAVEN_COMPILER_PLUGIN); + if (plugin != null) { + return Optional.ofNullable((Xpp3Dom) plugin.getConfiguration()); } return Optional.empty(); } diff --git a/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java b/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java index c1297fa658e09..423fdc27f231a 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/MojoLogger.java @@ -9,7 +9,8 @@ import org.apache.maven.shared.utils.logging.MessageUtils; import org.jboss.logging.Logger; import org.jboss.logging.LoggerProvider; -import org.wildfly.common.Assert; + +import io.smallrye.common.constraint.Assert; public class MojoLogger implements LoggerProvider { static final Object[] NO_PARAMS = new Object[0]; diff --git a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java index 1341a7d14c9eb..0440aac927820 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/QuarkusBootstrapProvider.java @@ -43,7 +43,6 @@ import io.quarkus.bootstrap.resolver.maven.BootstrapMavenContextConfig; import io.quarkus.bootstrap.resolver.maven.BootstrapMavenException; import io.quarkus.bootstrap.resolver.maven.EffectiveModelResolver; -import io.quarkus.bootstrap.resolver.maven.IncubatingApplicationModelResolver; import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver; import io.quarkus.bootstrap.resolver.maven.workspace.LocalProject; import io.quarkus.maven.components.ManifestSection; @@ -239,11 +238,8 @@ private CuratedApplication doBootstrap(QuarkusBootstrapMojo mojo, LaunchMode mod Consumer builderCustomizer) throws MojoExecutionException { final BootstrapAppModelResolver modelResolver = new BootstrapAppModelResolver(artifactResolver(mojo, mode)) - .setIncubatingModelResolver( - IncubatingApplicationModelResolver.isIncubatingEnabled(mojo.mavenProject().getProperties()) - || mode == LaunchMode.DEVELOPMENT - && !IncubatingApplicationModelResolver.isIncubatingModelResolverProperty( - mojo.mavenProject().getProperties(), "false")) + .setLegacyModelResolver( + BootstrapAppModelResolver.isLegacyModelResolver(mojo.mavenProject().getProperties())) .setDevMode(mode == LaunchMode.DEVELOPMENT) .setTest(mode == LaunchMode.TEST) .setCollectReloadableDependencies(mode == LaunchMode.DEVELOPMENT || mode == LaunchMode.TEST); diff --git a/devtools/maven/src/main/java/io/quarkus/maven/RunMojo.java b/devtools/maven/src/main/java/io/quarkus/maven/RunMojo.java index 8f246c035e48b..2bbea14df481d 100644 --- a/devtools/maven/src/main/java/io/quarkus/maven/RunMojo.java +++ b/devtools/maven/src/main/java/io/quarkus/maven/RunMojo.java @@ -88,7 +88,9 @@ public void accept(Map cmds) { throw new RuntimeException("Should never reach this!"); } List args = (List) cmd.get(0); - System.out.println("Executing \"" + String.join(" ", args) + "\""); + if (getLog().isInfoEnabled()) { + getLog().info("Executing \"" + String.join(" ", args) + "\""); + } Path workingDirectory = (Path) cmd.get(1); try { ProcessBuilder builder = new ProcessBuilder() diff --git a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyGraphMojoTest.java b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyGraphMojoTest.java index 26509fd19828f..b04acb9a54f43 100644 --- a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyGraphMojoTest.java +++ b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyGraphMojoTest.java @@ -14,11 +14,6 @@ protected boolean isGraph() { return true; } - @Override - protected boolean isIncubatingModelResolver() { - return true; - } - @Override protected void initRepo() { diff --git a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoRuntimeOnlyTest.java b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoRuntimeOnlyTest.java index 196e12c015826..0b9f510719be5 100644 --- a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoRuntimeOnlyTest.java +++ b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoRuntimeOnlyTest.java @@ -9,11 +9,6 @@ protected String mode() { return "prod"; } - @Override - protected boolean isIncubatingModelResolver() { - return true; - } - @Override protected boolean isRuntimeOnly() { return true; diff --git a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoTest.java b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoTest.java index 45d10a0417b62..50fcbe07c0332 100644 --- a/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoTest.java +++ b/devtools/maven/src/test/java/io/quarkus/maven/ConditionalDependencyTreeMojoTest.java @@ -9,11 +9,6 @@ protected String mode() { return "prod"; } - @Override - protected boolean isIncubatingModelResolver() { - return true; - } - @Override protected void initRepo() { diff --git a/devtools/maven/src/test/java/io/quarkus/maven/DependencyTreeMojoTestBase.java b/devtools/maven/src/test/java/io/quarkus/maven/DependencyTreeMojoTestBase.java index 40acadbe762e2..c4c081a1b63fc 100644 --- a/devtools/maven/src/test/java/io/quarkus/maven/DependencyTreeMojoTestBase.java +++ b/devtools/maven/src/test/java/io/quarkus/maven/DependencyTreeMojoTestBase.java @@ -61,10 +61,6 @@ protected boolean isGraph() { return false; } - protected boolean isIncubatingModelResolver() { - return false; - } - protected boolean isRuntimeOnly() { return false; } @@ -79,9 +75,6 @@ public void test() throws Exception { new DefaultArtifactHandler(ArtifactCoords.TYPE_JAR))); mojo.project.setModel(appModel); mojo.project.setOriginalModel(appModel); - if (isIncubatingModelResolver()) { - mojo.project.getProperties().setProperty("quarkus.bootstrap.incubating-model-resolver", "true"); - } mojo.resolver = mvnResolver; mojo.mode = mode(); mojo.graph = isGraph(); diff --git a/docs/pom.xml b/docs/pom.xml index 8dd5d654a0a7c..54f8ecb22aeec 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -1602,6 +1602,19 @@ + + io.quarkus + quarkus-micrometer-opentelemetry-deployment + ${project.version} + pom + test + + + * + * + + + io.quarkus quarkus-micrometer-registry-prometheus-deployment @@ -1875,6 +1888,19 @@ + + io.quarkus + quarkus-oidc-token-propagation-common-deployment + ${project.version} + pom + test + + + * + * + + + io.quarkus quarkus-openshift-deployment diff --git a/docs/src/main/asciidoc/_includes/snip-note-derby.adoc b/docs/src/main/asciidoc/_includes/snip-note-derby.adoc index e69de29bb2d1d..4fdf7f13d66e0 100644 --- a/docs/src/main/asciidoc/_includes/snip-note-derby.adoc +++ b/docs/src/main/asciidoc/_includes/snip-note-derby.adoc @@ -0,0 +1,4 @@ +//// +This file is intentionally empty. +It is used for compatibility with downstream systems, where this empty snippet is replaced with an equivalent that carries a required note. +//// \ No newline at end of file diff --git a/docs/src/main/asciidoc/_includes/snip-note-encrypted-pem-tech-prev.adoc b/docs/src/main/asciidoc/_includes/snip-note-encrypted-pem-tech-prev.adoc new file mode 100644 index 0000000000000..4fdf7f13d66e0 --- /dev/null +++ b/docs/src/main/asciidoc/_includes/snip-note-encrypted-pem-tech-prev.adoc @@ -0,0 +1,4 @@ +//// +This file is intentionally empty. +It is used for compatibility with downstream systems, where this empty snippet is replaced with an equivalent that carries a required note. +//// \ No newline at end of file diff --git a/docs/src/main/asciidoc/aws-lambda.adoc b/docs/src/main/asciidoc/aws-lambda.adoc index ec05de3639b8b..7fe1087152132 100644 --- a/docs/src/main/asciidoc/aws-lambda.adoc +++ b/docs/src/main/asciidoc/aws-lambda.adoc @@ -586,7 +586,7 @@ To extract the required ssl, you must start up a Docker container in the backgro First, let's start the GraalVM container, noting the container id output. [source,bash,subs=attributes+] ---- -docker run -it -d --entrypoint bash quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} +docker run -it -d --entrypoint bash quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} # This will output a container id, like 6304eea6179522aff69acb38eca90bedfd4b970a5475aa37ccda3585bc2abdde # Note this value as we will need it for the commands below diff --git a/docs/src/main/asciidoc/building-native-image.adoc b/docs/src/main/asciidoc/building-native-image.adoc index baa9be1b69fb9..523539ae17cd8 100644 --- a/docs/src/main/asciidoc/building-native-image.adoc +++ b/docs/src/main/asciidoc/building-native-image.adoc @@ -255,7 +255,7 @@ To see the `GreetingResourceIT` run against the native executable, use `./mvnw v $ ./mvnw verify -Dnative ... Finished generating 'getting-started-1.0.0-SNAPSHOT-runner' in 22.0s. -[INFO] [io.quarkus.deployment.pkg.steps.NativeImageBuildRunner] docker run --env LANG=C --rm --user 1000:1000 -v /home/zakkak/code/quarkus-quickstarts/getting-started/target/getting-started-1.0.0-SNAPSHOT-native-image-source-jar:/project:z --entrypoint /bin/bash quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} -c objcopy --strip-debug getting-started-1.0.0-SNAPSHOT-runner +[INFO] [io.quarkus.deployment.pkg.steps.NativeImageBuildRunner] docker run --env LANG=C --rm --user 1000:1000 -v /home/zakkak/code/quarkus-quickstarts/getting-started/target/getting-started-1.0.0-SNAPSHOT-native-image-source-jar:/project:z --entrypoint /bin/bash quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} -c objcopy --strip-debug getting-started-1.0.0-SNAPSHOT-runner [INFO] [io.quarkus.deployment.QuarkusAugmentor] Quarkus augmentation completed in 70686ms [INFO] [INFO] --- maven-failsafe-plugin:3.0.0-M7:integration-test (default) @ getting-started --- @@ -421,6 +421,25 @@ These are regular Quarkus config properties, so if you always want to build in a it is recommended you add these to your `application.properties` in order to avoid specifying them every time. ==== +Executable built that way with the container runtime will be a 64-bit Linux executable, so depending on your operating system it may no longer be runnable. + +[IMPORTANT] +==== +Starting with Quarkus 3.19+, the _builder_ image used to build the native executable is based on UBI 9. +It means that the native executable produced by the container build will be based on UBI 9 as well. +So, if you plan to build a container, make sure that the base image in your `Dockerfile` is compatible with UBI 9. +The native executable will not run on UBI 8 base images. + +You can configure the builder image used for the container build by setting the `quarkus.native.builder-image` property. +For example to switch back to an UBI8 _builder image_ you can use: + +`quarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` + +You can see the available tags for UBI8 https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[here] +and for UBI9 https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags[here (UBI 9)]) + +==== + [[tip-quarkus-native-remote-container-build]] [TIP] ==== @@ -434,6 +453,7 @@ In this case, use the parameter `-Dquarkus.native.remote-container-build=true` i The reason for this is that the local build driver invoked through `-Dquarkus.native.container-build=true` uses volume mounts to make the JAR available in the build container, but volume mounts do not work with remote daemons. The remote container build driver copies the necessary files instead of mounting them. Note that even though the remote driver also works with local daemons, the local driver should be preferred in the local case because mounting is usually more performant than copying. ==== + [TIP] ==== Building with GraalVM instead of Mandrel requires a custom builder image parameter to be passed additionally: @@ -446,7 +466,8 @@ Please note that the above command points to a floating tag. It is highly recommended to use the floating tag, so that your builder image remains up-to-date and secure. If you absolutely must, you may hard-code to a specific tag -(see https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[here] for available tags), +(see https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[here (UBI 8)] +and https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags[here (UBI 9)] for available tags), but be aware that you won't get security updates that way and it's unsupported. ==== @@ -493,25 +514,30 @@ The project generation has provided a `Dockerfile.native-micro` in the `src/main [source,dockerfile] ---- -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 WORKDIR /work/ -COPY target/*-runner /work/application -RUN chmod 775 /work +RUN chown 1001 /work \ + && chmod "g+rwX" /work \ + && chown 1001:root /work +COPY --chown=1001:root --chmod=755 target/*-runner /work/application + EXPOSE 8080 -CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] +USER 1001 + +ENTRYPOINT ["./application", "-Dquarkus.http.host=0.0.0.0"] ---- [NOTE] .Quarkus Micro Image? ==== The Quarkus Micro Image is a small container image providing the right set of dependencies to run your native application. -It is based on https://catalog.redhat.com/software/containers/ubi8-micro/601a84aadd19c7786c47c8ea?container-tabs=overview[UBI Micro]. +It is based on https://catalog.redhat.com/software/containers/ubi9-micro/61832b36dd607bfc82e66399?container-tabs=overview[UBI Micro]. This base image has been tailored to work perfectly in containers. You can read more about UBI images on: * https://www.redhat.com/en/blog/introducing-red-hat-universal-base-image[Introduction to Universal Base Image] -* https://catalog.redhat.com/software/container-stacks/detail/5ec53f50ef29fd35586d9a56[Red Hat Universal Base Image 8] +* https://catalog.redhat.com/software/containers/ubi9/ubi/615bcf606feffc5384e8452e[Red Hat Universal Base Image 9] UBI images can be used without any limitations. @@ -538,12 +564,12 @@ The project generation has also provided a `Dockerfile.native` in the `src/main/ [source,dockerfile] ---- -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.10 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 WORKDIR /work/ RUN chown 1001 /work \ && chmod "g+rwX" /work \ && chown 1001:root /work -COPY --chown=1001:root target/*-runner /work/application +COPY --chown=1001:root --chmod=0755 target/*-runner /work/application EXPOSE 8080 USER 1001 @@ -578,18 +604,18 @@ Sample Dockerfile for building with Maven: [source,dockerfile,subs=attributes+] ---- ## Stage 1 : build with maven builder image with native capabilities -FROM quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} AS build +FROM quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} AS build COPY --chown=quarkus:quarkus --chmod=0755 mvnw /code/mvnw COPY --chown=quarkus:quarkus .mvn /code/.mvn COPY --chown=quarkus:quarkus pom.xml /code/ USER quarkus WORKDIR /code -RUN ./mvnw -B org.apache.maven.plugins:maven-dependency-plugin:3.1.2:go-offline +RUN ./mvnw -B org.apache.maven.plugins:maven-dependency-plugin:3.8.1:go-offline COPY src /code/src RUN ./mvnw package -Dnative ## Stage 2 : create the docker final image -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 WORKDIR /work/ COPY --from=build /code/target/*-runner /work/application @@ -616,7 +642,7 @@ Sample Dockerfile for building with Gradle: [source,dockerfile,subs=attributes+] ---- ## Stage 1 : build with maven builder image with native capabilities -FROM quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} AS build +FROM quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} AS build USER root RUN microdnf install findutils COPY --chown=quarkus:quarkus gradlew /code/gradlew @@ -630,7 +656,7 @@ COPY src /code/src RUN ./gradlew build -Dquarkus.native.enabled=true ## Stage 2 : create the docker final image -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 WORKDIR /work/ COPY --from=build /code/build/*-runner /work/application RUN chmod 775 /work @@ -661,7 +687,7 @@ Please see xref:native-and-ssl.adoc#working-with-containers[our Using SSL With N [NOTE,subs=attributes+] ==== -To use GraalVM CE instead of Mandrel, update the `FROM` clause to: `FROM quay.io/quarkus/ubi-quarkus-graalvmce-builder-image:{graalvm-flavor} AS build`. +To use GraalVM CE instead of Mandrel, update the `FROM` clause to: `FROM quay.io/quarkus/ubi9-quarkus-graalvmce-builder-image:{graalvm-flavor} AS build`. ==== === Using a Distroless base image @@ -702,7 +728,7 @@ Sample multistage Dockerfile for building an image from `scratch`: [source,dockerfile,subs=attributes+] ---- ## Stage 1 : build with maven builder image with native capabilities -FROM quay.io/quarkus/ubi-quarkus-graalvmce-builder-image:{graalvm-flavor} AS build +FROM quay.io/quarkus/ubi9-quarkus-graalvmce-builder-image:{graalvm-flavor} AS build USER root RUN microdnf install make gcc COPY --chown=quarkus:quarkus mvnw /code/mvnw @@ -719,7 +745,7 @@ RUN mkdir /musl && \ ENV PATH="/musl/bin:${PATH}" USER quarkus WORKDIR /code -RUN ./mvnw -B org.apache.maven.plugins:maven-dependency-plugin:3.1.2:go-offline +RUN ./mvnw -B org.apache.maven.plugins:maven-dependency-plugin:3.8.1:go-offline COPY src /code/src RUN ./mvnw package -Dnative -DskipTests -Dquarkus.native.additional-build-args="--static","--libc=musl" diff --git a/docs/src/main/asciidoc/cdi-reference.adoc b/docs/src/main/asciidoc/cdi-reference.adoc index cd714fcf13433..365e5069ababc 100644 --- a/docs/src/main/asciidoc/cdi-reference.adoc +++ b/docs/src/main/asciidoc/cdi-reference.adoc @@ -616,9 +616,9 @@ TIP: It is also possible to use `@IfBuildProfile` and `@UnlessBuildProfile` on s [[enable_build_properties]] === Enabling Beans for Quarkus Build Properties -Quarkus adds a capability that CDI currently does not support which is to conditionally enable a bean when a Quarkus build time property has/has not a specific value, +Quarkus adds a capability that CDI currently does not support which is to conditionally enable a bean when a Quarkus build time property has or does not have a specific value, via the `@io.quarkus.arc.properties.IfBuildProperty` and `@io.quarkus.arc.properties.UnlessBuildProperty` annotations. -When used in conjunction with `@io.quarkus.arc.DefaultBean`, this annotation allow for the creation of different bean configurations for different build properties. +When used in conjunction with `@io.quarkus.arc.DefaultBean`, these annotations allow for the creation of different bean configurations for different build properties. The scenario we mentioned above with `Tracer` could also be implemented in the following way: diff --git a/docs/src/main/asciidoc/centralized-log-management.adoc b/docs/src/main/asciidoc/centralized-log-management.adoc index f0127b3d339ce..6885fd2f5e7fd 100644 --- a/docs/src/main/asciidoc/centralized-log-management.adoc +++ b/docs/src/main/asciidoc/centralized-log-management.adoc @@ -6,18 +6,15 @@ https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc = Centralized log management (Graylog, Logstash, Fluentd) include::_attributes.adoc[] :categories: observability -:summary: This guide explains how to centralize your logs with Logstash or Fluentd using the Graylog Extended Log Format (GELF). +:summary: This guide explains how to centralize your logs with Graylog, Logstash or Fluentd. :topics: observability,logging -:extensions: io.quarkus:quarkus-logging-gelf +:extensions: io.quarkus:quarkus-logging-gelf,io.quarkus:quarkus-opentelemetry This guide explains how you can send your logs to a centralized log management system like Graylog, Logstash (inside the Elastic Stack or ELK - Elasticsearch, Logstash, Kibana) or Fluentd (inside EFK - Elasticsearch, Fluentd, Kibana). -There are a lot of different ways to centralize your logs (if you are using Kubernetes, the simplest way is to log to the console and ask you cluster administrator to integrate a central log manager inside your cluster). -In this guide, we will expose how to send them to an external tool using the `quarkus-logging-gelf` extension that can use TCP or UDP to send logs in the Graylog Extended Log Format (GELF). - -The `quarkus-logging-gelf` extension will add a GELF log handler to the underlying logging backend that Quarkus uses (jboss-logmanager). -By default, it is disabled, if you enable it but still use another handler (by default the console handler is enabled), your logs will be sent to both handlers. +There are a lot of different ways to centralize your logs (if you are using Kubernetes, the simplest way is to log to the console and ask your cluster administrator to integrate a central log manager inside your cluster). +In this guide, we will expose how to send them to an external tool using supported Quarkus extensions in supported standard formats like Graylog Extended Log Format (GELF), Elastic Common Schema (ECS) or the OpenTelemetry Log signal. == Prerequisites @@ -28,35 +25,12 @@ include::{includes}/prerequisites.adoc[] The following examples will all be based on the same example application that you can create with the following steps. -Create an application with the `quarkus-logging-gelf` extension. You can use the following command to create it: +Create an application with the REST extension. You can use the following command to create it: -:create-app-artifact-id: gelf-logging -:create-app-extensions: rest,logging-gelf +:create-app-artifact-id: centralized-logging +:create-app-extensions: rest include::{includes}/devtools/create-app.adoc[] -If you already have your Quarkus project configured, you can add the `logging-gelf` extension -to your project by running the following command in your project base directory: - -:add-extension-extensions: logging-gelf -include::{includes}/devtools/extension-add.adoc[] - -This will add the following dependency to your build file: - -[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] -.pom.xml ----- - - io.quarkus - quarkus-logging-gelf - ----- - -[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] -.build.gradle ----- -implementation("io.quarkus:quarkus-logging-gelf") ----- - For demonstration purposes, we create an endpoint that does nothing but log a sentence. You don't need to do this inside your application. [source,java] @@ -67,10 +41,10 @@ import jakarta.ws.rs.Path; import org.jboss.logging.Logger; -@Path("/gelf-logging") +@Path("/logging") @ApplicationScoped -public class GelfLoggingResource { - private static final Logger LOG = Logger.getLogger(GelfLoggingResource.class); +public class LoggingResource { + private static final Logger LOG = Logger.getLogger(LoggingResource.class); @GET public void log() { @@ -80,27 +54,85 @@ public class GelfLoggingResource { } ---- -Configure the GELF log handler to send logs to an external UDP endpoint on the port 12201: +== Send logs to the Elastic Stack (ELK) in the ECS (Elastic Common Schema) format with the Socket handler -[source,properties] +You can send your logs to Logstash using a TCP input in the https://www.elastic.co/guide/en/ecs-logging/overview/current/intro.html[ECS] format. +To achieve this, we will use the `quarkus-logging-json` extension to format the logs in JSON format and the socket handler to send them to Logstash. + +Create the following file in `$HOME/pipelines/ecs.conf`: + +[source] ---- -quarkus.log.handler.gelf.enabled=true -quarkus.log.handler.gelf.host=localhost -quarkus.log.handler.gelf.port=12201 +input { + tcp { + port => 4560 + codec => json + } +} + +filter { + if ![span][id] and [mdc][spanId] { + mutate { rename => { "[mdc][spanId]" => "[span][id]" } } + } + if ![trace][id] and [mdc][traceId] { + mutate { rename => {"[mdc][traceId]" => "[trace][id]"} } + } +} + +output { + stdout {} + elasticsearch { + hosts => ["http://elasticsearch:9200"] + } +} ---- -== Send logs to Graylog +Then configure your application to log in JSON format -To send logs to Graylog, you first need to launch the components that compose the Graylog stack: +[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] +.pom.xml +---- + + io.quarkus + quarkus-logging-json + +---- + +[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] +.build.gradle +---- +implementation("io.quarkus:quarkus-logging-json") +---- + +and specify the host and port of your Logstash endpoint. To be ECS compliant, specify the log format. + +[source, properties] +---- +# to keep the logs in the usual format in the console +quarkus.log.console.json=false + +quarkus.log.socket.enable=true +quarkus.log.socket.json=true +quarkus.log.socket.endpoint=localhost:4560 + +# to have the exception serialized into a single text element +quarkus.log.socket.json.exception-output-type=formatted + +# specify the format of the produced JSON log +quarkus.log.socket.json.log-format=ECS +---- + +Finally, launch the components that compose the Elastic Stack: -- MongoDB - Elasticsearch -- Graylog +- Logstash +- Kibana You can do this via the following `docker-compose.yml` file that you can launch via `docker-compose up -d`: [source,yaml,subs="attributes"] ---- +# Launch Elasticsearch version: '3.2' services: @@ -108,87 +140,90 @@ services: image: {elasticsearch-image} ports: - "9200:9200" + - "9300:9300" environment: ES_JAVA_OPTS: "-Xms512m -Xmx512m" discovery.type: "single-node" cluster.routing.allocation.disk.threshold_enabled: false networks: - - graylog + - elk - mongo: - image: mongo:4.0 + logstash: + image: {logstash-image} + volumes: + - source: $HOME/pipelines + target: /usr/share/logstash/pipeline + type: bind + ports: + - "12201:12201/udp" + - "5000:5000" + - "9600:9600" networks: - - graylog + - elk + depends_on: + - elasticsearch - graylog: - image: graylog/graylog:4.3.0 + kibana: + image: {kibana-image} ports: - - "9000:9000" - - "12201:12201/udp" - - "1514:1514" - environment: - GRAYLOG_HTTP_EXTERNAL_URI: "http://127.0.0.1:9000/" - # CHANGE ME (must be at least 16 characters)! - GRAYLOG_PASSWORD_SECRET: "forpasswordencryption" - # Password: admin - GRAYLOG_ROOT_PASSWORD_SHA2: "8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918" + - "5601:5601" networks: - - graylog + - elk depends_on: - elasticsearch - - mongo networks: - graylog: + elk: driver: bridge + ---- -Then, you need to create a UDP input in Graylog. -You can do it from the Graylog web console (System -> Input -> Select GELF UDP) available at http://localhost:9000 or via the API. +Launch your application, you should see your logs arriving inside the Elastic Stack; you can use Kibana available at http://localhost:5601/ to access them. -This curl example will create a new Input of type GELF UDP, it uses the default login from Graylog (admin/admin). +== Send logs to Fluentd with the Syslog handler -[source,bash] ----- -curl -H "Content-Type: application/json" -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "X-Requested-By: curl" -X POST -v -d \ -'{"title":"udp input","configuration":{"recv_buffer_size":262144,"bind_address":"0.0.0.0","port":12201,"decompress_size_limit":8388608},"type":"org.graylog2.inputs.gelf.udp.GELFUDPInput","global":true}' \ -http://localhost:9000/api/system/inputs ----- - -Launch your application, you should see your logs arriving inside Graylog. +You can send your logs to Fluentd using a Syslog input. +As opposed to the GELF input, the Syslog input will not render multiline logs in one event. -== Send logs to Logstash / the Elastic Stack (ELK) +First, you need to create a Fluentd image with the Elasticsearch plugin. +You can use the following Dockerfile that should be created inside a `fluentd` directory. -Logstash comes by default with an Input plugin that can understand the GELF format, we will first create a pipeline that enables this plugin. +[source,dockerfile] +---- +FROM fluent/fluentd:v1.3-debian +RUN ["gem", "install", "fluent-plugin-elasticsearch", "--version", "3.7.0"] +---- -Create the following file in `$HOME/pipelines/gelf.conf`: +Then, you need to create a Fluentd configuration file inside `$HOME/fluentd/fluent.conf` [source] ---- -input { - gelf { - port => 12201 - } -} -output { - stdout {} - elasticsearch { - hosts => ["http://elasticsearch:9200"] - } -} + + @type syslog + port 5140 + bind 0.0.0.0 + message_format rfc5424 + tag system + + + + @type elasticsearch + host elasticsearch + port 9200 + logstash_format true + ---- -Finally, launch the components that compose the Elastic Stack: +Then, launch the components that compose the EFK Stack: - Elasticsearch -- Logstash +- Fluentd - Kibana You can do this via the following `docker-compose.yml` file that you can launch via `docker-compose up -d`: [source,yaml,subs="attributes"] ---- -# Launch Elasticsearch version: '3.2' services: @@ -202,20 +237,18 @@ services: discovery.type: "single-node" cluster.routing.allocation.disk.threshold_enabled: false networks: - - elk + - efk - logstash: - image: {logstash-image} + fluentd: + build: fluentd + ports: + - "5140:5140/udp" volumes: - - source: $HOME/pipelines - target: /usr/share/logstash/pipeline + - source: $HOME/fluentd + target: /fluentd/etc type: bind - ports: - - "12201:12201/udp" - - "5000:5000" - - "9600:9600" networks: - - elk + - efk depends_on: - elasticsearch @@ -224,132 +257,174 @@ services: ports: - "5601:5601" networks: - - elk + - efk depends_on: - elasticsearch networks: - elk: + efk: driver: bridge +---- + +Finally, configure your application to send logs to EFK using Syslog: +[source,properties] +---- +quarkus.log.syslog.enable=true +quarkus.log.syslog.endpoint=localhost:5140 +quarkus.log.syslog.protocol=udp +quarkus.log.syslog.app-name=quarkus +quarkus.log.syslog.hostname=quarkus-test ---- -Launch your application, you should see your logs arriving inside the Elastic Stack; you can use Kibana available at http://localhost:5601/ to access them. +Launch your application, you should see your logs arriving inside EFK: you can use Kibana available at http://localhost:5601/ to access them. +== Send logs with OpenTelemetry Logging -[[logstash_ecs]] -== GELF alternative: Send logs to Logstash in the ECS (Elastic Common Schema) format +OpenTelemetry Logging is able to send logs to a compatible OpenTelemetry collector. Its usage is described in the guide xref:opentelemetry-logging.adoc[Using OpenTelemetry Logging]. -You can also send your logs to Logstash using a TCP input in the https://www.elastic.co/guide/en/ecs-logging/overview/current/intro.html[ECS] format. -To achieve this we will use the `quarkus-logging-json` extension to format the logs in JSON format and the socket handler to send them to Logstash. +== Send logs with the `logging-gelf` extension -For this you can use the same `docker-compose.yml` file as above but with a different Logstash pipeline configuration. +WARNING: This extension is deprecated, we advise considering the alternatives described above in this guide. -[source] ----- -input { - tcp { - port => 4560 - codec => json - } -} +The `quarkus-logging-gelf` extension will add a GELF log handler to the underlying logging backend that Quarkus uses (jboss-logmanager). +By default, it is disabled, if you enable it but still use another handler (by default the console handler is enabled), your logs will be sent to both handlers. -filter { - if ![span][id] and [mdc][spanId] { - mutate { rename => { "[mdc][spanId]" => "[span][id]" } } - } - if ![trace][id] and [mdc][traceId] { - mutate { rename => {"[mdc][traceId]" => "[trace][id]"} } - } -} +You can add the `logging-gelf` extension to your project by running the following command in your project base directory: -output { - stdout {} - elasticsearch { - hosts => ["http://elasticsearch:9200"] - } -} ----- +:add-extension-extensions: logging-gelf +include::{includes}/devtools/extension-add.adoc[] -Then configure your application to log in JSON format instead of GELF +This will add the following dependency to your build file: [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] .pom.xml ---- io.quarkus - quarkus-logging-json + quarkus-logging-gelf ---- [source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] .build.gradle ---- -implementation("io.quarkus:quarkus-logging-json") +implementation("io.quarkus:quarkus-logging-gelf") ---- -and specify the host and port of your Logstash endpoint. To be ECS compliant, specify the log format. +Configure the GELF log handler to send logs to an external UDP endpoint on port 12201: -[source, properties] +[source,properties] +---- +quarkus.log.handler.gelf.enabled=true +quarkus.log.handler.gelf.host=localhost +quarkus.log.handler.gelf.port=12201 ---- -# to keep the logs in the usual format in the console -quarkus.log.console.json=false -quarkus.log.socket.enable=true -quarkus.log.socket.json=true -quarkus.log.socket.endpoint=localhost:4560 +=== Send logs to Graylog -# to have the exception serialized into a single text element -quarkus.log.socket.json.exception-output-type=formatted +NOTE: It is advised to use the Syslog handler instead. -# specify the format of the produced JSON log -quarkus.log.socket.json.log-format=ECS +To send logs to Graylog, you first need to launch the components that compose the Graylog stack: + +- MongoDB +- Elasticsearch +- Graylog + +You can do this via the following `docker-compose.yml` file that you can launch via `docker-compose up -d`: + +[source,yaml,subs="attributes"] ---- +version: '3.2' +services: + elasticsearch: + image: {elasticsearch-image} + ports: + - "9200:9200" + environment: + ES_JAVA_OPTS: "-Xms512m -Xmx512m" + discovery.type: "single-node" + cluster.routing.allocation.disk.threshold_enabled: false + networks: + - graylog -== Send logs to Fluentd (EFK) + mongo: + image: mongo:4.0 + networks: + - graylog -First, you need to create a Fluentd image with the needed plugins: elasticsearch and input-gelf. -You can use the following Dockerfile that should be created inside a `fluentd` directory. + graylog: + image: graylog/graylog:4.3.0 + ports: + - "9000:9000" + - "12201:12201/udp" + - "1514:1514" + environment: + GRAYLOG_HTTP_EXTERNAL_URI: "http://127.0.0.1:9000/" + # CHANGE ME (must be at least 16 characters)! + GRAYLOG_PASSWORD_SECRET: "forpasswordencryption" + # Password: admin + GRAYLOG_ROOT_PASSWORD_SHA2: "8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918" + networks: + - graylog + depends_on: + - elasticsearch + - mongo -[source,dockerfile] +networks: + graylog: + driver: bridge ---- -FROM fluent/fluentd:v1.3-debian -RUN ["gem", "install", "fluent-plugin-elasticsearch", "--version", "3.7.0"] -RUN ["gem", "install", "fluent-plugin-input-gelf", "--version", "0.3.1"] + +Then, you need to create a UDP input in Graylog. +You can do it from the Graylog web console (System -> Input -> Select GELF UDP) available at http://localhost:9000 or via the API. + +This curl example will create a new Input of type GELF UDP, it uses the default login from Graylog (admin/admin). + +[source,bash] +---- +curl -H "Content-Type: application/json" -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "X-Requested-By: curl" -X POST -v -d \ +'{"title":"udp input","configuration":{"recv_buffer_size":262144,"bind_address":"0.0.0.0","port":12201,"decompress_size_limit":8388608},"type":"org.graylog2.inputs.gelf.udp.GELFUDPInput","global":true}' \ +http://localhost:9000/api/system/inputs ---- -You can build the image or let docker-compose build it for you. +Launch your application, you should see your logs arriving inside Graylog. -Then you need to create a fluentd configuration file inside `$HOME/fluentd/fluent.conf` +=== Send logs to Logstash / the Elastic Stack (ELK) + +NOTE: It is advised to use xref:opentelemetry-logging.adoc[OpenTelemetry Logging] or the Socket handler instead. + +Logstash comes by default with an Input plugin that can understand the GELF format, we will first create a pipeline that enables this plugin. + +Create the following file in `$HOME/pipelines/gelf.conf`: [source] ---- - - type gelf - tag example.gelf - bind 0.0.0.0 - port 12201 - - - - @type elasticsearch - host elasticsearch - port 9200 - logstash_format true - +input { + gelf { + port => 12201 + } +} +output { + stdout {} + elasticsearch { + hosts => ["http://elasticsearch:9200"] + } +} ---- -Finally, launch the components that compose the EFK Stack: +Finally, launch the components that compose the Elastic Stack: - Elasticsearch -- Fluentd +- Logstash - Kibana You can do this via the following `docker-compose.yml` file that you can launch via `docker-compose up -d`: [source,yaml,subs="attributes"] ---- +# Launch Elasticsearch version: '3.2' services: @@ -363,18 +438,20 @@ services: discovery.type: "single-node" cluster.routing.allocation.disk.threshold_enabled: false networks: - - efk + - elk - fluentd: - build: fluentd - ports: - - "12201:12201/udp" + logstash: + image: {logstash-image} volumes: - - source: $HOME/fluentd - target: /fluentd/etc + - source: $HOME/pipelines + target: /usr/share/logstash/pipeline type: bind + ports: + - "12201:12201/udp" + - "5000:5000" + - "9600:9600" networks: - - efk + - elk depends_on: - elasticsearch @@ -383,44 +460,46 @@ services: ports: - "5601:5601" networks: - - efk + - elk depends_on: - elasticsearch networks: - efk: + elk: driver: bridge + ---- -Launch your application, you should see your logs arriving inside EFK: you can use Kibana available at http://localhost:5601/ to access them. +Launch your application, you should see your logs arriving inside the Elastic Stack; you can use Kibana available at http://localhost:5601/ to access them. -== GELF alternative: use Syslog +=== Send logs to Fluentd (EFK) -You can also send your logs to Fluentd using a Syslog input. -As opposed to the GELF input, the Syslog input will not render multiline logs in one event, that's why we advise to use the GELF input that we implement in Quarkus. +NOTE: It is advised to use xref:opentelemetry-logging.adoc[OpenTelemetry Logging] or the Socket handler instead. -First, you need to create a Fluentd image with the elasticsearch plugin. +First, you need to create a Fluentd image with the needed plugins: elasticsearch and input-gelf. You can use the following Dockerfile that should be created inside a `fluentd` directory. [source,dockerfile] ---- FROM fluent/fluentd:v1.3-debian RUN ["gem", "install", "fluent-plugin-elasticsearch", "--version", "3.7.0"] +RUN ["gem", "install", "fluent-plugin-input-gelf", "--version", "0.3.1"] ---- -Then, you need to create a fluentd configuration file inside `$HOME/fluentd/fluent.conf` +You can build the image or let docker-compose build it for you. + +Then you need to create a fluentd configuration file inside `$HOME/fluentd/fluent.conf` [source] ---- - @type syslog - port 5140 + type gelf + tag example.gelf bind 0.0.0.0 - message_format rfc5424 - tag system + port 12201 - + @type elasticsearch host elasticsearch port 9200 @@ -428,7 +507,7 @@ Then, you need to create a fluentd configuration file inside `$HOME/fluentd/flue ---- -Then, launch the components that compose the EFK Stack: +Finally, launch the components that compose the EFK Stack: - Elasticsearch - Fluentd @@ -456,7 +535,7 @@ services: fluentd: build: fluentd ports: - - "5140:5140/udp" + - "12201:12201/udp" volumes: - source: $HOME/fluentd target: /fluentd/etc @@ -480,21 +559,9 @@ networks: driver: bridge ---- -Finally, configure your application to send logs to EFK using Syslog: - -[source,properties] ----- -quarkus.log.syslog.enable=true -quarkus.log.syslog.endpoint=localhost:5140 -quarkus.log.syslog.protocol=udp -quarkus.log.syslog.app-name=quarkus -quarkus.log.syslog.hostname=quarkus-test ----- - Launch your application, you should see your logs arriving inside EFK: you can use Kibana available at http://localhost:5601/ to access them. - -== Elasticsearch indexing consideration +=== Elasticsearch indexing consideration Be careful that, by default, Elasticsearch will automatically map unknown fields (if not disabled in the index settings) by detecting their type. This can become tricky if you use log parameters (which are included by default), or if you enable MDC inclusion (disabled by default), @@ -518,11 +585,11 @@ or you can configure your Elasticsearch index to store those fields as text or k See the following documentation for Graylog (but the same issue exists for the other central logging stacks): link:https://docs.graylog.org/en/3.2/pages/configuration/elasticsearch.html#custom-index-mappings[Custom Index Mappings]. [[configuration-reference]] -== Configuration Reference +=== Configuration Reference Configuration is done through the usual `application.properties` file. include::{generated-dir}/config/quarkus-logging-gelf.adoc[opts=optional, leveloffset=+1] This extension uses the `logstash-gelf` library that allow more configuration options via system properties, -you can access its documentation here: https://logging.paluch.biz/ . +you can access its documentation here: https://logging.paluch.biz/. diff --git a/docs/src/main/asciidoc/container-image.adoc b/docs/src/main/asciidoc/container-image.adoc index 8fd7785a8e19b..6a2cbb17e4e97 100644 --- a/docs/src/main/asciidoc/container-image.adoc +++ b/docs/src/main/asciidoc/container-image.adoc @@ -48,7 +48,7 @@ For example, the presence of `src/main/jib/foo/bar` would result in `/foo/bar` There are cases where the built container image may need to have Java debugging conditionally enabled at runtime. -When the base image has not been changed (and therefore `ubi8/openjdk-11-runtime`, `ubi8/openjdk-17-runtime`, or `ubi8/openjdk-21-runtime` is used), then the `quarkus.jib.jvm-additional-arguments` configuration property can be used in order to +When the base image has not been changed (and therefore `ubi9/openjdk-17-runtime`, or `ubi9/openjdk-21-runtime` is used), then the `quarkus.jib.jvm-additional-arguments` configuration property can be used in order to make the JVM listen on the debug port at startup. The exact configuration is: @@ -64,7 +64,7 @@ Other base images might provide launch scripts that enable debugging when an env The `quarkus.jib.jvm-entrypoint` configuration property can be used to completely override the container entry point and can thus be used to either hard code the JVM debug configuration or point to a script that handles the details. -For example, if the base images `ubi8/openjdk-11-runtime`, `ubi8/openjdk-17-runtime` or `ubi8/openjdk-21-runtime` are used to build the container, the entry point can be hard-coded on the application properties file. +For example, if the base images `ubi9/openjdk-17-runtime` or `ubi9/openjdk-21-runtime` are used to build the container, the entry point can be hard-coded on the application properties file. .Example application.properties [source,properties] @@ -89,7 +89,7 @@ java \ -jar quarkus-run.jar ---- -NOTE: `/home/jboss` is the WORKDIR for all quarkus binaries in the base images `ubi8/openjdk-11-runtime`, `ubi8/openjdk-17-runtime` and `ubi8/openjdk-21-runtime` (https://catalog.redhat.com/software/containers/ubi8/openjdk-17/618bdbf34ae3739687568813?container-tabs=dockerfile[Dockerfile for ubi8/openjdk-17-runtime, window="_blank"]) +NOTE: `/home/jboss` is the WORKDIR for all quarkus binaries in the base images `ubi9/openjdk-17-runtime` and `ubi9/openjdk-21-runtime` (https://catalog.redhat.com/software/containers/ubi9/openjdk-21-runtime/6501ce769a0d86945c422d5f?container-tabs=dockerfile[Dockerfile for ubi9/openjdk-17-runtime, window="_blank"]) ==== Multi-module projects and layering diff --git a/docs/src/main/asciidoc/databases-dev-services.adoc b/docs/src/main/asciidoc/databases-dev-services.adoc index 5d371c224cdcf..819a98d4c8eaf 100644 --- a/docs/src/main/asciidoc/databases-dev-services.adoc +++ b/docs/src/main/asciidoc/databases-dev-services.adoc @@ -212,9 +212,9 @@ Login credentials are the same for most databases, except when the database requ |Database |Username |Password |Database name |PostgreSQL, MariaDB, MySQL, IBM Db2, H2 -|`quarkus` for the default datasource or name of the datasource |`quarkus` |`quarkus` +|`quarkus` for the default datasource or name of the datasource |Microsoft SQL Server |`SA` diff --git a/docs/src/main/asciidoc/gradle-tooling.adoc b/docs/src/main/asciidoc/gradle-tooling.adoc index 89251c264e359..276f6498fa2ad 100644 --- a/docs/src/main/asciidoc/gradle-tooling.adoc +++ b/docs/src/main/asciidoc/gradle-tooling.adoc @@ -518,13 +518,13 @@ Configuring the `quarkusBuild` task can be done as following: quarkusBuild { nativeArgs { containerBuild = true <1> - builderImage = "quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}" <2> + builderImage = "quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}" <2> } } ---- <1> Set `quarkus.native.container-build` property to `true` -<2> Set `quarkus.native.builder-image` property to `quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` +<2> Set `quarkus.native.builder-image` property to `quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}` **** [role="secondary asciidoc-tabs-sync-kotlin"] @@ -535,13 +535,13 @@ quarkusBuild { tasks.quarkusBuild { nativeArgs { "container-build" to true <1> - "builder-image" to "quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}" <2> + "builder-image" to "quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}" <2> } } ---- <1> Set `quarkus.native.container-build` property to `true` -<2> Set `quarkus.native.builder-image` property to `quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` +<2> Set `quarkus.native.builder-image` property to `quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}` **** [WARNING] @@ -564,13 +564,15 @@ Note that in this case the build itself runs in a Docker container too, so you d [TIP] ==== -By default, the native executable will be generated using the `quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` Docker image. +By default, the native executable will be generated using the `quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}` Docker image. If you want to build a native executable with a different Docker image (for instance to use a different GraalVM version), use the `-Dquarkus.native.builder-image=` build argument. -The list of the available Docker images can be found on https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[quay.io]. +The list of the available Docker images can be found on https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags[quay.io]. Be aware that a given Quarkus version might not be compatible with all the images available. + +Note also that starting Quarkus 3.19, the default _builder_ images are based on UBI 9. To use the previous UBI 8 based images, you can use the pick an image from the https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[quay.io repository]. ==== == Running native tests diff --git a/docs/src/main/asciidoc/grpc-service-consumption.adoc b/docs/src/main/asciidoc/grpc-service-consumption.adoc index 177d8c14a5db5..f15213bc7a645 100644 --- a/docs/src/main/asciidoc/grpc-service-consumption.adoc +++ b/docs/src/main/asciidoc/grpc-service-consumption.adoc @@ -204,8 +204,12 @@ public class StreamingEndpoint { For each gRPC service you inject in your application, you can configure the following attributes: +=== Global configuration include::{generated-dir}/config/quarkus-grpc_quarkus.grpc-client.adoc[opts=optional, leveloffset=+1] +=== Per-client configuration +include::{generated-dir}/config/quarkus-grpc_quarkus.grpc.clients.adoc[opts=optional, leveloffset=+1] + The `client-name` is the name set in the `@GrpcClient` or derived from the injection point if not explicitly defined. The following examples uses _hello_ as the client name. @@ -213,6 +217,50 @@ Don't forget to replace it with the name you used in the `@GrpcClient` annotatio IMPORTANT: When you enable `quarkus.grpc.clients."client-name".xds.enabled`, it's the xDS that should handle most of the configuration above. +=== Custom Channel building + +When Quarkus builds a gRPC Channel instance (the way gRPC clients communicate with gRPC services on a lower network level), users can apply their own Channel(Builder) customizers. The customizers are applied by `priority`, the higher the number the later customizer is applied. The customizers are applied before Quarkus applies user's client configuration; e.g. ideal for some initial defaults per all clients. + +There are two `customize` methods, the first one uses gRPC's `ManagedChannelBuilder` as a parameter - to be used with Quarkus' legacy gRPC support, where the other uses `GrpcClientOptions` - to be used with the new Vert.x gRPC support. User should implement the right `customize` method per gRPC support type usage, or both if the customizer is gRPC type neutral. + +[source, java] +---- +public interface ChannelBuilderCustomizer> { + + /** + * Customize a ManagedChannelBuilder instance. + * + * @param name gRPC client name + * @param config client's configuration + * @param builder Channel builder instance + * @return map of config properties to be used as default service config against the builder + */ + default Map customize(String name, GrpcClientConfiguration config, T builder) { + return Map.of(); + } + + /** + * Customize a GrpcClientOptions instance. + * + * @param name gRPC client name + * @param config client's configuration + * @param options GrpcClientOptions instance + */ + default void customize(String name, GrpcClientConfiguration config, GrpcClientOptions options) { + } + + /** + * Priority by which the customizers are applied. + * Higher priority is applied later. + * + * @return the priority + */ + default int priority() { + return 0; + } +} +---- + === Enabling TLS To enable TLS, use the following configuration. diff --git a/docs/src/main/asciidoc/hibernate-orm.adoc b/docs/src/main/asciidoc/hibernate-orm.adoc index e369e8ca015c2..751c6dd823cf1 100644 --- a/docs/src/main/asciidoc/hibernate-orm.adoc +++ b/docs/src/main/asciidoc/hibernate-orm.adoc @@ -1082,7 +1082,7 @@ Jump over to xref:datasource.adoc[Quarkus - Datasources] for all details. Quarkus currently supports the link:{hibernate-orm-docs-url}#multitenacy-separate-database[separate database] approach, the link:{hibernate-orm-docs-url}#multitenacy-separate-schema[separate schema] approach and the link:{hibernate-orm-docs-url}#multitenacy-discriminator[discriminator] approach. -To see multitenancy in action, you can check out the link:{quickstarts-tree-url}/hibernate-orm-multi-tenancy-quickstart[hibernate-orm-multi-tenancy-quickstart] quickstart. +To see multitenancy in action, you can check out the link:{quickstarts-tree-url}/hibernate-orm-multi-tenancy-schema-quickstart[hibernate-orm-multi-tenancy-schema-quickstart] or link:{quickstarts-tree-url}/hibernate-orm-multi-tenancy-database-quickstart[hibernate-orm-multi-tenancy-database-quickstart]. === Writing the application @@ -1210,7 +1210,7 @@ quarkus.hibernate-orm.multitenant=SCHEMA <2> quarkus.datasource.db-kind=postgresql <3> quarkus.datasource.username=quarkus_test quarkus.datasource.password=quarkus_test -quarkus.datasource.jdbc.url=jdbc:postgresql://localhost:5432/quarkus_test +%prod.quarkus.datasource.jdbc.url=jdbc:postgresql://localhost:5432/quarkus_test quarkus.flyway.schemas=base,mycompany <4> quarkus.flyway.locations=classpath:schema @@ -1278,7 +1278,7 @@ quarkus.hibernate-orm.datasource=base <3> quarkus.datasource.base.db-kind=postgresql <4> quarkus.datasource.base.username=quarkus_test quarkus.datasource.base.password=quarkus_test -quarkus.datasource.base.jdbc.url=jdbc:postgresql://localhost:5432/quarkus_test +%prod.quarkus.datasource.base.jdbc.url=jdbc:postgresql://localhost:5432/quarkus_test quarkus.flyway.base.locations=classpath:database/base <5> quarkus.flyway.base.migrate-at-start=true @@ -1286,7 +1286,7 @@ quarkus.flyway.base.migrate-at-start=true quarkus.datasource.mycompany.db-kind=postgresql <6> quarkus.datasource.mycompany.username=mycompany quarkus.datasource.mycompany.password=mycompany -quarkus.datasource.mycompany.jdbc.url=jdbc:postgresql://localhost:5433/mycompany +%prod.quarkus.datasource.mycompany.jdbc.url=jdbc:postgresql://localhost:5433/mycompany quarkus.flyway.mycompany.locations=classpath:database/mycompany <7> quarkus.flyway.mycompany.migrate-at-start=true ---- diff --git a/docs/src/main/asciidoc/images/observability-grafana-dashboards.png b/docs/src/main/asciidoc/images/observability-grafana-dashboards.png new file mode 100644 index 0000000000000..76039ff24ee13 Binary files /dev/null and b/docs/src/main/asciidoc/images/observability-grafana-dashboards.png differ diff --git a/docs/src/main/asciidoc/images/observability-grafana-loki.png b/docs/src/main/asciidoc/images/observability-grafana-loki.png new file mode 100644 index 0000000000000..1e33c42e31e89 Binary files /dev/null and b/docs/src/main/asciidoc/images/observability-grafana-loki.png differ diff --git a/docs/src/main/asciidoc/images/observability-grafana-tempo.png b/docs/src/main/asciidoc/images/observability-grafana-tempo.png new file mode 100644 index 0000000000000..09a180f66c326 Binary files /dev/null and b/docs/src/main/asciidoc/images/observability-grafana-tempo.png differ diff --git a/docs/src/main/asciidoc/infinispan-client-reference.adoc b/docs/src/main/asciidoc/infinispan-client-reference.adoc index 7084c2e4ae3d9..77fdc84bccdf2 100644 --- a/docs/src/main/asciidoc/infinispan-client-reference.adoc +++ b/docs/src/main/asciidoc/infinispan-client-reference.adoc @@ -299,6 +299,7 @@ distributedCache: You can use the following authentication mechanisms with the Infinispan client: +* DIGEST-SHA-512 (default) * DIGEST-MD5 * PLAIN (recommended only in combination with TLS encryption) * EXTERNAL diff --git a/docs/src/main/asciidoc/kafka.adoc b/docs/src/main/asciidoc/kafka.adoc index ac01aac591db6..147f0b6dc30e7 100644 --- a/docs/src/main/asciidoc/kafka.adoc +++ b/docs/src/main/asciidoc/kafka.adoc @@ -2956,9 +2956,7 @@ NOTE: If you use Hibernate Reactive, look at < emitter; + @Channel("kafka") MutinyEmitter emitter; @POST @Path("/fruits") - @Transactional // <1> - public CompletionStage storeAndSendToKafka(Fruit fruit) { // <2> + @Transactional // <1> + public void storeAndSendToKafka(Fruit fruit) { // <2> fruit.persist(); - return emitter.send(new FruitDto(fruit)); // <3> + emitter.sendAndAwait(new FruitDto(fruit)); // <3> } } ---- <1> As we are writing to the database, make sure we run inside a transaction -<2> The method receives the fruit instance to persist. It returns a `CompletionStage` which is used for the transaction demarcation. The transaction is committed when the return `CompletionStage` completes. In our case, it's when the message is written to Kafka. +<2> The method receives the fruit instance to persist. <3> Wrap the managed entity inside a Data transfer object and send it to Kafka. This makes sure that managed entity is not impacted by the Kafka serialization. +Then await the completion of the operation before returning. + +NOTE: You should not return a `CompletionStage` or `Uni` when using `@Transactional`, as all transaction commits will happen on a single thread, which impacts performance. [[writing-entities-managed-by-hibernate-reactive-to-kafka]] === Writing entities managed by Hibernate Reactive to Kafka @@ -3191,23 +3192,104 @@ public class FruitProducer { @Consumes(MediaType.APPLICATION_JSON) @Bulkhead(1) public Uni post(Fruit fruit) { - Context context = Vertx.currentContext(); // <2> - return sf.withTransaction(session -> // <3> - kafkaTx.withTransaction(emitter -> // <4> - session.persist(fruit).invoke(() -> emitter.send(fruit)) // <5> - ).emitOn(context::runOnContext) // <6> - ); + return sf.withTransaction(session -> // <2> + kafkaTx.withTransaction(emitter -> // <3> + session.persist(fruit).invoke(() -> emitter.send(fruit)) // <4> + )); } } ---- <1> Inject the Hibernate Reactive `SessionFactory`. -<2> Capture the caller Vert.x context. -<3> Begin a Hibernate Reactive transaction. -<4> Begin a Kafka transaction. -<5> Persist the payload and send the entity to Kafka. -<6> The Kafka transaction terminates on the Kafka producer sender thread. -We need to switch to the Vert.x context previously captured in order to terminate the Hibernate Reactive transaction on the same context we started it. +<2> Begin a Hibernate Reactive transaction. +<3> Begin a Kafka transaction. +<4> Persist the payload and send the entity to Kafka. + +Alternatively, you can use the `@WithTransaction` annotation to start a transaction and commit it when the method returns: + +[source, java] +---- +import jakarta.inject.Inject; +import jakarta.ws.rs.Consumes; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.core.MediaType; + +import org.eclipse.microprofile.faulttolerance.Bulkhead; +import org.eclipse.microprofile.reactive.messaging.Channel; + +import io.quarkus.hibernate.reactive.panache.common.WithTransaction; +import io.smallrye.mutiny.Uni; +import io.smallrye.reactive.messaging.kafka.transactions.KafkaTransactions; + +@Path("/") +public class FruitProducer { + + @Channel("kafka") KafkaTransactions kafkaTx; + + @POST + @Path("/fruits") + @Consumes(MediaType.APPLICATION_JSON) + @Bulkhead(1) + @WithTransaction // <1> + public Uni post(Fruit fruit) { + return kafkaTx.withTransaction(emitter -> // <2> + fruit.persist().invoke(() -> emitter.send(fruit)) // <3> + ); + } +} +---- + +<1> Start a Hibernate Reactive transaction and commit it when the method returns. +<2> Begin a Kafka transaction. +<3> Persist the payload and send the entity to Kafka. + +[[chaining-kafka-transactions-with-hibernate-orm-transactions]] +=== Chaining Kafka Transactions with Hibernate ORM transactions + +While `KafkaTransactions` provide a reactive API on top of Mutiny to manage Kafka transactions, +you can still chain Kafka transactions with blocking Hibernate ORM transactions. + +[source, java] +---- +import jakarta.transaction.Transactional; +import jakarta.ws.rs.POST; +import jakarta.ws.rs.Path; + +import org.eclipse.microprofile.reactive.messaging.Channel; + +import io.quarkus.logging.Log; +import io.smallrye.mutiny.Uni; +import io.smallrye.reactive.messaging.kafka.transactions.KafkaTransactions; + +@Path("/") +public class FruitProducer { + + @Channel("kafka") KafkaTransactions emitter; + + @POST + @Path("/fruits") + @Consumes(MediaType.APPLICATION_JSON) + @Bulkhead(1) + @Transactional // <1> + public void post(Fruit fruit) { + emitter.withTransaction(e -> { // <2> + // if id is attributed by the database, will need to flush to get it + // fruit.persistAndFlush(); + fruit.persist(); // <3> + Log.infov("Persisted fruit {0}", p); + e.send(p); // <4> + return Uni.createFrom().voidItem(); + }).await().indefinitely(); // <5> + } +} +---- + +<1> Start a Hibernate ORM transaction. The transaction is committed when the method returns. +<2> Begin a Kafka transaction. +<3> Persist the payload. +<4> Send the entity to Kafka inside the Kafka transaction. +<5> Wait on the returned `Uni` for the Kafka transaction to complete. == Logging diff --git a/docs/src/main/asciidoc/logging.adoc b/docs/src/main/asciidoc/logging.adoc index 1a50a862618b0..ca4362f0bd7c7 100644 --- a/docs/src/main/asciidoc/logging.adoc +++ b/docs/src/main/asciidoc/logging.adoc @@ -169,9 +169,11 @@ class SimpleBean { <1> The fully qualified class name (FQCN) of the declaring class is used as a logger name, for example, `org.jboss.logging.Logger.getLogger(SimpleBean.class)` will be used. <2> In this case, the name _foo_ is used as a logger name, for example, `org.jboss.logging.Logger.getLogger("foo")` will be used. -NOTE: The logger instances are cached internally. +[NOTE] +==== +The logger instances are cached internally. Therefore, when a logger is injected, for example, into a `@RequestScoped` bean, it is shared for all bean instances to avoid possible performance penalties associated with logger instantiation. - +==== == Use log levels @@ -530,12 +532,9 @@ For details about its configuration, see the xref:#quarkus-core_section_quarkus- === Socket log handler -This handler will send the logs to a socket. -It is disabled by default, so you must first enable it. -When enabled, it sends all log events to a socket, for instance to a Logstash server. - -This will typically be used in conjunction with the `quarkus-logging-json` extension so send logs in ECS format to an Elasticsearch instance. -An example configuration can be found in the xref:centralized-log-management.adoc[Centralized log management] guide. +This handler sends logs to a socket. +Socket log handler is disabled by default; enable it to use it. +When enabled, it sends all log events to a socket, such as a Logstash server. * A global configuration example: + @@ -545,6 +544,9 @@ quarkus.log.socket.enable=true quarkus.log.socket.endpoint=localhost:4560 ---- +Typically, this handler is used with the `quarkus-logging-json` extension to send logs in ECS format to an Elasticsearch instance. +For an example configuration, see the xref:centralized-log-management.adoc[Centralized log management] guide. + == Add a logging filter to your log handler @@ -610,8 +612,11 @@ quarkus.console.color=false quarkus.log.category."io.quarkus".level=INFO ---- -NOTE: If you add these properties in the command line, ensure `"` is escaped. +[NOTE] +==== +If you add these properties in the command line, ensure `"` is escaped. For example, `-Dquarkus.log.category.\"io.quarkus\".level=DEBUG`. +==== [[category-example]] .File TRACE logging configuration @@ -667,7 +672,7 @@ To send logs to a centralized tool such as Graylog, Logstash, or Fluentd, see th === OpenTelemetry logging -Logging entries from all appenders can be sent using OpenTelemetry Logging. +Logging entries from all appenders can be sent by using OpenTelemetry Logging. For details, see the Quarkus xref:opentelemetry-logging.adoc[OpenTelemetry Logging] guide. @@ -677,7 +682,7 @@ Enable proper logging for `@QuarkusTest` by setting the `java.util.logging.manag The system property must be set early on to be effective, so it is recommended to configure it in the build system. -.Setting the `java.util.logging.manager` system property in the Maven Surefire plugin configuration +.Setting the `java.util.logging.manager` system property in the Maven Surefire plugin configuration: [source, xml] ---- @@ -822,7 +827,7 @@ To add data to the MDC and extract it in your log output: . Use the `MDC` class to set the data. .. Add `import org.jboss.logmanager.MDC;` -.. Set `MDC.put(...)` as shown in the example below: +.. Set `MDC.put(...)` as shown in the example below: + [source,java] .An example with JBoss Logging and `io.quarkus.logging.Log` diff --git a/docs/src/main/asciidoc/maven-tooling.adoc b/docs/src/main/asciidoc/maven-tooling.adoc index d334224fc0665..8ac4e7ddf821e 100644 --- a/docs/src/main/asciidoc/maven-tooling.adoc +++ b/docs/src/main/asciidoc/maven-tooling.adoc @@ -509,13 +509,18 @@ Note that in this case the build itself runs in a Docker container too, so you d [TIP] ==== -By default, the native executable will be generated using the `quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` Docker image. +By default, the native executable will be generated using the `quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor}` Docker image. If you want to build a native executable with a different Docker image (for instance to use a different GraalVM version), use the `-Dquarkus.native.builder-image=` build argument. -The list of the available Docker images can be found on https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[quay.io]. +The list of the available Docker images can be found on https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags[quay.io]. Be aware that a given Quarkus version might not be compatible with all the images available. + +Starting from Quarkus 3.19, the _builder_ image is based on UBI 9, and thus requires an UBI 9 base image if you want to run the native executable in a container. +You can switch back to UBI 8, by setting the `quarkus.native.builder-image` property to one of the available image from the https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[quay.io repository]. +For example ``quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` is using UBI 8, and so the resulting native executable will be compatible with UBI 8 base images. + ==== You can follow the xref:building-native-image.adoc[Build a native executable guide] as well as xref:deploying-to-kubernetes.adoc[Deploying Application to Kubernetes and OpenShift] for more information. @@ -1057,6 +1062,9 @@ Here is a list of system properties the Quarkus bootstrap Maven resolver checks | `false` | By default, the Quarkus Maven resolver is reading project's POMs directly when discovering the project's layout. While in most cases it works well enough and relatively fast, reading raw POMs has its limitation. E.g. if a POM includes modules in a profile, these modules will not be discovered. This system property enables project's layout discovery based on the effective POM models, that are properly interpolated, instead of the raw ones. The reason this option is not enabled by default is it may appear to be significantly more time-consuming that could increase, e.g. CI testing times. Until there is a better approach found that could be used by default, projects that require it should enable this option. +| `quarkus.bootstrap.legacy-model-resolver` +| `false` +| This *system* or *POM* property can be used to enable the legacy `ApplicationModel` resolver implementation. The property was introduced in Quarkus 3.19.0 and will be removed once the legacy implementation is known to be not in demand. |=== These system properties above could be added to, e.g., a `surefire` and/or `failsafe` plugin configuration as diff --git a/docs/src/main/asciidoc/messaging.adoc b/docs/src/main/asciidoc/messaging.adoc index e7cc35d6d2b36..478123e5f3a0a 100644 --- a/docs/src/main/asciidoc/messaging.adoc +++ b/docs/src/main/asciidoc/messaging.adoc @@ -356,6 +356,80 @@ public class MyProfileBean { } ---- +==== Pausable Channels + +Injected `@Channel` streams are not subscribed to by default, so the flow of messages is controlled by the application code using reactive streams and Mutiny APIs. +But for `@Incoming` methods, the flow of messages is controlled by the runtime. + +Pausable channels provide a mechanism to control message flow programmatically. +This is useful in scenarios where producers or consumers need to stop temporarily due to managing the lifecycle or performing maintenance operations. + +To use pausable channels, you need to activate it with the configuration property `pausable` set to `true`. + +[source, properties] +---- +mp.messaging.incoming.my-channel.pausable=true +# optional, by default the channel is NOT paused initially +mp.messaging.outgoing.my-channel.initially-paused=true +---- + +If a channel is configured to be pausable, you can get the `PausableChannel` by channel name from the `ChannelRegistry` programmatically, and pause or resume the channel as needed: + +[source, java] +---- +import jakarta.annotation.PostConstruct; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.eclipse.microprofile.reactive.messaging.Incoming; + +import io.smallrye.reactive.messaging.ChannelRegistry; +import io.smallrye.reactive.messaging.PausableChannel; + +@ApplicationScoped +public class PausableController { + + @Inject + ChannelRegistry registry; + + @PostConstruct + public void resume() { + // Wait for the application to be ready + // Retrieve the pausable channel + PausableChannel pausable = registry.getPausable("my-channel"); + // Pause the processing of the messages + pausable.resume(); + } + + public void pause() { + // Retrieve the pausable channel + PausableChannel pausable = registry.getPausable("my-channel"); + // Pause the processing of the messages + pausable.pause(); + } + + @Incoming("my-channel") + void process(String message) { + // Process the message + } + +} +---- + +This feature is independent of connectors and can be in theory used with channels backed by any connector. +Note that pausing message consumption applies back-pressure on the underlying consumer which receives messages from the remote broker. + +[NOTE] +==== +Kafka consumers provide a similar feature to pause and resume the consumption of messages from topic-partitions. +The Quarkus Kafka connector allows xref:kafka.adoc#kafka-bare-clients[access to the underlying client] to pause/resume the consumption. + +However, by default, with the `pause-if-no-requests=true` configuration, +the connector handles automatically the back-pressure, +by the pausing and resuming the Kafka consumer based on downstream requests. +It is therefore recommended to use pausable channels with the default `pause-if-no-requests=true` configuration. +==== + ==== Multiple Outgoings and `@Broadcast` By default, messages transmitted in a channel are only dispatched to a single consumer. diff --git a/docs/src/main/asciidoc/native-reference.adoc b/docs/src/main/asciidoc/native-reference.adoc index 57dd02b740429..d3751eb901087 100644 --- a/docs/src/main/asciidoc/native-reference.adoc +++ b/docs/src/main/asciidoc/native-reference.adoc @@ -608,7 +608,7 @@ $ ./mvnw verify -DskipITs=false -Dquarkus.test.integration-test-profile=test-wit [INFO] T E S T S [INFO] ------------------------------------------------------- [INFO] Running org.acme.GreetingResourceIT -2024-05-14 16:29:53,941 INFO [io.qua.tes.com.DefaultDockerContainerLauncher] (main) Executing "podman run --name quarkus-integration-test-PodgW -i --rm --user 501:20 -p 8081:8081 -p 8444:8444 --entrypoint java -v /tmp/new-project/target:/project --env QUARKUS_LOG_CATEGORY__IO_QUARKUS__LEVEL=INFO --env QUARKUS_HTTP_PORT=8081 --env QUARKUS_HTTP_SSL_PORT=8444 --env TEST_URL=http://localhost:8081 --env QUARKUS_PROFILE=test-with-native-agent --env QUARKUS_TEST_INTEGRATION_TEST_PROFILE=test-with-native-agent quay.io/quarkus/ubi-quarkus-mandrel-builder-image:jdk-21 -agentlib:native-image-agent=access-filter-file=quarkus-access-filter.json,caller-filter-file=quarkus-caller-filter.json,config-output-dir=native-image-agent-base-config, -jar quarkus-app/quarkus-run.jar" +2024-05-14 16:29:53,941 INFO [io.qua.tes.com.DefaultDockerContainerLauncher] (main) Executing "podman run --name quarkus-integration-test-PodgW -i --rm --user 501:20 -p 8081:8081 -p 8444:8444 --entrypoint java -v /tmp/new-project/target:/project --env QUARKUS_LOG_CATEGORY__IO_QUARKUS__LEVEL=INFO --env QUARKUS_HTTP_PORT=8081 --env QUARKUS_HTTP_SSL_PORT=8444 --env TEST_URL=http://localhost:8081 --env QUARKUS_PROFILE=test-with-native-agent --env QUARKUS_TEST_INTEGRATION_TEST_PROFILE=test-with-native-agent quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:jdk-21 -agentlib:native-image-agent=access-filter-file=quarkus-access-filter.json,caller-filter-file=quarkus-caller-filter.json,config-output-dir=native-image-agent-base-config, -jar quarkus-app/quarkus-run.jar" ... [INFO] [INFO] --- quarkus:{quarkus-version}:native-image-agent (default) @ new-project --- @@ -862,7 +862,7 @@ So, go ahead and add the following options to that file: [source,properties,subs=attributes+] ---- quarkus.native.container-build=true -quarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} +quarkus.native.builder-image=quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} quarkus.container-image.build=true quarkus.container-image.group=test ---- @@ -1282,7 +1282,7 @@ These are called expert options and you can learn more about them by running: [source,bash,subs=attributes+] ---- -docker run quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} --expert-options-all +docker run quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} --expert-options-all ---- [WARNING] @@ -2478,7 +2478,7 @@ E.g. [source,bash,subs=attributes+] ---- ./mvnw package -DskipTests -Dnative -Dquarkus.native.container-build=true \ - -Dquarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} \ + -Dquarkus.native.builder-image=quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} \ -Dquarkus.native.monitoring=jfr ---- diff --git a/docs/src/main/asciidoc/observability-devservices-lgtm.adoc b/docs/src/main/asciidoc/observability-devservices-lgtm.adoc index 5d207a4791ecc..37fc37fa7cb65 100644 --- a/docs/src/main/asciidoc/observability-devservices-lgtm.adoc +++ b/docs/src/main/asciidoc/observability-devservices-lgtm.adoc @@ -4,14 +4,14 @@ and pull requests should be submitted there: https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// = Observability Dev Services with Grafana OTel LGTM - include::_attributes.adoc[] :categories: observability,devservices,telemetry,metrics,tracing,logging, opentelemetry, micrometer, prometheus, tempo, loki, grafana :summary: Instructions on how to use Grafana Otel LGTM :topics: observability,grafana,lgtm,otlp,opentelemetry,devservices,micrometer :extensions: io.quarkus:quarkus-observability-devservices -https://github.com/grafana/docker-otel-lgtm[OTel-LGTM] is `all-in-one` Docker image containing OpenTelemetry's https://github.com/open-telemetry/opentelemetry-proto/blob/main/docs/README.md[OTLP] as the protocol to transport metrics, tracing and logging data to an https://opentelemetry.io/docs/collector[OpenTelemetry Collector] which then stores signals data into https://prometheus.io/[Prometheus] (metrics), https://github.com/grafana/tempo[Tempo] (traces) and https://github.com/grafana/loki[Loki] (logs), only to have it visualized by https://github.com/grafana/grafana[Grafana]. It's used by Quarkus Observability to provide the Grafana OTel LGTM Dev Resource. +This Dev Service provides the https://github.com/grafana/docker-otel-lgtm[Grafana OTel-LGTM], an `all-in-one` Docker image containing an https://opentelemetry.io/docs/collector[OpenTelemetry Collector] receiving and then forwarding telemetry data to https://prometheus.io/[Prometheus] (metrics), https://github.com/grafana/tempo[Tempo] (traces) and https://github.com/grafana/loki[Loki] (logs). +This data can then be visualized by https://github.com/grafana/grafana[Grafana]. == Configuring your project @@ -33,9 +33,35 @@ Add the Quarkus Grafana OTel LGTM sink (where data goes) extension to your build implementation("io.quarkus:quarkus-observability-devservices-lgtm") ---- -=== Metrics +=== Micrometer + +The https://quarkus.io/guides/telemetry-micrometer[Micrometer Quarkus extension] provides metrics from automatic instrumentation implemented in Quarkus and its extensions. + +There are multiple ways to output Micrometer metrics. Next there are some examples: + +==== Using the Micrometer Prometheus registry + +This is the most common way to output metrics from Micrometer and the default way in Quarkus. The Micrometer Prometheus registry will publish data in the `/q/metrics` endpoint and a scraper inside the Grafana LGTM Dev Service will grab it (*pull* data from the service). + + +[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] +.pom.xml +---- + + io.quarkiverse.micrometer.registry + quarkus-micrometer-registry-prometheus + +---- + +[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] +.build.gradle +---- +implementation("io.quarkiverse.micrometer.registry:quarkus-micrometer-registry-prometheus") +---- + +==== Using the Micrometer OTLP registry -If you need metrics, add the Micrometer OTLP registry to your build file: +The https://docs.quarkiverse.io/quarkus-micrometer-registry/dev/micrometer-registry-otlp.html[Quarkiverse Micrometer OTLP registry] will output data using the OpenTelemetry OTLP protocol to the Grafana LGTM Dev Service. This will *push* data out of the service: [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] .pom.xml @@ -52,11 +78,15 @@ If you need metrics, add the Micrometer OTLP registry to your build file: implementation("io.quarkiverse.micrometer.registry:quarkus-micrometer-registry-otlp") ---- -When using the https://micrometer.io/[MicroMeter's] Quarkiverse OTLP registry to push metrics to Grafana OTel LGTM, the `quarkus.micrometer.export.otlp.url` property is automatically set to OTel collector endpoint as seen from the outside of the docker container. +When using the https://micrometer.io/[Micrometer's] Quarkiverse OTLP registry to push metrics to Grafana OTel LGTM, the `quarkus.micrometer.export.otlp.url` property is automatically set to OTel collector endpoint as seen from the outside of the Docker container. + +=== OpenTelemetry -=== Tracing +With OpenTelemetry, metrics, traces and logs can be created and sent to the Grafana LGTM Dev Service. -For Tracing add the `quarkus-opentelemetry` extension to your build file: +By default, the https://quarkus.io/guides/opentelemetry[OpenTelemetry extension] will produce https://quarkus.io/guides/opentelemetry-tracing[traces]. https://quarkus.io/guides/opentelemetry-metrics[Metrics] and https://quarkus.io/guides/opentelemetry-logging[logs] must be enabled separately. + +The `quarkus-opentelemetry` extension can be added to your build file like this: [source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] .pom.xml ---- @@ -72,11 +102,36 @@ For Tracing add the `quarkus-opentelemetry` extension to your build file: implementation("io.quarkus:quarkus-opentelemetry") ---- -The `quarkus.otel.exporter.otlp.endpoint` property is automatically set to OTel collector endpoint as seen from the outside of the docker container. +The `quarkus.otel.exporter.otlp.endpoint` property is automatically set to the OTel collector endpoint as seen from the outside of the Docker container. The `quarkus.otel.exporter.otlp.protocol` is set to `http/protobuf`. -=== Access Grafana +=== Micrometer to OpenTelemetry bridge + +This extension provides Micrometer metrics and OpenTelemetry metrics, traces and logs. All data is managed and sent out by the OpenTelemetry extension. + +**All signals are enabled by default.** + +The extension can be added to your build file like this: + +[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] +.pom.xml +---- + + io.quarkus + quarkus-micrometer-opentelemetry + +---- + +[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] +.build.gradle +---- +implementation("io.quarkus:quarkus-micrometer-opentelemetry") +---- + +== Grafana + +=== Grafana UI access Once you start your app in dev mode: @@ -87,19 +142,43 @@ You will see a log entry like this: [source, log] ---- [io.qu.ob.de.ObservabilityDevServiceProcessor] (build-35) Dev Service Lgtm started, config: {grafana.endpoint=http://localhost:42797, quarkus.otel.exporter.otlp.endpoint=http://localhost:34711, otel-collector.url=localhost:34711, quarkus.micrometer.export.otlp.url=http://localhost:34711/v1/metrics, quarkus.otel.exporter.otlp.protocol=http/protobuf} - ---- Remember that Grafana is accessible in an ephemeral port, so you need to check the logs to see which port is being used. In this example, the grafana endpoint is `grafana.endpoint=http://localhost:42797`. -If you miss the message you can always check the port with this Docker command: -[source, bash] ----- -docker ps | grep grafana ----- +Another option is to use the *Dev UI* (http://localhost:8080/q/dev-ui/extensions) as the Grafana URL link will be available and if selected it will open a new browser tab directly to the running Grafana instance: + +image::dev-ui-observability-card.png[alt=Dev UI LGTM, align=center,width=50%] + +=== Explore + +In the explore section, you can query the data for all the data sources. + +To see traces, select the `tempo` data source and query for data: + +image::observability-grafana-tempo.png[alt=Dev UI LGTM, align=center,width=90%] + +For logs, select the `loki` data source and query for data: + +image::observability-grafana-loki.png[alt=Dev UI LGTM, align=center,width=90%] + +=== The dashboards + +The Dev Service includes a set of dashboards. + +image::observability-grafana-dashboards.png[alt=Dev UI LGTM, align=center,width=90%] + +Each dashboard is tuned for the specific application setup. The available dashboards are: + +* *Quarkus Micrometer OpenTelemetry*: to be used with the Micrometer and OpenTelemetry extension. +* *Quarkus Micrometer OTLP registry*: to be used with the Micrometer OTLP registry extension. +* *Quarkus Micrometer Prometheus registry*: to be used with the Micrometer Prometheus registry extension. +* *Quarkus OpenTelemetry Logging*: to view logs coming from the OpenTelemetry extension. -Another option is to use the Dev UI as the Grafana URL link will be available and if selected will open a new browser tab directly to the running Grafana instance: -image::dev-ui-observability-card.png[alt=Dev UI LGTM, align=center,width=80%] +[NOTE] +==== +Some panels in the dashboards might take a few minutes to show accurate data when their values are calculated over a sliding time window. +==== === Additional configuration diff --git a/docs/src/main/asciidoc/openapi-swaggerui.adoc b/docs/src/main/asciidoc/openapi-swaggerui.adoc index 356bb5156445d..e69f2935bc41d 100644 --- a/docs/src/main/asciidoc/openapi-swaggerui.adoc +++ b/docs/src/main/asciidoc/openapi-swaggerui.adoc @@ -165,7 +165,7 @@ public class FruitResourceTest { Quarkus provides the https://github.com/smallrye/smallrye-open-api/[SmallRye OpenAPI] extension compliant with the https://github.com/eclipse/microprofile-open-api/[MicroProfile OpenAPI] specification in order to generate your API -https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.0.md[OpenAPI v3 specification]. +https://spec.openapis.org/oas/v3.1.0.html[OpenAPI v3 specification]. You just need to add the `openapi` extension to your Quarkus application: @@ -198,7 +198,7 @@ Once your application is started, you can make a request to the default `/q/open [source,shell] ---- $ curl http://localhost:8080/q/openapi -openapi: 3.0.3 +openapi: 3.1.0 info: title: Generated API version: "1.0" @@ -359,7 +359,7 @@ Remember that setting fields on the schema will override what has been generated === Runtime filters -Runtime filters by default runs on startup (when the final OpenAPI document gets created). You can change runtime filters to run on every request, making the openapi document dynamic. +Runtime filters by default runs on startup (when the final OpenAPI document gets created). You can change runtime filters to run on every request, making the OpenAPI document dynamic. To do this you need to set this propery: `quarkus.smallrye-openapi.always-run-filter=true`. == Loading OpenAPI Schema From Static Files @@ -373,7 +373,7 @@ Quarkus also supports alternative <> [source,yaml] ---- -openapi: 3.0.1 +openapi: 3.1.0 info: title: Static OpenAPI document of fruits resource description: Fruit resources Open API documentation @@ -446,16 +446,23 @@ Live reload of static OpenAPI document is supported during development. A modifi == Changing the OpenAPI version -By default, when the document is generated, the OpenAPI version used will be `3.0.3`. If you use a static file as mentioned above, the version in the file +By default, when the document is generated, the OpenAPI version used will be `3.1.0`. If you use a static file as mentioned above, the version in the file will be used. You can also define the version in SmallRye using the following configuration: [source, properties] ---- -mp.openapi.extensions.smallrye.openapi=3.0.2 +mp.openapi.extensions.smallrye.openapi=3.0.4 ---- This might be useful if your API goes through a Gateway that needs a certain version. +[NOTE] +==== +Changing the OpenAPI version between `3.0.x` and `3.1.x` versions will result in changes to the rendered document to satisfy the requirements +of the chosen version. A good starting point to learn about the differences between OpenAPI 3.0 and 3.1 is the +https://www.openapis.org/blog/2021/02/16/migrating-from-openapi-3-0-to-3-1-0[OpenAPI Initiative]. +==== + == Auto-generation of Operation Id The https://swagger.io/docs/specification/paths-and-operations/[Operation Id] can be set using the `@Operation` annotation, and is in many cases useful when using a tool to generate a client stub from the schema. diff --git a/docs/src/main/asciidoc/platform.adoc b/docs/src/main/asciidoc/platform.adoc index 6b14645a13c3e..5b40093988e0a 100644 --- a/docs/src/main/asciidoc/platform.adoc +++ b/docs/src/main/asciidoc/platform.adoc @@ -129,9 +129,25 @@ A platform properties file for the example above would contain: [source,text,subs=attributes+] ---- -platform.quarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor} +platform.quarkus.native.builder-image=quay.io/quarkus/ubi9-quarkus-mandrel-builder-image:{mandrel-flavor} ---- +[IMPORTANT] +==== +Starting with Quarkus 3.19+, the _builder_ image used to build the native executable is based on UBI 9. +It means that the native executable produced by the container build will be based on UBI 9 as well. +So, if you plan to build a container, make sure that the base image in your `Dockerfile` is compatible with UBI 9. +The native executable will not run on UBI 8 base images. + +For example to switch back to an UBI8 _builder image_ you can use: + +`platform.quarkus.native.builder-image=quay.io/quarkus/ubi-quarkus-mandrel-builder-image:{mandrel-flavor}` + +You can see the available tags for UBI8 https://quay.io/repository/quarkus/ubi-quarkus-mandrel-builder-image?tab=tags[here] +and for UBI9 https://quay.io/repository/quarkus/ubi9-quarkus-mandrel-builder-image?tab=tags[here (UBI 9)]) +==== + + There is also a Maven plugin goal that validates the platform properties content and its artifact coordinates and also checks whether the platform properties artifact is present in the platform's BOM. Here is a sample plugin configuration: [source,xml] diff --git a/docs/src/main/asciidoc/quarkus-runtime-base-image.adoc b/docs/src/main/asciidoc/quarkus-runtime-base-image.adoc index c9640697f42ac..d2ebb7802c7db 100644 --- a/docs/src/main/asciidoc/quarkus-runtime-base-image.adoc +++ b/docs/src/main/asciidoc/quarkus-runtime-base-image.adoc @@ -22,10 +22,9 @@ In your `Dockerfile`, just use: [source, dockerfile] ---- -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 WORKDIR /work/ -COPY target/*-runner /work/application -RUN chmod 775 /work +COPY --chmod=0755 target/*-runner /work/application EXPOSE 8080 CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] ---- @@ -39,11 +38,11 @@ In this case, you need to use a multi-stage `dockerfile` to copy the required li [source, dockerfile] ---- # First stage - install the dependencies in an intermediate container -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.10 as BUILD +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 as BUILD RUN microdnf install freetype # Second stage - copy the dependencies -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 COPY --from=BUILD \ /lib64/libfreetype.so.6 \ /lib64/libbz2.so.1 \ @@ -51,8 +50,7 @@ COPY --from=BUILD \ /lib64/ WORKDIR /work/ -COPY target/*-runner /work/application -RUN chmod 775 /work +COPY --chmod=0755 target/*-runner /work/application EXPOSE 8080 CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] ---- @@ -62,11 +60,11 @@ If you need to have access to the full AWT support, you need more than just `lib [source, dockerfile] ---- # First stage - install the dependencies in an intermediate container -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.10 as BUILD +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 as BUILD RUN microdnf install freetype fontconfig # Second stage - copy the dependencies -FROM quay.io/quarkus/quarkus-micro-image:2.0 +FROM quay.io/quarkus/ubi9-quarkus-micro-image:2.0 COPY --from=BUILD \ /lib64/libfreetype.so.6 \ /lib64/libgcc_s.so.1 \ @@ -95,8 +93,7 @@ COPY --from=BUILD \ /etc/fonts /etc/fonts WORKDIR /work/ -COPY target/*-runner /work/application -RUN chmod 775 /work +COPY --chmod=0755 target/*-runner /work/application EXPOSE 8080 CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] ---- @@ -104,7 +101,8 @@ CMD ["./application", "-Dquarkus.http.host=0.0.0.0"] == Alternative - Using ubi-minimal -If the micro image does not suit your requirements, you can use https://catalog.redhat.com/software/containers/ubi8/ubi-minimal/5c359a62bed8bd75a2c3fba8[ubi8/ubi-minimal]. + +If the micro image does not suit your requirements, you can use https://catalog.redhat.com/software/containers/ubi9-minimal/61832888c0d15aff4912fe0d[ubi9-minimal]. It's a bigger image, but contains more utilities and is closer to a full Linux distribution. Typically, it contains a package manager (`microdnf`), so you can install packages more easily. @@ -112,12 +110,12 @@ To use this base image, use the following `Dockerfile`: [source, dockerfile] ---- -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.10 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 WORKDIR /work/ RUN chown 1001 /work \ && chmod "g+rwX" /work \ && chown 1001:root /work -COPY --chown=1001:root target/*-runner /work/application +COPY --chown=1001:root --chmod=0755 target/*-runner /work/application EXPOSE 8080 USER 1001 diff --git a/docs/src/main/asciidoc/scheduler-reference.adoc b/docs/src/main/asciidoc/scheduler-reference.adoc index bc15e9117e74f..72531ad0abf01 100644 --- a/docs/src/main/asciidoc/scheduler-reference.adoc +++ b/docs/src/main/asciidoc/scheduler-reference.adoc @@ -442,6 +442,57 @@ class MyService { NOTE: A CDI event is fired synchronously and asynchronously when the scheduler or a scheduled job is paused/resumed. The payload is `io.quarkus.scheduler.SchedulerPaused`, `io.quarkus.scheduler.SchedulerResumed`, `io.quarkus.scheduler.ScheduledJobPaused` and `io.quarkus.scheduler.ScheduledJobResumed` respectively. +[[scheduling_long_running_tasks]] +== Scheduling Long-Running Tasks + +Executing a long-running task might yield a warning message similar to the following: + +[source,java] +---- +WARN [io.ver.cor.imp.BlockedThreadChecker] (vertx-blocked-thread-checker) Thread Thread[vert.x-worker-thread-1,5,main] has been blocked for 81879 ms, time limit is 60000 ms: io.vertx.core.VertxException: Thread blocked +---- + +This is happening because the default worker thread pool is coming from Vert.x which guards against threads being blocked for far too long. + +NOTE: The amount of time for which a Vert.x worker thread can be blocked is also https://quarkus.io/guides/all-config#quarkus-vertx_quarkus-vertx-max-worker-execute-time[configurable]. + +Therefore, a proper way to execute long tasks is to offload them from the scheduled method to a custom executor service. +Here's an example of such setup for a long-running task that we do not expect to execute often: + +[source,java] +---- +@ApplicationScoped +public class LongRunner implements Runnable { + + private ExecutorService executorService; + + @PostConstruct + void init() { + executorService = Executors.newThreadPerTaskExecutor(Executors.defaultThreadFactory()); <1> + } + + @PreDestroy + void destroy() { + executorService.shutdown(); <2> + } + + + @Scheduled(cron = "{my.schedule}") + public void update() { + executorService.execute(this); <3> + } + + @Override + public void run() { <4> + // perform the actual task here + } +} +---- +<1> Create a fitting executor. In this case, a new thread is created per scheduled task and stopped once the task finishes. +<2> `@PreDestroy` callback is used to shut down the executor service. +<3> Scheduled method only delegates the job to the custom executor - this prevent Vert.x thread from being blocked. +<4> The bean implements `Runnable`, a format we can directly pass to the executor service as a parameter. + [[programmatic_scheduling]] == Programmatic Scheduling diff --git a/docs/src/main/asciidoc/security-getting-started-tutorial.adoc b/docs/src/main/asciidoc/security-getting-started-tutorial.adoc index 72fee1f4e9401..d4d214b6fa9fb 100644 --- a/docs/src/main/asciidoc/security-getting-started-tutorial.adoc +++ b/docs/src/main/asciidoc/security-getting-started-tutorial.adoc @@ -4,7 +4,7 @@ and pull requests should be submitted there: https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// [id="security-getting-started-tutorial"] -= Getting started with Security by using Basic authentication and Jakarta Persistence += Getting started with security by using Basic authentication and Jakarta Persistence include::_attributes.adoc[] :diataxis-type: tutorial :categories: security,getting-started @@ -158,6 +158,7 @@ endif::no-quarkus-security-jpa-reactive[] Add a regular Jakarta REST resource to your Java source code, as shown in the following code snippet: + ==== +.`src/main/java/org/acme/security/jpa/PublicResource.java` [source,java] ---- package org.acme.security.jpa; @@ -185,6 +186,7 @@ The source code for the `/api/admin` endpoint is similar, but instead, you use a Add a Jakarta REST resource with the following `@RolesAllowed` annotation: + ==== +.`src/main/java/org/acme/security/jpa/AdminResource.java` [source,java] ---- package org.acme.security.jpa; @@ -211,12 +213,12 @@ public class AdminResource { Use `SecurityContext` to get access to the currently authenticated `Principal` user and to return their username, all of which is retrieved from the database. + ==== +.`src/main/java/org/acme/security/jpa/UserResource.java` [source,java] ---- package org.acme.security.jpa; import jakarta.annotation.security.RolesAllowed; -import jakarta.inject.Inject; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.core.Context; @@ -238,8 +240,9 @@ public class UserResource { [[define-the-user-entity]] == Define the user entity -* You can now describe how you want security information to be stored in the model by adding annotations to the `user` entity, as outlined in the following code snippet: +Specify how security information is stored in the model by adding the following annotations to the `user` entity: +.`src/main/java/org/acme/security/jpa/User.java` [source,java] ---- package org.acme.security.jpa; @@ -316,10 +319,11 @@ When secure access is required, and no other authentication mechanisms are enabl Therefore, in this tutorial, you do not need to set the property `quarkus.http.auth.basic` to `true`. ==== + -. Configure at least one data source in the `application.properties` file so the `quarkus-security-jpa` extension can access your database. +. Configure at least one data source in the `src/main/resources/application.properties` file so the `quarkus-security-jpa` extension can access your database. For example: + ==== +.src/main/resources/application.properties [source,properties] ---- quarkus.http.auth.basic=true @@ -327,7 +331,7 @@ quarkus.http.auth.basic=true %prod.quarkus.datasource.db-kind=postgresql %prod.quarkus.datasource.username=quarkus %prod.quarkus.datasource.password=quarkus -%prod.quarkus.datasource.jdbc.url=jdbc:postgresql:security_jpa +%prod.quarkus.datasource.jdbc.url=jdbc:postgresql:quarkus quarkus.hibernate-orm.database.generation=drop-and-create ---- @@ -344,6 +348,7 @@ ifndef::no-quarkus-security-jpa-reactive[] * The URLs of Reactive datasources that are used by the `quarkus-security-jpa-reactive` extension are set with the `quarkus.datasource.reactive.url` configuration property and not the `quarkus.datasource.jdbc.url` configuration property typically used by JDBC datasources. + +.src/main/resources/application.properties [source,properties] ---- %prod.quarkus.datasource.reactive.url=vertx-reactive:postgresql://localhost:5431/security_jpa @@ -356,6 +361,7 @@ Therefore, adjustments are needed in a production environment. ==== endif::no-quarkus-security-jpa-reactive[] +.`src/main/java/org/acme/security/jpa/Startup.java` [source,java] ---- package org.acme.security.jpa; @@ -415,9 +421,11 @@ include::{includes}/devtools/dev.adoc[] In this scenario, `Dev Services for PostgreSQL` launches and configures a `PostgreSQL` test container. +Make sure that either `Podman` or `Docker` is installed on your computer. To write the integration test, use the following code sample: +.`src/test/java/org/acme/security/jpa/JpaSecurityRealmTest.java` [source,java] ---- package org.acme.security.jpa; @@ -486,6 +494,8 @@ public class JpaSecurityRealmTest { As you can see in this code sample, you do not need to start the test container from the test code. +To run these tests, choose `Press [r] to resume testing` option which is shown in the console after you started your application in dev mode. + [NOTE] ==== When you start your application in dev mode, Dev Services for PostgreSQL launches a PostgreSQL dev mode container so that you can start developing your application. @@ -493,9 +503,17 @@ While developing your application, you can add and run tests individually by usi Dev Services for PostgreSQL supports testing while you develop by providing a separate PostgreSQL test container that does not conflict with the dev mode container. ==== +Alternatively, you can run these tests using Maven: + +[source,bash,subs=attributes+] +---- +./mvnw test +---- + == Test your application in production mode by using Curl or browser -To test your application using Curl or the browser, you must first start a PostgreSQL server, then compile and run your application either in JVM or native mode. +To test your application using Curl or a browser start a PostgreSQL server first. +Then, compile and run your application in either JVM or native mode. === Start the PostgreSQL server diff --git a/docs/src/main/asciidoc/security-jwt-build.adoc b/docs/src/main/asciidoc/security-jwt-build.adoc index 931ebdf339139..591788711d305 100644 --- a/docs/src/main/asciidoc/security-jwt-build.adoc +++ b/docs/src/main/asciidoc/security-jwt-build.adoc @@ -3,6 +3,7 @@ This guide is maintained in the main Quarkus repository and pull requests should be submitted there: https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// +[id="security-jwt-build"] = Build, sign, and encrypt JSON Web Tokens include::_attributes.adoc[] :categories: security @@ -79,11 +80,13 @@ JwtClaimsBuilder builder5 = Jwt.claims(token); The API is fluent so you can initialize the builder as part of a fluent sequence. The builder automatically sets the following claims if they are not explicitly configured: + - `iat` (issued at): Current time - `exp` (expires at): Five minutes from the current time (customizable with the `smallrye.jwt.new-token.lifespan` property) - `jti` (unique token identifier) You can configure the following properties globally to avoid setting them directly in the builder: + - `smallrye.jwt.new-token.issuer`: Specifies the default issuer. - `smallrye.jwt.new-token.audience`: Specifies the default audience. diff --git a/docs/src/main/asciidoc/security-jwt.adoc b/docs/src/main/asciidoc/security-jwt.adoc index 539c5961d57a5..d5f3ae002865a 100644 --- a/docs/src/main/asciidoc/security-jwt.adoc +++ b/docs/src/main/asciidoc/security-jwt.adoc @@ -14,7 +14,7 @@ include::_attributes.adoc[] :extensions: io.quarkus:quarkus-smallrye-jwt This guide explains how to integrate link:https://github.com/smallrye/smallrye-jwt/[SmallRye JWT] into your Quarkus application to implement link:https://tools.ietf.org/html/rfc7519[JSON Web Token (JWT)] security in compliance with the MicroProfile JWT specification. -You’ll learn how to verify JWTs, represent them as MicroProfile JWT org.eclipse.microprofile.jwt.JsonWebToken, and secure Quarkus HTTP endpoints using bearer token authorization and link:https://en.wikipedia.org/wiki/Role-based_access_control[Role-Based Access Control]. +You’ll learn how to verify JWTs, represent them as MicroProfile JWT `org.eclipse.microprofile.jwt.JsonWebToken`, and secure Quarkus HTTP endpoints using bearer token authorization and link:https://en.wikipedia.org/wiki/Role-based_access_control[Role-Based Access Control]. [NOTE] ==== @@ -145,9 +145,9 @@ public class TokenSecuredResource { <7> Builds a response containing the caller's name, the `isSecure()` and `getAuthenticationScheme()` states of the request `SecurityContext`, and whether a non-null `JsonWebToken` was injected. [[run-application]] -=== Run the application +=== Run the application in dev mode -Now you are ready to run our application. Use: +Now, you are ready to run the application in dev mode by using one of the following commands: include::{includes}/devtools/dev.adoc[] @@ -174,6 +174,12 @@ Now that the REST endpoint is running, you can access it by using a command line [source,shell] ---- $ curl http://127.0.0.1:8080/secured/permit-all; echo +---- + +This command returns the following response: + +[source,shell] +---- hello anonymous, isHttps: false, authScheme: null, hasJWT: false ---- @@ -255,9 +261,6 @@ public class TokenSecuredResource { <3> The `@RolesAllowed` annotation restricts access to users with either the "User" or "Admin" role. <4> The response is constructed similarly to the `hello` method, with the addition of the `birthdate` claim retrieved directly from the injected `JsonWebToken`. - - - After you make this addition to your `TokenSecuredResource`, rerun the `./mvnw quarkus:dev` command, and then try `curl -v http://127.0.0.1:8080/secured/roles-allowed; echo` to attempt to access the new endpoint. Your output should be as follows: @@ -266,6 +269,12 @@ Your output should be as follows: [source,shell] ---- $ curl -v http://127.0.0.1:8080/secured/roles-allowed; echo +---- + +This command returns the following response: + +[source,shell] +---- * Trying 127.0.0.1... * TCP_NODELAY set * Connected to 127.0.0.1 (127.0.0.1) port 8080 (#0) @@ -284,10 +293,13 @@ $ curl -v http://127.0.0.1:8080/secured/roles-allowed; echo ---- Excellent. -You have not provided any JWT in the request, so you should not be able to access the endpoint, and you were not able to. +You did not provide a JWT in the request, so access to the endpoint was correctly denied. Instead, you received an HTTP 401 Unauthorized error. -You need to obtain and pass in a valid JWT to access that endpoint. -There are two steps to this, 1) configuring our {extension-name} extension with information on how to validate a JWT, and 2) generating a matching JWT with the appropriate claims. + +To access the endpoint, you must obtain and include a valid JWT in your request. This involves two steps: + +. Configuring the {extension-name} extension with the necessary information to validate a JWT. +. Generating a JWT with the appropriate claims to match the configuration. === Configuring the {extension-name} extension security information @@ -347,31 +359,35 @@ import java.util.Arrays; import java.util.HashSet; import org.eclipse.microprofile.jwt.Claims; - import io.smallrye.jwt.build.Jwt; +/** + * A utility class to generate and print a JWT token string to stdout. + */ public class GenerateToken { + /** - * Generate JWT token + * Generates and prints a JWT token. */ public static void main(String[] args) { - String token = - Jwt.issuer("https://example.com/issuer") // <1> - .upn("jdoe@quarkus.io") // <2> - .groups(new HashSet<>(Arrays.asList("User", "Admin"))) // <3> - .claim(Claims.birthdate.name(), "2001-07-13") // <4> - .sign(); + String token = Jwt.issuer("https://example.com/issuer") // <1> + .upn("jdoe@quarkus.io") // <2> + .groups(new HashSet<>(Arrays.asList("User", "Admin"))) // <3> + .claim(Claims.birthdate.name(), "2001-07-13") // <4> + .sign(); + System.out.println(token); + System.exit(0); } } ---- -<1> Set JWT issuer as an `iss` claim value. -This must match the server side `mp.jwt.verify.issuer` for the token to be accepted as valid. -<2> The `upn` claim is defined by the {mp-jwt} spec as the preferred claim to use for the `Principal` seen by the container security APIs. -<3> The `group` claim provides the groups and top-level roles associated with the JWT bearer. -<4> The `birthday` claim. -It can be considered a sensitive claim, so consider encrypting the claims, as described in xref:security-jwt-build.adoc[Generate JWT tokens with SmallRye JWT]. +<1> Sets the `iss` (issuer) claim in the JWT. + This value must match the server-side `mp.jwt.verify.issuer` configuration for the token to be considered valid. +<2> Specifies the `upn` (User Principal Name) claim, which the {mp-jwt} specification defines as the preferred claim for identifying the `Principal` in container security APIs. +<3> Defines the `groups` claim, which provides the group memberships and top-level roles assigned to the JWT bearer. +<4> Adds a `birthdate` claim. + Because this can be considered sensitive information, consider encrypting claims as described in xref:security-jwt-build.adoc[Generate JWT tokens with SmallRye JWT]. Note that for this code to work, you need the content of the RSA private key corresponding to the public key you have in the `TokenSecuredResource` application. Take the following PEM content and place it into `security-jwt-quickstart/src/test/resources/privateKey.pem`: @@ -444,27 +460,32 @@ Next, use the following command to generate the JWT: [source,shell] ---- $ mvn exec:java -Dexec.mainClass=org.acme.security.jwt.GenerateToken -Dexec.classpathScope=test -Dsmallrye.jwt.sign.key.location=privateKey.pem - -eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJqZG9lLXVzaW5nLWp3dC1yYmFjIiwiYXVkIjoidXNpbmctand0LXJiYWMiLCJ1cG4iOiJqZG9lQHF1YXJrdXMuaW8iLCJiaXJ0aGRhdGUiOiIyMDAxLTA3LTEzIiwiYXV0aF90aW1lIjoxNTUxNjU5Njc2LCJpc3MiOiJodHRwczpcL1wvcXVhcmt1cy5pb1wvdXNpbmctand0LXJiYWMiLCJyb2xlTWFwcGluZ3MiOnsiZ3JvdXAyIjoiR3JvdXAyTWFwcGVkUm9sZSIsImdyb3VwMSI6Ikdyb3VwMU1hcHBlZFJvbGUifSwiZ3JvdXBzIjpbIkVjaG9lciIsIlRlc3RlciIsIlN1YnNjcmliZXIiLCJncm91cDIiXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiamRvZSIsImV4cCI6MTU1MTY1OTk3NiwiaWF0IjoxNTUxNjU5Njc2LCJqdGkiOiJhLTEyMyJ9.O9tx_wNNS4qdpFhxeD1e7v4aBNWz1FCq0UV8qmXd7dW9xM4hA5TO-ZREk3ApMrL7_rnX8z81qGPIo_R8IfHDyNaI1SLD56gVX-NaOLS2OjfcbO3zOWJPKR_BoZkYACtMoqlWgIwIRC-wJKUJU025dHZiNL0FWO4PjwuCz8hpZYXIuRscfFhXKrDX1fh3jDhTsOEFfu67ACd85f3BdX9pe-ayKSVLh_RSbTbBPeyoYPE59FW7H5-i8IE-Gqu838Hz0i38ksEJFI25eR-AJ6_PSUD0_-TV3NjXhF3bFIeT4VSaIZcpibekoJg0cQm-4ApPEcPLdgTejYHA-mupb8hSwg ---- -The JWT string is a Base64 URL encoded string with three parts separated by '.' characters. -First part - JWT headers, second part - JWT claims, third part - JWT signature. +The JWT string is a Base64 URL-encoded string consisting of three parts, separated by `.` characters: + +. The header, which contains metadata about the token, such as the signing algorithm. +. The payload, also called "claims", which includes the token's claims or data. +. The signature, which verifies the token's integrity. === Finally, secured access to `/secured/roles-allowed` Now, let's use this to make a secured request to the `/secured/roles-allowed` endpoint. Make sure you have the Quarkus server still running in dev mode, and then run the following command, making sure to use your version of the generated JWT from the previous step: -[source,bash] ----- -curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJqZG9lLXVzaW5nLWp3dC1yYmFjIiwiYXVkIjoidXNpbmctand0LXJiYWMiLCJ1cG4iOiJqZG9lQHF1YXJrdXMuaW8iLCJiaXJ0aGRhdGUiOiIyMDAxLTA3LTEzIiwiYXV0aF90aW1lIjoxNTUxNjUyMDkxLCJpc3MiOiJodHRwczpcL1wvcXVhcmt1cy5pb1wvdXNpbmctand0LXJiYWMiLCJyb2xlTWFwcGluZ3MiOnsiZ3JvdXAyIjoiR3JvdXAyTWFwcGVkUm9sZSIsImdyb3VwMSI6Ikdyb3VwMU1hcHBlZFJvbGUifSwiZ3JvdXBzIjpbIkVjaG9lciIsIlRlc3RlciIsIlN1YnNjcmliZXIiLCJncm91cDIiXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiamRvZSIsImV4cCI6MTU1MTY1MjM5MSwiaWF0IjoxNTUxNjUyMDkxLCJqdGkiOiJhLTEyMyJ9.aPA4Rlc4kw7n_OZZRRk25xZydJy_J_3BRR8ryYLyHTO1o68_aNWWQCgpnAuOW64svPhPnLYYnQzK-l2vHX34B64JySyBD4y_vRObGmdwH_SEufBAWZV7mkG3Y4mTKT3_4EWNu4VH92IhdnkGI4GJB6yHAEzlQI6EdSOa4Nq8Gp4uPGqHsUZTJrA3uIW0TbNshFBm47-oVM3ZUrBz57JKtr0e9jv0HjPQWyvbzx1HuxZd6eA8ow8xzvooKXFxoSFCMnxotd3wagvYQ9ysBa89bgzL-lhjWtusuMFDUVYwFqADE7oOSOD4Vtclgq8svznBQ-YpfTHfb9QEcofMlpyjNA" http://127.0.0.1:8080/secured/roles-allowed; echo ----- .`curl` command for `/secured/roles-allowed` with JWT [source,shell] ---- $ curl -H "Authorization: Bearer eyJraWQ..." http://127.0.0.1:8080/secured/roles-allowed; echo +---- + +Make sure to use the generated token as the HTTP Authorization Bearer scheme value. + +This command returns the following response: + +[source,shell] +---- hello jdoe@quarkus.io, isHttps: false, authScheme: Bearer, hasJWT: true, birthdate: 2001-07-13 ---- @@ -569,65 +590,64 @@ public class TokenSecuredResource { Now generate the token again and run: -[source,bash] +[source,shell] ---- -curl -H "Authorization: Bearer eyJraWQiOiJcL3ByaXZhdGVLZXkucGVtIiwidHlwIjoiSldUIiwiYWxnIjoiUlMyNTYifQ.eyJzdWIiOiJqZG9lLXVzaW5nLWp3dC1yYmFjIiwiYXVkIjoidXNpbmctand0LXJiYWMiLCJ1cG4iOiJqZG9lQHF1YXJrdXMuaW8iLCJiaXJ0aGRhdGUiOiIyMDAxLTA3LTEzIiwiYXV0aF90aW1lIjoxNTUxNjUyMDkxLCJpc3MiOiJodHRwczpcL1wvcXVhcmt1cy5pb1wvdXNpbmctand0LXJiYWMiLCJyb2xlTWFwcGluZ3MiOnsiZ3JvdXAyIjoiR3JvdXAyTWFwcGVkUm9sZSIsImdyb3VwMSI6Ikdyb3VwMU1hcHBlZFJvbGUifSwiZ3JvdXBzIjpbIkVjaG9lciIsIlRlc3RlciIsIlN1YnNjcmliZXIiLCJncm91cDIiXSwicHJlZmVycmVkX3VzZXJuYW1lIjoiamRvZSIsImV4cCI6MTU1MTY1MjM5MSwiaWF0IjoxNTUxNjUyMDkxLCJqdGkiOiJhLTEyMyJ9.aPA4Rlc4kw7n_OZZRRk25xZydJy_J_3BRR8ryYLyHTO1o68_aNWWQCgpnAuOW64svPhPnLYYnQzK-l2vHX34B64JySyBD4y_vRObGmdwH_SEufBAWZV7mkG3Y4mTKT3_4EWNu4VH92IhdnkGI4GJB6yHAEzlQI6EdSOa4Nq8Gp4uPGqHsUZTJrA3uIW0TbNshFBm47-oVM3ZUrBz57JKtr0e9jv0HjPQWyvbzx1HuxZd6eA8ow8xzvooKXFxoSFCMnxotd3wagvYQ9ysBa89bgzL-lhjWtusuMFDUVYwFqADE7oOSOD4Vtclgq8svznBQ-YpfTHfb9QEcofMlpyjNA" http://127.0.0.1:8080/secured/roles-allowed-admin; echo +$ curl -H "Authorization: Bearer eyJraWQ..." http://127.0.0.1:8080/secured/roles-allowed-admin; echo ---- +Make sure to use the generated token as the HTTP Authorization Bearer scheme value. + +This command returns the following response: + [source,shell] ---- -$ curl -H "Authorization: Bearer eyJraWQ..." http://127.0.0.1:8080/secured/roles-allowed-admin; echo hello jdoe@quarkus.io, isHttps: false, authScheme: Bearer, hasJWT: true, birthdate: 2001-07-13 ---- -=== Package and run the application +=== Run the application in JVM mode -As usual, the application can be packaged by using: +You can run the application as a standard Java application. +. Compile the application: ++ +==== include::{includes}/devtools/build.adoc[] - -And executed by using `java -jar target/quarkus-app/quarkus-run.jar`: - -.Runner jar example -[source,shell,subs=attributes+] +==== +. Run the application: ++ +==== +[source,bash] ---- -$ java -jar target/quarkus-app/quarkus-run.jar -2019-03-28 14:27:48,839 INFO [io.quarkus] (main) Quarkus {quarkus-version} started in 0.796s. Listening on: http://[::]:8080 -2019-03-28 14:27:48,841 INFO [io.quarkus] (main) Installed features: [cdi, rest, rest-jackson, security, smallrye-jwt] +java -jar target/quarkus-app/quarkus-run.jar ---- +==== -You can also generate the native executable with: +=== Run the application in native mode -include::{includes}/devtools/build-native.adoc[] +You can compile this same demo into native mode without any modifications. +This implies that you no longer need to install a JVM on your production environment. +The runtime technology is included in the produced binary and optimized to run with minimal resources required. -.Native executable example -[source,shell] +Compilation takes a bit longer, so this step is disabled by default. + +. Build your application again by enabling the `native` profile: ++ +==== + +include::{includes}/devtools/build-native.adoc[] +==== +. Run the following binary directly: ++ +==== +[source,bash] ---- -[INFO] Scanning for projects... -... -[security-jwt-quickstart-runner:25602] universe: 493.17 ms -[security-jwt-quickstart-runner:25602] (parse): 660.41 ms -[security-jwt-quickstart-runner:25602] (inline): 1,431.10 ms -[security-jwt-quickstart-runner:25602] (compile): 7,301.78 ms -[security-jwt-quickstart-runner:25602] compile: 10,542.16 ms -[security-jwt-quickstart-runner:25602] image: 2,797.62 ms -[security-jwt-quickstart-runner:25602] write: 988.24 ms -[security-jwt-quickstart-runner:25602] [total]: 43,778.16 ms -[INFO] ------------------------------------------------------------------------ -[INFO] BUILD SUCCESS -[INFO] ------------------------------------------------------------------------ -[INFO] Total time: 51.500 s -[INFO] Finished at: 2019-03-28T14:30:56-07:00 -[INFO] ------------------------------------------------------------------------ - -$ ./target/security-jwt-quickstart-runner -2019-03-28 14:31:37,315 INFO [io.quarkus] (main) Quarkus 0.12.0 started in 0.006s. Listening on: http://[::]:8080 -2019-03-28 14:31:37,316 INFO [io.quarkus] (main) Installed features: [cdi, rest, rest-jackson, security, smallrye-jwt] +./target/security-jwt-quickstart-1.0.0-SNAPSHOT-runner ---- +==== === Explore the solution -The `security-jwt-quickstart` link:{quickstarts-tree-url}/security-jwt-quickstart[directory] repository contains all the versions covered in this quickstart guide, along with additional endpoints that demonstrate subresources using injected `JsonWebToken`s and their claims via CDI APIs. +The `security-jwt-quickstart` link:{quickstarts-tree-url}/security-jwt-quickstart[directory] repository contains all the versions covered in this quickstart guide, along with additional endpoints that demonstrate subresources using injected `JsonWebToken` tokens and their claims via CDI APIs. We encourage you to explore the `security-jwt-quickstart` directory and review the quickstart solutions to learn more about the features of the {extension-name} extension. @@ -1095,7 +1115,7 @@ SmallRye JWT provides more properties that can be used to customize the token pr |`smallrye.jwt.token.kid`|none|Key identifier. The verification JWK key and every JWT token must have a matching `kid` header if it is set. |`smallrye.jwt.time-to-live`|none|The maximum number of seconds a JWT can be issued for use. Effectively, the difference between the expiration date of the JWT and the issued at date must not exceed this value. Setting this property to a non-positive value relaxes the requirement for the token to have a valid 'iat' (issued at) claim. |`smallrye.jwt.require.named-principal`|`true`|If an application relies on `java.security.Principal` returning a name, then a token must have a `upn` or `preferred_username` or `sub` claim set. Setting this property results in SmallRye JWT throwing an exception if none of these claims is available for the application code to deal with a non-null `Principal` name reliably. -|`smallrye.jwt.path.sub`|none|Path to the claim containing the subject name. It starts from the top-level JSON object and can contain multiple segments where each segment only represents a JSON object name, for example, ' realms/subject`. This property can be used if a token has no 'sub' claim but has the subject set in a different claim. Use double quotes with the namespace-qualified claims. +|`smallrye.jwt.path.sub`|none|Path to the claim containing the subject name. It starts from the top-level JSON object and can contain multiple segments where each segment only represents a JSON object name, for example, `realms/subject`. This property can be used if a token has no 'sub' claim but has the subject set in a different claim. Use double quotes with the namespace-qualified claims. |`smallrye.jwt.claims.sub`|none| This property can set a default sub claim value when the current token has no standard or custom `sub` claim available. Effectively, this property can be used to customize the `java.security.Principal` name if no `upn` or `preferred_username` or `sub` claim is set. |`smallrye.jwt.path.groups`|none|Path to the claim containing the groups. It starts from the top-level JSON object and can contain multiple segments where each segment represents a JSON object name only, for example: `realm/groups`. This property can be used if a token has no 'groups' claim but has the groups set in a different claim. Use double quotes with the namespace-qualified claims. |`smallrye.jwt.groups-separator`|space|Separator for splitting a string which might contain multiple group values. It is only used if the `smallrye.jwt.path.groups` property points to a custom claim with a string value. The default value is a single space because a standard OAuth2 `scope` claim might contain a space-separated sequence. @@ -1114,7 +1134,7 @@ SmallRye JWT provides more properties that can be used to customize the token pr |`smallrye.jwt.client.tls.hosts`|none|Set of trusted hostnames. If the keys have to be fetched over `HTTPS` and `smallrye.jwt.client.tls.trust-all` is set to `false` then this property can be used to configure the trusted hostnames. |`smallrye.jwt.http.proxy.host`|none|HTTP proxy host. |`smallrye.jwt.http.proxy.port`|80|HTTP proxy port. -|`smallrye.jwt.keystore.type`|`JKS`|This property can be used to customize a keystore type if either `mp.jwt.verify.publickey.location` or mp.jwt.decrypt.key.location` points to a `KeyStore` file. If it is not set, the file name is checked to determine the keystore type before defaulting to `JKS`. +|`smallrye.jwt.keystore.type`|`JKS`|This property can be used to customize a keystore type if either `mp.jwt.verify.publickey.location` or `mp.jwt.decrypt.key.location` points to a `KeyStore` file. If it is not set, the file name is checked to determine the keystore type before defaulting to `JKS`. |`smallrye.jwt.keystore.provider`||This property can be used to customize a `KeyStore` provider if `mp.jwt.verify.publickey.location` or `mp.jwt.decrypt.key.location` points to a `KeyStore` file. |`smallrye.jwt.keystore.password`||Keystore password. If `mp.jwt.verify.publickey.location` or `mp.jwt.decrypt.key.location`, this property must be set. |`smallrye.jwt.keystore.verify.key.alias`||This property has to be set to identify a public verification key which is extracted from `KeyStore` from a matching certificate if `mp.jwt.verify.publickey.location` points to a `KeyStore` file. diff --git a/docs/src/main/asciidoc/security-keycloak-authorization.adoc b/docs/src/main/asciidoc/security-keycloak-authorization.adoc index 12a3687e8ee71..58f8d5cdf6f9f 100644 --- a/docs/src/main/asciidoc/security-keycloak-authorization.adoc +++ b/docs/src/main/asciidoc/security-keycloak-authorization.adoc @@ -3,6 +3,7 @@ This guide is maintained in the main Quarkus repository. To contribute, submit a pull request here: https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc //// +[id="security-keycloak-authorization"] = Using OpenID Connect (OIDC) and Keycloak to centralize authorization include::_attributes.adoc[] :diataxis-type: howto @@ -15,7 +16,9 @@ Learn how to enable bearer token authorization in your Quarkus application by us == Overview -The `quarkus-keycloak-authorization` extension builds on the `quarkus-oidc` extension to offer advanced authorization capabilities. It includes a policy enforcer that dynamically regulates access to secured resources. Access is governed by permissions defined in Keycloak, supporting flexible and dynamic Resource-Based Access Control (RBAC). +The Keycloak Authorization extension, `quarkus-keycloak-authorization`, extends the OpenID Connect extension, `quarkus-oidc`, to provide advanced authorization capabilities. +It features a policy enforcer that dynamically manages access to secured resources. +Access is governed by permissions defined in Keycloak, supporting flexible and dynamic Resource-Based Access Control (RBAC). Use the `quarkus-keycloak-authorization` extension only if you are using Keycloak and Keycloak Authorization Services is enabled in your environment to handle authorization decisions. @@ -23,11 +26,11 @@ If you are not using Keycloak, or if Keycloak is configured without Keycloak Aut .How it works -The `quarkus-keycloak-authorization` extension centralizes authorization responsibilities in Keycloak, enhancing security and simplifying application maintenance. The extension: +The `quarkus-keycloak-authorization` extension centralizes authorization responsibilities in Keycloak, enhancing security and simplifying application maintenance: -1. Uses the `quarkus-oidc` extension to verify bearer tokens. -2. Sends verified tokens to Keycloak Authorization Services. -3. Allows Keycloak to evaluate resource-based permissions dynamically, by using attributes such as resource name, identifier, or URI. +- It uses the `quarkus-oidc` extension to verify bearer tokens. +- It sends verified tokens to Keycloak Authorization Services. +- It allows Keycloak to evaluate resource-based permissions dynamically by using attributes such as resource name, identifier, or URI. By externalizing authorization decisions, you can: @@ -36,7 +39,8 @@ By externalizing authorization decisions, you can: .Compatibility -This extension is compatible only with Quarkus xref:security-oidc-bearer-token-authentication.adoc[OIDC service applications]. It complements explicit mechanisms such as role-based access control with dynamic authorization policies. +This extension is compatible only with Quarkus xref:security-oidc-bearer-token-authentication.adoc[OIDC service applications]. +It complements explicit mechanisms such as role-based access control with dynamic authorization policies. .Key Features @@ -48,8 +52,8 @@ This extension is compatible only with Quarkus xref:security-oidc-bearer-token-a Before using this extension, ensure the following: -1. Keycloak Authorization Services is enabled in your Keycloak instance. -2. Your Quarkus application includes the `quarkus-keycloak-authorization` extension. +. Keycloak Authorization Services is enabled in your Keycloak instance. +. Your Quarkus application includes the `quarkus-keycloak-authorization` extension. For detailed steps, see the xref:security-oidc-bearer-token-authentication.adoc[OIDC Bearer Token Authentication] guide. @@ -75,7 +79,8 @@ This example demonstrates a simple microservice setup with two protected endpoin .Token-based access control -Access to these endpoints is controlled by using bearer tokens. To gain access, the following conditions must be met: +Access to these endpoints is controlled by using bearer tokens. +To gain access, the following conditions must be met: - **Valid token**: The token must have a correct signature, a valid expiration date, and the appropriate audience. - **Trust**: The microservice must trust the issuing Keycloak server. @@ -91,8 +96,8 @@ For `/api/users/me`: - **Access policy**: Open to users with a valid bearer token and the `user` role. - **Response**: Returns user details as a JSON object derived from the token. - -Example response: ++ +.Example response [source,json] ---- { @@ -104,13 +109,15 @@ Example response: } ---- + For `/api/admin`: - *Access policy*: Restricted to users with a valid bearer token and the `admin` role. .Decoupled authorization -This example highlights the use of role-based access control (RBAC) policies to protect resources. Key points include: +This example highlights the use of role-based access control (RBAC) policies to protect resources. +Key points include: - *Policy flexibility*: Keycloak supports various policy types, such as attribute-based and custom policies, enabling fine-grained control. - *Decoupled application logic*: Authorization policies are managed entirely by Keycloak, allowing your application to focus on its core functionality. @@ -132,11 +139,12 @@ To get started, create a new project by using the following command: :create-app-extensions: oidc,keycloak-authorization,rest-jackson include::{includes}/devtools/create-app.adoc[] -This command generates a new project with the `keycloak-authorization` extension. The extension integrates a Keycloak Adapter into your Quarkus application, providing the necessary capabilities to interact with a Keycloak server and perform bearer token authorization. +This command generates a new project with the `keycloak-authorization` extension. +The extension integrates a Keycloak Adapter into your Quarkus application, providing the necessary capabilities to interact with a Keycloak server and perform bearer token authorization. .Adding extensions to an existing project -If you already have an existing Quarkus project, you can add the `oidc` and `keycloak-authorization` extensions by running the following command in your project’s base directory: +If you already have an existing Quarkus project, you can add the `oidc` and `keycloak-authorization` extensions by running the following command in your project's base directory: :add-extension-extensions: oidc,keycloak-authorization include::{includes}/devtools/extension-add.adoc[] @@ -165,7 +173,8 @@ implementation("io.quarkus:quarkus-keycloak-authorization") .Implementing the `/api/users/me` endpoint -Start by implementing the `/api/users/me` endpoint. The following code defines a Jakarta REST resource that provides user details: +Start by implementing the `/api/users/me` endpoint. +The following code defines a Jakarta REST resource that provides user details: [source,java] ---- @@ -209,7 +218,8 @@ public class UsersResource { .Implementing the `/api/admin` endpoint -Next, define the `/api/admin` endpoint. The following code represents a simple Jakarta REST resource protected with authentication: +Next, define the `/api/admin` endpoint. +The following code represents a simple Jakarta REST resource protected with authentication: [source,java] ---- @@ -236,7 +246,8 @@ public class AdminResource { .Role-based access control with Keycloak -Notice that explicit annotations such as `@RolesAllowed` are not defined to enforce access control for the resources. Instead, the `keycloak-authorization` extension dynamically maps the URIs of protected resources in Keycloak. +Notice that explicit annotations such as `@RolesAllowed` are not defined to enforce access control for the resources. +Instead, the `keycloak-authorization` extension dynamically maps the URIs of protected resources in Keycloak. Access control is managed as follows: @@ -247,7 +258,8 @@ This decouples access control logic from the application code, making it easier == Configuring the application -You can use the OpenID Connect extension to configure the adapter settings through the `application.properties` file, typically located in the `src/main/resources` directory. Below is an example configuration: +You can use the OpenID Connect extension to configure the adapter settings through the `application.properties` file, typically located in the `src/main/resources` directory. +For example: [source,properties] ---- @@ -267,18 +279,20 @@ quarkus.keycloak.devservices.realm-path=quarkus-realm.json <6> <1> Specifies the URL of the Keycloak server and the realm used for authentication. <2> Identifies the client application within the Keycloak realm. <3> Defines the client secret for authentication with the Keycloak server. -<4> Disables TLS verification for development purposes. Not recommended for production. +<4> Disables TLS verification for development purposes, not recommended for production. <5> Enables the Keycloak policy enforcer to manage access control based on defined permissions. <6> Configures Dev Services to import a specified realm file, effective only in dev mode and not in JVM or native modes. [NOTE] ==== -Adding the `%prod.` profile prefix to `quarkus.oidc.auth-server-url` ensures that Dev Services for Keycloak automatically launches a container in development mode. For more details, see the <> section. +Adding the `%prod.` profile prefix to `quarkus.oidc.auth-server-url` ensures that Dev Services for Keycloak automatically launches a container in development mode. +For more details, see the <> section. ==== [NOTE] ==== -By default, applications using the `quarkus-oidc` extension are treated as `service` type applications. However, the extension also supports `web-app` type applications under the following conditions: +By default, applications using the `quarkus-oidc` extension are treated as `service` type applications. +However, the extension also supports `web-app` type applications under the following conditions: - The access token returned during the authorization code grant flow must be the source of roles (`quarkus.oidc.roles.source=accesstoken`). - Note: For `web-app` type applications, ID token roles are checked by default. @@ -309,21 +323,6 @@ docker run --name keycloak \ <1> For `keycloak.version`, ensure the version is `26.0.7` or later. <2> For Keycloak keystore, use the `keycloak-keystore.jks` file located at https://github.com/quarkusio/quarkus-quickstarts/blob/main/security-keycloak-authorization-quickstart/config/keycloak-keystore.jks[quarkus-quickstarts/security-keycloak-authorization-quickstart/config]. - -Try to access your Keycloak server at https://localhost:8543[localhost:8543]. - -To access the Keycloak Administration Console, log in as the `admin` user. -The username and password are both `admin`. - -Import the link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[realm configuration file] to create a new realm. -For more details, see the Keycloak documentation about how to https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[create a new realm]. - -After importing the realm, you can see the resource permissions: - -image::keycloak-authorization-permissions.png[alt=Keycloak Authorization Permissions,role="center"] - -It explains why the endpoint has no `@RolesAllowed` annotations - the resource access permissions are set directly in Keycloak. - .Accessing the Keycloak server . Open your browser and navigate to https://localhost:8543[https://localhost:8543]. @@ -333,7 +332,8 @@ It explains why the endpoint has no `@RolesAllowed` annotations - the resource a .Importing the realm configuration -To create a new realm, import the link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[realm configuration file]. For detailed steps on creating realms, refer to the Keycloak documentation: https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[Create a new realm]. +To create a new realm, import the link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[realm configuration file]. +For detailed steps on creating realms, refer to the Keycloak documentation: https://www.keycloak.org/docs/latest/server_admin/index.html#_create-realm[Create a new realm]. After importing the realm, you can review the resource permissions: @@ -341,7 +341,8 @@ image::keycloak-authorization-permissions.png[alt=Keycloak Authorization Permiss .Role of Keycloak in resource permissions -The resource access permissions are configured directly in Keycloak, which eliminates the need for `@RolesAllowed` annotations in your application code. This approach centralizes access control management within Keycloak, simplifying application maintenance and security updates. +The resource access permissions are configured directly in Keycloak, which eliminates the need for `@RolesAllowed` annotations in your application code. +This approach centralizes access control management within Keycloak, simplifying application maintenance and security updates. [[keycloak-dev-mode]] == Running the application in dev mode @@ -352,12 +353,12 @@ include::{includes}/devtools/dev.adoc[] xref:security-openid-connect-dev-services.adoc[Dev Services for Keycloak] starts a Keycloak container and imports the `quarkus-realm.json` configuration file. -Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click a `Provider: Keycloak` link in an `OpenID Connect` `Dev UI` card. +Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/dev-ui] and click a **Provider: Keycloak** link on an **OpenID Connect** card in the Dev UI. .Interacting with Dev UI . Open the xref:dev-ui.adoc[Dev UI] at http://localhost:8080/q/dev-ui[/q/dev-ui]. -. Click the `Provider: Keycloak` link within the `OpenID Connect` Dev UI card. +. Click the **Provider: Keycloak** link within the **OpenID Connect** card in the Dev UI. .Testing user permissions @@ -374,17 +375,19 @@ When prompted to log in to a `Single Page Application` provided by `OpenID Conne If you started Dev Services for Keycloak without importing a realm file such as link:{quickstarts-tree-url}/security-keycloak-authorization-quickstart/config/quarkus-realm.json[quarkus-realm.json], create a default `quarkus` realm without Keycloak authorization policies: -. Select the `Keycloak Admin` link from the `OpenID Connect` Dev UI card. -. Log in to the Keycloak admin console. The username and password are both `admin`. +. Select the **Keycloak Admin** link from the **OpenID Connect** card in the Dev UI. +. Log in to the Keycloak admin console. +The username and password are both `admin`. . Follow the instructions at link:https://www.keycloak.org/docs/latest/authorization_services/index.html[Keycloak Authorization Services documentation] to enable authorization policies in the `quarkus` realm. -The `Keycloak Admin` link is easy to find in Dev UI: +The **Keycloak Admin** link is easy to find in Dev UI: image::dev-ui-oidc-keycloak-card.png[alt=Dev UI OpenID Connect Card,role="center"] .Adding custom JavaScript policies -If your application uses Keycloak authorization configured with link:https://www.keycloak.org/docs/latest/authorization_services/index.html#_policy_js[JavaScript policies] that are deployed in a JAR archive, Dev Services for Keycloak can transfer this archive to the Keycloak container. Use the following properties in `application.properties` to configure the transfer: +If your application uses Keycloak authorization configured with link:https://www.keycloak.org/docs/latest/authorization_services/index.html#_policy_js[JavaScript policies] that are deployed in a JAR archive, Dev Services for Keycloak can transfer this archive to the Keycloak container. +Use the following properties in `application.properties` to configure the transfer: [source,properties] ---- @@ -393,18 +396,19 @@ quarkus.keycloak.devservices.resource-aliases.policies=/policies.jar <1> # Map the policies archive to a specific location in the container quarkus.keycloak.devservices.resource-mappings.policies=/opt/keycloak/providers/policies.jar <2> ---- -<1> Creates a `policies` alias for the `/policies.jar` classpath resource. The policies archive can also be located on the file system. +<1> Creates a `policies` alias for the `/policies.jar` classpath resource. +The policies archive can also be located on the file system. <2> Maps the policies archive to the `/opt/keycloak/providers/policies.jar` location inside the Keycloak container. == Running the application in JVM mode -After exploring the application in dev mode, you can run it as a standard Java application. +After exploring the application in dev mode, you can run it as a standard Java application in JVM mode. -First compile it: +Compile the application: include::{includes}/devtools/build.adoc[] -Then run it: +Run the application: [source,bash] ---- @@ -413,15 +417,18 @@ java -jar target/quarkus-app/quarkus-run.jar == Running the application in native mode -This same demo can be compiled into native code; no modifications are required. +You can compile this demo into native code; no modifications are required. + +Native compilation eliminates the need for a JVM in the production environment because the produced binary includes the runtime and is optimized for minimal resource usage. -This implies that you no longer need to install a JVM on your production environment because the runtime technology is included in the produced binary and optimized to run with minimal resources. +Compilation takes longer and is disabled by default. +To build the application, enable the `native` profile. -Compilation takes a bit longer, so this step is turned off by default; let's build again by enabling the `native` profile: +Build the native binary: include::{includes}/devtools/build-native.adoc[] -After a while, you can run this binary directly: +After a while, run the native binary: [source,bash] ---- @@ -431,11 +438,14 @@ After a while, you can run this binary directly: [[testing]] == Testing the application -See the preceding <> section about testing your application in a dev mode. +See the preceding <> section for instructions on testing your application in development mode. -You can test the application launched in JVM or native modes with `curl`. +You can test the application running in JVM or native modes by using `curl`. -The application uses bearer token authorization, and the first thing to do is obtain an access token from the Keycloak server to access the application resources: +.Obtaining an access token + +The application uses bearer token authorization. +To access its resources, first obtain an access token from the Keycloak server: [source,bash] ---- @@ -449,7 +459,8 @@ export access_token=$(\ [NOTE] ==== -When the `quarkus.oidc.authentication.user-info-required` property is set to `true` to require that an access token is used to request `UserInfo`, you must add a `scope=openid` query parameter to the token grant request command, for example: +If the `quarkus.oidc.authentication.user-info-required` property is set to `true`, the application requires that an access token is used to request `UserInfo`. +In that case, you must add the `scope=openid` query parameter to the token grant request; for example: [source,bash] ---- @@ -462,11 +473,11 @@ export access_token=$(\ ---- ==== -The preceding example obtains an access token for user `alice`. +The preceding example obtains an access token for the user `alice`. + +.Accessing the `/api/users/me` endpoint -Any user is allowed to access the -`http://localhost:8080/api/users/me` endpoint, -which returns a JSON payload with details about the user. +Any user with a valid access token can access the `http://localhost:8080/api/users/me` endpoint, which returns a JSON payload with user details: [source,bash] ---- @@ -475,17 +486,21 @@ curl -v -X GET \ -H "Authorization: Bearer "$access_token ---- -The `http://localhost:8080/api/admin` endpoint can only be accessed by users with the `admin` role. -If you try to access this endpoint with the previously issued access token, you get a `403` response from the server. +.Accessing the `/api/admin` endpoint + +The `http://localhost:8080/api/admin` endpoint is restricted to users with the `admin` role. +If you try to access this endpoint with the previously issued access token, the server returns a `403 Forbidden` response: [source,bash] ---- - curl -v -X GET \ - http://localhost:8080/api/admin \ - -H "Authorization: Bearer "$access_token +curl -v -X GET \ + http://localhost:8080/api/admin \ + -H "Authorization: Bearer "$access_token ---- -To access the admin endpoint, get a token for the `admin` user: +.Getting an admin access token + +To access the admin endpoint, get an access token for the `admin` user: [source,bash] ---- @@ -499,8 +514,8 @@ export access_token=$(\ == Injecting the authorization client -In some cases, using the link:https://www.keycloak.org/docs/latest/authorization_services/#_service_client_api[Keycloak Authorization Client Java API] is beneficial for tasks such as managing resources and obtaining permissions directly from Keycloak. -For this purpose, you can inject an `AuthzClient` instance into your beans as follows: +You can use the link:https://www.keycloak.org/docs/latest/authorization_services/#_service_client_api[Keycloak Authorization Client Java API] for advanced tasks, such as managing resources and getting permissions directly from Keycloak. +To enable this functionality, inject an `AuthzClient` instance into your beans: [source,java] ---- @@ -512,18 +527,24 @@ public class ProtectedResource { NOTE: If you want to use the `AuthzClient` directly, set `quarkus.keycloak.policy-enforcer.enable=true`; otherwise, no bean is available for injection. +[NOTE] +==== +To use the `AuthzClient` directly, set `quarkus.keycloak.policy-enforcer.enable=true`. +Otherwise, no bean is available for injection. +==== + == Mapping protected resources -By default, the extension fetches resources on-demand from Keycloak, using their URI to identify and map the resources in your application that need to be protected. +By default, the extension fetches resources from Keycloak on demand, using their URI to identify and map the application resources that require protection. -To disable this on-demand fetching and instead pre-load resources at startup, apply the following configuration setting: +To disable on-demand fetching and instead pre-load resources at startup, configure the following property: [source,properties] ---- quarkus.keycloak.policy-enforcer.lazy-load-paths=false ---- -The time required to pre-load resources from Keycloak at startup varies based on their quantity, potentially affecting your application's initial load time." +The time required to pre-load resources from Keycloak during startup depends on the number of resources, which might impact your application's initial load time. == More about configuring protected resources @@ -534,10 +555,10 @@ For more details, check the xref:security-overview.adoc[Quarkus Security overvie == Access to public resources -To enable access to a public resource without the `quarkus-keycloak-authorization` applying its policies, create a `permit` HTTP Policy configuration in `application.properties`. +To allow access to a public resource without applying `quarkus-keycloak-authorization` policies, define a `permit` HTTP policy in the `application.properties` file. For more information, see the xref:security-authorize-web-endpoints-reference.adoc[Authorization of web endpoints] guide. -There's no need to deactivate policy checks for a Keycloak Authorization Policy with settings such as these: +You do not need to disable policy checks for a Keycloak Authorization Policy when using configurations like the following: [source,properties] ---- @@ -545,7 +566,7 @@ quarkus.keycloak.policy-enforcer.paths.1.paths=/api/public quarkus.keycloak.policy-enforcer.paths.1.enforcement-mode=DISABLED ---- -To block access to the public resource to anonymous users, you can create an enforcing Keycloak Authorization Policy: +To restrict access to public resources for anonymous users, define an enforcing Keycloak Authorization Policy: [source,properties] ---- @@ -557,8 +578,8 @@ Only the default tenant configuration applies when controlling anonymous access == Checking permission scopes programmatically -In addition to resource permissions, you can specify method scopes. -The scope usually represents an action that can be performed on a resource. +In addition to resource permissions, you can define method scopes. +A scope typically represents an action performed on a resource. You can create an enforcing Keycloak Authorization Policy with a method scope. For example: @@ -574,10 +595,11 @@ quarkus.keycloak.policy-enforcer.paths.1.methods.get.scopes=read <1> quarkus.keycloak.policy-enforcer.paths.2.name=Scope Permission Resource quarkus.keycloak.policy-enforcer.paths.2.paths=/api/protected/programmatic-way,/api/protected/annotation-way ---- -<1> User must have resource permission 'Scope Permission Resource' and scope 'read' +<1> User must have resource permission `Scope Permission Resource` and scope `read` + +The Keycloak Policy Enforcer secures the `/api/protected/standard-way` request path, removing the need for annotations such as `@RolesAllowed`. +However, in some scenarios, you may need to perform a programmatic check. -The Keycloak Policy Enforcer now secures the `/api/protected/standard-way` request path, eliminating the need for additional annotations such as `@RolesAllowed`. -However, in certain scenarios, a programmatic check is necessary. You can achieve this by injecting a `SecurityIdentity` instance into your beans, as shown in the following example. Or, you can get the same result by annotating the resource method with `@PermissionsAllowed`. The following example demonstrates three resource methods, each requiring the same `read` scope: @@ -636,15 +658,14 @@ public class ProtectedResource { } } ---- -<1> Request sub-path `/standard-way` requires both resource permission and scope `read` according to the configuration properties we previously set in the `application.properties`. -<2> Request sub-path `/programmatic-way` only requires permission `Scope Permission Resource`, but we can enforce scope with `SecurityIdentity#checkPermission`. -<3> The `@PermissionsAllowed` annotation only grants access to the requests with permission `Scope Permission Resource` and scope `read`. +<1> The `/standard-way` sub-path requires both the resource permission and the `read` scope, based on the configuration set in the `application.properties` file. +<2> The `/programmatic-way` sub-path checks only for the `Scope Permission Resource` permission by default. However, you can enforce additional constraints, such as scope requirements, by using `SecurityIdentity#checkPermission`. +<3> The `@PermissionsAllowed` annotation at `/annotation-way` restricts access to requests that have the `Scope Permission Resource` permission along with the `read` scope. For more information, see the section xref:security-authorize-web-endpoints-reference.adoc#standard-security-annotations[Authorization using annotations] of the Security Authorization guide. == Multi-tenancy You can set up policy enforcer configurations for each tenant, similar to how it is done with xref:security-openid-connect-multitenancy.adoc[OpenID Connect (OIDC) multi-tenancy]. - For example: [source,properties] @@ -689,10 +710,7 @@ quarkus.keycloak.webapp-tenant.policy-enforcer.paths.1.claim-information-point.c == Dynamic tenant configuration resolution -If you need a more dynamic configuration for the different tenants you want to support and don’t want to end up -with multiple entries in your configuration file, you can use the `io.quarkus.keycloak.pep.TenantPolicyConfigResolver`. - -This interface allows you to dynamically create tenant configurations at runtime: +To create configurations for multiple tenants while avoiding excessive entries in your configuration file, you can use the `io.quarkus.keycloak.pep.TenantPolicyConfigResolver` interface to define them programmatically at runtime. [source,java] ---- @@ -742,9 +760,9 @@ public class CustomTenantPolicyConfigResolver implements TenantPolicyConfigResol } } ---- -<1> Create or update the `/enhanced-config` path in the default tenant config. -<2> Add `/new-config` path into tenant config populated with documented configuration default values. -<3> Use default static tenant configuration resolution based on the `application.properties` file and other SmallRye Config configuration sources. +<1> Define or update the `/enhanced-config` path in the default tenant configuration. +<2> Add the `/new-config` path to the tenant configuration, including custom claims and values that are populated programmatically. +<3> Fallback to the default static tenant configuration resolution defined in the `application.properties` file or other SmallRye Config sources. == Configuration reference diff --git a/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc b/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc index 66bdaa41cc42a..9d8bf00278459 100644 --- a/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc +++ b/docs/src/main/asciidoc/security-oidc-auth0-tutorial.adoc @@ -708,7 +708,7 @@ import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; @RegisterRestClient @AccessToken <1> diff --git a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication-tutorial.adoc b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication-tutorial.adoc index fbcb7fd5fbd91..7d8f81e56e933 100644 --- a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication-tutorial.adoc +++ b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication-tutorial.adoc @@ -231,7 +231,16 @@ For more information, see the Keycloak documentation about link:https://www.keyc ifndef::no-quarkus-keycloak-admin-client[] [NOTE] ==== -If you want to use the Keycloak Admin Client to configure your server from your application, you need to include either the `quarkus-keycloak-admin-rest-client` or the `quarkus-keycloak-admin-resteasy-client` (if the application uses `quarkus-rest-client`) extension. +To configure the Keycloak server from your application by using the Keycloak Admin Client, include one of the following extensions based on your setup: + +- *For Quarkus REST*: If you are using `quarkus-rest`, `quarkus-rest-client`, or both, include the `quarkus-keycloak-admin-rest-client` extension. + +- *For RESTEasy Classic*: If you are using `quarkus-resteasy`, `quarkus-resteasy-client`, or both, include the `quarkus-keycloak-admin-resteasy-client` extension. + +- *If no REST layer is explicitly used*: It is recommended to include the `quarkus-keycloak-admin-rest-client` extension. + +These guidelines ensure seamless integration of the Keycloak Admin Client with your REST framework, whether you are working with a REST server, a REST client, or both. + For more information, see the xref:security-keycloak-admin-client.adoc[Quarkus Keycloak Admin Client] guide. ==== endif::no-quarkus-keycloak-admin-client[] diff --git a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc index 0e3c6a1bfe588..ab1ee4e5c4024 100644 --- a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc +++ b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc @@ -411,14 +411,28 @@ For example, if you work with Keycloak, you can use `keycloak.js` to authenticat keycloak-spa - - + - + ---- +[NOTE] +==== +To enable authentication for this SPA Keycloak example, disable *Client authentication* and set *Web origins* to `http://localhost:8080`. These settings allow Keycloak's CORS policy to communicate with your Quarkus application. +The code provides an example of building Quarkus single-page applications integrated with Keycloak. For more details about creating single-page applications integrating Keycloak, refer to the official link:https://www.keycloak.org/securing-apps/javascript-adapter[Keycloak JavaScript adapter documentation]. +==== + === Cross-origin resource sharing If you plan to use your OIDC `service` application from a single-page application running on a different domain, you must configure cross-origin resource sharing (CORS). diff --git a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc index 79b5fb51a0387..8cb5fb41e6606 100644 --- a/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc +++ b/docs/src/main/asciidoc/security-oidc-code-flow-authentication.adoc @@ -50,6 +50,14 @@ For information about how to support multiple tenants, see xref:security-openid- == Using the authorization code flow mechanism +=== Configuring Quarkus to support authorization code flow + +To enable an authorization code flow authentication, the `quarkus.oidc.application-type` property must be set to `web-app`. +Usually, the Quarkus OIDC `web-app` application type must be set when your Quarkus application is a frontend application which serves HTML pages and requires an OIDC single sign-on login. +For the Quarkus OIDC `web-app` application, the authorization code flow is defined as the preferred method for authenticating users. +When your application serves HTML pages and provides REST API at the same time, and requires both the authorization code flow authentication and xref:security-oidc-bearer-token-authentication.adoc[the bearer access token authentication], the `quarkus.oidc.application-type` property can be set to `hybrid` instead. +In this case, the authorization code flow is only triggered when an HTTP `Authorization` request header with a `Bearer` authorization scheme containing a bearer access token is not set. + === Configuring access to the OIDC provider endpoint The OIDC `web-app` application requires URLs of the OIDC provider's authorization, token, `JsonWebKey` (JWK) set, and possibly the `UserInfo`, introspection and end-session (RP-initiated logout) endpoints. @@ -789,7 +797,7 @@ It applies to ID tokens and also to access tokens in a JWT format, if the `web-a [[jose4j-validator]] ==== Jose4j Validator -You can register a custom [Jose4j Validator] to customize the JWT claim verification process. See xref:security-oidc-bearer-token-authentication.adoc#jose4j-validator[Jose4j] section for more information. +You can register a custom Jose4j Validator to customize the JWT claim verification process. See the xref:security-oidc-bearer-token-authentication.adoc#jose4j-validator[Jose4j] section for more information. === Proof Key for Code Exchange (PKCE) @@ -1093,6 +1101,7 @@ For more information, refer to the xref:hibernate-orm.adoc[Hibernate ORM] guide. <2> You can choose a column length depending on the length of your tokens. endif::no-quarkus-oidc-db-token-state-manager[] +ifndef::no-quarkus-oidc-redis-token-state-manager[] [[redis-token-state-manager]] ==== Redis TokenStateManager @@ -1124,6 +1133,7 @@ quarkus.oidc.redis-token-state-manager.redis-client-name=my-redis-client <1> <1> The `my-redis-client` name must correspond to the Redis client config key specified with `quarkus.redis.my-redis-client.*` configuration properties. Please refer to the xref:redis-reference.adoc[Quarkus Redis Client reference] for information how to configure the Redis client. +endif::no-quarkus-oidc-redis-token-state-manager[] === Logout and expiration diff --git a/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc b/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc index 41387d39bc78e..5c89d02870989 100644 --- a/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-client-reference.adoc @@ -1243,12 +1243,12 @@ public class TokenEndpointResponseFilter implements OidcResponseFilter { The `quarkus-rest-client-oidc-token-propagation` extension provides a REST Client filter, `io.quarkus.oidc.token.propagation.reactive.AccessTokenRequestReactiveFilter`, that simplifies the propagation of authentication information. This client propagates the xref:security-oidc-bearer-token-authentication.adoc[bearer token] present in the currently active request or the token acquired from the xref:security-oidc-code-flow-authentication.adoc[authorization code flow mechanism] as the HTTP `Authorization` header's `Bearer` scheme value. -You can selectively register `AccessTokenRequestReactiveFilter` by using either `io.quarkus.oidc.token.propagation.AccessToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider` annotation, for example: +You can selectively register `AccessTokenRequestReactiveFilter` by using either `io.quarkus.oidc.token.propagation.common.AccessToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider` annotation, for example: [source,java] ---- import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; @@ -1297,7 +1297,7 @@ quarkus.oidc-client.grant-options.exchange.audience=quarkus-app-exchange quarkus.resteasy-client-oidc-token-propagation.exchange-token=true <1> ---- -<1> Please note that the `exchange-token` configuration property is ignored when the OidcClient name is set with the `io.quarkus.oidc.token.propagation.AccessToken#exchangeTokenClient` annotation attribute. +<1> Please note that the `exchange-token` configuration property is ignored when the OidcClient name is set with the `io.quarkus.oidc.token.propagation.common.AccessToken#exchangeTokenClient` annotation attribute. NOTE: `AccessTokenRequestReactiveFilter` will use `OidcClient` to exchange the current token, and you can use `quarkus.oidc-client.grant-options.exchange` to set the additional exchange properties expected by your OpenID Connect Provider. @@ -1316,7 +1316,7 @@ quarkus.oidc-client.scopes=https://graph.microsoft.com/user.read,offline_access quarkus.resteasy-client-oidc-token-propagation.exchange-token=true ---- -`AccessTokenRequestReactiveFilter` uses a default `OidcClient` by default. A named `OidcClient` can be selected with a `quarkus.rest-client-oidc-token-propagation.client-name` configuration property or with the `io.quarkus.oidc.token.propagation.AccessToken#exchangeTokenClient` annotation attribute. +`AccessTokenRequestReactiveFilter` uses a default `OidcClient` by default. A named `OidcClient` can be selected with a `quarkus.rest-client-oidc-token-propagation.client-name` configuration property or with the `io.quarkus.oidc.token.propagation.common.AccessToken#exchangeTokenClient` annotation attribute. [[token-propagation-resteasy]] == Token Propagation for RESTEasy Classic @@ -1337,12 +1337,12 @@ The following sections show how `AccessTokenRequestFilter` and `JsonWebTokenRequ `AccessTokenRequestFilter` treats all tokens as Strings, and as such, it can work with both JWT and opaque tokens. -You can selectively register `AccessTokenRequestFilter` by using either `io.quarkus.oidc.token.propagation.AccessToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider`, for example: +You can selectively register `AccessTokenRequestFilter` by using either `io.quarkus.oidc.token.propagation.common.AccessToken` or `org.eclipse.microprofile.rest.client.annotation.RegisterProvider`, for example: [source,java] ---- import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; diff --git a/docs/src/main/asciidoc/security-openid-connect-client.adoc b/docs/src/main/asciidoc/security-openid-connect-client.adoc index a38be8999326b..24e648e925559 100644 --- a/docs/src/main/asciidoc/security-openid-connect-client.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-client.adoc @@ -238,7 +238,7 @@ import jakarta.ws.rs.Produces; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; import io.smallrye.mutiny.Uni; @@ -275,9 +275,9 @@ import java.util.Map; import org.eclipse.microprofile.config.inject.ConfigProperty; import io.quarkus.oidc.client.OidcClient; -import io.quarkus.oidc.client.OidcClientConfig; -import io.quarkus.oidc.client.OidcClientConfig.Grant.Type; import io.quarkus.oidc.client.OidcClients; +import io.quarkus.oidc.client.runtime.OidcClientConfig; +import io.quarkus.oidc.client.runtime.OidcClientConfig.Grant.Type; import io.quarkus.runtime.StartupEvent; import io.smallrye.mutiny.Uni; import jakarta.enterprise.context.ApplicationScoped; @@ -532,11 +532,11 @@ Open a xref:dev-ui.adoc[Dev UI] available at http://localhost:8080/q/dev-ui[/q/d When asked, log in to a `Single Page Application` provided by the OpenID Connect Dev UI: - * Log in as `alice`, with the password, `alice`. + * Log in as `admin`, with the password, `admin`. This user has both `admin` and `user` roles. ** Access `/frontend/user-name-with-propagated-token`, which returns `200`. ** Access `/frontend/admin-name-with-propagated-token`, which returns `200`. - * Log out and back in as `bob` with the password, `bob`. + * Log out and back in as `alice` with the password, `alice`. This user has a `user` role. ** Access `/frontend/user-name-with-propagated-token`, which returns `200`. ** Access `/frontend/admin-name-with-propagated-token`, which returns `403`. diff --git a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc index 5bfb9256f947b..ae743b9907460 100644 --- a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc @@ -126,6 +126,16 @@ Sometimes, ID tokens are forwarded to application frontends as bearer tokens. This helps endpoints identify the user logged into SPA or perform out-of-band token verification. Choose the *With ID Token* option in such cases. +[NOTE] +==== +When you import custom Keycloak realms, you may find, after inspecting both the access and ID tokens in Dev UI, that only the access token contains the list of roles in its `groups` claim. +This information is important for accessing endpoints that are secured with the `@RolesAllowed` annotation. +To tell Keycloak to include this information in the ID token, add the `microprofile-jwt` scope to the list of client scopes in the Keycloak admin console. +Alternatively, add the `microprofile-jwt` scope to the list of required scopes using the `quarkus.oidc.authentication.scopes` property. + +For more information, see the https://www.keycloak.org/docs/latest/server_admin/#protocol[Keycloak server administration guide]. +==== + Manually entering the service paths is not ideal. For information about enabling Swagger or GraphQL UI for testing the service with the access token already acquired by the OIDC Dev UI, see the <> section. diff --git a/docs/src/main/asciidoc/security-openid-connect-providers.adoc b/docs/src/main/asciidoc/security-openid-connect-providers.adoc index d7ea4feb8d638..e277d5678f570 100644 --- a/docs/src/main/asciidoc/security-openid-connect-providers.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-providers.adoc @@ -631,7 +631,7 @@ package org.acme.calendar; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; import io.smallrye.mutiny.Uni; import jakarta.ws.rs.Consumes; import jakarta.ws.rs.POST; diff --git a/docs/src/main/asciidoc/smallrye-fault-tolerance.adoc b/docs/src/main/asciidoc/smallrye-fault-tolerance.adoc index e7823b4ba164e..33d98302ab6b6 100644 --- a/docs/src/main/asciidoc/smallrye-fault-tolerance.adoc +++ b/docs/src/main/asciidoc/smallrye-fault-tolerance.adoc @@ -626,6 +626,9 @@ The `""` below is: * `"/"` for per method configuration * `""` for per class configuration * `global` for global configuration +* It may also be an `@Identifier` value for configuring a programmatically constructed `Guard` or `TypedGuard` that is used declaratively through `@ApplyGuard`. +Note that in this case, the `Guard`/`TypedGuard` should *NEVER* be used programmatically, because configuration is applied on creation, which happens lazily, on the first use. +This first use must be through `@ApplyGuard`, otherwise configuration would be ignored. include::{generated-dir}/config/quarkus-smallrye-fault-tolerance.adoc[opts=optional, leveloffset=+1] diff --git a/docs/src/main/asciidoc/smallrye-graphql-client.adoc b/docs/src/main/asciidoc/smallrye-graphql-client.adoc index fab9e282f1795..7a1e692d3e668 100644 --- a/docs/src/main/asciidoc/smallrye-graphql-client.adoc +++ b/docs/src/main/asciidoc/smallrye-graphql-client.adoc @@ -222,7 +222,7 @@ specify that within the `@GraphQLClientApi` annotation (by setting the `endpoint or move this over to the configuration file, `application.properties`: ---- -quarkus.smallrye-graphql-client.star-wars-typesafe.url=https://swapi-graphql.netlify.app/.netlify/functions/index +quarkus.smallrye-graphql-client.star-wars-typesafe.url=https://swapi-graphql.netlify.app/graphql ---- NOTE: During *tests only*, the URL is an optional property, and if it's not specified, Quarkus will assume @@ -284,7 +284,7 @@ representations of the GraphQL types and documents. The client API interface is We still need to configure the URL for the client, so let's put this into `application.properties`: ---- -quarkus.smallrye-graphql-client.star-wars-dynamic.url=https://swapi-graphql.netlify.app/.netlify/functions/index +quarkus.smallrye-graphql-client.star-wars-dynamic.url=https://swapi-graphql.netlify.app/graphql ---- We decided to name the client `star-wars-dynamic`. We will use this name when injecting a dynamic client diff --git a/docs/src/main/asciidoc/smallrye-health.adoc b/docs/src/main/asciidoc/smallrye-health.adoc index 6041797a540fc..efbdaf5c0b205 100644 --- a/docs/src/main/asciidoc/smallrye-health.adoc +++ b/docs/src/main/asciidoc/smallrye-health.adoc @@ -399,6 +399,37 @@ public class LivenessAsync implements AsyncHealthCheck { } ---- +== Health status change observers + +If you need to react to change of the health status of your application, the Smallrye Health +extension provides a CDI event that can notify you about the individual health status changes. + +To observe the health status change, you can observe the `io.smallrye.health.api.event.HealthStatusChangeEvent` +with the standard CDI observation mechanism. Since we cannot guarantee that the observer method +runs all the time on the worker thread (meaning it can run on the event loop thread), it's +recommended that you never block in the in observer method. + +[source,java] +---- +import io.smallrye.health.api.event.HealthStatusChangeEvent; + +@ApplicationScoped +public class HealthObserver { + + public void observeHealthChange(@Observes @Default HealthStatusChangeEvent event) { + ... + } + + public void observeReadinessChange(@Observes @Readiness HealthStatusChangeEvent event) { + ... + } + + public void observeLivenessChange(@Observes @Liveness HealthStatusChangeEvent event) { + ... + } +} +---- + == Extension health checks Some extension may provide default health checks, including the extension will automatically register its health checks. diff --git a/docs/src/main/asciidoc/telemetry-micrometer-to-opentelemetry.adoc b/docs/src/main/asciidoc/telemetry-micrometer-to-opentelemetry.adoc new file mode 100644 index 0000000000000..423ab1d8eb4eb --- /dev/null +++ b/docs/src/main/asciidoc/telemetry-micrometer-to-opentelemetry.adoc @@ -0,0 +1,236 @@ +//// +This guide is maintained in the main Quarkus repository +and pull requests should be submitted there: +https://github.com/quarkusio/quarkus/tree/main/docs/src/main/asciidoc +//// +[id=telemetry-micrometer-opentelemetry] += Micrometer and OpenTelemetry extension +include::_attributes.adoc[] +:extension-status: preview +:diataxis-type: reference +:categories: observability +:summary: Guide to send Micrometer data to OpenTelemetry. +:topics: observability,opentelemetry,metrics,micrometer,tracing,logs +:extensions: io.quarkus:quarkus-micrometer-opentelemetry + +This extension provides support for both Micrometer and OpenTelemetry in Quarkus applications. It streamlines integration by incorporating both extensions along with a bridge that enables sending Micrometer metrics via OpenTelemetry. + +include::{includes}/extension-status.adoc[] + +[NOTE] +==== +- The xref:telemetry-micrometer.adoc[Micrometer Guide] is available for detailed information about the Micrometer extension. +- The xref:opentelemetry.adoc[OpenTelemetry Guide] provides information about the OpenTelemetry extension. +==== + +The extension allows the normal use of the Micrometer API, but have the metrics handled by the OpenTelemetry extension. + +As an example, the `@Timed` annotation from Micrometer is used to measure the execution time of a method: +[source,java] +---- +import io.micrometer.core.annotation.Timed; +//... +@Timed(name = "timer_metric") +public String timer() { + return "OK"; +} +---- +The output telemetry data is handled by the OpenTelemetry SDK and sent by the `quarkus-opentelemetry` extension exporter using the OTLP protocol. + +This reduces the overhead of having an independent Micrometer registry plus the OpenTelemetry SDK in memory for the same application when both `quarkus-micrometer` and `quarkus-opentelemetry` extensions are used independently. + +*The OpenTelemetry SDK will handle all metrics.* Either Micrometer metrics (manual or automatic) and OpenTelemetry Metrics can be used. All are available with this single extension. + +All the configurations from the OpenTelemetry and Micrometer extensions are available with `quarkus-micrometer-opentelemetry`. + +The bridge is more than the simple OTLP registry found in Quarkiverse. In this extension, the OpenTelemetry SDK provides a Micrometer registry implementation based on the https://github.com/open-telemetry/opentelemetry-java-instrumentation/tree/main/instrumentation/micrometer/micrometer-1.5/library[`micrometer/micrometer-1.5`] OpenTelemetry instrumentation library. + +== Usage + +If you already have your Quarkus project configured, you can add the `quarkus-micrometer-opentelemetry` extension to your project by running the following command in your project base directory: + +:add-extension-extensions: micrometer-opentelemetry +include::{includes}/devtools/extension-add.adoc[] + +This will add the following to your build file: + +[source,xml,role="primary asciidoc-tabs-target-sync-cli asciidoc-tabs-target-sync-maven"] +.pom.xml +---- + + io.quarkus + quarkus-micrometer-opentelemetry + +---- + +[source,gradle,role="secondary asciidoc-tabs-target-sync-gradle"] +.build.gradle +---- +implementation("io.quarkus:quarkus-micrometer-opentelemetry") +---- + +== Configuration + +When the extension is present, Micrometer is enabled by default as are OpenTelemetry tracing, metrics and logs. + +OpenTelemetry metrics auto-instrumentation for HTTP server and JVM metrics are disabled by default because those metrics can be collected by Micrometer. + +Specific automatic Micrometer metrics are all disabled by default and can be enabled by setting, for example in the case of JVM metrics: +[source,properties] +---- +quarkus.micrometer.binder.jvm=true +---- +in the `application.properties` file. + +For this and other properties you can use with the extension, Please refer to: + +* xref:telemetry-micrometer.adoc#configuration-reference[Micrometer metrics configuration properties] +* xref:opentelemetry.adoc#configuration-reference[OpenTelemetry configuration properties] + +== Metric differences between Micrometer and OpenTelemetry + +=== API differences +The metrics produced with each framework follow different APIs and the mapping is not 1:1. + +One fundamental API difference is that Micrometer uses a https://docs.micrometer.io/micrometer/reference/concepts/timers.html[Timer] and OpenTelemetry uses a https://opentelemetry.io/docs/specs/otel/metrics/data-model/#histogram[Histogram] to record latency (execution time) metrics and the frequency of the events. + +When using the `@Timed` annotation with Micrometer, 2 different metrics are https://github.com/open-telemetry/opentelemetry-java-instrumentation/blob/324fdbdd452ddffaf2da2c5bf004d8bb3fdfa1dd/instrumentation/micrometer/micrometer-1.5/library/src/main/java/io/opentelemetry/instrumentation/micrometer/v1_5/OpenTelemetryTimer.java#L31[created on the OpenTelemetry side], one `Gauge` for the `max` value and one `Histogram`. + +The `DistributionSummary` from Micrometer is transformed into a `Histogram` and a `DoubleGauge` for the `max` value. If service level objectives (slo) are set to `true` when creating a `DistributionSummary`, an additional histogram is created for them. + +This table shows the differences between the two frameworks: + +|=== +|Micrometer |OpenTelemetry + +|DistributionSummary +|`` (Histogram), `.max` (DoubleGauge) + +|DistributionSummary with SLOs +|`` (Histogram), `.max` (DoubleGauge), `.histogram` (DoubleGauge) + +|LongTaskTimer +|`.active` (ObservableLongUpDownCounter), `.duration` (ObservableDoubleUpDownCounter) + +|Timer +|`` (Histogram), `.max` (ObservableDoubleGauge) +|=== + + +=== Semantic convention differences + +The 2 frameworks follow different semantic conventions. The OpenTelemetry Metrics are based on the https://opentelemetry.io/docs/concepts/semantic-conventions/[OpenTelemetry Semantic Conventions] and are still under active development (early 2025). Micrometer metrics convention format is around for a long time and has not changed much. + +When these 2 configurations are set in the `application.properties` file: + +[source,properties] +---- +quarkus.micrometer.binder.jvm=true +quarkus.micrometer.binder.http-server.enabled=true +---- + +The JVM and HTTP server metrics are collected by Micrometer. + +Next, are examples of the metrics collected by Micrometer and a comparison of what would be the `quarkus-micrometer-registry-prometheus` output vs the one on this bridge. A link to the equivalent OpenTelemetry Semantic Convention is also provided for reference and is not currently used in the bridge. + +|=== +|Micrometer Meter |Quarkus Micrometer Prometheus output | This bridge OpenTelemetry output name | Related OpenTelemetry Semantic Convention (not applied) + +|Using the @Timed interceptor. +| +|method.timed (Histogram), method.timed.max (DoubleGauge) +|NA + +|Using the @Counted interceptor. +| +|method.counted (DoubleSum) +|NA + +|`http.server.active.requests` (Gauge) +|`http_server_active_requests` (Gauge) +|`http.server.active.requests` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/http/http-metrics/#metric-httpserveractive_requests[`http.server.active_requests`] (UpDownCounter) + +|`http.server.requests` (Timer) +|`http_server_requests_seconds_count`, `http_server_requests_seconds_sum`, `http_server_requests_seconds_max` (Gauge) +|`http.server.requests` (Histogram), `http.server.requests.max` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/http/http-metrics/#metric-httpserverrequestduration[`http.server.request.duration`] (Histogram) + +|`http.server.bytes.read` (DistributionSummary) +|`http_server_bytes_read_count`, `http_server_bytes_read_sum` , `http_server_bytes_read_max` (Gauge) +|`http.server.bytes.read` (Histogram), `http.server.bytes.read.max` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/http/http-metrics/#metric-httpserverrequestbodysize[`http.server.request.body.size`] (Histogram) + +|`http.server.bytes.write` (DistributionSummary) +|`http_server_bytes_write_count`, `http_server_bytes_write_sum` , `http_server_bytes_write_max` (Gauge) +|`http.server.bytes.write` (Histogram), `http.server.bytes.write.max` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/http/http-metrics/#metric-httpserverresponsebodysize[`http.server.response.body.size`] (Histogram) + +|`http.server.connections` (LongTaskTimer) +|`http_server_connections_seconds_active_count`, `http_server_connections_seconds_duration_sum` `http_server_connections_seconds_max` (Gauge) +|`http.server.connections.active` (LongSum), `http.server.connections.duration` (DoubleGauge) +| N/A + +|`jvm.threads.live` (Gauge) +|`jvm_threads_live_threads` (Gauge) +|`jvm.threads.live` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/runtime/jvm-metrics/#metric-jvmthreadcount[`jvm.threads.live`] (UpDownCounter) + +|`jvm.threads.started` (FunctionCounter) +|`jvm_threads_started_threads_total` (Counter) +|`jvm.threads.started` (DoubleSum) +|https://opentelemetry.io/docs/specs/semconv/runtime/jvm-metrics/#metric-jvmthreadcount[`jvm.threads.live`] (UpDownCounter) + +|`jvm.threads.daemon` (Gauge) +|`jvm_threads_daemon_threads` (Gauge) +|`jvm.threads.daemon` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/runtime/jvm-metrics/#metric-jvmthreadcount[`jvm.threads.live`] (UpDownCounter) + +|`jvm.threads.peak` (Gauge) +|`jvm_threads_peak_threads` (Gauge) +|`jvm.threads.peak` (DoubleGauge) +|N/A + +|`jvm.threads.states` (Gauge per state) +|`jvm_threads_states_threads` (Gauge) +|`jvm.threads.states` (DoubleGauge) +|https://opentelemetry.io/docs/specs/semconv/runtime/jvm-metrics/#metric-jvmthreadcount[`jvm.threads.live`] (UpDownCounter) +|=== + + +[NOTE] +==== +Some metrics might be missing from the output if they contain no data. +==== + +== See the output + +=== Grafana-OTel-LGTM Dev Service +You can use the xref:observability-devservices-lgtm.adoc[Grafana-OTel-LGTM] Dev Service. + +This Dev Service includes Grafana for visualizing data, Loki to store logs, Tempo to store traces and Prometheus to store metrics. +It also provides an OTel collector to receive the data + +=== Logging exporter + +You can output all metrics to the console by setting the exporter to `logging` in the `application.properties` file: +[source, properties] +---- +quarkus.otel.metrics.exporter=logging <1> +quarkus.otel.metric.export.interval=10000ms <2> +---- + +<1> Set the exporter to `logging`. +Normally you don't need to set this. +The default is `cdi`. +<2> Set the interval to export the metrics. +The default is `1m`, which is too long for debugging. + +Also add this dependency to your project: +[source,xml] +---- + + io.opentelemetry + opentelemetry-exporter-logging + +---- diff --git a/docs/src/main/asciidoc/telemetry-micrometer.adoc b/docs/src/main/asciidoc/telemetry-micrometer.adoc index 2e4b60fb1095b..f9f9414ea0cb1 100644 --- a/docs/src/main/asciidoc/telemetry-micrometer.adoc +++ b/docs/src/main/asciidoc/telemetry-micrometer.adoc @@ -588,7 +588,9 @@ link:https://micrometer.io/docs/concepts[official documentation]. === Use `HttpServerMetricsTagsContributor` for server HTTP requests -By providing CDI beans that implement `io.quarkus.micrometer.runtime.HttpServerMetricsTagsContributor`, user code can contribute arbitrary tags based on the details of HTTP request +By providing CDI beans that implement `io.quarkus.micrometer.runtime.HttpServerMetricsTagsContributor`, user code can contribute arbitrary tags based on the details of HTTP request and responses. + +CAUTION: When creating tags using this interface, it's important to limit the cardinality of the values, otherwise there is a risk of severely degrading the metrics system's capacity. === Use `HttpClientMetricsTagsContributor` for client HTTP requests diff --git a/docs/src/main/asciidoc/tls-registry-reference.adoc b/docs/src/main/asciidoc/tls-registry-reference.adoc index 8ef99143c6cb2..18d15125ced69 100644 --- a/docs/src/main/asciidoc/tls-registry-reference.adoc +++ b/docs/src/main/asciidoc/tls-registry-reference.adoc @@ -50,17 +50,17 @@ By specifying the `+quarkus.tls..*+` properties, you can adapt the TLS set [IMPORTANT] ==== -The default TLS configuration is not a fallback/global configuration. This means that each named TLS configuration -(or "TLS bucket") needs to provide its own properties. For instance, `quarkus.tls.reload-period` will only be applied -to the default TLS configuration. +The default TLS configuration is not a fallback or global configuration. +Each named TLS configuration, or "TLS bucket," must provide its own properties. +For instance, `quarkus.tls.reload-period` will only be applied to the default TLS configuration. ==== === Configuring HTTPS for a HTTP server To ensure secure client-server communication, the client is often required to verify the server's authenticity. -* The server must use a keystore that contains its certificate and private key -* The client needs to be configured with a truststore to validate the server's certificate +* The server must use a keystore that contains its certificate and private key. +* The client needs to be configured with a truststore to validate the server's certificate. During the TLS handshake, the server presents its certificate, which the client then validates. This prevents man-in-the-middle attacks and secures data transmission. @@ -149,8 +149,7 @@ This configuration enables mTLS by ensuring that both the server and client vali [[referencing-a-tls-configuration]] == Referencing a TLS configuration -To reference an example _named_ configuration that you created by using the `quarkus.tls..*` properties as explained in <> -, use the `tls-configuration-name` property as shown in the following examples: +To reference an example _named_ configuration that you created by using the `quarkus.tls..*` properties as explained in <>, use the `tls-configuration-name` property as shown in the following examples: .Example configuration for the core HTTP server: [source,properties] @@ -173,39 +172,31 @@ quarkus.smallrye-graphql-client.my-client.tls-configuration-name=MY_TLS_CONFIGUR [NOTE] ==== -When using the Typesafe GraphQL client with a certificate -reloading mechanism (see <>), it is essential to -override the bean's scope to `RequestScoped` (or another similar scope -shorter than application). This is because by default, the Typesafe client is an -application-scoped bean, so shortening the scope guarantees that new instances of the bean -created after a certificate reload will be configured with the latest -certificate. Dynamic clients are `@Dependent` scoped, so you should -inject them into components with an appropriate scope. +When using the Typesafe GraphQL client with a certificate reloading mechanism, as described in the <> section, it is essential to override the bean's scope to `RequestScoped` or another similar scope shorter than the application. +This is because, by default, the Typesafe client is an application-scoped bean. +Shortening the scope guarantees that new instances of the bean created after a certificate reload will be configured with the latest certificate. +Dynamic clients are `@Dependent` scoped; inject them into components with an appropriate scope. ==== === Referencing the default truststore of SunJSSE JDK distributions typically contain a truststore in the `$JAVA_HOME/lib/security/cacerts` file. -It is used as a default truststore by SunJSSE, the default implementation of Java Secure Socket Extension (JSSE). -SSL/TLS capabilities provided by SunJSSE are leveraged by various Java Runtime components, -such as `javax.net.ssl.HttpsURLConnection` and others. +This truststore is used as a default truststore by SunJSSE, the default implementation of the Java Secure Socket Extension (JSSE). +SSL/TLS capabilities provided by SunJSSE are leveraged by various Java Runtime components, such as `javax.net.ssl.HttpsURLConnection` and others. -Although Quarkus extensions typically do not honor the default truststore of SunJSSE, -it might still be practical to use it in some situations - be it migration from legacy technologies -or when running on a Linux distribution where the SunJSSE truststore is synchronized with the operating system truststore. +Although Quarkus extensions typically do not honor the default truststore of SunJSSE, it is still practical to use it in some situations. +This applies when migrating from legacy technologies or running on a Linux distribution where the SunJSSE truststore is synchronized with the operating system (OS). -To make the use of SunJSSE truststore easier, Quarkus TLS Registry provides a TLS configuration -under the name `javax.net.ssl` that mimics the default behavior of SunJSSE: +To simplify the use of the SunJSSE truststore, Quarkus TLS Registry provides a TLS configuration under the name `javax.net.ssl` that mimics the default behavior of SunJSSE: -. If the `javax.net.ssl.trustStore` system property is defined, then its value is honored as a truststore -. Otherwise, the paths `$JAVA_HOME/lib/security/jssecacerts` and `$JAVA_HOME/lib/security/cacerts` are checked - and the first existing file is used as a truststore -. Otherwise an `IllegalStateException` is thrown. +* If the `javax.net.ssl.trustStore` system property is defined, its value is honored as a truststore. +* Otherwise, the paths `$JAVA_HOME/lib/security/jssecacerts` and `$JAVA_HOME/lib/security/cacerts` are checked, and the first existing file is used as a truststore. +* If neither condition is met, an `IllegalStateException` is thrown. The password for opening the truststore is taken from the `javax.net.ssl.trustStorePassword` system property. -If it is not set, the default password `changeit` is used. +If this property is not set, the default password `changeit` is used. -`javax.net.ssl` can be used as a value for various `*.tls-configuration-name` properties, for example: +The `javax.net.ssl` configuration can be used as a value for various `*.tls-configuration-name` properties, as shown below: .Example configuration for a gRPC client: [source,properties] @@ -227,15 +218,15 @@ The following sections outline the various properties available for configuring === Key stores -Key stores are used to store private keys and the certificates. +Key stores store private keys and certificates. They are mainly used on the server side but can also be used on the client side when mTLS is used. ==== PEM keystores Privacy Enhanced Mail (PEM) keystores are composed of a list of file pairs: -* *The certificate file* - a `.crt` or `.pem` file -* *The private key file* - often a `.key` file +* *The certificate file* - a `.crt` or `.pem` file. +* *The private key file* - often a `.key` file. To configure a PEM keystore: [source,properties] @@ -265,18 +256,20 @@ This setting is important when using SNI, because it uses the first specified pa When using PEM keystore, the following formats are supported: -- PKCS#8 private key (unencrypted) -- PKCS#1 RSA private key (unencrypted) -- Encrypted PKCS#8 private key (encrypted with AES-128-CBC) +* PKCS#8 private key (unencrypted) +* PKCS#1 RSA private key (unencrypted) +* Encrypted PKCS#8 private key (encrypted with AES-128-CBC) -In the later case, the `quarkus.tls.key-store.pem.password` (or `quarkus.tls.key-store.pem..password`) property must be set to the password used to decrypt the private key: +In the later case, the `quarkus.tls.key-store.pem.password` or `quarkus.tls.key-store.pem..password` property must be set to the password used to decrypt the private key. +.An encrypted PEM keystore configuration example: [source,properties] ---- quarkus.tls.http.key-store.pem.cert=certificate.crt quarkus.tls.http.key-store.pem.key=key.key quarkus.tls.http.key-store.pem.password=password ---- +include::_includes/snip-note-encrypted-pem-tech-prev.adoc[] ==== PKCS12 keystores @@ -345,6 +338,47 @@ quarkus.tls.key-store.jks.alias-password=my-alias-password * Alternatively, use SNI to select the appropriate certificate and private key. Note that all keys must use the same password. +==== Provided keystores +If you need more control over the keystore used in a TLS configuration, you can provide a CDI bean implementing the `io.quarkus.tls.runtime.KeyStoreProvider` interface. Quarkus calls `KeyStoreProvider::getKeyStore` when the TLS configuration is <> and any time the configuration is <>. The resulting keystore and options are then made available via `TlsConfiguration::getKeyStore` and `TlsConfiguration::getKeyStoreOptions`. + +.Example KeyStoreProvider +[source, java] +---- +import io.quarkus.tls.runtime.KeyStoreAndKeyCertOptions; +import io.quarkus.tls.runtime.KeyStoreProvider; +import io.smallrye.common.annotation.Identifier; +import io.vertx.core.Vertx; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.net.PemKeyCertOptions; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +@ApplicationScoped <1> +@Identifier("") <2> +public class ExampleKeyStoreProvider implements KeyStoreProvider { + + @Inject <3> + SecurityDatabase securityDatabase; + + @Override + public KeyStoreAndKeyCertOptions getKeyStore(Vertx vertx) { + try { + var options = new PemKeyCertOptions() + .addCertValue(Buffer.buffer(securityDatabase.getEntityCertificate())) + .addKeyValue(Buffer.buffer(securityDatabase.getEntityPrivateKey())); + var keyStore = options + .loadKeyStore(vertx); + return new KeyStoreAndKeyCertOptions(keyStore, options); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} +---- +<1> The CDI bean implementing the `KeyStoreProvider` interface can be `@ApplicationScoped`, `@Singleton` or `@Dependent`. +<2> Use the `@Identifier` qualifier to indicate a named TLS configuration for which to provide keystore options. Omit the qualifier (or use `@Default` explicitly) to indicate the default TLS configuration. See <> for more details. +<3> Other CDI beans can be injected for runtime access to keystore material. + [[sni]] ==== SNI @@ -447,6 +481,46 @@ quarkus.tls.trust-store.jks.alias=my-alias `.jks` files are password-protected, so you need to provide the password to open the truststore. However, unlike keystores, the alias does not require a password because it contains a public certificate, not a private key. +==== Provided truststores +If you need more control over the truststore used in a TLS configuration, you can provide a CDI bean implementing the `io.quarkus.tls.runtime.TrustStoreProvider` interface. Quarkus calls `TrustStoreProvider::getTrustStore` when the TLS configuration is <> and any time the configuration is <>. The resulting truststore and options are then made available via `TlsConfiguration::getTrustStore` and `TlsConfiguration::getTrustStoreOptions`. + +.Example TrustStoreProvider +[source, java] +---- +import io.quarkus.tls.runtime.TrustStoreAndTrustOptions; +import io.quarkus.tls.runtime.TrustStoreProvider; +import io.smallrye.common.annotation.Identifier; +import io.vertx.core.Vertx; +import io.vertx.core.buffer.Buffer; +import io.vertx.core.net.PemTrustOptions; +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +@ApplicationScoped <1> +@Identifier("") <2> +public class ExampleTrustStoreProvider implements TrustStoreProvider { + + @Inject <3> + SecurityDatabase securityDatabase; + + @Override + public TrustStoreAndTrustOptions getTrustStore(Vertx vertx) { + try { + var options = new PemTrustOptions() + .addCertValue(Buffer.buffer(securityDatabase.getTrustedRootCertificate())); + var trustStore = options + .loadKeyStore(vertx); + return new TrustStoreAndTrustOptions(trustStore, options); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} +---- +<1> The CDI bean implementing the `TrustStoreProvider` interface can be `@ApplicationScoped`, `@Singleton` or `@Dependent`. +<2> Use the `@Identifier` qualifier to indicate a named TLS configuration for which to provide truststore options. Omit the qualifier (or use `@Default` explicitly) to indicate the default TLS configuration. See <> for more details. +<3> Other CDI beans can be injected for runtime access to truststore material. + ==== Credential providers @@ -494,7 +568,7 @@ quarkus.tls.cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384 The TLS protocol versions are the list of protocols that can be used during the TLS handshake. Enabled TLS protocol versions are specified as an ordered list separated by commas. -The relevant configuration property is `quarkus.tls.protocols` (or `quarkus.tls..protocols` for named TLS configurations). +The relevant configuration property is `quarkus.tls.protocols` or `quarkus.tls..protocols` for named TLS configurations. It defaults to `TLSv1.3, TLSv1.2` if not configured. The available options are `TLSv1`, `TLSv1.1`, `TLSv1.2`, and `TLSv1.3`. @@ -534,8 +608,11 @@ ALPN is enabled by default. quarkus.tls.alpn=false ---- + -WARNING: Disabling ALPN is not recommended for non-experts, as it can lead to performance degradation, protocol negotiation issues, and unexpected behavior, particularly with protocols like HTTP/2. +[WARNING] +==== +Disabling ALPN is not recommended for non-experts, as it can lead to performance degradation, protocol negotiation issues, and unexpected behavior, particularly with protocols like HTTP/2. However, disabling ALPN can be useful for diagnosing native inconsistencies or testing performance in specific edge cases where protocol negotiation causes conflicts. +==== ==== Certificate Revocation List (CRL) @@ -677,18 +754,17 @@ If any of these checks fail, the application will not start. == Reloading certificates The `TlsConfiguration` obtained from the `TLSConfigurationRegistry` includes a mechanism for reloading certificates. -The `reload` method refreshes the keystores, truststores and CRLs, typically by reloading them from the file system. +The `reload` method refreshes the keystores, truststores, and CRLs, typically by reloading them from the file system. NOTE: The reload operation is not automatic and must be triggered manually. -Additionally, the `TlsConfiguration` implementation must support reloading (which is the case for the configured certificate). +Additionally, the `TlsConfiguration` implementation must support reloading, as is the case for the configured certificate. The `reload` method returns a `boolean` indicating whether the reload was successful. A value of `true` means the reload operation was successful, not necessarily that there were updates to the certificates. After a `TlsConfiguration` has been reloaded, servers and clients using this configuration may need to perform specific actions to apply the new certificates. -The recommended approach for informing clients and servers about the certificate reload is to fire a CDI event of -type `io.quarkus.tls.CertificateUpdatedEvent`. +The recommended approach for informing clients and servers about the certificate reload is to fire a CDI event of type `io.quarkus.tls.CertificateUpdatedEvent`. To do so, inject a CDI event of this type and fire it when a reload occurs. . Manually triggering a reload and firing a `CertificateUpdatedEvent`: @@ -742,9 +818,11 @@ quarkus.tls.http.key-store.pem.0.cert=tls.crt quarkus.tls.http.key-store.pem.0.key=tls.key ---- -IMPORTANT: Impacted server and client may need to listen to the `CertificateUpdatedEvent` to apply the new certificates. -This is automatically done for the Quarkus HTTP server (i.e. Quarkus REST server, gRPC server, Web Socket server) and -the management interface if it is enabled. +[IMPORTANT] +==== +Impacted servers and clients may need to listen to the `CertificateUpdatedEvent` to apply the new certificates. +This is automatically done for the Quarkus HTTP server, such as the Quarkus REST server, gRPC server, and WebSocket server, as well as the management interface if it is enabled. +==== NOTE: In Quarkus dev mode, when files are touched, it will trigger the `CertificateUpdatedEvent` much more frequently. @@ -1134,18 +1212,17 @@ When developing with TLS, you can use two types of certificates: * **Self-signed certificate**: The certificate is signed by the same entity that uses it. It is not trusted by default. -This type of certificate is typically used when a Certificate Authority (CA) is unavailable or you want a simple setup. -It is not suitable for production and should only be used for development. +This type of certificate is typically used when a Certificate Authority (CA) is unavailable or when a simple setup is needed. +It is not suitable for production and is intended only for development. * **CA-signed certificate**: The certificate is signed by a Certificate CA, a trusted entity. This certificate is trusted by default and is the standard choice for production environments. While you can use a self-signed certificate for local development, it has limitations. -Browsers and tools like `curl`, `wget`, and `httpie` typically do not trust self-signed certificates, requiring manual import of the CA in your OS. +Browsers and tools like `curl`, `wget`, and `httpie` typically do not trust self-signed certificates, requiring manual import of the CA in your operating system. -To avoid this issue, you can use a development CA to sign certificates and install the CA in the system truststore. +To avoid this issue, use a development CA to sign certificates and install the CA in the system truststore. This ensures that the system trusts all certificates signed by the CA. - Quarkus simplifies the generation of a development CA and the certificates that are signed by this CA. [[generate-a-development-ca]] @@ -1157,7 +1234,7 @@ Note that the generated CA is only valid for development purposes and can only b To generate a development CA: [source,shell] ---- -quarkus tls generate-ca-certificate --install \ <1> +quarkus tls generate-quarkus-ca --install \ <1> --renew \ <2> --truststore <3> ---- @@ -1348,7 +1425,7 @@ quarkus.management.enabled=true [IMPORTANT] ==== .Port 80 -The Let's Encrypt ACME challenge requires that the application is reachable on port `80` (basically: `http://your-dns-name`). +The Let's Encrypt ACME challenge requires that the application is reachable on port `80`, essentially `http://your-dns-name`. Ensure the port `80` is accessible from the Internet. It might require an explicit security policy depending on your hosting provider. @@ -1364,7 +1441,7 @@ quarkus.http.insecure-requests=redirect ==== -The challenge is served from the primary HTTP interface (accessible from your DNS domain name). +The challenge is served from the primary HTTP interface, which is accessible from your DNS domain name. IMPORTANT: Do not start your application yet. @@ -1383,9 +1460,9 @@ quarkus tls lets-encrypt prepare --domain= + The `prepare` command does the following: -* Creates a `.letsencrypt` folder in your application's root directory -* Creates a self-signed domain certificate and private key for your application configured in the previous <> step to be able to start and accept HTTPS requests -* Creates a `.env` configuration file in your application's root directory and configures the application to use the self-signed domain certificate and private key (until we get the Let's Encrypt certificate) +* Creates a `.letsencrypt` folder in your application's root directory. +* Creates a self-signed domain certificate and private key for your application configured in the previous <> step to start and accept HTTPS requests. +* Creates a `.env` configuration file in the root directory of your application and configures it to use the self-signed domain certificate and private key until you obtain the Let's Encrypt certificate. + The following snippet shows an example of the generated `.env` file: + @@ -1434,8 +1511,11 @@ Use `https://localhost:8443/` if you choose not to enable a management router in * Issues a Let's Encrypt certificate request. * Interacts with the Quarkus application to resolve ACME challenges. + -NOTE: When the Let's Encrypt certificate chain and private key have been successfully acquired, they are converted to PEM format and copied to your application's `.letsencrypt` folder. +[NOTE] +==== +When the Let's Encrypt certificate chain and private key have been successfully acquired, they are converted to PEM format and copied to your application's `.letsencrypt` folder. The TLS registry is informed that a new certificate and private key are ready and reloads them automatically. +==== + . Access your application's endpoint using `https://your-domain-name:8443/` again. Confirm in the browser that the Let's Encrypt certificate authority is now signing your domain certificate. diff --git a/docs/src/main/asciidoc/vertx-reference.adoc b/docs/src/main/asciidoc/vertx-reference.adoc index c9faed3bd2294..1514b28efc4bb 100644 --- a/docs/src/main/asciidoc/vertx-reference.adoc +++ b/docs/src/main/asciidoc/vertx-reference.adoc @@ -1035,7 +1035,7 @@ quarkus.http.so-reuse-port=true == Use a Vert.x context-aware scheduler Some Mutiny operators need to schedule work on an executor thread pool. -A good example is `.onItem().delayIt().by(Duration.ofMillis(10)` as it needs such an executor to delay emissions. +A good example is `.onItem().delayIt().by(Duration.ofMillis(10))` as it needs such an executor to delay emissions. The default executor is returned by `io.smallrye.mutiny.infrastructure.Infrastructure` and it is already configured and managed by Quarkus. diff --git a/docs/src/main/asciidoc/virtual-threads.adoc b/docs/src/main/asciidoc/virtual-threads.adoc index 89000b4cb259b..0183f7c38c262 100644 --- a/docs/src/main/asciidoc/virtual-threads.adoc +++ b/docs/src/main/asciidoc/virtual-threads.adoc @@ -349,7 +349,7 @@ To containerize your Quarkus application that use `@RunOnVirtualThread`, add the quarkus.container-image.build=true quarkus.container-image.group= quarkus.container-image.name= -quarkus.jib.base-jvm-image=registry.access.redhat.com/ubi8/openjdk-21-runtime <1> +quarkus.jib.base-jvm-image=registry.access.redhat.com/ubi9/openjdk-21-runtime <1> quarkus.jib.platforms=linux/amd64,linux/arm64 <2> ---- <1> Make sure you use a base image supporting virtual threads. Here we use an image providing Java 21. Quarkus picks an image providing Java 21+ automatically if you do not set one. diff --git a/docs/src/main/asciidoc/websockets-next-reference.adoc b/docs/src/main/asciidoc/websockets-next-reference.adoc index aa30b17a83572..0dfff6c443730 100644 --- a/docs/src/main/asciidoc/websockets-next-reference.adoc +++ b/docs/src/main/asciidoc/websockets-next-reference.adoc @@ -185,8 +185,12 @@ public class MyWebSocket { ==== Request context -If an endpoint is annotated with `@RequestScoped`, or with a security annotation (such as `@RolesAllowed`), or depends directly or indirectly on a `@RequestScoped` bean, or on a bean annotated with a security annotation, then each WebSocket endpoint callback method execution is associated with a new _request context_. -The request context is active during endpoint callback invocation. +Each WebSocket endpoint callback method execution is associated with a new CDI _request context_ if the endpoint is: + +* Annotated with the `@RequestScoped` annotation. +* Has a method annotated with a security annotation such as `@RolesAllowed`. +* Depends directly or indirectly on a `@RequestScoped` bean. +* Depends directly or indirectly on a CDI beans secured with a standard security annotation. TIP: It is also possible to set the `quarkus.websockets-next.server.activate-request-context` config property to `always`. In this case, the request context is always activated when an endpoint callback is invoked. @@ -783,6 +787,68 @@ class MyBean { [[websocket-next-security]] === Security +Security capabilities are provided by the Quarkus Security extension. +Any xref:security-identity-providers.adoc[Identity provider] can be used to convert authentication credentials on the initial HTTP request into a `SecurityIdentity` instance. +The `SecurityIdentity` is then associated with the websocket connection. +Authorization options are demonstrated in following sections. + +NOTE: When an OpenID Connect extension, `quarkus-oidc`, is used and token expires, Quarkus automatically closes connection. + +[[secure-http-upgrade]] +==== Secure HTTP upgrade + +An HTTP upgrade is secured when a standard security annotation is placed on an endpoint class or an HTTP Security policy is defined. +The advantage of securing HTTP upgrade is less processing, the authorization is performed early and only once. +You should always prefer HTTP upgrade security unless you need to perform an action on error (see <>) or a security check based on the payload (see <>). + +.Use standard security annotation to secure an HTTP upgrade +[source, java] +---- +package io.quarkus.websockets.next.test.security; + +import io.quarkus.security.Authenticated; +import jakarta.inject.Inject; + +import io.quarkus.security.identity.SecurityIdentity; +import io.quarkus.websockets.next.OnOpen; +import io.quarkus.websockets.next.OnTextMessage; +import io.quarkus.websockets.next.WebSocket; + +@Authenticated <1> +@WebSocket(path = "/end") +public class Endpoint { + + @Inject + SecurityIdentity currentIdentity; + + @OnOpen + String open() { + return "ready"; + } + + @OnTextMessage + String echo(String message) { + return message; + } +} +---- +<1> Initial HTTP handshake ends with the 401 status for anonymous users. +You can also redirect the handshake request on authorization failure with the `quarkus.websockets-next.server.security.auth-failure-redirect-url` configuration property. + +IMPORTANT: HTTP upgrade is only secured when a security annotation is declared on an endpoint class next to the `@WebSocket` annotation. +Placing a security annotation on an endpoint bean will not secure bean methods, only the HTTP upgrade. +You must always verify that your endpoint is secured as intended. + +.Use HTTP Security policy to secure an HTTP upgrade +[source,properties] +---- +quarkus.http.auth.permission.http-upgrade.paths=/end +quarkus.http.auth.permission.http-upgrade.policy=authenticated +---- + +[[secure-callback-methods]] +==== Secure WebSocket endpoint callback methods + WebSocket endpoint callback methods can be secured with security annotations such as `io.quarkus.security.Authenticated`, `jakarta.annotation.security.RolesAllowed` and other annotations listed in the xref:security-authorize-web-endpoints-reference.adoc#standard-security-annotations[Supported security annotations] documentation. @@ -828,60 +894,109 @@ public class Endpoint { <1> The echo callback method can only be invoked if the current security identity has an `admin` role. <2> The error handler is invoked in case of the authorization failure. -`SecurityIdentity` is initially created during a secure HTTP upgrade and associated with the websocket connection. +[[secure-endpoints-with-permission-checkers]] +==== Secure server endpoints with permission checkers -NOTE: When OpenID Connect extension is used and token expires, Quarkus automatically closes connection. +WebSocket endpoints can be secured with the xref:security-authorize-web-endpoints-reference.adoc#permission-checker[permission checkers]. +We recommend to <> rather than individual endpoint methods. For example: -=== Secure HTTP upgrade +.Example of a WebSocket endpoint with secured HTTP upgrade +[source, java] +---- +package io.quarkus.websockets.next.test.security; -An HTTP upgrade is secured when standard security annotation is placed on an endpoint class or an HTTP Security policy is defined. -The advantage of securing HTTP upgrade is less processing, the authorization is performed early and only once. -You should always prefer HTTP upgrade security unless, like in th example above, you need to perform action on error. +import io.quarkus.security.PermissionsAllowed; +import io.quarkus.websockets.next.OnTextMessage; +import io.quarkus.websockets.next.WebSocket; -.Use standard security annotation to secure an HTTP upgrade +@PermissionsAllowed("product:premium") +@WebSocket(path = "/product/premium") +public class PremiumProductEndpoint { + + @OnTextMessage + PremiumProduct getPremiumProduct(int productId) { + return new PremiumProduct(productId); + } + +} +---- + +.Example of a permission checker authorizing the HTTP upgrade [source, java] ---- package io.quarkus.websockets.next.test.security; -import io.quarkus.security.Authenticated; +import io.quarkus.security.identity.SecurityIdentity; +import io.quarkus.security.PermissionChecker; +import io.quarkus.vertx.http.runtime.security.HttpSecurityUtils; +import io.vertx.ext.web.RoutingContext; +import jakarta.enterprise.context.ApplicationScoped; + +@ApplicationScoped +public class PermissionChecker { + + @PermissionChecker("product:premium") + public boolean canGetPremiumProduct(SecurityIdentity securityIdentity) { <1> + String username = currentIdentity.getPrincipal().getName(); + + RoutingContext routingContext = HttpSecurityUtils.getRoutingContextAttribute(securityIdentity); + String initialHttpUpgradePath = routingContext == null ? null : routingContext.normalizedPath(); + if (!isUserAllowedToAccessPath(initialHttpUpgradePath, username)) { + return false; + } + + return isPremiumCustomer(username); + } + +} +---- +<1> A permission checker authorizing an HTTP upgrade must declare exactly one method parameter, the `SecurityIdentity`. + +It is also possible to run security checks on every message. For example, a message payload can be accessed like this: + +[source, java] +---- +package io.quarkus.websockets.next.test.security; + +import io.quarkus.security.PermissionChecker; +import io.quarkus.security.PermissionsAllowed; import jakarta.inject.Inject; +import io.quarkus.security.ForbiddenException; import io.quarkus.security.identity.SecurityIdentity; +import io.quarkus.websockets.next.OnError; import io.quarkus.websockets.next.OnOpen; import io.quarkus.websockets.next.OnTextMessage; import io.quarkus.websockets.next.WebSocket; -@Authenticated <1> -@WebSocket(path = "/end") -public class Endpoint { +@WebSocket(path = "/product") +public class ProductEndpoint { + + private record Product(int id, String name) {} @Inject SecurityIdentity currentIdentity; - @OnOpen - String open() { - return "ready"; - } - + @PermissionsAllowed("product:get") @OnTextMessage - String echo(String message) { - return message; + Product getProduct(int productId) { <1> + return new Product(productId, "Product " + productId); } -} ----- -<1> Initial HTTP handshake ends with the 401 status for anonymous users. -You can also redirect the handshake request on authorization failure with the `quarkus.websockets-next.server.security.auth-failure-redirect-url` configuration property. -IMPORTANT: HTTP upgrade is only secured when a security annotation is declared on an endpoint class next to the `@WebSocket` annotation. -Placing a security annotation on an endpoint bean will not secure bean methods, only the HTTP upgrade. -You must always verify that your endpoint is secured as intended. + @OnError + String error(ForbiddenException t) { <2> + return "forbidden:" + currentIdentity.getPrincipal().getName(); + } -.Use HTTP Security policy to secure an HTTP upgrade -[source,properties] ----- -quarkus.http.auth.permission.http-upgrade.paths=/end -quarkus.http.auth.permission.http-upgrade.policy=authenticated + @PermissionChecker("product:get") + boolean canGetProduct(int productId) { + String username = currentIdentity.getPrincipal().getName(); + return currentIdentity.hasRole("admin") || canUserGetProduct(productId, username); + } +} ---- +<1> The `getProduct` callback method can only be invoked if the current security identity has an `admin` role or the user is allowed to get the product detail. +<2> The error handler is invoked in case of the authorization failure. === Inspect and/or reject HTTP upgrade diff --git a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java index 7007893adadf4..0203d954780e9 100644 --- a/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java +++ b/extensions/agroal/deployment/src/main/java/io/quarkus/agroal/deployment/AgroalProcessor.java @@ -1,6 +1,7 @@ package io.quarkus.agroal.deployment; import static io.quarkus.agroal.deployment.AgroalDataSourceBuildUtil.qualifiers; +import static io.quarkus.arc.deployment.OpenTelemetrySdkBuildItem.isOtelSdkEnabled; import static io.quarkus.deployment.Capability.OPENTELEMETRY_TRACER; import java.sql.Driver; @@ -26,6 +27,7 @@ import io.agroal.api.AgroalPoolInterceptor; import io.quarkus.agroal.DataSource; import io.quarkus.agroal.runtime.AgroalDataSourceSupport; +import io.quarkus.agroal.runtime.AgroalOpenTelemetryWrapper; import io.quarkus.agroal.runtime.AgroalRecorder; import io.quarkus.agroal.runtime.DataSourceJdbcBuildTimeConfig; import io.quarkus.agroal.runtime.DataSources; @@ -34,9 +36,9 @@ import io.quarkus.agroal.runtime.TransactionIntegration; import io.quarkus.agroal.spi.JdbcDataSourceBuildItem; import io.quarkus.agroal.spi.JdbcDriverBuildItem; -import io.quarkus.agroal.spi.OpenTelemetryInitBuildItem; import io.quarkus.arc.BeanDestroyer; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; +import io.quarkus.arc.deployment.OpenTelemetrySdkBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.arc.processor.DotNames; @@ -127,7 +129,7 @@ void build( // at least one datasource is using OpenTelemetry JDBC instrumentation, // therefore we register the OpenTelemetry data source wrapper bean additionalBeans.produce(new AdditionalBeanBuildItem.Builder() - .addBeanClass("io.quarkus.agroal.runtime.AgroalOpenTelemetryWrapper") + .addBeanClass(AgroalOpenTelemetryWrapper.class) .setDefaultScope(DotNames.SINGLETON).build()); } @@ -244,13 +246,13 @@ void generateDataSourceSupportBean(AgroalRecorder recorder, @Record(ExecutionTime.RUNTIME_INIT) @BuildStep - @Consume(OpenTelemetryInitBuildItem.class) @Consume(NarayanaInitBuildItem.class) void generateDataSourceBeans(AgroalRecorder recorder, DataSourcesRuntimeConfig dataSourcesRuntimeConfig, List aggregatedBuildTimeConfigBuildItems, SslNativeConfigBuildItem sslNativeConfig, Capabilities capabilities, + Optional openTelemetrySdkBuildItem, BuildProducer syntheticBeanBuildItemBuildProducer, BuildProducer jdbcDataSource) { if (aggregatedBuildTimeConfigBuildItems.isEmpty()) { @@ -275,7 +277,8 @@ void generateDataSourceBeans(AgroalRecorder recorder, .checkActive(recorder.agroalDataSourceCheckActiveSupplier(dataSourceName)) // pass the runtime config into the recorder to ensure that the DataSource related beans // are created after runtime configuration has been set up - .createWith(recorder.agroalDataSourceSupplier(dataSourceName, dataSourcesRuntimeConfig)) + .createWith(recorder.agroalDataSourceSupplier( + dataSourceName, dataSourcesRuntimeConfig, isOtelSdkEnabled(openTelemetrySdkBuildItem))) .destroyer(BeanDestroyer.AutoCloseableDestroyer.class); if (!DataSourceUtil.isDefault(dataSourceName)) { diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/AgroalRecorder.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/AgroalRecorder.java index 4a1b5b300b0f0..eeea464179304 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/AgroalRecorder.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/AgroalRecorder.java @@ -55,13 +55,15 @@ public ActiveResult get() { public Function, AgroalDataSource> agroalDataSourceSupplier( String dataSourceName, - @SuppressWarnings("unused") DataSourcesRuntimeConfig dataSourcesRuntimeConfig) { + @SuppressWarnings("unused") DataSourcesRuntimeConfig dataSourcesRuntimeConfig, + Optional> otelEnabled) { return new Function<>() { @SuppressWarnings("deprecation") @Override public AgroalDataSource apply(SyntheticCreationalContext context) { DataSources dataSources = context.getInjectedReference(DataSources.class); - return dataSources.createDataSource(dataSourceName); + return dataSources.createDataSource(dataSourceName, + otelEnabled.isPresent() ? otelEnabled.get().getValue() : false); } }; } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java index b8dd019dbe556..5b60fadd9f597 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSources.java @@ -138,7 +138,7 @@ public AgroalDataSource getDataSource(String dataSourceName) { } @SuppressWarnings("resource") - public AgroalDataSource createDataSource(String dataSourceName) { + public AgroalDataSource createDataSource(String dataSourceName, boolean otelEnabled) { if (!agroalDataSourceSupport.entries.containsKey(dataSourceName)) { throw new IllegalArgumentException("No datasource named '" + dataSourceName + "' exists"); } @@ -221,7 +221,9 @@ public AgroalDataSource createDataSource(String dataSourceName) { dataSource.setPoolInterceptors(interceptorList); } - if (dataSourceJdbcBuildTimeConfig.telemetry() && dataSourceJdbcRuntimeConfig.telemetry().orElse(true)) { + if (dataSourceJdbcBuildTimeConfig.telemetry() && + dataSourceJdbcRuntimeConfig.telemetry().orElse(true) && + otelEnabled) { // activate OpenTelemetry JDBC instrumentation by wrapping AgroalDatasource // use an optional CDI bean as we can't reference optional OpenTelemetry classes here dataSource = agroalOpenTelemetryWrapper.get().apply(dataSource); @@ -250,11 +252,11 @@ private void applyNewConfiguration(String dataSourceName, AgroalDataSourceConfig TransactionIntegration txIntegration = new NarayanaTransactionIntegration(transactionManager, transactionSynchronizationRegistry, null, false, dataSourceJdbcBuildTimeConfig.transactions() == io.quarkus.agroal.runtime.TransactionIntegration.XA - && transactionRuntimeConfig.enableRecovery + && transactionRuntimeConfig.enableRecovery() ? xaResourceRecoveryRegistry : null); if (dataSourceJdbcBuildTimeConfig.transactions() == io.quarkus.agroal.runtime.TransactionIntegration.XA - && !transactionRuntimeConfig.enableRecovery) { + && !transactionRuntimeConfig.enableRecovery()) { log.warnv( "Datasource {0} enables XA but transaction recovery is not enabled. Please enable transaction recovery by setting quarkus.transaction-manager.enable-recovery=true, otherwise data may be lost if the application is terminated abruptly", dataSourceName); diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java index 55f2cd22b79ec..295f66f8d5665 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/health/DataSourceHealthCheck.java @@ -1,6 +1,7 @@ package io.quarkus.agroal.runtime.health; import java.sql.SQLException; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -80,4 +81,8 @@ public HealthCheckResponse call() { } return builder.build(); } + + protected Map getCheckedDataSources() { + return Collections.unmodifiableMap(checkedDataSources); + } } diff --git a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/OpenTelemetryInitBuildItem.java b/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/OpenTelemetryInitBuildItem.java deleted file mode 100644 index c9f818427ab24..0000000000000 --- a/extensions/agroal/spi/src/main/java/io/quarkus/agroal/spi/OpenTelemetryInitBuildItem.java +++ /dev/null @@ -1,9 +0,0 @@ -package io.quarkus.agroal.spi; - -import io.quarkus.builder.item.EmptyBuildItem; - -/** - * Marker build item that indicates that the OpenTelemetry extension has been initialized. - */ -public final class OpenTelemetryInitBuildItem extends EmptyBuildItem { -} diff --git a/extensions/arc/deployment/pom.xml b/extensions/arc/deployment/pom.xml index 779cb9e6f9da3..ca2f99f3bf6b8 100644 --- a/extensions/arc/deployment/pom.xml +++ b/extensions/arc/deployment/pom.xml @@ -73,9 +73,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java index 5b8c1893fd1f7..f4d2610156689 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcConfig.java @@ -8,13 +8,16 @@ import java.util.Set; import io.quarkus.deployment.index.IndexDependencyConfig; +import io.quarkus.runtime.annotations.ConfigDocIgnore; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; @ConfigRoot(phase = BUILD_TIME) -public class ArcConfig { +@ConfigMapping(prefix = "quarkus.arc") +public interface ArcConfig { public static final Set ALLOWED_REMOVE_UNUSED_BEANS_VALUES = Set.of("all", "true", "none", "false", "fwk", "framework"); @@ -43,15 +46,15 @@ public class ArcConfig { * * @see UnremovableBeanBuildItem */ - @ConfigItem(defaultValue = "all") - public String removeUnusedBeans; + @WithDefault("all") + String removeUnusedBeans(); /** * If set to true {@code @Inject} is automatically added to all non-static non-final fields that are annotated with * one of the annotations defined by {@link AutoInjectAnnotationBuildItem}. */ - @ConfigItem(defaultValue = "true") - public boolean autoInjectFields; + @WithDefault("true") + boolean autoInjectFields(); /** * If set to true, the bytecode of unproxyable beans will be transformed. This ensures that a proxy/subclass @@ -65,16 +68,16 @@ public class ArcConfig { *

  • Makes private no-args constructors package-private if necessary. * > selectedAlternatives; + Optional> selectedAlternatives(); /** * If set to true then {@code jakarta.enterprise.inject.Produces} is automatically added to all non-void methods that are * annotated with a scope annotation, a stereotype or a qualifier, and are not annotated with {@code Inject} or * {@code Produces}, and no parameter is annotated with {@code Disposes}, {@code Observes} or {@code ObservesAsync}. */ - @ConfigItem(defaultValue = "true") - public boolean autoProducerMethods; + @WithDefault("true") + boolean autoProducerMethods(); /** * The list of types that should be excluded from discovery. @@ -123,8 +125,7 @@ public class ArcConfig { * If any element value matches a discovered type then the type is excluded from discovery, i.e. no beans and observer * methods are created from this type. */ - @ConfigItem - public Optional> excludeTypes; + Optional> excludeTypes(); /** * List of types that should be considered unremovable regardless of whether they are directly used or not. @@ -143,8 +144,7 @@ public class ArcConfig { * @see {@link #removeUnusedBeans} * @see {@link io.quarkus.arc.Unremovable} */ - @ConfigItem - public Optional> unremovableTypes; + Optional> unremovableTypes(); /** * Artifacts that should be excluded from discovery. @@ -152,10 +152,9 @@ public class ArcConfig { * These artifacts would be otherwise scanned for beans, i.e. they * contain a Jandex index or a beans.xml descriptor. */ - @ConfigItem @ConfigDocSection @ConfigDocMapKey("dependency-name") - Map excludeDependency; + Map excludeDependency(); /** * If set to true then the container attempts to detect "unused removed beans" false positives during programmatic lookup at @@ -163,8 +162,8 @@ public class ArcConfig { * * @see ArcConfig#removeUnusedBeans */ - @ConfigItem(defaultValue = "true") - public boolean detectUnusedFalsePositives; + @WithDefault("true") + boolean detectUnusedFalsePositives(); /** * If set to true then the container attempts to detect wrong usages of annotations and eventually fails the build to @@ -174,8 +173,8 @@ public class ArcConfig { * result a component annotated with {@code @jakarta.ejb.Singleton} would be completely ignored. Another example is an inner * class annotated with a scope annotation - this component would be again completely ignored. */ - @ConfigItem(defaultValue = "true") - public boolean detectWrongAnnotations; + @WithDefault("true") + boolean detectWrongAnnotations(); /** * If set to {@code true}, the container will perform additional validations mandated by the CDI specification. @@ -190,20 +189,18 @@ public class ArcConfig { * Note that {@link #transformUnproxyableClasses} and {@link #removeUnusedBeans} also has effect on specification * compatibility. You may want to disable these features to get behavior closer to the specification. */ - @ConfigItem(defaultValue = "false") - public boolean strictCompatibility; + @WithDefault("false") + boolean strictCompatibility(); /** * Dev mode configuration. */ - @ConfigItem - public ArcDevModeConfig devMode; + ArcDevModeConfig devMode(); /** * Test mode configuration. */ - @ConfigItem - public ArcTestConfig test; + ArcTestConfig test(); /** * The list of packages that will not be checked for split package issues. @@ -214,14 +211,12 @@ public class ArcConfig { *
  • a package name with suffix {@code .*}, i.e. {@code org.acme.*}, which matches a package that starts with provided * value
  • */ - @ConfigItem - public Optional> ignoredSplitPackages; + Optional> ignoredSplitPackages(); /** * Context propagation configuration. */ - @ConfigItem - public ArcContextPropagationConfig contextPropagation; + ArcContextPropagationConfig contextPropagation(); /** * If set to {@code true}, the container should try to optimize the contexts for some of the scopes. If set to {@code auto} @@ -231,8 +226,9 @@ public class ArcConfig { * Typically, some implementation parts of the context for {@link jakarta.enterprise.context.ApplicationScoped} could be * pregenerated during build. */ - @ConfigItem(defaultValue = "auto", generateDocumentation = false) - public OptimizeContexts optimizeContexts; + @WithDefault("auto") + @ConfigDocIgnore + OptimizeContexts optimizeContexts(); public enum OptimizeContexts { TRUE, @@ -240,17 +236,17 @@ public enum OptimizeContexts { AUTO } - public final boolean isRemoveUnusedBeansFieldValid() { - return ALLOWED_REMOVE_UNUSED_BEANS_VALUES.contains(removeUnusedBeans.toLowerCase()); + default boolean isRemoveUnusedBeansFieldValid() { + return ALLOWED_REMOVE_UNUSED_BEANS_VALUES.contains(removeUnusedBeans().toLowerCase()); } - public final boolean shouldEnableBeanRemoval() { - final String lowerCase = removeUnusedBeans.toLowerCase(); + default boolean shouldEnableBeanRemoval() { + final String lowerCase = removeUnusedBeans().toLowerCase(); return "all".equals(lowerCase) || "true".equals(lowerCase) || "fwk".equals(lowerCase) || "framework".equals(lowerCase); } - public final boolean shouldOnlyKeepAppBeans() { - final String lowerCase = removeUnusedBeans.toLowerCase(); + default boolean shouldOnlyKeepAppBeans() { + final String lowerCase = removeUnusedBeans().toLowerCase(); return "fwk".equals(lowerCase) || "framework".equals(lowerCase); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcContextPropagationConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcContextPropagationConfig.java index 6334f78c14d0d..0ef53f2d6d901 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcContextPropagationConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcContextPropagationConfig.java @@ -1,10 +1,10 @@ package io.quarkus.arc.deployment; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class ArcContextPropagationConfig { +public interface ArcContextPropagationConfig { /** * If set to true and the SmallRye Context Propagation extension is present then the CDI contexts will be propagated by @@ -16,7 +16,7 @@ public class ArcContextPropagationConfig { * * Note that the CDI contexts may be propagated in a different way though. For example with the Vertx duplicated context. */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java index 0e175d57b02ba..03396f072833f 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcDevModeConfig.java @@ -1,23 +1,23 @@ package io.quarkus.arc.deployment; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class ArcDevModeConfig { +public interface ArcDevModeConfig { /** * If set to true then the container monitors business method invocations and fired events during the development mode. *

    * NOTE: This config property should not be changed in the development mode as it requires a full rebuild of the application */ - @ConfigItem(defaultValue = "false") - public boolean monitoringEnabled; + @WithDefault("false") + boolean monitoringEnabled(); /** * If set to true then the dependency graphs are generated and available in the Dev UI. */ - @ConfigItem(defaultValue = "true") - public boolean generateDependencyGraphs; + @WithDefault("true") + boolean generateDependencyGraphs(); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java index a89b988cb9184..8fcf9403769c1 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcProcessor.java @@ -302,8 +302,8 @@ protected DotName getDotName(BeanInfo bean) { } } // unremovable beans specified in application.properties - if (arcConfig.unremovableTypes.isPresent()) { - List> classPredicates = initClassPredicates(arcConfig.unremovableTypes.get()); + if (arcConfig.unremovableTypes().isPresent()) { + List> classPredicates = initClassPredicates(arcConfig.unremovableTypes().get()); builder.addRemovalExclusion(new Predicate() { @Override public boolean test(BeanInfo beanInfo) { @@ -328,17 +328,17 @@ public boolean test(BeanInfo bean) { } }); } - builder.setTransformUnproxyableClasses(arcConfig.transformUnproxyableClasses); - builder.setTransformPrivateInjectedFields(arcConfig.transformPrivateInjectedFields); - builder.setFailOnInterceptedPrivateMethod(arcConfig.failOnInterceptedPrivateMethod); + builder.setTransformUnproxyableClasses(arcConfig.transformUnproxyableClasses()); + builder.setTransformPrivateInjectedFields(arcConfig.transformPrivateInjectedFields()); + builder.setFailOnInterceptedPrivateMethod(arcConfig.failOnInterceptedPrivateMethod()); builder.setJtaCapabilities(capabilities.isPresent(Capability.TRANSACTIONS)); builder.setGenerateSources(BootstrapDebug.debugSourcesDir() != null); builder.setAllowMocking(launchModeBuildItem.getLaunchMode() == LaunchMode.TEST); - builder.setStrictCompatibility(arcConfig.strictCompatibility); + builder.setStrictCompatibility(arcConfig.strictCompatibility()); - if (arcConfig.selectedAlternatives.isPresent()) { + if (arcConfig.selectedAlternatives().isPresent()) { final List> selectedAlternatives = initClassPredicates( - arcConfig.selectedAlternatives.get()); + arcConfig.selectedAlternatives().get()); builder.setAlternativePriorities(new AlternativePriorities() { @Override @@ -372,9 +372,9 @@ public Integer compute(AnnotationTarget target, Collection stere }); } - if (arcConfig.excludeTypes.isPresent()) { + if (arcConfig.excludeTypes().isPresent()) { for (Predicate predicate : initClassPredicates( - arcConfig.excludeTypes.get())) { + arcConfig.excludeTypes().get())) { builder.addExcludeType(predicate); } } @@ -396,7 +396,7 @@ public Integer compute(AnnotationTarget target, Collection stere builder.setOptimizeContexts(new Predicate() { @Override public boolean test(BeanDeployment deployment) { - switch (arcConfig.optimizeContexts) { + switch (arcConfig.optimizeContexts()) { case TRUE: return true; case FALSE: @@ -406,7 +406,7 @@ public boolean test(BeanDeployment deployment) { // Note that removed beans are excluded return deployment.getBeans().size() < 1000; default: - throw new IllegalArgumentException("Unexpected value: " + arcConfig.optimizeContexts); + throw new IllegalArgumentException("Unexpected value: " + arcConfig.optimizeContexts()); } } }); @@ -573,7 +573,7 @@ public void registerSubclass(DotName beanClassName, String subclassName) { } }, existingClasses.existingClasses, bytecodeTransformerConsumer, - config.shouldEnableBeanRemoval() && config.detectUnusedFalsePositives, executor); + config.shouldEnableBeanRemoval() && config.detectUnusedFalsePositives(), executor); for (ResourceOutput.Resource resource : resources) { switch (resource.getType()) { @@ -623,7 +623,7 @@ public ArcContainerBuildItem initializeContainer(ArcConfig config, ArcRecorder r throws Exception { ArcContainer container = recorder.initContainer(shutdown, currentContextFactory.isPresent() ? currentContextFactory.get().getFactory() : null, - config.strictCompatibility); + config.strictCompatibility()); return new ArcContainerBuildItem(container); } @@ -757,7 +757,7 @@ void validateAsyncObserverExceptionHandlers(ValidationPhaseBuildItem validationP @BuildStep void registerContextPropagation(ArcConfig config, BuildProducer threadContextProvider) { - if (config.contextPropagation.enabled) { + if (config.contextPropagation().enabled()) { threadContextProvider.produce(new ThreadContextProviderBuildItem(ArcContextProvider.class)); } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java index 3973f0c061e57..b0053c682ef1e 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/ArcTestConfig.java @@ -1,16 +1,16 @@ package io.quarkus.arc.deployment; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class ArcTestConfig { +public interface ArcTestConfig { /** * If set to true then disable {@code StartupEvent} and {@code ShutdownEvent} observers declared on application bean classes * during the tests. */ - @ConfigItem(defaultValue = "false") - public boolean disableApplicationLifecycleObservers; + @WithDefault("false") + boolean disableApplicationLifecycleObservers(); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java index 2da0e5b6d9c28..cafa155c711fd 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoInjectFieldProcessor.java @@ -44,7 +44,7 @@ void autoInjectQualifiers(BeanArchiveIndexBuildItem beanArchiveIndex, @BuildStep void annotationTransformer(ArcConfig config, List autoInjectAnnotations, BuildProducer annotationsTransformer) throws Exception { - if (!config.autoInjectFields) { + if (!config.autoInjectFields()) { return; } List annotationNames = new ArrayList<>(); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java index 6baf5c6ab4d2d..569cdfb0af784 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/AutoProducerMethodsProcessor.java @@ -34,7 +34,7 @@ void annotationTransformer(ArcConfig config, BeanArchiveIndexBuildItem beanArchi CustomScopeAnnotationsBuildItem scopes, List stereotypeRegistrars, BuildProducer annotationsTransformer) throws Exception { - if (!config.autoProducerMethods) { + if (!config.autoProducerMethods()) { return; } Set qualifiersAndStereotypes = new HashSet<>(); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java index 7f0b09a4b42ad..c1eb8187624dc 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/BeanArchiveProcessor.java @@ -95,12 +95,13 @@ public void transform(TransformationContext ctx) { knownMissingClasses, Thread.currentThread().getContextClassLoader()); } Set generatedClassNames = new HashSet<>(); - for (GeneratedBeanBuildItem generatedBeanClass : generatedBeans) { - IndexingUtil.indexClass(generatedBeanClass.getName(), additionalBeanIndexer, applicationIndex, additionalIndex, - knownMissingClasses, Thread.currentThread().getContextClassLoader(), generatedBeanClass.getData()); - generatedClassNames.add(DotName.createSimple(generatedBeanClass.getName().replace('/', '.'))); - generatedClass.produce(new GeneratedClassBuildItem(true, generatedBeanClass.getName(), generatedBeanClass.getData(), - generatedBeanClass.getSource())); + for (GeneratedBeanBuildItem generatedBean : generatedBeans) { + IndexingUtil.indexClass(generatedBean.getName(), additionalBeanIndexer, applicationIndex, additionalIndex, + knownMissingClasses, Thread.currentThread().getContextClassLoader(), generatedBean.getData()); + generatedClassNames.add(DotName.createSimple(generatedBean.getName().replace('/', '.'))); + generatedClass.produce(new GeneratedClassBuildItem(generatedBean.isApplicationClass(), generatedBean.getName(), + generatedBean.getData(), + generatedBean.getSource())); } PersistentClassIndex index = liveReloadBuildItem.getContextObject(PersistentClassIndex.class); @@ -154,7 +155,7 @@ private IndexView buildApplicationIndex(ArcConfig config, ApplicationArchivesBui beanDefiningAnnotations.add(DotNames.QUALIFIER); beanDefiningAnnotations.add(DotNames.INTERCEPTOR_BINDING); - boolean rootIsAlwaysBeanArchive = !config.strictCompatibility; + boolean rootIsAlwaysBeanArchive = !config.strictCompatibility(); Collection candidateArchives = applicationArchivesBuildItem.getApplicationArchives(); if (!rootIsAlwaysBeanArchive) { candidateArchives = new ArrayList<>(candidateArchives); @@ -249,9 +250,9 @@ private boolean isApplicationArchiveExcluded(ArcConfig config, List parameterAnnotations = getParameterAnnotations( - validationPhase.getBeanProcessor().getBeanDeployment(), - target.asMethodParameter().method(), methodParameterInfo.position()); - mapping = Annotations.find(parameterAnnotations, CONFIG_MAPPING_NAME); + + // target can be null for synthetic injection point + if (target != null) { + if (target.kind().equals(FIELD)) { + mapping = target.asField().annotation(CONFIG_MAPPING_NAME); + } else if (target.kind().equals(METHOD_PARAMETER)) { + MethodParameterInfo methodParameterInfo = target.asMethodParameter(); + if (methodParameterInfo.type().name().equals(type.name())) { + Set parameterAnnotations = getParameterAnnotations( + validationPhase.getBeanProcessor().getBeanDeployment(), + target.asMethodParameter().method(), methodParameterInfo.position()); + mapping = Annotations.find(parameterAnnotations, CONFIG_MAPPING_NAME); + } } } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanBuildItem.java index 03bc7572c4edf..4908805c2d32d 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanBuildItem.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanBuildItem.java @@ -8,6 +8,7 @@ */ public final class GeneratedBeanBuildItem extends MultiBuildItem { + private final boolean applicationClass; private final String name; private final byte[] data; private final String source; @@ -17,9 +18,14 @@ public GeneratedBeanBuildItem(String name, byte[] data) { } public GeneratedBeanBuildItem(String name, byte[] data, String source) { + this(name, data, source, true); + } + + public GeneratedBeanBuildItem(String name, byte[] data, String source, boolean applicationClass) { this.name = name; this.data = data; this.source = source; + this.applicationClass = applicationClass; } public String getName() { @@ -38,4 +44,8 @@ public String getSource() { return source; } + public boolean isApplicationClass() { + return applicationClass; + } + } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanGizmoAdaptor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanGizmoAdaptor.java index 6adbd0d320685..211cfd188267e 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanGizmoAdaptor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/GeneratedBeanGizmoAdaptor.java @@ -4,6 +4,7 @@ import java.io.Writer; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Predicate; import io.quarkus.bootstrap.BootstrapDebug; import io.quarkus.deployment.annotations.BuildProducer; @@ -13,10 +14,23 @@ public class GeneratedBeanGizmoAdaptor implements ClassOutput { private final BuildProducer classOutput; private final Map sources; + private final Predicate applicationClassPredicate; public GeneratedBeanGizmoAdaptor(BuildProducer classOutput) { + this(classOutput, new Predicate() { + + @Override + public boolean test(String t) { + return true; + } + }); + } + + public GeneratedBeanGizmoAdaptor(BuildProducer classOutput, + Predicate applicationClassPredicate) { this.classOutput = classOutput; this.sources = BootstrapDebug.debugSourcesDir() != null ? new ConcurrentHashMap<>() : null; + this.applicationClassPredicate = applicationClassPredicate; } @Override @@ -28,7 +42,7 @@ public void write(String className, byte[] bytes) { source = sw.toString(); } } - classOutput.produce(new GeneratedBeanBuildItem(className, bytes, source)); + classOutput.produce(new GeneratedBeanBuildItem(className, bytes, source, applicationClassPredicate.test(className))); } @Override diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java index 7ff5ed89ca6f5..19405dea4c4dd 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/LifecycleEventsBuildStep.java @@ -24,7 +24,8 @@ ApplicationStartBuildItem startupEvent(ArcRecorder recorder, BeanContainerBuildItem beanContainer, ShutdownContextBuildItem shutdown, LaunchModeBuildItem launchMode, ArcConfig config) { - recorder.handleLifecycleEvents(shutdown, launchMode.getLaunchMode(), config.test.disableApplicationLifecycleObservers); + recorder.handleLifecycleEvents(shutdown, launchMode.getLaunchMode(), + config.test().disableApplicationLifecycleObservers()); return new ApplicationStartBuildItem(); } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/OpenTelemetrySdkBuildItem.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/OpenTelemetrySdkBuildItem.java new file mode 100644 index 0000000000000..86c0ccdd3aa63 --- /dev/null +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/OpenTelemetrySdkBuildItem.java @@ -0,0 +1,27 @@ +package io.quarkus.arc.deployment; + +import java.util.Optional; + +import io.quarkus.builder.item.SimpleBuildItem; +import io.quarkus.runtime.RuntimeValue; + +public final class OpenTelemetrySdkBuildItem extends SimpleBuildItem { + + private final RuntimeValue runtimeEnabled; + + public OpenTelemetrySdkBuildItem(RuntimeValue sdkEnabled) { + this.runtimeEnabled = sdkEnabled; + } + + /** + * True if the OpenTelemetry SDK is enabled at build and runtime. + */ + public RuntimeValue isRuntimeEnabled() { + return runtimeEnabled; + } + + public static Optional> isOtelSdkEnabled(Optional buildItem) { + // optional is empty if the extension is disabled at build time + return buildItem.isPresent() ? Optional.of(buildItem.get().isRuntimeEnabled()) : Optional.empty(); + } +} diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SplitPackageProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SplitPackageProcessor.java index f68b4256900b6..8cca3db1d208b 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SplitPackageProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SplitPackageProcessor.java @@ -59,8 +59,8 @@ void splitPackageDetection(ApplicationArchivesBuildItem archivesBuildItem, // build up exclusion predicates from user defined config and extensions List> packageSkipPredicates = new ArrayList<>(); - if (config.ignoredSplitPackages.isPresent()) { - packageSkipPredicates.addAll(initPredicates(config.ignoredSplitPackages.get())); + if (config.ignoredSplitPackages().isPresent()) { + packageSkipPredicates.addAll(initPredicates(config.ignoredSplitPackages().get())); } for (IgnoreSplitPackageBuildItem exclusionBuildItem : excludedPackages) { packageSkipPredicates.addAll(initPredicates(exclusionBuildItem.getExcludedPackages())); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationUsageProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationUsageProcessor.java index 9819e72f124d7..9e3e34f565fba 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationUsageProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/WrongAnnotationUsageProcessor.java @@ -31,7 +31,7 @@ void detect(ArcConfig config, ApplicationIndexBuildItem applicationIndex, Custom TransformedAnnotationsBuildItem transformedAnnotations, BuildProducer validationErrors, InterceptorResolverBuildItem interceptorResolverBuildItem) { - if (!config.detectWrongAnnotations) { + if (!config.detectWrongAnnotations()) { return; } diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevModeApiProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevModeApiProcessor.java index 894aa9827118a..2eccacdf1cdcd 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevModeApiProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevModeApiProcessor.java @@ -65,7 +65,7 @@ public void collectBeanInfo(ArcConfig config, ValidationPhaseBuildItem validatio // Build dependency graphs Map> beanDependenciesMap = new HashMap<>(); - if (config.devMode.generateDependencyGraphs) { + if (config.devMode().generateDependencyGraphs()) { BeanResolver resolver = validationPhaseBuildItem.getBeanResolver(); Collection beans = validationContext.get(BuildExtension.Key.BEANS); Map> directDependents = new HashMap<>(); diff --git a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevUIProcessor.java b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevUIProcessor.java index fe0b50353eb3e..791b946ab5abf 100644 --- a/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevUIProcessor.java +++ b/extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/devui/ArcDevUIProcessor.java @@ -77,7 +77,7 @@ public CardPageBuildItem pages(ArcBeanInfoBuildItem arcBeanInfoBuildItem, ArcCon pageBuildItem.addBuildTimeData(DECORATORS, decorators); } - if (config.devMode.monitoringEnabled) { + if (config.devMode().monitoringEnabled()) { pageBuildItem.addPage(Page.webComponentPageBuilder() .icon("font-awesome-solid:fire") .componentLink("qwc-arc-fired-events.js")); @@ -111,10 +111,10 @@ JsonRPCProvidersBuildItem createJsonRPCService() { void registerMonitoringComponents(ArcConfig config, BuildProducer beans, BuildProducer annotationTransformers, CustomScopeAnnotationsBuildItem customScopes, List beanDefiningAnnotations) { - if (!config.devMode.monitoringEnabled) { + if (!config.devMode().monitoringEnabled()) { return; } - if (!config.transformUnproxyableClasses) { + if (!config.transformUnproxyableClasses()) { throw new IllegalStateException( "Dev UI problem: monitoring of CDI business method invocations not possible\n\t- quarkus.arc.transform-unproxyable-classes was set to false and therefore it would not be possible to apply interceptors to unproxyable bean classes\n\t- please disable the monitoring feature via quarkus.arc.dev-mode.monitoring-enabled=false or enable unproxyable classes transformation"); } diff --git a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-beans.js b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-beans.js index 5332603b82a6c..c713557e9488d 100644 --- a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-beans.js +++ b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-beans.js @@ -18,6 +18,12 @@ export class QwcArcBeans extends LitElement { jsonRpc = new JsonRpc(this); static styles = css` + :host { + display: flex; + flex-direction: column; + height: 100%; + } + .arctable { height: 100%; padding-bottom: 10px; @@ -46,10 +52,15 @@ export class QwcArcBeans extends LitElement { overflow: hidden; height: 100%; } + .filterBar { + width: 99%; + margin-left: 5px; + } `; static properties = { _beans: {state: true}, + _filteredBeans: {state: true}, _beanIdsWithDependencyGraphs: {state: true}, _selectedBean: {state: true} }; @@ -57,6 +68,7 @@ export class QwcArcBeans extends LitElement { constructor() { super(); this._beans = beans; + this._filteredBeans = this._beans; this._beanIdsWithDependencyGraphs = beanIdsWithDependencyGraphs; this._selectedBean = null; } @@ -74,11 +86,12 @@ export class QwcArcBeans extends LitElement { newBeans.push(bean); } this._beans = newBeans; + this._filteredBeans = this._beans; }); } render() { - if (this._beans) { + if (this._filteredBeans) { if (this._selectedBean) { return this._renderBeanGraph(); } else { @@ -89,8 +102,35 @@ export class QwcArcBeans extends LitElement { } } + _renderFilterBar(){ + return html` + + `; + } + _renderBeanList(){ - return html` + return html`${this._renderFilterBar()} + + if(this._filteredInterceptors){ + return html`${this._renderFilterBar()} + + + `; + } + _classRenderer(bean){ return html` ${bean.interceptorClass.name} diff --git a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-observers.js b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-observers.js index f66a143e44379..600fe84f15db8 100644 --- a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-observers.js +++ b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-observers.js @@ -13,6 +13,11 @@ import 'qui-ide-link'; export class QwcArcObservers extends LitElement { static styles = css` + :host { + display: flex; + flex-direction: column; + height: 100%; + } .arctable { height: 100%; padding-bottom: 10px; @@ -33,22 +38,28 @@ export class QwcArcObservers extends LitElement { .annotation { color: var(--lumo-contrast-50pct); } + .filterBar { + width: 99%; + margin-left: 5px; + } `; static properties = { - _observers: {attribute: false} + _observers: {attribute: false}, + _filteredObservers: {state: true} }; constructor() { super(); this._observers = observers; + this._filteredObservers = this._observers; } render() { - if(this._observers){ + if(this._filteredObservers){ - return html` - + return html`${this._renderFilterBar()} + + + `; + } + _sourceRenderer(bean){ return html`${bean.declaringClass.name}#${bean.methodName}()`; } diff --git a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-removed-components.js b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-removed-components.js index f08363fa1a034..548bf0c7a6df5 100644 --- a/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-removed-components.js +++ b/extensions/arc/deployment/src/main/resources/dev-ui/qwc-arc-removed-components.js @@ -18,32 +18,48 @@ import 'qui-ide-link'; export class QwcArcRemovedComponents extends LitElement { static styles = css` .fullHeight { - height: 100%; + height: 100%; } + + .searchableGrid { + display: flex; + flex-direction: column; + height: 100%; + } + code { - font-size: 85%; + font-size: 85%; } .annotation { - color: var(--lumo-contrast-50pct); + color: var(--lumo-contrast-50pct); } .producer { - color: var(--lumo-primary-text-color); + color: var(--lumo-primary-text-color); + } + + .filterBar { + width: 99%; + margin-left: 5px; } `; static properties = { _removedBeans: {state: true}, + _filteredRemovedBeans: {state: true}, _removedDecorators: {state: true}, _removedInterceptors: {state: true}, + _filteredRemovedInterceptors: {state: true}, }; constructor() { super(); this._removedBeans = removedBeans; + this._filteredRemovedBeans = this._removedBeans; this._removedDecorators = removedDecorators; this._removedInterceptors = removedInterceptors; + this._filteredRemovedInterceptors = this._removedInterceptors; } render() { @@ -75,8 +91,8 @@ export class QwcArcRemovedComponents extends LitElement { if (this._removedBeans.length > 0) { - return html` - + return html`${this._renderFilterBar(0)} + + + `; + } + _renderRemovedDecorators(){ if (this._removedDecorators.length > 0) { return html`TODO: Not yet implemented`; @@ -106,8 +159,8 @@ export class QwcArcRemovedComponents extends LitElement { _renderRemovedInterceptors(){ if (this._removedInterceptors.length > 0) { - return html` - + return html`${this._renderFilterBar(2)} + ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/cache/runtime-spi/pom.xml b/extensions/cache/runtime-spi/pom.xml index a0c57831b3890..e71a7105c1875 100644 --- a/extensions/cache/runtime-spi/pom.xml +++ b/extensions/cache/runtime-spi/pom.xml @@ -19,27 +19,4 @@ - - - - maven-compiler-plugin - - - default-compile - - - - io.quarkus - quarkus-extension-processor - ${project.version} - - - - - - - - - - diff --git a/extensions/caffeine/runtime/src/main/java/io/quarkus/caffeine/runtime/graal/CacheConstructorsFeature.java b/extensions/caffeine/runtime/src/main/java/io/quarkus/caffeine/runtime/graal/CacheConstructorsFeature.java index e5349c7bf163a..3547ffdf00e97 100644 --- a/extensions/caffeine/runtime/src/main/java/io/quarkus/caffeine/runtime/graal/CacheConstructorsFeature.java +++ b/extensions/caffeine/runtime/src/main/java/io/quarkus/caffeine/runtime/graal/CacheConstructorsFeature.java @@ -9,10 +9,10 @@ /** * This Automatic Feature for GraalVM will register for reflection * the most commonly used cache implementations from Caffeine. - * It's implemented as an explicit @{@link Feature} rather than + * It's implemented as an explicit {@link Feature} rather than * using the Quarkus builditems because it doesn't need to be * dynamically tuned (the list is static), and to take advantage - * of the reachability information we can infer from @{@link org.graalvm.nativeimage.hosted.Feature.DuringAnalysisAccess}. + * of the reachability information we can infer from {@link org.graalvm.nativeimage.hosted.Feature.DuringAnalysisAccess}. * * This allows us to register for reflection these resources only if * Caffeine is indeed being used: only if the cache builder is reachable diff --git a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuild.java b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuild.java index 141303d68b293..5dc46308cb145 100644 --- a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuild.java +++ b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuild.java @@ -14,6 +14,6 @@ public BuildpackBuild(ContainerImageConfig containerImageConfig) { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(BuildpackProcessor.BUILDPACK)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(BuildpackProcessor.BUILDPACK)).orElse(true); } } diff --git a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuildEnabled.java b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuildEnabled.java index 022181c41fb17..53162d1f5903d 100644 --- a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuildEnabled.java +++ b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackBuildEnabled.java @@ -15,6 +15,6 @@ public class BuildpackBuildEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(BuildpackProcessor.BUILDPACK)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(BuildpackProcessor.BUILDPACK)).orElse(true); } } diff --git a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackProcessor.java b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackProcessor.java index 3a39af7458462..fbca75be5cb32 100644 --- a/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackProcessor.java +++ b/extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackProcessor.java @@ -254,8 +254,8 @@ private String runBuildpackBuild(BuildpackConfig buildpackConfig, }); AuthConfig authConfig = new AuthConfig(); authConfig.withRegistryAddress(registry); - containerImageConfig.username.ifPresent(u -> authConfig.withUsername(u)); - containerImageConfig.password.ifPresent(p -> authConfig.withPassword(p)); + containerImageConfig.username().ifPresent(u -> authConfig.withUsername(u)); + containerImageConfig.password().ifPresent(p -> authConfig.withPassword(p)); log.info("Pushing image to " + authConfig.getRegistryAddress()); Stream.concat(Stream.of(containerImage.getImage()), containerImage.getAdditionalImageTags().stream()).forEach(i -> { diff --git a/extensions/container-image/container-image-docker-common/deployment/src/main/java/io/quarkus/container/image/docker/common/deployment/CommonProcessor.java b/extensions/container-image/container-image-docker-common/deployment/src/main/java/io/quarkus/container/image/docker/common/deployment/CommonProcessor.java index e3130f24362aa..d8c252b00bef9 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/main/java/io/quarkus/container/image/docker/common/deployment/CommonProcessor.java +++ b/extensions/container-image/container-image-docker-common/deployment/src/main/java/io/quarkus/container/image/docker/common/deployment/CommonProcessor.java @@ -163,13 +163,13 @@ protected void loginToRegistryIfNeeded(ContainerImageConfig containerImageConfig }); // Check if we need to login first - if (containerImageConfig.username.isPresent() && containerImageConfig.password.isPresent()) { - var loginSuccessful = ExecUtil.exec(executableName, "login", registry, "-u", containerImageConfig.username.get(), - "-p", containerImageConfig.password.get()); + if (containerImageConfig.username().isPresent() && containerImageConfig.password().isPresent()) { + var loginSuccessful = ExecUtil.exec(executableName, "login", registry, "-u", containerImageConfig.username().get(), + "-p", containerImageConfig.password().get()); if (!loginSuccessful) { throw containerRuntimeException(executableName, - new String[] { "-u", containerImageConfig.username.get(), "-p", "********" }); + new String[] { "-u", containerImageConfig.username().get(), "-p", "********" }); } } } @@ -184,7 +184,7 @@ protected List getContainerCommonBuildArgs(String image, args.addAll(List.of("build", "-f", dockerfilePaths.dockerfilePath().toAbsolutePath().toString())); config.buildArgs().forEach((k, v) -> args.addAll(List.of("--build-arg", "%s=%s".formatted(k, v)))); - containerImageConfig.labels.forEach((k, v) -> args.addAll(List.of("--label", "%s=%s".formatted(k, v)))); + containerImageConfig.labels().forEach((k, v) -> args.addAll(List.of("--label", "%s=%s".formatted(k, v)))); config.cacheFrom() .filter(cacheFrom -> !cacheFrom.isEmpty()) .ifPresent(cacheFrom -> args.addAll(List.of("--cache-from", String.join(",", cacheFrom)))); diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-17-runtime b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-17-runtime index e6463565e1482..7c3e59de33dd6 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-17-runtime +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-17-runtime @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20 +FROM registry.access.redhat.com/ubi8/openjdk-17-runtime:1.21 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-21-runtime b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-21-runtime index c2dafccd00990..aedcafe942f67 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-21-runtime +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi8-openjdk-21-runtime @@ -1,5 +1,5 @@ # Use Java 21 base image -FROM registry.access.redhat.com/ubi8/openjdk-21-runtime:1.20 +FROM registry.access.redhat.com/ubi8/openjdk-21-runtime:1.21 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java17 b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java17 index 7ddf7b3ee58fd..8e9ca8f2eb9d4 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java17 +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java17 @@ -1,13 +1,13 @@ -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 ARG JAVA_PACKAGE=java-17-openjdk-headless ARG RUN_JAVA_VERSION=1.3.8 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' # Install java and the run-java script # Also set up permissions for user `1001` -RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ - && microdnf update \ - && microdnf clean all \ +RUN microdnf -y install ca-certificates ${JAVA_PACKAGE} \ + && microdnf -y update \ + && microdnf -y clean all \ && mkdir /deployments \ && chown 1001 /deployments \ && chmod "g+rwX" /deployments \ diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java21 b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java21 index b01efabe94b57..f2293c6f908e6 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java21 +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-java21 @@ -1,13 +1,13 @@ -FROM registry.access.redhat.com/ubi9/ubi-minimal:9.4 +FROM registry.access.redhat.com/ubi9/ubi-minimal:9.5 ARG JAVA_PACKAGE=java-21-openjdk-headless ARG RUN_JAVA_VERSION=1.3.8 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' # Install java and the run-java script # Also set up permissions for user `1001` -RUN microdnf install curl ca-certificates ${JAVA_PACKAGE} \ - && microdnf update \ - && microdnf clean all \ +RUN microdnf -y install ca-certificates ${JAVA_PACKAGE} \ + && microdnf -y update \ + && microdnf -y clean all \ && mkdir /deployments \ && chown 1001 /deployments \ && chmod "g+rwX" /deployments \ diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-17-runtime b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-17-runtime index f83ad03e05062..86305cdb37b42 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-17-runtime +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-17-runtime @@ -1,4 +1,4 @@ -FROM registry.access.redhat.com/ubi9/openjdk-17-runtime:1.20 +FROM registry.access.redhat.com/ubi9/openjdk-17-runtime:1.21 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' diff --git a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-21-runtime b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-21-runtime index ffa2b1f33d734..6021313058b97 100644 --- a/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-21-runtime +++ b/extensions/container-image/container-image-docker-common/deployment/src/test/resources/ubi9-openjdk-21-runtime @@ -1,5 +1,5 @@ # Use Java 21 base image -FROM registry.access.redhat.com/ubi9/openjdk-21-runtime:1.20 +FROM registry.access.redhat.com/ubi9/openjdk-21-runtime:1.21 ENV LANG='en_US.UTF-8' LANGUAGE='en_US:en' diff --git a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java index 7b7032c7f21ad..f1ee2de6a7d9e 100644 --- a/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java +++ b/extensions/container-image/container-image-docker/deployment/src/main/java/io/quarkus/container/image/docker/deployment/DockerBuild.java @@ -15,6 +15,6 @@ public class DockerBuild implements BooleanSupplier { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(DockerProcessor.DOCKER_CONTAINER_IMAGE_NAME)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(DockerProcessor.DOCKER_CONTAINER_IMAGE_NAME)).orElse(true); } } diff --git a/extensions/container-image/container-image-docker/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/container-image/container-image-docker/runtime/src/main/resources/META-INF/quarkus-extension.yaml index c7737a60750eb..ab90c8b2f8006 100644 --- a/extensions/container-image/container-image-docker/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/container-image/container-image-docker/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -8,6 +8,6 @@ metadata: - "image" categories: - "cloud" - status: "preview" + status: "stable" config: - - "quarkus.docker." \ No newline at end of file + - "quarkus.docker." diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerImageJibConfig.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerImageJibConfig.java index 8d50494d788b1..a90fda173ca89 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerImageJibConfig.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/ContainerImageJibConfig.java @@ -21,16 +21,16 @@ public interface ContainerImageJibConfig { /** * The base image to be used when a container image is being produced for the jar build. * - * When the application is built against Java 21 or higher, {@code registry.access.redhat.com/ubi8/openjdk-21-runtime:1.20} + * When the application is built against Java 21 or higher, {@code registry.access.redhat.com/ubi9/openjdk-21-runtime:1.21} * is used as the default. - * Otherwise {@code registry.access.redhat.com/ubi8/openjdk-17-runtime:1.20} is used as the default. + * Otherwise {@code registry.access.redhat.com/ubi9/openjdk-17-runtime:1.21} is used as the default. */ Optional baseJvmImage(); /** * The base image to be used when a container image is being produced for the native binary build. - * The default is "quay.io/quarkus/quarkus-micro-image". You can also use - * "registry.access.redhat.com/ubi8/ubi-minimal" which is a bigger base image, but provide more built-in utilities + * The default is "quay.io/quarkus/ubi9-quarkus-micro-image:2.0". You can also use + * "registry.access.redhat.com/ubi9/ubi-minimal" which is a bigger base image, but provide more built-in utilities * such as the microdnf package manager. */ @WithDefault(ContainerImages.QUARKUS_MICRO_IMAGE) diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuild.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuild.java index 6e25a5c0ae0a3..c987f6c193a4f 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuild.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuild.java @@ -14,6 +14,6 @@ public JibBuild(ContainerImageConfig containerImageConfig) { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(JibProcessor.JIB)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(JibProcessor.JIB)).orElse(true); } } diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuildEnabled.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuildEnabled.java index 4412c5d4d4d0f..28fd4c33279f8 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuildEnabled.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibBuildEnabled.java @@ -15,6 +15,6 @@ public class JibBuildEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(JibProcessor.JIB)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(JibProcessor.JIB)).orElse(true); } } diff --git a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java index 4623955e39e3d..4bb2a7505c590 100644 --- a/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java +++ b/extensions/container-image/container-image-jib/deployment/src/main/java/io/quarkus/container/image/jib/deployment/JibProcessor.java @@ -94,8 +94,8 @@ public class JibProcessor { private static final IsClassPredicate IS_CLASS_PREDICATE = new IsClassPredicate(); private static final String BINARY_NAME_IN_CONTAINER = "application"; - // The source for this can be found at https://github.com/jboss-container-images/openjdk/blob/ubi8/modules/run/artifacts/opt/jboss/container/java/run/run-java.sh - // A list of env vars that affect this script can be found at https://jboss-container-images.github.io/openjdk/ubi8/ubi8-openjdk-17.html + // The source for this can be found at https://github.com/jboss-container-images/openjdk/blob/ubi9/modules/run/artifacts/opt/jboss/container/java/run/run-java.sh + // A list of env vars that affect this script can be found at https://rh-openjdk.github.io/redhat-openjdk-containers/ubi9/ubi9-openjdk-17.html private static final String RUN_JAVA_PATH = "/opt/jboss/container/java/run/run-java.sh"; private static final String DEFAULT_BASE_IMAGE_USER = "185"; @@ -268,8 +268,8 @@ private Containerizer createContainerizer(ContainerImageConfig containerImageCon if (imageReference.getRegistry() == null) { log.info("No container image registry was set, so 'docker.io' will be used"); } - RegistryImage registryImage = toRegistryImage(imageReference, containerImageConfig.username, - containerImageConfig.password); + RegistryImage registryImage = toRegistryImage(imageReference, containerImageConfig.username(), + containerImageConfig.password()); containerizer = Containerizer.to(registryImage); } else { DockerDaemonImage dockerDaemonImage = DockerDaemonImage.named(imageReference); @@ -296,7 +296,7 @@ private Containerizer createContainerizer(ContainerImageConfig containerImageCon log.log(toJBossLoggingLevel(e.getLevel()), e.getMessage()); } }); - containerizer.setAllowInsecureRegistries(containerImageConfig.insecure); + containerizer.setAllowInsecureRegistries(containerImageConfig.insecure()); containerizer.setAlwaysCacheBaseImage(jibConfig.alwaysCacheBaseImage()); containerizer.setOfflineMode(jibConfig.offlineMode()); jibConfig.baseImageLayersCache().ifPresent(cacheDir -> containerizer.setBaseImageLayersCache(Paths.get(cacheDir))); @@ -867,11 +867,11 @@ private void handleExtraFiles(OutputTargetBuildItem outputTarget, JibContainerBu private Map allLabels(ContainerImageJibConfig jibConfig, ContainerImageConfig containerImageConfig, List containerImageLabels) { - if (containerImageLabels.isEmpty() && containerImageConfig.labels.isEmpty()) { + if (containerImageLabels.isEmpty() && containerImageConfig.labels().isEmpty()) { return Collections.emptyMap(); } - final Map allLabels = new HashMap<>(containerImageConfig.labels); + final Map allLabels = new HashMap<>(containerImageConfig.labels()); for (ContainerImageLabelBuildItem containerImageLabel : containerImageLabels) { // we want the user supplied labels to take precedence so the user can override labels generated from other extensions if desired allLabels.putIfAbsent(containerImageLabel.getName(), containerImageLabel.getValue()); diff --git a/extensions/container-image/container-image-openshift/deployment/pom.xml b/extensions/container-image/container-image-openshift/deployment/pom.xml index 70ac92990e046..c0458deba5dfc 100644 --- a/extensions/container-image/container-image-openshift/deployment/pom.xml +++ b/extensions/container-image/container-image-openshift/deployment/pom.xml @@ -80,9 +80,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/ContainerImageOpenshiftConfig.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/ContainerImageOpenshiftConfig.java index 46042c656cec4..a3ca8b9b08556 100644 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/ContainerImageOpenshiftConfig.java +++ b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/ContainerImageOpenshiftConfig.java @@ -9,12 +9,14 @@ import io.quarkus.deployment.images.ContainerImages; import io.quarkus.deployment.pkg.builditem.CompiledJavaVersionBuildItem; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -@ConfigRoot(name = "openshift", phase = ConfigPhase.BUILD_TIME) -public class ContainerImageOpenshiftConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +@ConfigMapping(prefix = "quarkus.openshift") +public interface ContainerImageOpenshiftConfig { public static final String DEFAULT_NATIVE_TARGET_FILENAME = "application"; @@ -36,20 +38,19 @@ public static String getDefaultJvmImage(CompiledJavaVersionBuildItem.JavaVersion /** * The build config strategy to use. */ - @ConfigItem(defaultValue = "binary") - public BuildStrategy buildStrategy; + @WithDefault("binary") + BuildStrategy buildStrategy(); /** * The base image to be used when a container image is being produced for the jar build. * The value of this property is used to create an ImageStream for the builder image used in the Openshift build. * When it references images already available in the internal Openshift registry, the corresponding streams are used * instead. - * When the application is built against Java 21 or higher, {@code registry.access.redhat.com/ubi8/openjdk-21:1.20} + * When the application is built against Java 21 or higher, {@code registry.access.redhat.com/ubi9/openjdk-21:1.21} * is used as the default. - * Otherwise {@code registry.access.redhat.com/ubi8/openjdk-17:1.20} is used as the default. + * Otherwise {@code registry.access.redhat.com/ubi9/openjdk-17:1.21} is used as the default. */ - @ConfigItem - public Optional baseJvmImage; + Optional baseJvmImage(); /** * The base image to be used when a container image is being produced for the native binary build. @@ -57,87 +58,80 @@ public static String getDefaultJvmImage(CompiledJavaVersionBuildItem.JavaVersion * When it references images already available in the internal Openshift registry, the corresponding streams are used * instead. */ - @ConfigItem(defaultValue = ContainerImages.QUARKUS_BINARY_S2I) - public String baseNativeImage; + @WithDefault(ContainerImages.QUARKUS_BINARY_S2I) + String baseNativeImage(); /** * The default Dockerfile to use for jvm builds */ - @ConfigItem(defaultValue = DEFAULT_JVM_DOCKERFILE) - public String jvmDockerfile; + @WithDefault(DEFAULT_JVM_DOCKERFILE) + String jvmDockerfile(); /** * The default Dockerfile to use for native builds */ - @ConfigItem(defaultValue = DEFAULT_NATIVE_DOCKERFILE) - public String nativeDockerfile; + @WithDefault(DEFAULT_NATIVE_DOCKERFILE) + String nativeDockerfile(); /** * The JVM arguments to pass to the JVM when starting the application */ - @ConfigItem - public Optional> jvmArguments; + Optional> jvmArguments(); /** * Additional arguments to pass when starting the native application */ - @ConfigItem - public Optional> nativeArguments; + Optional> nativeArguments(); /** * The directory where the jar is added during the assemble phase. * This is dependent on the S2I image and should be supplied if a non default image is used. */ - @ConfigItem - public Optional jarDirectory; + Optional jarDirectory(); /** * The resulting filename of the jar in the S2I image. * This option may be used if the selected S2I image uses a fixed name for the jar. */ - @ConfigItem - public Optional jarFileName; + Optional jarFileName(); /** * The directory where the native binary is added during the assemble phase. * This is dependent on the S2I image and should be supplied if a non-default image is used. */ - @ConfigItem - public Optional nativeBinaryDirectory; + Optional nativeBinaryDirectory(); /** * The resulting filename of the native binary in the S2I image. * This option may be used if the selected S2I image uses a fixed name for the native binary. */ - @ConfigItem - public Optional nativeBinaryFileName; + Optional nativeBinaryFileName(); /** * The build timeout. */ - @ConfigItem(defaultValue = "PT5M") - Duration buildTimeout; + @WithDefault("PT5M") + Duration buildTimeout(); /** * The log level of OpenShift build log. */ - @ConfigItem(defaultValue = DEFAULT_BUILD_LOG_LEVEL) - public Logger.Level buildLogLevel; + @WithDefault(DEFAULT_BUILD_LOG_LEVEL) + Logger.Level buildLogLevel(); /** * The image push secret to use for pushing to external registries. * (see: https://cloud.redhat.com/blog/pushing-application-images-to-an-external-registry) **/ - @ConfigItem - public Optional imagePushSecret; + Optional imagePushSecret(); /** * Check if baseJvmImage is the default * * @returns true if baseJvmImage is the default */ - public boolean hasDefaultBaseJvmImage() { - return baseJvmImage.isPresent(); + default boolean hasDefaultBaseJvmImage() { + return baseJvmImage().isPresent(); } /** @@ -145,8 +139,8 @@ public boolean hasDefaultBaseJvmImage() { * * @returns true if baseNativeImage is the default */ - public boolean hasDefaultBaseNativeImage() { - return baseNativeImage.equals(ContainerImages.QUARKUS_BINARY_S2I); + default boolean hasDefaultBaseNativeImage() { + return baseNativeImage().equals(ContainerImages.QUARKUS_BINARY_S2I); } /** @@ -154,8 +148,8 @@ public boolean hasDefaultBaseNativeImage() { * * @returns true if jvmDockerfile is the default */ - public boolean hasDefaultJvmDockerfile() { - return jvmDockerfile.equals(DEFAULT_JVM_DOCKERFILE); + default boolean hasDefaultJvmDockerfile() { + return jvmDockerfile().equals(DEFAULT_JVM_DOCKERFILE); } /** @@ -163,16 +157,16 @@ public boolean hasDefaultJvmDockerfile() { * * @returns true if nativeDockerfile is the default */ - public boolean hasDefaultNativeDockerfile() { - return nativeDockerfile.equals(DEFAULT_NATIVE_DOCKERFILE); + default boolean hasDefaultNativeDockerfile() { + return nativeDockerfile().equals(DEFAULT_NATIVE_DOCKERFILE); } /** * @return the effective JVM arguments to use by getting the jvmArguments and the jvmAdditionalArguments properties. */ - public List getEffectiveJvmArguments() { + default List getEffectiveJvmArguments() { List effectiveJvmArguments = new ArrayList<>(); - jvmArguments.ifPresent(effectiveJvmArguments::addAll); + jvmArguments().ifPresent(effectiveJvmArguments::addAll); return effectiveJvmArguments; } diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftBuild.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftBuild.java index 32ce09e05fcff..715790a1e7506 100644 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftBuild.java +++ b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftBuild.java @@ -14,6 +14,6 @@ public class OpenshiftBuild implements BooleanSupplier { @Override public boolean getAsBoolean() { - return containerImageConfig.builder.map(b -> b.equals(OpenshiftProcessor.OPENSHIFT)).orElse(true); + return containerImageConfig.builder().map(b -> b.equals(OpenshiftProcessor.OPENSHIFT)).orElse(true); } } diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftProcessor.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftProcessor.java index 37789fe8a9c20..533a8c09b8cab 100644 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftProcessor.java +++ b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftProcessor.java @@ -2,9 +2,9 @@ import static io.quarkus.container.image.openshift.deployment.OpenshiftUtils.getDeployStrategy; import static io.quarkus.container.image.openshift.deployment.OpenshiftUtils.getNamespace; -import static io.quarkus.container.image.openshift.deployment.OpenshiftUtils.mergeConfig; import static io.quarkus.container.util.PathsUtil.findMainSourcesRoot; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.*; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.FAST_JAR; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import static io.quarkus.deployment.pkg.steps.JarResultBuildStep.DEFAULT_FAST_JAR_DIRECTORY_NAME; import java.io.BufferedReader; @@ -96,14 +96,12 @@ public AvailableContainerImageExtensionBuildItem availability() { } @BuildStep(onlyIf = { OpenshiftBuild.class }, onlyIfNot = NativeBuild.class) - public void openshiftPrepareJvmDockerBuild(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig, + public void openshiftPrepareJvmDockerBuild(ContainerImageOpenshiftConfig config, OutputTargetBuildItem out, BuildProducer decorator) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); - if (config.buildStrategy == BuildStrategy.DOCKER) { + if (config.buildStrategy() == BuildStrategy.DOCKER) { decorator.produce(new DecoratorBuildItem(new ApplyDockerfileToBuildConfigDecorator(null, - findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(openshiftConfig.jvmDockerfile)))); + findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(config.jvmDockerfile())))); //When using the docker build strategy, we can't possibly know these values, so it's the image responsibility to work without them. decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_JAR"))); decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_LIB"))); @@ -111,14 +109,12 @@ public void openshiftPrepareJvmDockerBuild(ContainerImageOpenshiftConfig openshi } @BuildStep(onlyIf = { OpenshiftBuild.class, NativeBuild.class }) - public void openshiftPrepareNativeDockerBuild(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig, + public void openshiftPrepareNativeDockerBuild(ContainerImageOpenshiftConfig config, OutputTargetBuildItem out, BuildProducer decorator) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); - if (config.buildStrategy == BuildStrategy.DOCKER) { + if (config.buildStrategy() == BuildStrategy.DOCKER) { decorator.produce(new DecoratorBuildItem(new ApplyDockerfileToBuildConfigDecorator(null, - findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(openshiftConfig.nativeDockerfile)))); + findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(config.nativeDockerfile())))); } //Let's remove this for all kinds of native build decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_JAR"))); @@ -126,8 +122,7 @@ public void openshiftPrepareNativeDockerBuild(ContainerImageOpenshiftConfig open } @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class }, onlyIfNot = NativeBuild.class) - public void openshiftRequirementsJvm(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig, + public void openshiftRequirementsJvm(ContainerImageOpenshiftConfig config, CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem out, PackageConfig packageConfig, @@ -138,22 +133,21 @@ public void openshiftRequirementsJvm(ContainerImageOpenshiftConfig openshiftConf BuildProducer builderImageProducer, BuildProducer commandProducer) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); String outputJarFileName = jarBuildItem.getPath().getFileName().toString(); - String jarFileName = config.jarFileName.orElse(outputJarFileName); - String baseJvmImage = config.baseJvmImage + String jarFileName = config.jarFileName().orElse(outputJarFileName); + String baseJvmImage = config.baseJvmImage() .orElse(ContainerImageOpenshiftConfig.getDefaultJvmImage(compiledJavaVersion.getJavaVersion())); - boolean hasCustomJarPath = config.jarFileName.isPresent() || config.jarDirectory.isPresent(); - boolean hasCustomJvmArguments = config.jvmArguments.isPresent(); + boolean hasCustomJarPath = config.jarFileName().isPresent() || config.jarDirectory().isPresent(); + boolean hasCustomJvmArguments = config.jvmArguments().isPresent(); builderImageProducer.produce(new BaseImageInfoBuildItem(baseJvmImage)); - if (config.buildStrategy == BuildStrategy.BINARY) { + if (config.buildStrategy() == BuildStrategy.BINARY) { // Jar directory priorities: // 1. explicitly specified by the user. // 3. fallback value - String jarDirectory = config.jarDirectory.orElse(config.FALLBACK_JAR_DIRECTORY); + String jarDirectory = config.jarDirectory().orElse(ContainerImageOpenshiftConfig.FALLBACK_JAR_DIRECTORY); String pathToJar = concatUnixPaths(jarDirectory, jarFileName); //In all other cases its the responsibility of the image to set those up correctly. @@ -169,8 +163,7 @@ public void openshiftRequirementsJvm(ContainerImageOpenshiftConfig openshiftConf } @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class, NativeBuild.class }) - public void openshiftRequirementsNative(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig, + public void openshiftRequirementsNative(ContainerImageOpenshiftConfig config, CurateOutcomeBuildItem curateOutcomeBuildItem, OutputTargetBuildItem out, PackageConfig packageConfig, @@ -179,35 +172,35 @@ public void openshiftRequirementsNative(ContainerImageOpenshiftConfig openshiftC BuildProducer builderImageProducer, BuildProducer commandProducer) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); boolean usingDefaultBuilder = ImageUtil.getRepository(ContainerImages.QUARKUS_BINARY_S2I) - .equals(ImageUtil.getRepository(config.baseNativeImage)); + .equals(ImageUtil.getRepository(config.baseNativeImage())); String outputNativeBinaryFileName = nativeImage.getPath().getFileName().toString(); String nativeBinaryFileName = null; - boolean hasCustomNativePath = config.nativeBinaryFileName.isPresent() || config.nativeBinaryDirectory.isPresent(); - boolean hasCustomNativeArguments = config.nativeArguments.isPresent(); + boolean hasCustomNativePath = config.nativeBinaryFileName().isPresent() || config.nativeBinaryDirectory().isPresent(); + boolean hasCustomNativeArguments = config.nativeArguments().isPresent(); //The default openshift builder for native builds, renames the native binary. //To make things easier for the user, we need to handle it. - if (usingDefaultBuilder && !config.nativeBinaryFileName.isPresent()) { + if (usingDefaultBuilder && !config.nativeBinaryFileName().isPresent()) { nativeBinaryFileName = ContainerImageOpenshiftConfig.DEFAULT_NATIVE_TARGET_FILENAME; } else { - nativeBinaryFileName = config.nativeBinaryFileName.orElse(outputNativeBinaryFileName); + nativeBinaryFileName = config.nativeBinaryFileName().orElse(outputNativeBinaryFileName); } - if (config.buildStrategy == BuildStrategy.BINARY) { - builderImageProducer.produce(new BaseImageInfoBuildItem(config.baseNativeImage)); + if (config.buildStrategy() == BuildStrategy.BINARY) { + builderImageProducer.produce(new BaseImageInfoBuildItem(config.baseNativeImage())); // Native binary directory priorities: // 1. explicitly specified by the user. // 2. fallback vale - String nativeBinaryDirectory = config.nativeBinaryDirectory.orElse(config.FALLBACK_NATIVE_BINARY_DIRECTORY); + String nativeBinaryDirectory = config.nativeBinaryDirectory().orElse(config.FALLBACK_NATIVE_BINARY_DIRECTORY); String pathToNativeBinary = concatUnixPaths(nativeBinaryDirectory, nativeBinaryFileName); if (hasCustomNativePath || hasCustomNativeArguments) { commandProducer - .produce(KubernetesCommandBuildItem.commandWithArgs(pathToNativeBinary, config.nativeArguments.get())); + .produce( + KubernetesCommandBuildItem.commandWithArgs(pathToNativeBinary, config.nativeArguments().get())); } } } @@ -222,9 +215,9 @@ public void configureExternalRegistry(ApplicationInfoBuildItem applicationInfo, final String serviceAccountName = applicationInfo.getName(); String repositoryWithRegistry = registry + "/" + containerImageInfo.getRepository(); - if (openshiftConfig.imagePushSecret.isPresent()) { + if (openshiftConfig.imagePushSecret().isPresent()) { //if a push secret has been specified, we need to apply it. - String imagePushSecret = openshiftConfig.imagePushSecret.get(); + String imagePushSecret = openshiftConfig.imagePushSecret().get(); decorator.produce(new DecoratorBuildItem(OPENSHIFT, new ApplyDockerImageOutputToBuildConfigDecorator( applicationInfo.getName(), containerImageInfo.getImage(), imagePushSecret))); } else if (registry.contains(OPENSHIFT_INTERNAL_REGISTRY)) { @@ -246,8 +239,7 @@ public void configureExternalRegistry(ApplicationInfoBuildItem applicationInfo, } @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class }, onlyIfNot = NativeBuild.class) - public void openshiftBuildFromJar(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig, + public void openshiftBuildFromJar(ContainerImageOpenshiftConfig config, ContainerImageConfig containerImageConfig, KubernetesClientBuildItem kubernetesClientBuilder, ContainerImageInfoBuildItem containerImage, @@ -260,7 +252,6 @@ public void openshiftBuildFromJar(ContainerImageOpenshiftConfig openshiftConfig, // used to ensure that the jar has been built JarBuildItem jar) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); if (containerImageConfig.isBuildExplicitlyDisabled()) { return; } @@ -283,7 +274,7 @@ public void openshiftBuildFromJar(ContainerImageOpenshiftConfig openshiftConfig, try (KubernetesClient kubernetesClient = buildClient(kubernetesClientBuilder)) { String namespace = Optional.ofNullable(kubernetesClient.getNamespace()).orElse("default"); - LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy + " on server: " + LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy() + " on server: " + kubernetesClient.getMasterUrl() + " in namespace:" + namespace + "."); //The contextRoot is where inside the tarball we will add the jars. A null value means everything will be added under '/' while "target" means everything will be added under '/target'. //For docker kind of builds where we use instructions like: `COPY target/*.jar /deployments` it using '/target' is a requirement. @@ -291,7 +282,7 @@ public void openshiftBuildFromJar(ContainerImageOpenshiftConfig openshiftConfig, String outputDirName = out.getOutputDirectory().getFileName().toString(); PackageConfig.JarConfig.JarType jarType = packageConfig.jar().type(); String contextRoot = getContextRoot(outputDirName, jarType == FAST_JAR || jarType == MUTABLE_JAR, - config.buildStrategy); + config.buildStrategy()); KubernetesClientBuilder clientBuilder = newClientBuilderWithoutHttp2(kubernetesClient.getConfiguration(), kubernetesClientBuilder.getHttpClientFactory()); if (jarType == FAST_JAR || jarType == MUTABLE_JAR) { @@ -320,7 +311,7 @@ private String getContextRoot(String outputDirName, boolean isFastJar, BuildStra } @BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class, NativeBuild.class }) - public void openshiftBuildFromNative(ContainerImageOpenshiftConfig openshiftConfig, S2iConfig s2iConfig, + public void openshiftBuildFromNative(ContainerImageOpenshiftConfig config, ContainerImageConfig containerImageConfig, KubernetesClientBuildItem kubernetesClientBuilder, ContainerImageInfoBuildItem containerImage, @@ -332,8 +323,6 @@ public void openshiftBuildFromNative(ContainerImageOpenshiftConfig openshiftConf BuildProducer containerImageBuilder, NativeImageBuildItem nativeImage) { - ContainerImageOpenshiftConfig config = mergeConfig(openshiftConfig, s2iConfig); - if (containerImageConfig.isBuildExplicitlyDisabled()) { return; } @@ -346,7 +335,7 @@ public void openshiftBuildFromNative(ContainerImageOpenshiftConfig openshiftConf try (KubernetesClient kubernetesClient = buildClient(kubernetesClientBuilder)) { String namespace = Optional.ofNullable(kubernetesClient.getNamespace()).orElse("default"); - LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy + " on server: " + LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy() + " on server: " + kubernetesClient.getMasterUrl() + " in namespace:" + namespace + "."); Optional openshiftYml = generatedResources .stream() @@ -361,7 +350,7 @@ public void openshiftBuildFromNative(ContainerImageOpenshiftConfig openshiftConf //The contextRoot is where inside the tarball we will add the jars. A null value means everything will be added under '/' while "target" means everything will be added under '/target'. //For docker kind of builds where we use instructions like: `COPY target/*.jar /deployments` it using '/target' is a requirement. //For s2i kind of builds where jars are expected directly in the '/' we have to use null. - String contextRoot = config.buildStrategy == BuildStrategy.DOCKER ? "target" : null; + String contextRoot = config.buildStrategy() == BuildStrategy.DOCKER ? "target" : null; createContainerImage( newClientBuilderWithoutHttp2(kubernetesClient.getConfiguration(), kubernetesClientBuilder.getHttpClientFactory()), @@ -467,7 +456,7 @@ private static Build startOpenshiftBuild(BuildConfig buildConfig, File binaryFil try { return client.buildConfigs().withName(buildConfig.getMetadata().getName()) .instantiateBinary() - .withTimeoutInMillis(openshiftConfig.buildTimeout.toMillis()) + .withTimeoutInMillis(openshiftConfig.buildTimeout().toMillis()) .fromFile(binaryFile); } catch (Exception e) { Optional running = buildsOf(client, buildConfig).stream().findFirst(); @@ -498,7 +487,7 @@ private static void waitForOpenshiftBuild(Build build, ContainerImageOpenshiftCo build = updated; try (LogWatch w = client.builds().withName(buildName).withPrettyOutput().watchLog(); Reader reader = new InputStreamReader(w.getOutput())) { - display(reader, openshiftConfig.buildLogLevel); + display(reader, openshiftConfig.buildLogLevel()); } catch (IOException | KubernetesClientException ex) { // This may happen if the LogWatch is closed while we are still reading. // We shouldn't let the build fail, so let's log a warning and display last few lines of the log diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftUtils.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftUtils.java index 43341ba0cedaa..cf58ca1c7d6c2 100644 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftUtils.java +++ b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftUtils.java @@ -4,12 +4,8 @@ import java.util.Collection; import java.util.List; import java.util.Optional; -import java.util.Set; import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; -import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; import io.dekorate.kubernetes.decorator.Decorator; @@ -86,70 +82,6 @@ public void visit(SourceBuildStrategyFluent strategy) { return !tagsMissing; } - /** - * Merges {@link ContainerImageOpenshiftConfig} with {@link S2iConfig} prioritizing in the former. - * - * @param openshiftConfig the Openshift config - * @param s2iConfig the s2i config - * @return an instance of {@link ContainerImageOpenshiftConfig} with the merged configuration. - */ - public static ContainerImageOpenshiftConfig mergeConfig(ContainerImageOpenshiftConfig openshiftConfig, - S2iConfig s2iConfig) { - ContainerImageOpenshiftConfig result = openshiftConfig != null ? openshiftConfig : new ContainerImageOpenshiftConfig(); - if (s2iConfig == null) { - return result; - } - - Config config = ConfigProvider.getConfig(); - Set properties = StreamSupport.stream(config.getPropertyNames().spliterator(), false) - .filter(s -> s.startsWith("quarkus.s2i.") || s.startsWith("quarkus.openshift.")) - .collect(Collectors.toSet()); - - boolean hasS2iBaseJvmImage = properties.contains("quarkus.s2i.base-jvm-image"); - boolean hasS2iBaseNativeImage = properties.contains("quarkus.s2i.base-native-image"); - boolean hasS2iJvmArguments = properties.contains("quarkus.s2i.jvm-arguments"); - boolean hasS2iNativeArguments = properties.contains("quarkus.s2i.native-arguments"); - boolean hasS2iJarDirectory = properties.contains("quarkus.s2i.jar-directory"); - boolean hasS2iJarFileName = properties.contains("quarkus.s2i.jar-file-name"); - boolean hasS2iNativeBinaryDirectory = properties.contains("quarkus.s2i.native-binary-directory"); - boolean hasS2iNativeBinaryFileName = properties.contains("quarkus.s2i.native-binary-file-name"); - boolean hasS2iBuildTimeout = properties.contains("quarkus.s2i.native-binary-file-name"); - - boolean hasOpenshiftBaseJvmImage = properties.contains("quarkus.openshift.base-jvm-image"); - boolean hasOpenshiftBaseNativeImage = properties.contains("quarkus.openshift.base-native-image"); - boolean hasOpenshiftJvmArguments = properties.contains("quarkus.openshift.jvm-arguments"); - boolean hasOpenshiftNativeArguments = properties.contains("quarkus.openshift.native-arguments"); - boolean hasOpenshiftJarDirectory = properties.contains("quarkus.openshift.jar-directory"); - boolean hasOpenshiftJarFileName = properties.contains("quarkus.openshift.jar-file-name"); - boolean hasOpenshiftNativeBinaryDirectory = properties.contains("quarkus.openshift.native-binary-directory"); - boolean hasOpenshiftNativeBinaryFileName = properties.contains("quarkus.openshift.native-binary-file-name"); - boolean hasOpenshiftBuildTimeout = properties.contains("quarkus.openshift.native-binary-file-name"); - - result.baseJvmImage = hasS2iBaseJvmImage && !hasOpenshiftBaseJvmImage ? s2iConfig.baseJvmImage - : openshiftConfig.baseJvmImage; - result.baseNativeImage = hasS2iBaseNativeImage && !hasOpenshiftBaseNativeImage ? s2iConfig.baseNativeImage - : openshiftConfig.baseNativeImage; - result.jvmArguments = hasS2iJvmArguments && !hasOpenshiftJvmArguments ? s2iConfig.jvmArguments - : openshiftConfig.jvmArguments; - result.nativeArguments = hasS2iNativeArguments && !hasOpenshiftNativeArguments ? s2iConfig.nativeArguments - : openshiftConfig.nativeArguments; - result.jarDirectory = hasS2iJarDirectory && !hasOpenshiftJarDirectory ? Optional.of(s2iConfig.jarDirectory) - : openshiftConfig.jarDirectory; - result.jarFileName = hasS2iJarFileName && !hasOpenshiftJarFileName ? s2iConfig.jarFileName - : openshiftConfig.jarFileName; - result.nativeBinaryDirectory = hasS2iNativeBinaryDirectory && !hasOpenshiftNativeBinaryDirectory - ? Optional.of(s2iConfig.nativeBinaryDirectory) - : openshiftConfig.nativeBinaryDirectory; - result.nativeBinaryFileName = hasS2iNativeBinaryFileName && !hasOpenshiftNativeBinaryFileName - ? s2iConfig.nativeBinaryFileName - : openshiftConfig.nativeBinaryFileName; - result.buildTimeout = hasS2iBuildTimeout && !hasOpenshiftBuildTimeout ? s2iConfig.buildTimeout - : openshiftConfig.buildTimeout; - result.buildStrategy = openshiftConfig.buildStrategy; - - return result; - } - /** * @return the openshift namespace set in the OpenShift extension. */ diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseJavaImage.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseJavaImage.java deleted file mode 100644 index 5512d7725407c..0000000000000 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseJavaImage.java +++ /dev/null @@ -1,63 +0,0 @@ - -package io.quarkus.container.image.openshift.deployment; - -import java.util.Optional; - -import io.quarkus.container.image.deployment.util.ImageUtil; - -public enum S2iBaseJavaImage { - - //We only compare `repositories` so registries and tags are stripped - FABRIC8("fabric8/s2i-java:latest", "JAVA_MAIN_CLASS", "JAVA_APP_JAR", "JAVA_LIB_DIR", "JAVA_CLASSPATH", "JAVA_OPTIONS"); - - private final String image; - private final String javaMainClassEnvVar; - private final String jarEnvVar; - private final String jarLibEnvVar; - private final String classpathEnvVar; - private final String jvmOptionsEnvVar; - - public static Optional findMatching(String image) { - for (S2iBaseJavaImage candidate : S2iBaseJavaImage.values()) { - if (ImageUtil.getRepository(candidate.getImage()).equals(ImageUtil.getRepository(image))) { - return Optional.of(candidate); - } - } - return Optional.empty(); - } - - private S2iBaseJavaImage(String image, String javaMainClassEnvVar, String jarEnvVar, String jarLibEnvVar, - String classpathEnvVar, String jvmOptionsEnvVar) { - this.image = image; - this.javaMainClassEnvVar = javaMainClassEnvVar; - this.jarEnvVar = jarEnvVar; - this.jarLibEnvVar = jarLibEnvVar; - this.classpathEnvVar = classpathEnvVar; - this.jvmOptionsEnvVar = jvmOptionsEnvVar; - } - - public String getImage() { - return image; - } - - public String getJavaMainClassEnvVar() { - return javaMainClassEnvVar; - } - - public String getJvmOptionsEnvVar() { - return jvmOptionsEnvVar; - } - - public String getClasspathEnvVar() { - return classpathEnvVar; - } - - public String getJarLibEnvVar() { - return jarLibEnvVar; - } - - public String getJarEnvVar() { - return jarEnvVar; - } - -} diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseNativeImage.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseNativeImage.java deleted file mode 100644 index 167e2a8394f6a..0000000000000 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iBaseNativeImage.java +++ /dev/null @@ -1,49 +0,0 @@ - -package io.quarkus.container.image.openshift.deployment; - -import java.util.Optional; - -import io.quarkus.container.image.deployment.util.ImageUtil; - -public enum S2iBaseNativeImage { - - //We only compare `repositories` so registries and tags are stripped - QUARKUS("quarkus/ubi-quarkus-native-binary-s2i:2.0", "application", "QUARKUS_HOME", "QUARKUS_OPTS"); - - private final String image; - private final String fixedNativeBinaryName; - private final String homeDirEnvVar; - private final String optsEnvVar; - - public static Optional findMatching(String image) { - for (S2iBaseNativeImage candidate : S2iBaseNativeImage.values()) { - if (ImageUtil.getRepository(candidate.getImage()).equals(ImageUtil.getRepository(image))) { - return Optional.of(candidate); - } - } - return Optional.empty(); - } - - private S2iBaseNativeImage(String image, String fixedNativeBinaryName, String homeDirEnvVar, String optsEnvVar) { - this.image = image; - this.fixedNativeBinaryName = fixedNativeBinaryName; - this.homeDirEnvVar = homeDirEnvVar; - this.optsEnvVar = optsEnvVar; - } - - public String getImage() { - return image; - } - - public String getFixedNativeBinaryName() { - return this.fixedNativeBinaryName; - } - - public String getHomeDirEnvVar() { - return homeDirEnvVar; - } - - public String getOptsEnvVar() { - return optsEnvVar; - } -} diff --git a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iConfig.java b/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iConfig.java deleted file mode 100644 index 5e8e9d7e1a80b..0000000000000 --- a/extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/S2iConfig.java +++ /dev/null @@ -1,108 +0,0 @@ -package io.quarkus.container.image.openshift.deployment; - -import java.time.Duration; -import java.util.List; -import java.util.Optional; - -import io.quarkus.deployment.images.ContainerImages; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConfigPhase; -import io.quarkus.runtime.annotations.ConfigRoot; - -@ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public class S2iConfig { - - public static final String DEFAULT_NATIVE_TARGET_FILENAME = "application"; - - public static final String DEFAULT_JVM_DOCKERFILE = "src/main/docker/Dockerfile.jvm"; - public static final String DEFAULT_NATIVE_DOCKERFILE = "src/main/docker/Dockerfile.native"; - - public static final String FALLBACK_JAR_DIRECTORY = "/deployments/"; - public static final String FALLBACK_NATIVE_BINARY_DIRECTORY = "/home/quarkus/"; - - /** - * The build config strategy to use. - */ - @ConfigItem(defaultValue = "binary") - public BuildStrategy buildStrategy; - - /** - * The base image to be used when a container image is being produced for the jar build. - * - * When the application is built against Java 21 or higher, {@code registry.access.redhat.com/ubi8/openjdk-21:1.20} - * is used as the default. - * Otherwise {@code registry.access.redhat.com/ubi8/openjdk-17:1.20} is used as the default. - */ - @ConfigItem - public Optional baseJvmImage; - - /** - * The base image to be used when a container image is being produced for the native binary build - */ - @ConfigItem(defaultValue = ContainerImages.QUARKUS_BINARY_S2I) - public String baseNativeImage; - - /** - * The JVM arguments to pass to the JVM when starting the application - */ - @ConfigItem - public Optional> jvmArguments; - - /** - * Additional arguments to pass when starting the native application - */ - @ConfigItem - public Optional> nativeArguments; - - /** - * The directory where the jar is added during the assemble phase. - * This is dependent on the S2I image and should be supplied if a non default image is used. - */ - @ConfigItem(defaultValue = "/deployments/target/") - public String jarDirectory; - - /** - * The resulting filename of the jar in the S2I image. - * This option may be used if the selected S2I image uses a fixed name for the jar. - */ - @ConfigItem - public Optional jarFileName; - - /** - * The directory where the native binary is added during the assemble phase. - * This is dependent on the S2I image and should be supplied if a non-default image is used. - */ - @ConfigItem(defaultValue = "/home/quarkus/") - public String nativeBinaryDirectory; - - /** - * The resulting filename of the native binary in the S2I image. - * This option may be used if the selected S2I image uses a fixed name for the native binary. - */ - @ConfigItem - public Optional nativeBinaryFileName; - - /** - * The build timeout. - */ - @ConfigItem(defaultValue = "PT5M") - Duration buildTimeout; - - /** - * Check if baseJvmImage is the default - * - * @returns true if baseJvmImage is the default - */ - public boolean hasDefaultBaseJvmImage() { - return baseJvmImage.isPresent(); - } - - /** - * Check if baseNativeImage is the default - * - * @returns true if baseNativeImage is the default - */ - public boolean hasDefaultBaseNativeImage() { - return baseNativeImage.equals(ContainerImages.QUARKUS_BINARY_S2I); - } -} diff --git a/extensions/container-image/container-image-openshift/runtime/pom.xml b/extensions/container-image/container-image-openshift/runtime/pom.xml index 944f403fc9881..820ba97eb6510 100644 --- a/extensions/container-image/container-image-openshift/runtime/pom.xml +++ b/extensions/container-image/container-image-openshift/runtime/pom.xml @@ -53,9 +53,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/container-image/container-image-podman/deployment/src/main/java/io/quarkus/container/image/podman/deployment/PodmanBuild.java b/extensions/container-image/container-image-podman/deployment/src/main/java/io/quarkus/container/image/podman/deployment/PodmanBuild.java index 7e48b7fbbc493..431a2fb8c3ff7 100644 --- a/extensions/container-image/container-image-podman/deployment/src/main/java/io/quarkus/container/image/podman/deployment/PodmanBuild.java +++ b/extensions/container-image/container-image-podman/deployment/src/main/java/io/quarkus/container/image/podman/deployment/PodmanBuild.java @@ -13,7 +13,7 @@ public PodmanBuild(ContainerImageConfig containerImageConfig) { @Override public boolean getAsBoolean() { - return containerImageConfig.builder + return containerImageConfig.builder() .map(b -> b.equals(PodmanProcessor.PODMAN_CONTAINER_IMAGE_NAME)) .orElse(true); } diff --git a/extensions/container-image/container-image-podman/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/container-image/container-image-podman/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 563a067358c16..2c12e26682476 100644 --- a/extensions/container-image/container-image-podman/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/container-image/container-image-podman/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -9,6 +9,6 @@ metadata: guide: "https://quarkus.io/guides/container-image" categories: - "cloud" - status: "preview" + status: "stable" config: - - "quarkus.podman." \ No newline at end of file + - "quarkus.podman." diff --git a/extensions/container-image/deployment/pom.xml b/extensions/container-image/deployment/pom.xml index 50de8b3187ecf..f39d1232f2afa 100644 --- a/extensions/container-image/deployment/pom.xml +++ b/extensions/container-image/deployment/pom.xml @@ -59,9 +59,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageConfig.java b/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageConfig.java index b5ad69727968c..f97b43b728aff 100644 --- a/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageConfig.java +++ b/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageConfig.java @@ -5,111 +5,102 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; @ConfigRoot -public class ContainerImageConfig { +@ConfigMapping(prefix = "quarkus.container-image") +public interface ContainerImageConfig { /** * The group the container image will be part of */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - Optional group; //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead + Optional<@WithConverter(TrimmedStringConverter.class) String> group(); //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead /** * The name of the container image. If not set defaults to the application name */ - @ConfigItem(defaultValue = "${quarkus.application.name:unset}") - @ConvertWith(TrimmedStringConverter.class) - String name; //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead + @WithDefault("${quarkus.application.name:unset}") + @WithConverter(TrimmedStringConverter.class) + String name(); //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead /** * The tag of the container image. If not set defaults to the application version */ - @ConfigItem(defaultValue = "${quarkus.application.version:latest}") - Optional tag; //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead + @WithDefault("${quarkus.application.version:latest}") + String tag(); //used only by ContainerImageProcessor, use ContainerImageInfoBuildItem instead /** * Additional tags of the container image. */ - @ConfigItem - public Optional> additionalTags; + Optional> additionalTags(); /** * Custom labels to add to the generated image. */ - @ConfigItem @ConfigDocMapKey("label-name") - public Map labels; + Map labels(); /** * The container registry to use */ - @ConfigItem - public Optional registry; + Optional registry(); /** * Represents the entire image string. * If set, then {@code group}, {@code name}, {@code registry}, {@code tags}, {@code additionalTags} * are ignored */ - @ConfigItem - public Optional image; + Optional image(); /** * The username to use to authenticate with the registry where the built image will be pushed */ - @ConfigItem - public Optional username; + Optional username(); /** * The password to use to authenticate with the registry where the built image will be pushed */ - @ConfigItem - public Optional password; + Optional password(); /** * Whether or not insecure registries are allowed */ - @ConfigItem - public boolean insecure; + @WithDefault("false") + boolean insecure(); /** * Whether or not a image build will be performed. */ - @ConfigItem - public Optional build; + Optional build(); /** * Whether or not an image push will be performed. */ - @ConfigItem - public Optional push; + Optional push(); /** * The name of the container image extension to use (e.g. docker, podman, jib, s2i). * The option will be used in case multiple extensions are present. */ - @ConfigItem - public Optional builder; + Optional builder(); - public boolean isBuildExplicitlyEnabled() { - return build.isPresent() && build.get(); + default boolean isBuildExplicitlyEnabled() { + return build().isPresent() && build().get(); } - public boolean isBuildExplicitlyDisabled() { - return build.isPresent() && !build.get(); + default boolean isBuildExplicitlyDisabled() { + return build().isPresent() && !build().get(); } - public boolean isPushExplicitlyEnabled() { - return push.isPresent() && push.get(); + default boolean isPushExplicitlyEnabled() { + return push().isPresent() && push().get(); } - public boolean isPushExplicitlyDisabled() { - return push.isPresent() && !push.get(); + default boolean isPushExplicitlyDisabled() { + return push().isPresent() && !push().get(); } } diff --git a/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageProcessor.java b/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageProcessor.java index ab1115042161d..e842306621eda 100644 --- a/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageProcessor.java +++ b/extensions/container-image/deployment/src/main/java/io/quarkus/container/image/deployment/ContainerImageProcessor.java @@ -62,8 +62,8 @@ public void publishImageInfo(ApplicationInfoBuildItem app, // additionalTags are used even containerImageConfig.image is set because that // string cannot contain multiple tags - if (containerImageConfig.additionalTags.isPresent()) { - for (String additionalTag : containerImageConfig.additionalTags.get()) { + if (containerImageConfig.additionalTags().isPresent()) { + for (String additionalTag : containerImageConfig.additionalTags().get()) { if (!ImageReference.isValidTag(additionalTag)) { throw new IllegalArgumentException( "The supplied additional container-image tag '" + additionalTag + "' is invalid"); @@ -71,24 +71,25 @@ public void publishImageInfo(ApplicationInfoBuildItem app, } } - Optional effectiveGroup = getEffectiveGroup(containerImageConfig.group, singleSegmentImageRequest.isPresent()); + Optional effectiveGroup = getEffectiveGroup(containerImageConfig.group(), + singleSegmentImageRequest.isPresent()); // if the user supplied the entire image string, use it - if (containerImageConfig.image.isPresent()) { - ImageReference imageReference = ImageReference.parse(containerImageConfig.image.get()); + if (containerImageConfig.image().isPresent()) { + ImageReference imageReference = ImageReference.parse(containerImageConfig.image().get()); String repository = imageReference.getRepository(); if (singleSegmentImageRequest.isPresent() && imageReference.getRepository().contains("/") && imageReference.getRegistry().filter(StringUtil::isNullOrEmpty).isPresent()) { log.warn("A single segment image is preferred, but a local multi segment has been provided: " - + containerImageConfig.image.get()); + + containerImageConfig.image().get()); } containerImage.produce(new ContainerImageInfoBuildItem(imageReference.getRegistry(), - containerImageConfig.username, containerImageConfig.password, repository, - imageReference.getTag(), containerImageConfig.additionalTags.orElse(Collections.emptyList()))); + containerImageConfig.username(), containerImageConfig.password(), repository, + imageReference.getTag(), containerImageConfig.additionalTags().orElse(Collections.emptyList()))); return; } - String registry = containerImageConfig.registry + String registry = containerImageConfig.registry() .orElseGet(() -> containerImageRegistry.map(FallbackContainerImageRegistryBuildItem::getRegistry) .orElse(null)); if ((registry != null) && !ImageReference.isValidRegistry(registry)) { @@ -96,7 +97,7 @@ public void publishImageInfo(ApplicationInfoBuildItem app, } String effectiveName = containerImageCustomName.map(ContainerImageCustomNameBuildItem::getName) - .orElse(containerImageConfig.name); + .orElse(containerImageConfig.name()); String group = effectiveGroup.orElse(""); String repository = group.isBlank() ? effectiveName : group + "/" + effectiveName; if (!ImageReference.isValidRepository(repository)) { @@ -104,7 +105,7 @@ public void publishImageInfo(ApplicationInfoBuildItem app, + group + "' and name '" + effectiveName + "' is invalid"); } - String effectiveTag = containerImageConfig.tag.orElse(app.getVersion()); + String effectiveTag = containerImageConfig.tag(); if (effectiveTag.equals(UNSET_VALUE)) { effectiveTag = DEFAULT_TAG; } @@ -114,10 +115,10 @@ public void publishImageInfo(ApplicationInfoBuildItem app, } containerImage.produce(new ContainerImageInfoBuildItem(Optional.ofNullable(registry), - containerImageConfig.username, containerImageConfig.password, + containerImageConfig.username(), containerImageConfig.password(), effectiveGroup, effectiveName, effectiveTag, - containerImageConfig.additionalTags.orElse(Collections.emptyList()))); + containerImageConfig.additionalTags().orElse(Collections.emptyList()))); } private void ensureSingleContainerImageExtension(Capabilities capabilities) { diff --git a/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/ContainerImageInfoTest.java b/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/ContainerImageInfoTest.java index a864a948fb3fc..fa76fe6ef8504 100644 --- a/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/ContainerImageInfoTest.java +++ b/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/ContainerImageInfoTest.java @@ -17,11 +17,6 @@ import io.quarkus.deployment.Capabilities; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.builditem.ApplicationInfoBuildItem; -import io.quarkus.deployment.configuration.BuildTimeConfigurationReader; -import io.quarkus.deployment.configuration.DefaultValuesConfigurationSource; -import io.quarkus.runtime.LaunchMode; -import io.quarkus.runtime.configuration.ConfigUtils; -import io.smallrye.config.PropertiesConfigSource; import io.smallrye.config.SmallRyeConfig; import io.smallrye.config.SmallRyeConfigBuilder; @@ -104,19 +99,15 @@ private void clearProperty(String key) { } private void whenPublishImageInfo() { - BuildTimeConfigurationReader reader = new BuildTimeConfigurationReader( - Collections.singletonList(ContainerImageConfig.class)); - SmallRyeConfigBuilder builder = ConfigUtils.configBuilder(false, LaunchMode.NORMAL); - - DefaultValuesConfigurationSource ds = new DefaultValuesConfigurationSource( - reader.getBuildTimePatternMap()); - PropertiesConfigSource pcs = new PropertiesConfigSource(new Properties(), "Test Properties"); - builder.withSources(ds, pcs); - - SmallRyeConfig src = builder.build(); - BuildTimeConfigurationReader.ReadResult readResult = reader.readConfiguration(src); - ContainerImageConfig containerImageConfig = (ContainerImageConfig) readResult - .requireObjectForClass(ContainerImageConfig.class); + SmallRyeConfig config = new SmallRyeConfigBuilder() + .addDefaultInterceptors() + .addDefaultSources() + .addDiscoveredConverters() + .addDiscoveredCustomizers() + .withMapping(ContainerImageConfig.class) + .build(); + + ContainerImageConfig containerImageConfig = config.getConfigMapping(ContainerImageConfig.class); ApplicationInfoBuildItem app = new ApplicationInfoBuildItem(Optional.of(APP_NAME), Optional.of(APP_VERSION)); Capabilities capabilities = new Capabilities(Collections.emptySet()); diff --git a/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/InvalidConfigInNameTest.java b/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/InvalidConfigInNameTest.java index f4c2fc4a95c52..4b15b34471ea9 100644 --- a/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/InvalidConfigInNameTest.java +++ b/extensions/container-image/deployment/src/test/java/io/quarkus/container/image/deployment/InvalidConfigInNameTest.java @@ -1,19 +1,22 @@ package io.quarkus.container.image.deployment; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -import java.util.NoSuchElementException; - import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.config.ConfigValidationException; public class InvalidConfigInNameTest { @RegisterExtension static QuarkusUnitTest runner = new QuarkusUnitTest() - .setExpectedException(NoSuchElementException.class) + .assertException(t -> { + assertTrue(t instanceof ConfigValidationException); + assertTrue(t.getMessage().contains("NoSuchElementException")); + }) .withEmptyApplication() .overrideConfigKey("quarkus.container-image.build", "true") .overrideConfigKey("quarkus.container-image.name", "test-${foo.bar}"); diff --git a/extensions/container-image/runtime/pom.xml b/extensions/container-image/runtime/pom.xml index 02eaebdbc1433..b2b22a240420a 100644 --- a/extensions/container-image/runtime/pom.xml +++ b/extensions/container-image/runtime/pom.xml @@ -49,9 +49,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/credentials/deployment/src/main/java/io/quarkus/credentials/CredentialsProcessor.java b/extensions/credentials/deployment/src/main/java/io/quarkus/credentials/deployment/CredentialsProcessor.java similarity index 76% rename from extensions/credentials/deployment/src/main/java/io/quarkus/credentials/CredentialsProcessor.java rename to extensions/credentials/deployment/src/main/java/io/quarkus/credentials/deployment/CredentialsProcessor.java index fb0bbef212ca0..0ad22aa3181ad 100644 --- a/extensions/credentials/deployment/src/main/java/io/quarkus/credentials/CredentialsProcessor.java +++ b/extensions/credentials/deployment/src/main/java/io/quarkus/credentials/deployment/CredentialsProcessor.java @@ -1,6 +1,7 @@ -package io.quarkus.credentials; +package io.quarkus.credentials.deployment; import io.quarkus.arc.deployment.UnremovableBeanBuildItem; +import io.quarkus.credentials.CredentialsProvider; import io.quarkus.deployment.annotations.BuildStep; public class CredentialsProcessor { diff --git a/extensions/devservices/h2/src/main/java/io/quarkus/devservices/h2/deployment/H2DevServicesProcessor.java b/extensions/devservices/h2/src/main/java/io/quarkus/devservices/h2/deployment/H2DevServicesProcessor.java index 816befca62d33..1e461e876534c 100644 --- a/extensions/devservices/h2/src/main/java/io/quarkus/devservices/h2/deployment/H2DevServicesProcessor.java +++ b/extensions/devservices/h2/src/main/java/io/quarkus/devservices/h2/deployment/H2DevServicesProcessor.java @@ -84,12 +84,19 @@ public void close() throws IOException { } catch (SQLException t) { t.printStackTrace(); } - tcpServer.stop(); - LOG.info("Dev Services for H2 shut down; server status: " + tcpServer.getStatus()); - } else { - LOG.info( - "Dev Services for H2 was NOT shut down as it appears it was down already; server status: " - + tcpServer.getStatus()); + // TODO Yes, this is a port leak + // The good news is that because it's an in-memory database, it will get shut down + // when the JVM stops. Nonetheless, this clearly is not ok, and needs + // a fix so that we do not start databases in the augmentation phase + // TODO remove this when #45786 and #45785 are done + final boolean hackPendingDeferredDevServiceStart = true; + if (!hackPendingDeferredDevServiceStart) { + tcpServer.stop(); + LOG.info("Dev Services for H2 shut down; server status: " + tcpServer.getStatus()); + + } + // End of #45786 and #45785 workaround + } } }); diff --git a/extensions/devservices/oracle/pom.xml b/extensions/devservices/oracle/pom.xml index ecb868a75a09e..6fb8e48b17aa4 100644 --- a/extensions/devservices/oracle/pom.xml +++ b/extensions/devservices/oracle/pom.xml @@ -32,7 +32,7 @@ com.oracle.database.jdbc - ojdbc11 + ojdbc17 org.junit.jupiter diff --git a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BCryptPassword.java b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BCryptPassword.java index 09e8f55eb91fd..a999f721ae462 100644 --- a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BCryptPassword.java +++ b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BCryptPassword.java @@ -2,12 +2,12 @@ import java.util.Arrays; -import org.wildfly.common.Assert; - import com.oracle.svm.core.annotate.Alias; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; +import io.smallrye.common.constraint.Assert; + @TargetClass(className = "org.wildfly.security.password.interfaces.BCryptPassword") public final class Target_org_wildfly_security_password_interfaces_BCryptPassword { diff --git a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BSDUnixDESCryptPassword.java b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BSDUnixDESCryptPassword.java index 8f88e0be504d1..c6cfbec0d7ea2 100644 --- a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BSDUnixDESCryptPassword.java +++ b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_BSDUnixDESCryptPassword.java @@ -2,12 +2,12 @@ import java.util.Arrays; -import org.wildfly.common.Assert; - import com.oracle.svm.core.annotate.Alias; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; +import io.smallrye.common.constraint.Assert; + @TargetClass(className = "org.wildfly.security.password.interfaces.BSDUnixDESCryptPassword") public final class Target_org_wildfly_security_password_interfaces_BSDUnixDESCryptPassword { diff --git a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_ClearPassword.java b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_ClearPassword.java index 5d3c972695625..301377bf6e9d7 100644 --- a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_ClearPassword.java +++ b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_ClearPassword.java @@ -1,6 +1,6 @@ package io.quarkus.elytron.security.common.runtime.graal; -import static org.wildfly.common.Assert.checkNotNullParam; +import static io.smallrye.common.constraint.Assert.checkNotNullParam; import org.wildfly.security.password.interfaces.ClearPassword; diff --git a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_MaskedPassword.java b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_MaskedPassword.java index f8f7f9913c707..e4bbe63c5d802 100644 --- a/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_MaskedPassword.java +++ b/extensions/elytron-security-common/runtime/src/main/java/io/quarkus/elytron/security/common/runtime/graal/Target_org_wildfly_security_password_interfaces_MaskedPassword.java @@ -1,11 +1,12 @@ package io.quarkus.elytron.security.common.runtime.graal; -import org.wildfly.common.Assert; import org.wildfly.security.password.interfaces.MaskedPassword; import com.oracle.svm.core.annotate.Substitute; import com.oracle.svm.core.annotate.TargetClass; +import io.smallrye.common.constraint.Assert; + /** * Replace the {@linkplain MaskedPassword} interface due to an issue with char[].clone() failures during native image * generation. diff --git a/extensions/elytron-security-ldap/runtime/src/main/java/io/quarkus/elytron/security/ldap/DelegatingLdapContext.java b/extensions/elytron-security-ldap/runtime/src/main/java/io/quarkus/elytron/security/ldap/DelegatingLdapContext.java index 6f43ceb731a5a..2b71d2bd99914 100644 --- a/extensions/elytron-security-ldap/runtime/src/main/java/io/quarkus/elytron/security/ldap/DelegatingLdapContext.java +++ b/extensions/elytron-security-ldap/runtime/src/main/java/io/quarkus/elytron/security/ldap/DelegatingLdapContext.java @@ -22,9 +22,10 @@ import javax.naming.ldap.LdapContext; import javax.net.SocketFactory; -import org.wildfly.common.Assert; import org.wildfly.security.auth.realm.ldap.ThreadLocalSSLSocketFactory; +import io.smallrye.common.constraint.Assert; + class DelegatingLdapContext implements LdapContext { private final DirContext delegating; diff --git a/extensions/elytron-security-properties-file/deployment/src/main/java/io/quarkus/elytron/security/properties/deployment/ElytronPropertiesProcessor.java b/extensions/elytron-security-properties-file/deployment/src/main/java/io/quarkus/elytron/security/properties/deployment/ElytronPropertiesProcessor.java index db623f9ba66be..7da4d3788061f 100644 --- a/extensions/elytron-security-properties-file/deployment/src/main/java/io/quarkus/elytron/security/properties/deployment/ElytronPropertiesProcessor.java +++ b/extensions/elytron-security-properties-file/deployment/src/main/java/io/quarkus/elytron/security/properties/deployment/ElytronPropertiesProcessor.java @@ -12,11 +12,10 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.elytron.security.deployment.ElytronPasswordMarkerBuildItem; import io.quarkus.elytron.security.deployment.SecurityRealmBuildItem; -import io.quarkus.elytron.security.runtime.ElytronPropertiesFileRecorder; -import io.quarkus.elytron.security.runtime.MPRealmConfig; -import io.quarkus.elytron.security.runtime.MPRealmRuntimeConfig; -import io.quarkus.elytron.security.runtime.PropertiesRealmConfig; -import io.quarkus.elytron.security.runtime.SecurityUsersConfig; +import io.quarkus.elytron.security.properties.runtime.ElytronPropertiesFileRecorder; +import io.quarkus.elytron.security.properties.runtime.MPRealmRuntimeConfig; +import io.quarkus.elytron.security.properties.runtime.PropertiesRealmConfig; +import io.quarkus.elytron.security.properties.runtime.SecurityUsersConfig; import io.quarkus.runtime.RuntimeValue; /** diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/DigestAlgorithm.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/DigestAlgorithm.java similarity index 91% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/DigestAlgorithm.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/DigestAlgorithm.java index 5e74458a6ce63..1839dd946224d 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/DigestAlgorithm.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/DigestAlgorithm.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import org.wildfly.security.password.interfaces.DigestPassword; diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/ElytronPropertiesFileRecorder.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/ElytronPropertiesFileRecorder.java similarity index 99% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/ElytronPropertiesFileRecorder.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/ElytronPropertiesFileRecorder.java index 907d723610e65..202b6b583ea06 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/ElytronPropertiesFileRecorder.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/ElytronPropertiesFileRecorder.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import java.io.IOException; import java.io.UncheckedIOException; diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmConfig.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmConfig.java similarity index 91% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmConfig.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmConfig.java index a249554c152f3..8cffc4a3fc946 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmConfig.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import io.quarkus.runtime.annotations.ConfigGroup; import io.smallrye.config.WithDefault; diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmRuntimeConfig.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmRuntimeConfig.java similarity index 96% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmRuntimeConfig.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmRuntimeConfig.java index 9143b60477246..a73f49772c162 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/MPRealmRuntimeConfig.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/MPRealmRuntimeConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import java.util.Map; diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/PropertiesRealmConfig.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/PropertiesRealmConfig.java similarity index 96% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/PropertiesRealmConfig.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/PropertiesRealmConfig.java index 9e4aa8b5e95b8..5172cfc8c6af1 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/PropertiesRealmConfig.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/PropertiesRealmConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import io.quarkus.runtime.annotations.ConfigGroup; import io.smallrye.config.WithDefault; diff --git a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/SecurityUsersConfig.java b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/SecurityUsersConfig.java similarity index 91% rename from extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/SecurityUsersConfig.java rename to extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/SecurityUsersConfig.java index d9bc9f3918c70..bcc5786378627 100644 --- a/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/runtime/SecurityUsersConfig.java +++ b/extensions/elytron-security-properties-file/runtime/src/main/java/io/quarkus/elytron/security/properties/runtime/SecurityUsersConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.elytron.security.runtime; +package io.quarkus.elytron.security.properties.runtime; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigPhase; diff --git a/extensions/flyway/deployment/pom.xml b/extensions/flyway/deployment/pom.xml index da0f30f213e9a..dda6855c95541 100644 --- a/extensions/flyway/deployment/pom.xml +++ b/extensions/flyway/deployment/pom.xml @@ -86,9 +86,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayCallbacksLocator.java b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayCallbacksLocator.java index 92e5a125affc5..978bed167378a 100644 --- a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayCallbacksLocator.java +++ b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayCallbacksLocator.java @@ -76,7 +76,7 @@ public Map> getCallbacks() */ private Collection callbacksForDataSource(String dataSourceName) throws ClassNotFoundException, IllegalAccessException, InvocationTargetException, InstantiationException { - final Optional> callbackConfig = flywayBuildConfig.getConfigForDataSourceName(dataSourceName).callbacks; + final Optional> callbackConfig = flywayBuildConfig.datasources().get(dataSourceName).callbacks(); if (!callbackConfig.isPresent()) { return Collections.emptyList(); } diff --git a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayEnabled.java b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayEnabled.java index 928e009e8ef16..d4c52d665fb5c 100644 --- a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayEnabled.java +++ b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayEnabled.java @@ -18,7 +18,7 @@ public class FlywayEnabled implements BooleanSupplier { @Override public boolean getAsBoolean() { - return config.enabled; + return config.enabled(); } } diff --git a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayProcessor.java b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayProcessor.java index 76dc5882235b2..9765c8d0a9e5f 100644 --- a/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayProcessor.java +++ b/extensions/flyway/deployment/src/main/java/io/quarkus/flyway/deployment/FlywayProcessor.java @@ -121,10 +121,10 @@ MigrationStateBuildItem build(BuildProducer resour Map> applicationMigrationsToDs = new HashMap<>(); for (var dataSourceName : dataSourceNames) { FlywayDataSourceBuildTimeConfig flywayDataSourceBuildTimeConfig = flywayBuildTimeConfig - .getConfigForDataSourceName(dataSourceName); + .datasources().get(dataSourceName); Collection migrationLocations = discoverApplicationMigrations( - flywayDataSourceBuildTimeConfig.locations); + flywayDataSourceBuildTimeConfig.locations()); applicationMigrationsToDs.put(dataSourceName, migrationLocations); } Set datasourcesWithMigrations = new HashSet<>(); diff --git a/extensions/flyway/runtime/pom.xml b/extensions/flyway/runtime/pom.xml index 1c72cb73e60f8..47a34db56c9a7 100644 --- a/extensions/flyway/runtime/pom.xml +++ b/extensions/flyway/runtime/pom.xml @@ -81,9 +81,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayBuildTimeConfig.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayBuildTimeConfig.java index 76e427de6a5e4..467918c2ab341 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayBuildTimeConfig.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayBuildTimeConfig.java @@ -1,27 +1,21 @@ package io.quarkus.flyway.runtime; -import java.util.Collections; import java.util.Map; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; -@ConfigRoot(name = "flyway", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public final class FlywayBuildTimeConfig { - - /** - * Gets the {@link FlywayDataSourceBuildTimeConfig} for the given datasource name. - */ - public FlywayDataSourceBuildTimeConfig getConfigForDataSourceName(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return defaultDataSource; - } - return namedDataSources.getOrDefault(dataSourceName, FlywayDataSourceBuildTimeConfig.defaultConfig()); - } +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = "quarkus.flyway") +public interface FlywayBuildTimeConfig { /** * Whether Flyway is enabled *during the build*. @@ -30,20 +24,16 @@ public FlywayDataSourceBuildTimeConfig getConfigForDataSourceName(String dataSou * * @asciidoclet */ - @ConfigItem(defaultValue = "true") - public boolean enabled; - - /** - * Flyway configuration for the default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public FlywayDataSourceBuildTimeConfig defaultDataSource; + @WithDefault("true") + boolean enabled(); /** - * Named datasources. + * Datasources. */ - @ConfigItem(name = ConfigItem.PARENT) @ConfigDocMapKey("datasource-name") @ConfigDocSection - public Map namedDataSources = Collections.emptyMap(); + @WithParentName + @WithUnnamedKey(DataSourceUtil.DEFAULT_DATASOURCE_NAME) + @WithDefaults + Map datasources(); } \ No newline at end of file diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainer.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainer.java index cc952cb0f15aa..a82f00b3089cb 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainer.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainer.java @@ -8,6 +8,7 @@ public class FlywayContainer { private final boolean baselineAtStart; private final boolean cleanAtStart; + private final boolean cleanOnValidationError; private final boolean migrateAtStart; private final boolean repairAtStart; @@ -17,12 +18,13 @@ public class FlywayContainer { private final boolean createPossible; private final String id; - public FlywayContainer(Flyway flyway, boolean baselineAtStart, boolean cleanAtStart, boolean migrateAtStart, - boolean repairAtStart, boolean validateAtStart, + public FlywayContainer(Flyway flyway, boolean baselineAtStart, boolean cleanAtStart, boolean cleanOnValidationError, + boolean migrateAtStart, boolean repairAtStart, boolean validateAtStart, String dataSourceName, boolean hasMigrations, boolean createPossible) { this.flyway = flyway; this.baselineAtStart = baselineAtStart; this.cleanAtStart = cleanAtStart; + this.cleanOnValidationError = cleanOnValidationError; this.migrateAtStart = migrateAtStart; this.repairAtStart = repairAtStart; this.validateAtStart = validateAtStart; @@ -44,6 +46,10 @@ public boolean isCleanAtStart() { return cleanAtStart; } + public boolean isCleanOnValidationError() { + return cleanOnValidationError; + } + public boolean isMigrateAtStart() { return migrateAtStart; } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainerProducer.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainerProducer.java index fe921bdf6a9a3..dab6ca392faab 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainerProducer.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayContainerProducer.java @@ -45,15 +45,15 @@ public FlywayContainerProducer(FlywayRuntimeConfig flywayRuntimeConfig, FlywayBu public FlywayContainer createFlyway(DataSource dataSource, String dataSourceName, boolean hasMigrations, boolean createPossible) { - FlywayDataSourceRuntimeConfig matchingRuntimeConfig = flywayRuntimeConfig.getConfigForDataSourceName(dataSourceName); - FlywayDataSourceBuildTimeConfig matchingBuildTimeConfig = flywayBuildConfig.getConfigForDataSourceName(dataSourceName); + FlywayDataSourceRuntimeConfig matchingRuntimeConfig = flywayRuntimeConfig.datasources().get(dataSourceName); + FlywayDataSourceBuildTimeConfig matchingBuildTimeConfig = flywayBuildConfig.datasources().get(dataSourceName); final Collection callbacks = QuarkusPathLocationScanner.callbacksForDataSource(dataSourceName); final Flyway flyway = new FlywayCreator(matchingRuntimeConfig, matchingBuildTimeConfig, matchingConfigCustomizers( configCustomizerInstances, dataSourceName)).withCallbacks(callbacks) .createFlyway(dataSource); - return new FlywayContainer(flyway, matchingRuntimeConfig.baselineAtStart, matchingRuntimeConfig.cleanAtStart, - matchingRuntimeConfig.migrateAtStart, - matchingRuntimeConfig.repairAtStart, matchingRuntimeConfig.validateAtStart, + return new FlywayContainer(flyway, matchingRuntimeConfig.baselineAtStart(), matchingRuntimeConfig.cleanAtStart(), + matchingRuntimeConfig.cleanOnValidationError(), matchingRuntimeConfig.migrateAtStart(), + matchingRuntimeConfig.repairAtStart(), matchingRuntimeConfig.validateAtStart(), dataSourceName, hasMigrations, createPossible); } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayCreator.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayCreator.java index 9401b42d184fe..7775d53d883af 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayCreator.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayCreator.java @@ -52,64 +52,64 @@ public FlywayCreator withCallbacks(Collection callbacks) { public Flyway createFlyway(DataSource dataSource) { FluentConfiguration configure = Flyway.configure(); - if (flywayRuntimeConfig.jdbcUrl.isPresent()) { - if (flywayRuntimeConfig.username.isPresent() && flywayRuntimeConfig.password.isPresent()) { - configure.dataSource(flywayRuntimeConfig.jdbcUrl.get(), flywayRuntimeConfig.username.get(), - flywayRuntimeConfig.password.get()); + if (flywayRuntimeConfig.jdbcUrl().isPresent()) { + if (flywayRuntimeConfig.username().isPresent() && flywayRuntimeConfig.password().isPresent()) { + configure.dataSource(flywayRuntimeConfig.jdbcUrl().get(), flywayRuntimeConfig.username().get(), + flywayRuntimeConfig.password().get()); } else { throw new ConfigurationException( "Username and password must be defined when a JDBC URL is provided in the Flyway configuration"); } } else { - if (flywayRuntimeConfig.username.isPresent() && flywayRuntimeConfig.password.isPresent()) { + if (flywayRuntimeConfig.username().isPresent() && flywayRuntimeConfig.password().isPresent()) { AgroalDataSource agroalDataSource = (AgroalDataSource) dataSource; String jdbcUrl = agroalDataSource.getConfiguration().connectionPoolConfiguration() .connectionFactoryConfiguration().jdbcUrl(); - configure.dataSource(jdbcUrl, flywayRuntimeConfig.username.get(), - flywayRuntimeConfig.password.get()); + configure.dataSource(jdbcUrl, flywayRuntimeConfig.username().get(), + flywayRuntimeConfig.password().get()); } else if (dataSource != null) { configure.dataSource(dataSource); } } - if (flywayRuntimeConfig.initSql.isPresent()) { - configure.initSql(flywayRuntimeConfig.initSql.get()); + if (flywayRuntimeConfig.initSql().isPresent()) { + configure.initSql(flywayRuntimeConfig.initSql().get()); } - if (flywayRuntimeConfig.connectRetries.isPresent()) { - configure.connectRetries(flywayRuntimeConfig.connectRetries.getAsInt()); + if (flywayRuntimeConfig.connectRetries().isPresent()) { + configure.connectRetries(flywayRuntimeConfig.connectRetries().getAsInt()); } configure.connectRetriesInterval( - (int) flywayRuntimeConfig.connectRetriesInterval.orElse(DEFAULT_CONNECT_RETRIES_INTERVAL).toSeconds()); - if (flywayRuntimeConfig.defaultSchema.isPresent()) { - configure.defaultSchema(flywayRuntimeConfig.defaultSchema.get()); + (int) flywayRuntimeConfig.connectRetriesInterval().orElse(DEFAULT_CONNECT_RETRIES_INTERVAL).toSeconds()); + if (flywayRuntimeConfig.defaultSchema().isPresent()) { + configure.defaultSchema(flywayRuntimeConfig.defaultSchema().get()); } - if (flywayRuntimeConfig.schemas.isPresent()) { - configure.schemas(flywayRuntimeConfig.schemas.get().toArray(EMPTY_ARRAY)); + if (flywayRuntimeConfig.schemas().isPresent()) { + configure.schemas(flywayRuntimeConfig.schemas().get().toArray(EMPTY_ARRAY)); } - if (flywayRuntimeConfig.table.isPresent()) { - configure.table(flywayRuntimeConfig.table.get()); + if (flywayRuntimeConfig.table().isPresent()) { + configure.table(flywayRuntimeConfig.table().get()); } - configure.locations(flywayBuildTimeConfig.locations.toArray(EMPTY_ARRAY)); - if (flywayRuntimeConfig.sqlMigrationPrefix.isPresent()) { - configure.sqlMigrationPrefix(flywayRuntimeConfig.sqlMigrationPrefix.get()); + configure.locations(flywayBuildTimeConfig.locations().toArray(EMPTY_ARRAY)); + if (flywayRuntimeConfig.sqlMigrationPrefix().isPresent()) { + configure.sqlMigrationPrefix(flywayRuntimeConfig.sqlMigrationPrefix().get()); } - if (flywayRuntimeConfig.repeatableSqlMigrationPrefix.isPresent()) { - configure.repeatableSqlMigrationPrefix(flywayRuntimeConfig.repeatableSqlMigrationPrefix.get()); + if (flywayRuntimeConfig.repeatableSqlMigrationPrefix().isPresent()) { + configure.repeatableSqlMigrationPrefix(flywayRuntimeConfig.repeatableSqlMigrationPrefix().get()); } - configure.cleanDisabled(flywayRuntimeConfig.cleanDisabled); - configure.baselineOnMigrate(flywayRuntimeConfig.baselineOnMigrate); - configure.validateOnMigrate(flywayRuntimeConfig.validateOnMigrate); - configure.validateMigrationNaming(flywayRuntimeConfig.validateMigrationNaming); + configure.cleanDisabled(flywayRuntimeConfig.cleanDisabled()); + configure.baselineOnMigrate(flywayRuntimeConfig.baselineOnMigrate()); + configure.validateOnMigrate(flywayRuntimeConfig.validateOnMigrate()); + configure.validateMigrationNaming(flywayRuntimeConfig.validateMigrationNaming()); final String[] ignoreMigrationPatterns; - if (flywayRuntimeConfig.ignoreMigrationPatterns.isPresent()) { - ignoreMigrationPatterns = flywayRuntimeConfig.ignoreMigrationPatterns.get(); + if (flywayRuntimeConfig.ignoreMigrationPatterns().isPresent()) { + ignoreMigrationPatterns = flywayRuntimeConfig.ignoreMigrationPatterns().get(); } else { List patterns = new ArrayList<>(2); - if (flywayRuntimeConfig.ignoreMissingMigrations) { + if (flywayRuntimeConfig.ignoreMissingMigrations()) { patterns.add("*:Missing"); } - if (flywayRuntimeConfig.ignoreFutureMigrations) { + if (flywayRuntimeConfig.ignoreFutureMigrations()) { patterns.add("*:Future"); } // Default is *:Future @@ -117,20 +117,20 @@ public Flyway createFlyway(DataSource dataSource) { } configure.ignoreMigrationPatterns(ignoreMigrationPatterns); - configure.outOfOrder(flywayRuntimeConfig.outOfOrder); - if (flywayRuntimeConfig.baselineVersion.isPresent()) { - configure.baselineVersion(flywayRuntimeConfig.baselineVersion.get()); + configure.outOfOrder(flywayRuntimeConfig.outOfOrder()); + if (flywayRuntimeConfig.baselineVersion().isPresent()) { + configure.baselineVersion(flywayRuntimeConfig.baselineVersion().get()); } - if (flywayRuntimeConfig.baselineDescription.isPresent()) { - configure.baselineDescription(flywayRuntimeConfig.baselineDescription.get()); + if (flywayRuntimeConfig.baselineDescription().isPresent()) { + configure.baselineDescription(flywayRuntimeConfig.baselineDescription().get()); } - configure.placeholders(flywayRuntimeConfig.placeholders); - configure.createSchemas(flywayRuntimeConfig.createSchemas); - if (flywayRuntimeConfig.placeholderPrefix.isPresent()) { - configure.placeholderPrefix(flywayRuntimeConfig.placeholderPrefix.get()); + configure.placeholders(flywayRuntimeConfig.placeholders()); + configure.createSchemas(flywayRuntimeConfig.createSchemas()); + if (flywayRuntimeConfig.placeholderPrefix().isPresent()) { + configure.placeholderPrefix(flywayRuntimeConfig.placeholderPrefix().get()); } - if (flywayRuntimeConfig.placeholderSuffix.isPresent()) { - configure.placeholderSuffix(flywayRuntimeConfig.placeholderSuffix.get()); + if (flywayRuntimeConfig.placeholderSuffix().isPresent()) { + configure.placeholderSuffix(flywayRuntimeConfig.placeholderSuffix().get()); } if (!callbacks.isEmpty()) { configure.callbacks(callbacks.toArray(new Callback[0])); diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceBuildTimeConfig.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceBuildTimeConfig.java index 7668f4ca191ea..51a621defb296 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceBuildTimeConfig.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceBuildTimeConfig.java @@ -1,18 +1,17 @@ package io.quarkus.flyway.runtime; -import java.util.Collections; import java.util.List; import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; @ConfigGroup -public final class FlywayDataSourceBuildTimeConfig { +public interface FlywayDataSourceBuildTimeConfig { - private static final String DEFAULT_LOCATION = "db/migration"; + String DEFAULT_LOCATION = "db/migration"; /** * Comma-separated list of locations to scan recursively for migrations. The location type is determined by its prefix. @@ -23,9 +22,8 @@ public final class FlywayDataSourceBuildTimeConfig { * Locations starting with filesystem: point to a directory on the filesystem, may only contain SQL migrations and are only * scanned recursively down non-hidden directories. */ - @ConfigItem(defaultValue = DEFAULT_LOCATION) - @ConvertWith(TrimmedStringConverter.class) - public List locations; + @WithDefault(DEFAULT_LOCATION) + List<@WithConverter(TrimmedStringConverter.class) String> locations(); /** * Comma-separated list of fully qualified class names of Callback implementations @@ -33,17 +31,5 @@ public final class FlywayDataSourceBuildTimeConfig { * The {@link org.flywaydb.core.api.callback.Callback} subclass must have a no-args constructor and must not be abstract. * These classes must also not have any fields that hold state (unless that state is initialized in the constructor). */ - @ConfigItem - public Optional> callbacks = Optional.empty(); - - /** - * Creates a {@link FlywayDataSourceBuildTimeConfig} with default settings. - * - * @return {@link FlywayDataSourceBuildTimeConfig} - */ - public static FlywayDataSourceBuildTimeConfig defaultConfig() { - FlywayDataSourceBuildTimeConfig defaultConfig = new FlywayDataSourceBuildTimeConfig(); - defaultConfig.locations = Collections.singletonList(DEFAULT_LOCATION); - return defaultConfig; - } + Optional> callbacks(); } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceRuntimeConfig.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceRuntimeConfig.java index ed0c82aaa2a76..9a95a1305e3a6 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceRuntimeConfig.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayDataSourceRuntimeConfig.java @@ -1,33 +1,25 @@ package io.quarkus.flyway.runtime; import java.time.Duration; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.OptionalInt; +import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; @ConfigGroup -public final class FlywayDataSourceRuntimeConfig { - - /** - * Creates a {@link FlywayDataSourceRuntimeConfig} with default settings. - * - * @return {@link FlywayDataSourceRuntimeConfig} - */ - public static FlywayDataSourceRuntimeConfig defaultConfig() { - return new FlywayDataSourceRuntimeConfig(); - } +public interface FlywayDataSourceRuntimeConfig { /** * Flag to activate/deactivate Flyway for a specific datasource at runtime. */ - @ConfigItem(defaultValueDocumentation = "'true' if the datasource is active; 'false' otherwise") - public Optional active = Optional.empty(); + @ConfigDocDefault("'true' if the datasource is active; 'false' otherwise") + Optional active(); /** * The maximum number of retries when attempting to connect to the database. @@ -35,16 +27,15 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * After each failed attempt, Flyway will wait up to the configured `connect-retries-interval` duration before * attempting to connect again, up to the maximum number of times specified by connectRetries. */ - @ConfigItem - public OptionalInt connectRetries = OptionalInt.empty(); + OptionalInt connectRetries(); /** * The maximum time between retries when attempting to connect to the database. *

    * This will cap the interval between connect retries to the value provided. */ - @ConfigItem(defaultValueDocumentation = "120 seconds") - public Optional connectRetriesInterval = Optional.empty(); + @ConfigDocDefault("120 seconds") + Optional connectRetriesInterval(); /** * Sets the default schema managed by Flyway. This schema name is case-sensitive. If not specified, but schemas @@ -58,37 +49,32 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { *

  • This schema will be the default for the database connection (provided the database supports this concept).
  • * */ - @ConfigItem - public Optional defaultSchema = Optional.empty(); + Optional defaultSchema(); /** * The JDBC URL that Flyway uses to connect to the database. * Falls back to the datasource URL if not specified. */ - @ConfigItem - public Optional jdbcUrl = Optional.empty(); + Optional jdbcUrl(); /** * The username that Flyway uses to connect to the database. * If no specific JDBC URL is configured, falls back to the datasource username if not specified. */ - @ConfigItem - public Optional username = Optional.empty(); + Optional username(); /** * The password that Flyway uses to connect to the database. * If no specific JDBC URL is configured, falls back to the datasource password if not specified. */ - @ConfigItem - public Optional password = Optional.empty(); + Optional password(); /** * Comma-separated case-sensitive list of schemas managed by Flyway. * The first schema in the list will be automatically set as the default one during the migration. * It will also be the one containing the schema history table. */ - @ConfigItem - public Optional> schemas = Optional.empty(); + Optional> schemas(); /** * The name of Flyway's schema history table. @@ -97,8 +83,7 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * When the flyway.schemas property is set (multi-schema mode), the schema history table is placed in the first schema of * the list. */ - @ConfigItem - public Optional table = Optional.empty(); + Optional table(); /** * The file name prefix for versioned SQL migrations. @@ -106,8 +91,7 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * Versioned SQL migrations have the following file name structure: prefixVERSIONseparatorDESCRIPTIONsuffix , which using * the defaults translates to V1.1__My_description.sql */ - @ConfigItem - public Optional sqlMigrationPrefix = Optional.empty(); + Optional sqlMigrationPrefix(); /** * The file name prefix for repeatable SQL migrations. @@ -115,42 +99,48 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * Repeatable SQL migrations have the following file name structure: prefixSeparatorDESCRIPTIONsuffix , which using the * defaults translates to R__My_description.sql */ - @ConfigItem - public Optional repeatableSqlMigrationPrefix = Optional.empty(); + Optional repeatableSqlMigrationPrefix(); /** * true to execute Flyway clean command automatically when the application starts, false otherwise. * */ - @ConfigItem - public boolean cleanAtStart; + @WithDefault("false") + boolean cleanAtStart(); /** * true to prevent Flyway clean operations, false otherwise. */ - @ConfigItem - public boolean cleanDisabled; + @WithDefault("false") + boolean cleanDisabled(); /** * true to execute Flyway automatically when the application starts, false otherwise. * */ - @ConfigItem - public boolean migrateAtStart; + @WithDefault("false") + boolean migrateAtStart(); /** * true to execute a Flyway repair command when the application starts, false otherwise. * */ - @ConfigItem - public boolean repairAtStart; + @WithDefault("false") + boolean repairAtStart(); /** * true to execute a Flyway validate command when the application starts, false otherwise. * */ - @ConfigItem - public boolean validateAtStart; + @WithDefault("false") + boolean validateAtStart(); + + /** + * true to automatically execute a Flyway clean command when a validation error occurs at start, false otherwise. + */ + @WithName("validate-at-start.clean-on-validation-error") + @WithDefault("false") + boolean cleanOnValidationError(); /** * true to execute Flyway baseline before migrations This flag is ignored if the flyway_schema_history table exists in the @@ -158,94 +148,88 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * Note that this will not automatically call migrate, you must either enable baselineAtStart or programmatically call * flyway.migrate(). */ - @ConfigItem - public boolean baselineOnMigrate; + @WithDefault("false") + boolean baselineOnMigrate(); /** * true to execute Flyway baseline automatically when the application starts. * This flag is ignored if the flyway_schema_history table exists in the current schema. * This will work even if the current schema is empty. */ - @ConfigItem - public boolean baselineAtStart; + @WithDefault("false") + boolean baselineAtStart(); /** * The initial baseline version. */ - @ConfigItem - public Optional baselineVersion = Optional.empty(); + Optional baselineVersion(); /** * The description to tag an existing schema with when executing baseline. */ - @ConfigItem - public Optional baselineDescription = Optional.empty(); + Optional baselineDescription(); /** * Whether to automatically call validate when performing a migration. */ - @ConfigItem(defaultValue = "true") - public boolean validateOnMigrate = true; + @WithDefault("true") + boolean validateOnMigrate(); /** * Allows migrations to be run "out of order". */ - @ConfigItem - public boolean outOfOrder; + @WithDefault("false") + boolean outOfOrder(); /** * Ignore missing migrations when reading the history table. When set to true migrations from older versions present in the * history table but absent in the configured locations will be ignored (and logged as a warning), when false (the default) * the validation step will fail. */ - @ConfigItem - public boolean ignoreMissingMigrations; + @WithDefault("false") + boolean ignoreMissingMigrations(); /** * Ignore future migrations when reading the history table. When set to true migrations from newer versions present in the * history table but absent in the configured locations will be ignored (and logged as a warning), when false (the default) * the validation step will fail. */ - @ConfigItem - public boolean ignoreFutureMigrations; + @WithDefault("false") + boolean ignoreFutureMigrations(); /** * Sets the placeholders to replace in SQL migration scripts. */ - @ConfigItem @ConfigDocMapKey("placeholder-key") - public Map placeholders = Collections.emptyMap(); + Map placeholders(); /** * Whether Flyway should attempt to create the schemas specified in the schemas property */ - @ConfigItem(defaultValue = "true") - public boolean createSchemas; + @WithDefault("true") + boolean createSchemas(); /** * Prefix of every placeholder (default: ${ ) */ - @ConfigItem - public Optional placeholderPrefix = Optional.empty(); + Optional placeholderPrefix(); /** * Suffix of every placeholder (default: } ) */ - @ConfigItem - public Optional placeholderSuffix = Optional.empty(); + Optional placeholderSuffix(); /** * The SQL statements to run to initialize a new database connection immediately after opening it. */ - @ConfigItem - public Optional initSql = Optional.empty(); + Optional initSql(); /** * Whether to validate migrations and callbacks whose scripts do not obey the correct naming convention. A failure can be * useful to check that errors such as case sensitivity in migration prefixes have been corrected. */ - @ConfigItem - public boolean validateMigrationNaming; + @WithDefault("false") + boolean validateMigrationNaming(); /** * Ignore migrations during validate and repair according to a given list of patterns (see @@ -253,6 +237,5 @@ public static FlywayDataSourceRuntimeConfig defaultConfig() { * When this configuration is set, the ignoreFutureMigrations and ignoreMissingMigrations settings are ignored. Patterns are * comma separated. */ - @ConfigItem - public Optional ignoreMigrationPatterns = Optional.empty(); + Optional ignoreMigrationPatterns(); } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java index 2c2e10a4e4199..45322ca82cdfa 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRecorder.java @@ -109,10 +109,10 @@ public Flyway apply(SyntheticCreationalContext context) { public void doStartActions(String dataSourceName) { FlywayDataSourceRuntimeConfig flywayDataSourceRuntimeConfig = config.getValue() - .getConfigForDataSourceName(dataSourceName); + .datasources().get(dataSourceName); - if (flywayDataSourceRuntimeConfig.active.isPresent() - && !flywayDataSourceRuntimeConfig.active.get()) { + if (flywayDataSourceRuntimeConfig.active().isPresent() + && !flywayDataSourceRuntimeConfig.active().get()) { return; } @@ -129,7 +129,15 @@ public void doStartActions(String dataSourceName) { flywayContainer.getFlyway().clean(); } if (flywayContainer.isValidateAtStart()) { - flywayContainer.getFlyway().validate(); + if (flywayContainer.isCleanOnValidationError()) { + var result = flywayContainer.getFlyway().validateWithResult(); + + if (!result.validationSuccessful) { + flywayContainer.getFlyway().clean(); + } + } else { + flywayContainer.getFlyway().validate(); + } } if (flywayContainer.isBaselineAtStart()) { new FlywayExecutor(flywayContainer.getFlyway().getConfiguration()) diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRuntimeConfig.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRuntimeConfig.java index c778e2ac636d8..62a2fe9652b48 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRuntimeConfig.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/FlywayRuntimeConfig.java @@ -1,39 +1,28 @@ package io.quarkus.flyway.runtime; -import java.util.Collections; import java.util.Map; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; -@ConfigRoot(name = "flyway", phase = ConfigPhase.RUN_TIME) -public final class FlywayRuntimeConfig { +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = "quarkus.flyway") +public interface FlywayRuntimeConfig { /** - * Gets the {@link FlywayDataSourceRuntimeConfig} for the given datasource name. + * Datasources. */ - public FlywayDataSourceRuntimeConfig getConfigForDataSourceName(String dataSourceName) { - if (DataSourceUtil.isDefault(dataSourceName)) { - return defaultDataSource; - } - return namedDataSources.getOrDefault(dataSourceName, FlywayDataSourceRuntimeConfig.defaultConfig()); - } - - /** - * Flyway configuration for the default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public FlywayDataSourceRuntimeConfig defaultDataSource = FlywayDataSourceRuntimeConfig.defaultConfig(); - - /** - * Named datasources. - */ - @ConfigItem(name = ConfigItem.PARENT) @ConfigDocMapKey("datasource-name") @ConfigDocSection - public Map namedDataSources = Collections.emptyMap(); + @WithParentName + @WithUnnamedKey(DataSourceUtil.DEFAULT_DATASOURCE_NAME) + @WithDefaults + Map datasources(); } diff --git a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java index b9d5e4be9cbfc..cbd171125b5f5 100644 --- a/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java +++ b/extensions/flyway/runtime/src/main/java/io/quarkus/flyway/runtime/UnconfiguredDataSourceFlywayContainer.java @@ -12,7 +12,7 @@ public class UnconfiguredDataSourceFlywayContainer extends FlywayContainer { private final Throwable cause; public UnconfiguredDataSourceFlywayContainer(String dataSourceName, String message, Throwable cause) { - super(null, false, false, false, false, false, dataSourceName, false, false); + super(null, false, false, false, false, false, false, dataSourceName, false, false); this.message = message; this.cause = cause; } diff --git a/extensions/flyway/runtime/src/test/java/io/quarkus/flyway/runtime/FlywayCreatorTest.java b/extensions/flyway/runtime/src/test/java/io/quarkus/flyway/runtime/FlywayCreatorTest.java deleted file mode 100644 index 4995671779b3f..0000000000000 --- a/extensions/flyway/runtime/src/test/java/io/quarkus/flyway/runtime/FlywayCreatorTest.java +++ /dev/null @@ -1,278 +0,0 @@ -package io.quarkus.flyway.runtime; - -import static java.util.Arrays.asList; -import static org.junit.jupiter.api.Assertions.assertArrayEquals; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.util.Arrays; -import java.util.List; -import java.util.Optional; -import java.util.OptionalInt; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import org.flywaydb.core.Flyway; -import org.flywaydb.core.api.Location; -import org.flywaydb.core.api.configuration.Configuration; -import org.flywaydb.core.api.pattern.ValidatePattern; -import org.flywaydb.core.internal.util.ValidatePatternUtils; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -class FlywayCreatorTest { - - private FlywayDataSourceRuntimeConfig runtimeConfig = FlywayDataSourceRuntimeConfig.defaultConfig(); - private FlywayDataSourceBuildTimeConfig buildConfig = FlywayDataSourceBuildTimeConfig.defaultConfig(); - private Configuration defaultConfig = Flyway.configure().load().getConfiguration(); - - /** - * class under test. - */ - private FlywayCreator creator; - - @Test - @DisplayName("locations default matches flyway default") - void testLocationsDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(pathList(defaultConfig.getLocations()), pathList(createdFlywayConfig().getLocations())); - } - - @Test - @DisplayName("locations carried over from configuration") - void testLocationsOverridden() { - buildConfig.locations = Arrays.asList("db/migrations", "db/something"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(buildConfig.locations, pathList(createdFlywayConfig().getLocations())); - } - - @Test - @DisplayName("not configured locations replaced by default") - void testNotPresentLocationsOverridden() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(pathList(defaultConfig.getLocations()), pathList(createdFlywayConfig().getLocations())); - } - - @Test - @DisplayName("baseline description default matches flyway default") - void testBaselineDescriptionDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getBaselineDescription(), createdFlywayConfig().getBaselineDescription()); - } - - @Test - @DisplayName("baseline description carried over from configuration") - void testBaselineDescriptionOverridden() { - runtimeConfig.baselineDescription = Optional.of("baselineDescription"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.baselineDescription.get(), createdFlywayConfig().getBaselineDescription()); - } - - @Test - @DisplayName("baseline version default matches flyway default") - void testBaselineVersionDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getBaselineVersion(), createdFlywayConfig().getBaselineVersion()); - } - - @Test - @DisplayName("baseline version carried over from configuration") - void testBaselineVersionOverridden() { - runtimeConfig.baselineVersion = Optional.of("0.1.2"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.baselineVersion.get(), createdFlywayConfig().getBaselineVersion().getVersion()); - } - - @Test - @DisplayName("connection retries default matches flyway default") - void testConnectionRetriesDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getConnectRetries(), createdFlywayConfig().getConnectRetries()); - } - - @Test - @DisplayName("connection retries carried over from configuration") - void testConnectionRetriesOverridden() { - runtimeConfig.connectRetries = OptionalInt.of(12); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.connectRetries.getAsInt(), createdFlywayConfig().getConnectRetries()); - } - - @Test - @DisplayName("repeatable SQL migration prefix default matches flyway default") - void testRepeatableSqlMigrationPrefixDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getRepeatableSqlMigrationPrefix(), createdFlywayConfig().getRepeatableSqlMigrationPrefix()); - } - - @Test - @DisplayName("repeatable SQL migration prefix carried over from configuration") - void testRepeatableSqlMigrationPrefixOverridden() { - runtimeConfig.repeatableSqlMigrationPrefix = Optional.of("A"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.repeatableSqlMigrationPrefix.get(), createdFlywayConfig().getRepeatableSqlMigrationPrefix()); - } - - @Test - @DisplayName("schemas default matches flyway default") - void testSchemasDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(asList(defaultConfig.getSchemas()), asList(createdFlywayConfig().getSchemas())); - } - - @Test - @DisplayName("schemas carried over from configuration") - void testSchemasOverridden() { - runtimeConfig.schemas = Optional.of(Arrays.asList("TEST_SCHEMA_1", "TEST_SCHEMA_2")); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.schemas.get(), asList(createdFlywayConfig().getSchemas())); - } - - @Test - @DisplayName("SQL migration prefix default matches flyway default") - void testSqlMigrationPrefixDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getSqlMigrationPrefix(), createdFlywayConfig().getSqlMigrationPrefix()); - } - - @Test - @DisplayName("SQL migration prefix carried over from configuration") - void testSqlMigrationPrefixOverridden() { - runtimeConfig.sqlMigrationPrefix = Optional.of("M"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.sqlMigrationPrefix.get(), createdFlywayConfig().getSqlMigrationPrefix()); - } - - @Test - @DisplayName("table default matches flyway default") - void testTableDefault() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.getTable(), createdFlywayConfig().getTable()); - } - - @Test - @DisplayName("table carried over from configuration") - void testTableOverridden() { - runtimeConfig.table = Optional.of("flyway_history_test_table"); - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.table.get(), createdFlywayConfig().getTable()); - } - - @Test - @DisplayName("validate on migrate default matches to true") - void testValidateOnMigrate() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.validateOnMigrate, createdFlywayConfig().isValidateOnMigrate()); - assertTrue(runtimeConfig.validateOnMigrate); - } - - @Test - @DisplayName("clean disabled default matches to false") - void testCleanDisabled() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.cleanDisabled, createdFlywayConfig().isCleanDisabled()); - assertFalse(runtimeConfig.cleanDisabled); - - runtimeConfig.cleanDisabled = false; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertFalse(createdFlywayConfig().isCleanDisabled()); - - runtimeConfig.cleanDisabled = true; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertTrue(createdFlywayConfig().isCleanDisabled()); - } - - @Test - @DisplayName("outOfOrder is correctly set") - void testOutOfOrder() { - runtimeConfig.outOfOrder = false; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertFalse(createdFlywayConfig().isOutOfOrder()); - - runtimeConfig.outOfOrder = true; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertTrue(createdFlywayConfig().isOutOfOrder()); - } - - @Test - @DisplayName("ignoreMissingMigrations is correctly set") - void testIgnoreMissingMigrations() { - runtimeConfig.ignoreMissingMigrations = false; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertFalse(ValidatePatternUtils.isMissingIgnored(createdFlywayConfig().getIgnoreMigrationPatterns())); - - runtimeConfig.ignoreMissingMigrations = true; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertTrue(ValidatePatternUtils.isMissingIgnored(createdFlywayConfig().getIgnoreMigrationPatterns())); - } - - @Test - @DisplayName("ignoreFutureMigrations is correctly set") - void testIgnoreFutureMigrations() { - runtimeConfig.ignoreFutureMigrations = false; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertFalse(ValidatePatternUtils.isFutureIgnored(createdFlywayConfig().getIgnoreMigrationPatterns())); - - runtimeConfig.ignoreFutureMigrations = true; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertTrue(ValidatePatternUtils.isFutureIgnored(createdFlywayConfig().getIgnoreMigrationPatterns())); - } - - @ParameterizedTest - @MethodSource("validateOnMigrateOverwritten") - @DisplayName("validate on migrate overwritten in configuration") - void testValidateOnMigrateOverwritten(final boolean input, final boolean expected) { - runtimeConfig.validateOnMigrate = input; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(createdFlywayConfig().isValidateOnMigrate(), expected); - assertEquals(runtimeConfig.validateOnMigrate, expected); - } - - @Test - @DisplayName("validateMigrationNaming defaults to false and it is correctly set") - void testValidateMigrationNaming() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.validateMigrationNaming, createdFlywayConfig().isValidateMigrationNaming()); - assertFalse(runtimeConfig.validateMigrationNaming); - - runtimeConfig.validateMigrationNaming = true; - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertTrue(createdFlywayConfig().isValidateMigrationNaming()); - } - - @Test - @DisplayName("validateIgnoreMigrationPatterns defaults to false and it is correctly set") - void testIgnoreMigrationPatterns() { - creator = new FlywayCreator(runtimeConfig, buildConfig); - assertEquals(0, createdFlywayConfig().getIgnoreMigrationPatterns().length); - assertFalse(runtimeConfig.ignoreMigrationPatterns.isPresent()); - - runtimeConfig.ignoreMigrationPatterns = Optional.of(new String[] { "*:missing" }); - creator = new FlywayCreator(runtimeConfig, buildConfig); - final ValidatePattern[] existingIgnoreMigrationPatterns = createdFlywayConfig().getIgnoreMigrationPatterns(); - assertEquals(1, existingIgnoreMigrationPatterns.length); - final String[] ignoreMigrationPatterns = runtimeConfig.ignoreMigrationPatterns.get(); - final ValidatePattern[] validatePatterns = Arrays.stream(ignoreMigrationPatterns) - .map(ValidatePattern::fromPattern).toArray(ValidatePattern[]::new); - assertArrayEquals(validatePatterns, existingIgnoreMigrationPatterns); - } - - private static List pathList(Location[] locations) { - return Stream.of(locations).map(Location::getPath).collect(Collectors.toList()); - } - - private Configuration createdFlywayConfig() { - return creator.createFlyway(null).getConfiguration(); - } - - private static Stream validateOnMigrateOverwritten() { - return Stream. builder() - .add(Arguments.arguments(false, false)) - .add(Arguments.arguments(true, true)) - .build(); - } -} diff --git a/extensions/funqy/funqy-http/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/http/FunqyHttpBuildStep.java b/extensions/funqy/funqy-http/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/http/FunqyHttpBuildStep.java index a2fa752ecfcf1..9c65d3848cdfd 100644 --- a/extensions/funqy/funqy-http/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/http/FunqyHttpBuildStep.java +++ b/extensions/funqy/funqy-http/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/http/FunqyHttpBuildStep.java @@ -6,8 +6,6 @@ import java.util.List; import java.util.Optional; -import org.jboss.logging.Logger; - import com.fasterxml.jackson.databind.ObjectMapper; import io.quarkus.arc.deployment.BeanContainerBuildItem; @@ -25,12 +23,11 @@ import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.http.deployment.RequireBodyHandlerBuildItem; import io.quarkus.vertx.http.deployment.RouteBuildItem; -import io.quarkus.vertx.http.runtime.HttpBuildTimeConfig; +import io.quarkus.vertx.http.runtime.VertxHttpBuildTimeConfig; import io.vertx.core.Handler; import io.vertx.ext.web.RoutingContext; public class FunqyHttpBuildStep { - private static final Logger log = Logger.getLogger(FunqyHttpBuildStep.class); public static final String FUNQY_HTTP_FEATURE = "funqy-http"; @BuildStep @@ -55,12 +52,12 @@ public RequireBodyHandlerBuildItem requestBodyHandler(List fu public void staticInit(FunqyHttpBindingRecorder binding, BeanContainerBuildItem beanContainer, // dependency Optional hasFunctions, - HttpBuildTimeConfig httpConfig) throws Exception { + VertxHttpBuildTimeConfig httpBuildTimeConfig) throws Exception { if (!hasFunctions.isPresent() || hasFunctions.get() == null) return; // The context path + the resources path - String rootPath = httpConfig.rootPath; + String rootPath = httpBuildTimeConfig.rootPath(); binding.init(); } @@ -74,14 +71,14 @@ public void boot(ShutdownContextBuildItem shutdown, Optional hasFunctions, List functions, BeanContainerBuildItem beanContainer, - HttpBuildTimeConfig httpConfig, + VertxHttpBuildTimeConfig httpConfig, ExecutorBuildItem executorBuildItem) throws Exception { if (!hasFunctions.isPresent() || hasFunctions.get() == null) return; feature.produce(new FeatureBuildItem(FUNQY_HTTP_FEATURE)); - String rootPath = httpConfig.rootPath; + String rootPath = httpConfig.rootPath(); Handler handler = binding.start(rootPath, vertx.getVertx(), shutdown, diff --git a/extensions/funqy/funqy-knative-events/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/knative/events/FunqyKnativeEventsBuildStep.java b/extensions/funqy/funqy-knative-events/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/knative/events/FunqyKnativeEventsBuildStep.java index b8ba7d678b49d..bfe5e049d8831 100644 --- a/extensions/funqy/funqy-knative-events/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/knative/events/FunqyKnativeEventsBuildStep.java +++ b/extensions/funqy/funqy-knative-events/deployment/src/main/java/io/quarkus/funqy/deployment/bindings/knative/events/FunqyKnativeEventsBuildStep.java @@ -27,7 +27,7 @@ import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.http.deployment.RequireBodyHandlerBuildItem; import io.quarkus.vertx.http.deployment.RouteBuildItem; -import io.quarkus.vertx.http.runtime.HttpBuildTimeConfig; +import io.quarkus.vertx.http.runtime.VertxHttpBuildTimeConfig; import io.vertx.core.Handler; import io.vertx.ext.web.RoutingContext; @@ -75,14 +75,14 @@ public void boot(ShutdownContextBuildItem shutdown, BuildProducer routes, CoreVertxBuildItem vertx, BeanContainerBuildItem beanContainer, - HttpBuildTimeConfig httpConfig, + VertxHttpBuildTimeConfig httpBuildTimeConfig, ExecutorBuildItem executorBuildItem) throws Exception { if (!hasFunctions.isPresent() || hasFunctions.get() == null) return; feature.produce(new FeatureBuildItem(FUNQY_KNATIVE_FEATURE)); - String rootPath = httpConfig.rootPath; + String rootPath = httpBuildTimeConfig.rootPath(); if (rootPath == null) { rootPath = "/"; } else if (!rootPath.endsWith("/")) { diff --git a/extensions/grpc/deployment/pom.xml b/extensions/grpc/deployment/pom.xml index c19904ab9df5d..a6361bd37397a 100644 --- a/extensions/grpc/deployment/pom.xml +++ b/extensions/grpc/deployment/pom.xml @@ -159,9 +159,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcBuildTimeConfig.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcBuildTimeConfig.java index 33874f303a2e9..1da9c538a9f42 100644 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcBuildTimeConfig.java +++ b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcBuildTimeConfig.java @@ -1,18 +1,30 @@ package io.quarkus.grpc.deployment; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigGroup; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +@ConfigMapping(prefix = "quarkus.grpc") @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public class GrpcBuildTimeConfig { +public interface GrpcBuildTimeConfig { /** * Configuration gRPC dev mode. */ - @ConfigItem @ConfigDocSection(generated = true) - public GrpcDevModeConfig devMode; + GrpcDevModeConfig devMode(); + @ConfigGroup + interface GrpcDevModeConfig { + + /** + * Start gRPC server in dev mode even if no gRPC services are implemented. + * By default set to `true` to ease incremental development of new services using dev mode. + */ + @WithDefault("true") + boolean forceServerStart(); + } } diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java index c78dc05fbf084..056cb1ac5134a 100644 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java +++ b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcClientProcessor.java @@ -104,7 +104,7 @@ void registerStorkInterceptor(BuildProducer beans) { @BuildStep @Record(ExecutionTime.STATIC_INIT) void setUpStork(GrpcStorkRecorder storkRecorder, GrpcClientBuildTimeConfig config) { - storkRecorder.init(config.storkProactiveConnections); + storkRecorder.init(config.storkProactiveConnections()); } @BuildStep @@ -437,6 +437,11 @@ UnremovableBeanBuildItem unremovableClientInterceptors() { return UnremovableBeanBuildItem.beanTypes(GrpcDotNames.CLIENT_INTERCEPTOR); } + @BuildStep + UnremovableBeanBuildItem unremovableChannelBuilderCustomizers() { + return UnremovableBeanBuildItem.beanTypes(GrpcDotNames.CHANNEL_BUILDER_CUSTOMIZER); + } + Set getRegisteredInterceptors(InjectionPointInfo injectionPoint) { Set qualifiers = injectionPoint.getRequiredQualifiers(); if (qualifiers.size() <= 1) { diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDevModeConfig.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDevModeConfig.java deleted file mode 100644 index a2b0931824011..0000000000000 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDevModeConfig.java +++ /dev/null @@ -1,15 +0,0 @@ -package io.quarkus.grpc.deployment; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -@ConfigGroup -public class GrpcDevModeConfig { - - /** - * Start gRPC server in dev mode even if no gRPC services are implemented. - * By default set to `true` to ease incremental development of new services using dev mode. - */ - @ConfigItem(defaultValue = "true") - public boolean forceServerStart; -} diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDotNames.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDotNames.java index 08cf2daab02b9..8f97a287eb594 100644 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDotNames.java +++ b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcDotNames.java @@ -24,6 +24,7 @@ import io.quarkus.grpc.RegisterClientInterceptor; import io.quarkus.grpc.RegisterInterceptor; import io.quarkus.grpc.RegisterInterceptors; +import io.quarkus.grpc.api.ChannelBuilderCustomizer; import io.quarkus.grpc.runtime.supports.Channels; import io.quarkus.grpc.runtime.supports.GrpcClientConfigProvider; import io.smallrye.common.annotation.Blocking; @@ -58,6 +59,8 @@ public class GrpcDotNames { .createSimple(RegisterClientInterceptor.List.class.getName()); public static final DotName CLIENT_INTERCEPTOR = DotName.createSimple(ClientInterceptor.class.getName()); + public static final DotName CHANNEL_BUILDER_CUSTOMIZER = DotName.createSimple(ChannelBuilderCustomizer.class.getName()); + static final MethodDescriptor CREATE_CHANNEL_METHOD = MethodDescriptor.ofMethod(Channels.class, "createChannel", Channel.class, String.class, Set.class); static final MethodDescriptor RETRIEVE_CHANNEL_METHOD = MethodDescriptor.ofMethod(Channels.class, "retrieveChannel", diff --git a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java index 677c243adb9e0..49f15d00d9053 100644 --- a/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java +++ b/extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java @@ -21,6 +21,7 @@ import java.util.Set; import java.util.function.Predicate; import java.util.function.Supplier; +import java.util.stream.Collectors; import jakarta.enterprise.inject.spi.DeploymentException; import jakarta.transaction.Transaction; @@ -90,9 +91,14 @@ import io.quarkus.kubernetes.spi.KubernetesPortBuildItem; import io.quarkus.netty.deployment.MinNettyAllocatorMaxOrderBuildItem; import io.quarkus.runtime.LaunchMode; +import io.quarkus.runtime.RuntimeValue; import io.quarkus.smallrye.health.deployment.spi.HealthBuildItem; import io.quarkus.vertx.deployment.VertxBuildItem; +import io.quarkus.vertx.http.deployment.FilterBuildItem; import io.quarkus.vertx.http.deployment.VertxWebRouterBuildItem; +import io.vertx.core.Handler; +import io.vertx.ext.web.Router; +import io.vertx.ext.web.RoutingContext; public class GrpcServerProcessor { @@ -683,7 +689,8 @@ ServiceStartBuildItem initializeServer(GrpcServerRecorder recorder, List orderEnforcer, LaunchModeBuildItem launchModeBuildItem, VertxWebRouterBuildItem routerBuildItem, - VertxBuildItem vertx, Capabilities capabilities) { + VertxBuildItem vertx, Capabilities capabilities, + List filterBuildItems) { // Build the list of blocking methods per service implementation Map> blocking = new HashMap<>(); @@ -700,12 +707,25 @@ ServiceStartBuildItem initializeServer(GrpcServerRecorder recorder, } if (!bindables.isEmpty() - || (LaunchMode.current() == LaunchMode.DEVELOPMENT && buildTimeConfig.devMode.forceServerStart)) { + || (LaunchMode.current() == LaunchMode.DEVELOPMENT && buildTimeConfig.devMode().forceServerStart())) { //Uses mainrouter when the 'quarkus.http.root-path' is not '/' - recorder.initializeGrpcServer(vertx.getVertx(), - routerBuildItem.getMainRouter() != null ? routerBuildItem.getMainRouter() : routerBuildItem.getHttpRouter(), + Map> securityHandlers = null; + final RuntimeValue routerRuntimeValue; + if (routerBuildItem.getMainRouter() != null) { + routerRuntimeValue = routerBuildItem.getMainRouter(); + if (capabilities.isPresent(Capability.SECURITY)) { + securityHandlers = filterBuildItems + .stream() + .filter(filter -> filter.getPriority() == FilterBuildItem.AUTHENTICATION + || filter.getPriority() == FilterBuildItem.AUTHORIZATION) + .collect(Collectors.toMap(f -> f.getPriority() * -1, FilterBuildItem::getHandler)); + } + } else { + routerRuntimeValue = routerBuildItem.getHttpRouter(); + } + recorder.initializeGrpcServer(vertx.getVertx(), routerRuntimeValue, config, shutdown, blocking, virtuals, launchModeBuildItem.getLaunchMode(), - capabilities.isPresent(Capability.SECURITY)); + capabilities.isPresent(Capability.SECURITY), securityHandlers); return new ServiceStartBuildItem(GRPC_SERVER); } return null; @@ -726,14 +746,14 @@ void addHealthChecks(GrpcServerBuildTimeConfig config, BuildProducer beans) { boolean healthEnabled = false; if (!bindables.isEmpty()) { - healthEnabled = config.mpHealthEnabled; + healthEnabled = config.mpHealthEnabled(); - if (config.grpcHealthEnabled) { + if (config.grpcHealthEnabled()) { beans.produce(AdditionalBeanBuildItem.unremovableOf(GrpcHealthEndpoint.class)); healthEnabled = true; } healthBuildItems.produce(new HealthBuildItem("io.quarkus.grpc.runtime.health.GrpcHealthCheck", - config.mpHealthEnabled)); + config.mpHealthEnabled())); } if (healthEnabled || LaunchMode.current() == LaunchMode.DEVELOPMENT) { beans.produce(AdditionalBeanBuildItem.unremovableOf(GrpcHealthStorage.class)); diff --git a/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/auth/GrpcAuthCustomRootPathTest.java b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/auth/GrpcAuthCustomRootPathTest.java new file mode 100644 index 0000000000000..aa525408c274c --- /dev/null +++ b/extensions/grpc/deployment/src/test/java/io/quarkus/grpc/auth/GrpcAuthCustomRootPathTest.java @@ -0,0 +1,17 @@ +package io.quarkus.grpc.auth; + +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.test.QuarkusUnitTest; + +public class GrpcAuthCustomRootPathTest extends GrpcAuthTestBase { + + @RegisterExtension + static final QuarkusUnitTest config = createQuarkusUnitTest(""" + quarkus.grpc.server.use-separate-server=false + quarkus.grpc.clients.securityClient.host=localhost + quarkus.grpc.clients.securityClient.port=8081 + quarkus.http.root-path=/api + """, true); + +} diff --git a/extensions/grpc/inprocess/src/main/java/io/quarkus/grpc/inprocess/InProcessGrpcServerBuilderProvider.java b/extensions/grpc/inprocess/src/main/java/io/quarkus/grpc/inprocess/InProcessGrpcServerBuilderProvider.java index 1a7d5af2d3ddf..a40462d9a6621 100644 --- a/extensions/grpc/inprocess/src/main/java/io/quarkus/grpc/inprocess/InProcessGrpcServerBuilderProvider.java +++ b/extensions/grpc/inprocess/src/main/java/io/quarkus/grpc/inprocess/InProcessGrpcServerBuilderProvider.java @@ -25,13 +25,13 @@ public class InProcessGrpcServerBuilderProvider implements GrpcBuilderProvider { @Override public boolean providesServer(GrpcServerConfiguration configuration) { - return Enabled.isEnabled(configuration.inProcess); + return Enabled.isEnabled(configuration.inProcess()); } @Override public ServerBuilder createServerBuilder(Vertx vertx, GrpcServerConfiguration configuration, LaunchMode launchMode) { - ServerBuilder builder = InProcessServerBuilder.forName(configuration.inProcess.name); + ServerBuilder builder = InProcessServerBuilder.forName(configuration.inProcess().name()); // wrap with Vert.x context, so that the context interceptors work VertxInternal vxi = (VertxInternal) vertx; Executor delegate = vertx.nettyEventLoopGroup(); @@ -65,12 +65,12 @@ public boolean serverAlreadyExists() { @Override public String serverInfo(String host, int port, GrpcServerConfiguration configuration) { - return "InProcess gRPC server [" + configuration.inProcess.name + "]"; + return "InProcess gRPC server [" + configuration.inProcess().name() + "]"; } @Override public boolean providesChannel(GrpcClientConfiguration configuration) { - return Enabled.isEnabled(configuration.inProcess); + return Enabled.isEnabled(configuration.inProcess()); } @Override @@ -85,11 +85,11 @@ public String adjustHost(String host) { @Override public ManagedChannelBuilder createChannelBuilder(GrpcClientConfiguration configuration, String target) { - return InProcessChannelBuilder.forName(configuration.inProcess.name).directExecutor(); + return InProcessChannelBuilder.forName(configuration.inProcess().name()).directExecutor(); } @Override public String channelInfo(GrpcClientConfiguration configuration) { - return "InProcess [" + configuration.inProcess.name + "]"; + return "InProcess [" + configuration.inProcess().name() + "]"; } } diff --git a/extensions/grpc/runtime/pom.xml b/extensions/grpc/runtime/pom.xml index 2efcf0b64dc79..927401b02b8f4 100644 --- a/extensions/grpc/runtime/pom.xml +++ b/extensions/grpc/runtime/pom.xml @@ -104,7 +104,7 @@ io.smallrye.common smallrye-common-vertx-context
    - + io.quarkus @@ -170,9 +170,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/api/ChannelBuilderCustomizer.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/api/ChannelBuilderCustomizer.java new file mode 100644 index 0000000000000..c4544df3e52f8 --- /dev/null +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/api/ChannelBuilderCustomizer.java @@ -0,0 +1,48 @@ +package io.quarkus.grpc.api; + +import java.util.Map; + +import io.grpc.ManagedChannelBuilder; +import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; +import io.vertx.grpc.client.GrpcClientOptions; + +/** + * Allow for customization of Channel building. + * Implement the customize method, depending on which Channel implementation you're going to use, + * e.g. Vert.x or Netty. + * This is an experimental API, subject to change. + */ +public interface ChannelBuilderCustomizer> { + + /** + * Customize a ManagedChannelBuilder instance. + * + * @param name gRPC client name + * @param config client's configuration + * @param builder Channel builder instance + * @return map of config properties to be used as default service config against the builder + */ + default Map customize(String name, GrpcClientConfiguration config, T builder) { + return Map.of(); + } + + /** + * Customize a GrpcClientOptions instance. + * + * @param name gRPC client name + * @param config client's configuration + * @param options GrpcClientOptions instance + */ + default void customize(String name, GrpcClientConfiguration config, GrpcClientOptions options) { + } + + /** + * Priority by which the customizers are applied. + * Higher priority is applied later. + * + * @return the priority + */ + default int priority() { + return 0; + } +} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java index 848a2671c0702..fc717018282b8 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcServerRecorder.java @@ -46,7 +46,6 @@ import io.quarkus.grpc.reflection.service.ReflectionServiceV1alpha; import io.quarkus.grpc.runtime.config.GrpcConfiguration; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; -import io.quarkus.grpc.runtime.config.GrpcServerNettyConfig; import io.quarkus.grpc.runtime.devmode.DevModeInterceptor; import io.quarkus.grpc.runtime.devmode.GrpcHotReplacementInterceptor; import io.quarkus.grpc.runtime.devmode.GrpcServerReloader; @@ -60,6 +59,7 @@ import io.quarkus.runtime.ShutdownContext; import io.quarkus.runtime.annotations.Recorder; import io.quarkus.vertx.http.runtime.PortSystemProperties; +import io.quarkus.vertx.http.runtime.security.HttpAuthenticator; import io.quarkus.virtual.threads.VirtualThreadsRecorder; import io.vertx.core.AbstractVerticle; import io.vertx.core.AsyncResult; @@ -99,7 +99,7 @@ public void initializeGrpcServer(RuntimeValue vertxSupplier, ShutdownContext shutdown, Map> blockingMethodsPerService, Map> virtualMethodsPerService, - LaunchMode launchMode, boolean securityPresent) { + LaunchMode launchMode, boolean securityPresent, Map> securityHandlers) { GrpcContainer grpcContainer = Arc.container().instance(GrpcContainer.class).get(); if (grpcContainer == null) { throw new IllegalStateException("gRPC not initialized, GrpcContainer not found"); @@ -110,10 +110,10 @@ public void initializeGrpcServer(RuntimeValue vertxSupplier, } Vertx vertx = vertxSupplier.getValue(); - GrpcServerConfiguration configuration = cfg.server; + GrpcServerConfiguration configuration = cfg.server(); GrpcBuilderProvider provider = GrpcBuilderProvider.findServerBuilderProvider(configuration); - if (configuration.useSeparateServer) { + if (configuration.useSeparateServer()) { if (provider == null) { LOGGER.warn( "Using legacy gRPC support, with separate new HTTP server instance. " + @@ -137,7 +137,7 @@ public void initializeGrpcServer(RuntimeValue vertxSupplier, } } else { buildGrpcServer(vertx, configuration, routerSupplier, shutdown, blockingMethodsPerService, virtualMethodsPerService, - grpcContainer, launchMode, securityPresent); + grpcContainer, launchMode, securityPresent, securityHandlers); } } @@ -145,11 +145,12 @@ public void initializeGrpcServer(RuntimeValue vertxSupplier, private void buildGrpcServer(Vertx vertx, GrpcServerConfiguration configuration, RuntimeValue routerSupplier, ShutdownContext shutdown, Map> blockingMethodsPerService, Map> virtualMethodsPerService, - GrpcContainer grpcContainer, LaunchMode launchMode, boolean securityPresent) { + GrpcContainer grpcContainer, LaunchMode launchMode, boolean securityPresent, + Map> securityHandlers) { GrpcServerOptions options = new GrpcServerOptions(); - if (!configuration.maxInboundMessageSize.isEmpty()) { - options.setMaxMessageSize(configuration.maxInboundMessageSize.getAsInt()); + if (!configuration.maxInboundMessageSize().isEmpty()) { + options.setMaxMessageSize(configuration.maxInboundMessageSize().getAsInt()); } GrpcServer server = GrpcServer.server(vertx, options); List globalInterceptors = grpcContainer.getSortedGlobalInterceptors(); @@ -176,7 +177,7 @@ private void buildGrpcServer(Vertx vertx, GrpcServerConfiguration configuration, definitions.add(service.definition); } - boolean reflectionServiceEnabled = configuration.enableReflectionService || launchMode == LaunchMode.DEVELOPMENT; + boolean reflectionServiceEnabled = configuration.enableReflectionService() || launchMode == LaunchMode.DEVELOPMENT; if (reflectionServiceEnabled) { LOGGER.info("Registering gRPC reflection service"); @@ -193,8 +194,45 @@ private void buildGrpcServer(Vertx vertx, GrpcServerConfiguration configuration, initHealthStorage(); + Router router = routerSupplier.getValue(); + if (securityHandlers != null) { + for (Map.Entry> e : securityHandlers.entrySet()) { + Handler handler = e.getValue(); + Route route = router.route().order(e.getKey()).handler(new Handler() { + @Override + public void handle(RoutingContext ctx) { + if (!isGrpc(ctx)) { + ctx.next(); + } else if (ctx.get(HttpAuthenticator.class.getName()) != null) { + // this IF branch shouldn't be invoked with current implementation + // when gRPC is attached to the main router when the root path is not '/' + // because HTTP authenticator and authorizer handlers are not added by default on the main + // router; adding it in case someone made changes without consider this use case + // so that we prevent repeated authentication + ctx.next(); + } else { + if (!Context.isOnEventLoopThread()) { + Context capturedVertxContext = Vertx.currentContext(); + if (capturedVertxContext != null) { + capturedVertxContext.runOnContext(new Handler() { + @Override + public void handle(Void unused) { + handler.handle(ctx); + } + }); + return; + } + } + handler.handle(ctx); + } + } + }); + shutdown.addShutdownTask(route::remove); // remove this route at shutdown, this should reset it + } + } + LOGGER.info("Starting new Quarkus gRPC server (using Vert.x transport)..."); - Route route = routerSupplier.getValue().route().handler(ctx -> { + Route route = router.route().handler(ctx -> { if (!isGrpc(ctx)) { ctx.next(); } else { @@ -242,7 +280,7 @@ private void prodStart(GrpcContainer grpcContainer, Vertx vertx, GrpcServerConfi vertx.deployVerticle( () -> new GrpcServerVerticle(configuration, grpcContainer, provider, launchMode, blockingMethodsPerService, virtualMethodsPerService), - new DeploymentOptions().setInstances(configuration.instances), + new DeploymentOptions().setInstances(configuration.instances()), result -> { if (result.failed()) { startResult.completeExceptionally(result.cause()); @@ -267,13 +305,13 @@ private void prodStart(GrpcContainer grpcContainer, Vertx vertx, GrpcServerConfi private void postStartup(GrpcServerConfiguration configuration, GrpcBuilderProvider provider, boolean test) { initHealthStorage(); - int port = test ? testPort(configuration) : configuration.port; + int port = test ? testPort(configuration) : configuration.port(); String msg = "Started "; if (provider != null) - msg += provider.serverInfo(configuration.host, port, configuration); + msg += provider.serverInfo(configuration.host(), port, configuration); else msg += String.format("gRPC server on %s:%d [%s]", - configuration.host, port, "TLS enabled: " + !configuration.plainText); + configuration.host(), port, "TLS enabled: " + !configuration.plainText()); LOGGER.info(msg); } @@ -348,26 +386,26 @@ private void devModeStart(GrpcContainer grpcContainer, Vertx vertx, GrpcServerCo } private void applyNettySettings(GrpcServerConfiguration configuration, VertxServerBuilder builder) { - if (configuration.netty != null) { - GrpcServerNettyConfig config = configuration.netty; + if (configuration.netty() != null) { + GrpcServerConfiguration.GrpcServerNettyConfig config = configuration.netty(); NettyServerBuilder nettyServerBuilder = builder.nettyBuilder(); - config.keepAliveTime.ifPresent( + config.keepAliveTime().ifPresent( duration -> nettyServerBuilder.keepAliveTime(duration.toNanos(), TimeUnit.NANOSECONDS)); - config.permitKeepAliveTime.ifPresent( + config.permitKeepAliveTime().ifPresent( duration -> nettyServerBuilder.permitKeepAliveTime(duration.toNanos(), TimeUnit.NANOSECONDS)); - config.permitKeepAliveWithoutCalls.ifPresent(nettyServerBuilder::permitKeepAliveWithoutCalls); + config.permitKeepAliveWithoutCalls().ifPresent(nettyServerBuilder::permitKeepAliveWithoutCalls); } } @SuppressWarnings("rawtypes") private void applyTransportSecurityConfig(GrpcServerConfiguration configuration, ServerBuilder builder) { - if (configuration.transportSecurity != null) { - File cert = configuration.transportSecurity.certificate + if (configuration.transportSecurity() != null) { + File cert = configuration.transportSecurity().certificate() .map(File::new) .orElse(null); - File key = configuration.transportSecurity.key + File key = configuration.transportSecurity().key() .map(File::new) .orElse(null); if (cert != null || key != null) { @@ -490,7 +528,7 @@ private Map.Entry buildServer(Vertx vertx, GrpcServerConfigurat Map> virtualMethodsPerService, GrpcContainer grpcContainer, LaunchMode launchMode) { - int port = launchMode == LaunchMode.TEST ? configuration.testPort : configuration.port; + int port = launchMode == LaunchMode.TEST ? configuration.testPort() : configuration.port(); AtomicBoolean usePlainText = new AtomicBoolean(); @@ -498,7 +536,7 @@ private Map.Entry buildServer(Vertx vertx, GrpcServerConfigurat if (provider != null) { builder = provider.createServerBuilder(vertx, configuration, launchMode); } else { - VertxServerBuilder vsBuilder = VertxServerBuilder.forAddress(vertx, configuration.host, port); + VertxServerBuilder vsBuilder = VertxServerBuilder.forAddress(vertx, configuration.host(), port); // add Vert.x specific stuff here vsBuilder.useSsl(options -> { try { @@ -517,20 +555,20 @@ private Map.Entry buildServer(Vertx vertx, GrpcServerConfigurat builder = vsBuilder; } - if (configuration.maxInboundMessageSize.isPresent()) { - builder.maxInboundMessageSize(configuration.maxInboundMessageSize.getAsInt()); + if (configuration.maxInboundMessageSize().isPresent()) { + builder.maxInboundMessageSize(configuration.maxInboundMessageSize().getAsInt()); } - if (configuration.maxInboundMetadataSize.isPresent()) { - builder.maxInboundMetadataSize(configuration.maxInboundMetadataSize.getAsInt()); + if (configuration.maxInboundMetadataSize().isPresent()) { + builder.maxInboundMetadataSize(configuration.maxInboundMetadataSize().getAsInt()); } - Optional handshakeTimeout = configuration.handshakeTimeout; + Optional handshakeTimeout = configuration.handshakeTimeout(); handshakeTimeout.ifPresent(duration -> builder.handshakeTimeout(duration.toMillis(), TimeUnit.MILLISECONDS)); applyTransportSecurityConfig(configuration, builder); - boolean reflectionServiceEnabled = configuration.enableReflectionService || launchMode == LaunchMode.DEVELOPMENT; + boolean reflectionServiceEnabled = configuration.enableReflectionService() || launchMode == LaunchMode.DEVELOPMENT; List toBeRegistered = collectServiceDefinitions(grpcContainer.getServices()); List definitions = new ArrayList<>(); @@ -553,12 +591,14 @@ private Map.Entry buildServer(Vertx vertx, GrpcServerConfigurat builder.addService(ServerInterceptors.intercept(new ReflectionServiceV1alpha(definitions), globalInterceptors)); } - String msg = "Starting "; - if (provider != null) - msg += provider.serverInfo(configuration.host, port, configuration); - else - msg += String.format("gRPC server on %s:%d [TLS enabled: %s]", configuration.host, port, !usePlainText.get()); - LOGGER.debug(msg); + if (LOGGER.isDebugEnabled()) { + String msg = "Starting "; + if (provider != null) + msg += provider.serverInfo(configuration.host(), port, configuration); + else + msg += String.format("gRPC server on %s:%d [TLS enabled: %s]", configuration.host(), port, !usePlainText.get()); + LOGGER.debug(msg); + } return new AbstractMap.SimpleEntry<>(port, builder.build()); } @@ -571,8 +611,8 @@ private Map.Entry buildServer(Vertx vertx, GrpcServerConfigurat */ private CompressionInterceptor prepareCompressionInterceptor(GrpcServerConfiguration configuration) { CompressionInterceptor compressionInterceptor = null; - if (configuration.compression.isPresent()) { - compressionInterceptor = new CompressionInterceptor(configuration.compression.get()); + if (configuration.compression().isPresent()) { + compressionInterceptor = new CompressionInterceptor(configuration.compression().get()); } return compressionInterceptor; } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcSslUtils.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcSslUtils.java index 6b01398ec34ec..6c9b69887877c 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcSslUtils.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcSslUtils.java @@ -12,7 +12,6 @@ import org.jboss.logging.Logger; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; -import io.quarkus.grpc.runtime.config.SslServerConfig; import io.quarkus.runtime.util.ClassPathUtils; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.HttpServerOptions; @@ -33,30 +32,26 @@ public class GrpcSslUtils { */ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions options) throws IOException { - - // Disable plain-text is the ssl configuration is set. - if ((config.ssl.certificate.isPresent() || config.ssl.keyStore.isPresent()) - && config.plainText) { + // Disable plain-text if the ssl configuration is set. + if (config.plainText() && (config.ssl().certificate().isPresent() || config.ssl().keyStore().isPresent())) { LOGGER.info("Disabling gRPC plain-text as the SSL certificate is configured"); - config.plainText = false; } - - if (config.plainText) { + if (config.isPlainTextEnabled()) { options.setSsl(false); return true; } else { options.setSsl(true); } - SslServerConfig sslConfig = config.ssl; - final Optional certFile = sslConfig.certificate; - final Optional keyFile = sslConfig.key; - final Optional keyStoreFile = sslConfig.keyStore; - final Optional trustStoreFile = sslConfig.trustStore; - final Optional trustStorePassword = sslConfig.trustStorePassword; + GrpcServerConfiguration.SslServerConfig sslConfig = config.ssl(); + final Optional certFile = sslConfig.certificate(); + final Optional keyFile = sslConfig.key(); + final Optional keyStoreFile = sslConfig.keyStore(); + final Optional trustStoreFile = sslConfig.trustStore(); + final Optional trustStorePassword = sslConfig.trustStorePassword(); - options.setUseAlpn(config.alpn); - if (config.alpn) { + options.setUseAlpn(config.alpn()); + if (config.alpn()) { options.setAlpnVersions(Arrays.asList(HttpVersion.HTTP_2, HttpVersion.HTTP_1_1)); } @@ -64,7 +59,7 @@ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions createPemKeyCertOptions(certFile.get(), keyFile.get(), options); } else if (keyStoreFile.isPresent()) { final Path keyStorePath = keyStoreFile.get(); - final Optional keyStoreFileType = sslConfig.keyStoreType; + final Optional keyStoreFileType = sslConfig.keyStoreType(); String type; if (keyStoreFileType.isPresent()) { type = keyStoreFileType.get().toLowerCase(); @@ -77,13 +72,13 @@ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions case "pkcs12": { PfxOptions o = new PfxOptions() .setValue(Buffer.buffer(data)); - if (sslConfig.keyStorePassword.isPresent()) { - o.setPassword(sslConfig.keyStorePassword.get()); + if (sslConfig.keyStorePassword().isPresent()) { + o.setPassword(sslConfig.keyStorePassword().get()); } - if (sslConfig.keyStoreAlias.isPresent()) { - o.setAlias(sslConfig.keyStoreAlias.get()); - if (sslConfig.keyStoreAliasPassword.isPresent()) { - o.setAliasPassword(sslConfig.keyStoreAliasPassword.get()); + if (sslConfig.keyStoreAlias().isPresent()) { + o.setAlias(sslConfig.keyStoreAlias().get()); + if (sslConfig.keyStoreAliasPassword().isPresent()) { + o.setAliasPassword(sslConfig.keyStoreAliasPassword().get()); } } options.setPfxKeyCertOptions(o); @@ -92,13 +87,13 @@ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions case "jks": { JksOptions o = new JksOptions() .setValue(Buffer.buffer(data)); - if (sslConfig.keyStorePassword.isPresent()) { - o.setPassword(sslConfig.keyStorePassword.get()); + if (sslConfig.keyStorePassword().isPresent()) { + o.setPassword(sslConfig.keyStorePassword().get()); } - if (sslConfig.keyStoreAlias.isPresent()) { - o.setAlias(sslConfig.keyStoreAlias.get()); - if (sslConfig.keyStoreAliasPassword.isPresent()) { - o.setAliasPassword(sslConfig.keyStoreAliasPassword.get()); + if (sslConfig.keyStoreAlias().isPresent()) { + o.setAlias(sslConfig.keyStoreAlias().get()); + if (sslConfig.keyStoreAliasPassword().isPresent()) { + o.setAliasPassword(sslConfig.keyStoreAliasPassword().get()); } } options.setKeyStoreOptions(o); @@ -116,7 +111,7 @@ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions throw new IllegalArgumentException("No trust store password provided"); } String type; - final Optional trustStoreFileType = sslConfig.trustStoreType; + final Optional trustStoreFileType = sslConfig.trustStoreType(); final Path trustStoreFilePath = trustStoreFile.get(); if (trustStoreFileType.isPresent()) { type = trustStoreFileType.get(); @@ -126,11 +121,11 @@ static boolean applySslOptions(GrpcServerConfiguration config, HttpServerOptions createTrustStoreOptions(trustStoreFilePath, trustStorePassword.get(), type, options); } - for (String cipher : sslConfig.cipherSuites.orElse(Collections.emptyList())) { + for (String cipher : sslConfig.cipherSuites().orElse(Collections.emptyList())) { options.addEnabledCipherSuite(cipher); } - options.setEnabledSecureTransportProtocols(sslConfig.protocols); - options.setClientAuth(sslConfig.clientAuth); + options.setEnabledSecureTransportProtocols(sslConfig.protocols()); + options.setClientAuth(sslConfig.clientAuth()); return false; } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcTestPortUtils.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcTestPortUtils.java index bb3358616df7c..209d6cdf3fddb 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcTestPortUtils.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/GrpcTestPortUtils.java @@ -5,7 +5,6 @@ import org.eclipse.microprofile.config.ConfigProvider; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; -import io.quarkus.grpc.runtime.config.SslServerConfig; import io.vertx.core.http.ClientAuth; public final class GrpcTestPortUtils { @@ -13,13 +12,13 @@ private GrpcTestPortUtils() { } public static int testPort(GrpcServerConfiguration serverConfiguration) { - if (serverConfiguration.useSeparateServer) { - if (serverConfiguration.testPort == 0) { + if (serverConfiguration.useSeparateServer()) { + if (serverConfiguration.testPort() == 0) { return testPort("grpc.server"); } - return serverConfiguration.testPort; + return serverConfiguration.testPort(); } - if (isHttpsConfigured(serverConfiguration.ssl) || !serverConfiguration.plainText) { + if (isHttpsConfigured(serverConfiguration.ssl()) || !serverConfiguration.isPlainTextEnabled()) { int httpsTestPort = port("quarkus.http.test-ssl-port"); if (httpsTestPort == 0) { return testPort("https"); @@ -29,11 +28,11 @@ public static int testPort(GrpcServerConfiguration serverConfiguration) { return testPort("http"); } - private static boolean isHttpsConfigured(SslServerConfig ssl) { - return ssl.certificate.isPresent() || ssl.key.isPresent() || ssl.keyStore.isPresent() - || ssl.keyStoreType.isPresent() || ssl.keyStorePassword.isPresent() || ssl.trustStore.isPresent() - || ssl.trustStoreType.isPresent() || ssl.cipherSuites.isPresent() || ssl.clientAuth != ClientAuth.NONE - || !isDefaultProtocols(ssl.protocols); + private static boolean isHttpsConfigured(GrpcServerConfiguration.SslServerConfig ssl) { + return ssl.certificate().isPresent() || ssl.key().isPresent() || ssl.keyStore().isPresent() + || ssl.keyStoreType().isPresent() || ssl.keyStorePassword().isPresent() || ssl.trustStore().isPresent() + || ssl.trustStoreType().isPresent() || ssl.cipherSuites().isPresent() || ssl.clientAuth() != ClientAuth.NONE + || !isDefaultProtocols(ssl.protocols()); } private static boolean isDefaultProtocols(Set protocols) { diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/ClientXds.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/ClientXds.java deleted file mode 100644 index d6feecdd2b06e..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/ClientXds.java +++ /dev/null @@ -1,19 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * Client XDS config - * * XDS usage - */ -@ConfigGroup -public class ClientXds extends Xds { - /** - * Optional explicit target. - */ - @ConfigItem - public Optional target; -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Enabled.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Enabled.java index 76871cb252c08..cd5e8fbbb8974 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Enabled.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Enabled.java @@ -1,9 +1,13 @@ package io.quarkus.grpc.runtime.config; +import io.quarkus.runtime.annotations.ConfigDocIgnore; + public interface Enabled { - boolean isEnabled(); + + @ConfigDocIgnore + boolean enabled(); static boolean isEnabled(Enabled enabled) { - return enabled != null && enabled.isEnabled(); + return enabled != null && enabled.enabled(); } } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientBuildTimeConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientBuildTimeConfig.java index c765531c2e797..b59051f77a1d9 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientBuildTimeConfig.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientBuildTimeConfig.java @@ -1,16 +1,18 @@ package io.quarkus.grpc.runtime.config; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +@ConfigMapping(prefix = "quarkus.grpc-client") @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public class GrpcClientBuildTimeConfig { +public interface GrpcClientBuildTimeConfig { /** * If set to true, and a Stork load balancer is used, connections with all available service instances will be * requested proactively. This means better load balancing at the cost of having multiple active connections. */ - @ConfigItem(defaultValue = "true") - public boolean storkProactiveConnections; + @WithDefault("true") + boolean storkProactiveConnections(); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java index c083659d93f5b..e6a4cb6631b15 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcClientConfiguration.java @@ -1,70 +1,67 @@ package io.quarkus.grpc.runtime.config; +import java.nio.file.Path; import java.time.Duration; +import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.OptionalLong; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") @ConfigGroup -public class GrpcClientConfiguration { +public interface GrpcClientConfiguration { - public static final String DNS = "dns"; - public static final String XDS = "xds"; + String DNS = "dns"; + String XDS = "xds"; /** * Use new Vert.x gRPC client support. * By default, we still use previous Java gRPC support. */ - @ConfigItem(defaultValue = "false") - public boolean useQuarkusGrpcClient; + @WithDefault("false") + boolean useQuarkusGrpcClient(); /** * Configure XDS usage, if enabled. */ - @ConfigItem @ConfigDocSection(generated = true) - public ClientXds xds; + ClientXds xds(); /** * Configure InProcess usage, if enabled. */ - @ConfigItem - public InProcess inProcess; + InProcess inProcess(); /** * Configure Stork usage with new Vert.x gRPC, if enabled. */ - @ConfigItem - public StorkConfig stork; + StorkConfig stork(); /** * The gRPC service port. */ - @ConfigItem(defaultValue = "9000") - public int port; + @WithDefault("9000") + int port(); /** * The gRPC service test port. */ - @ConfigItem - public OptionalInt testPort; + OptionalInt testPort(); /** * The host name / IP on which the service is exposed. */ - @ConfigItem(defaultValue = "localhost") - public String host; + @WithDefault("localhost") + String host(); /** * The SSL/TLS config. * Only use this if you want to use the old Java gRPC client. */ - public SslClientConfig ssl; + SslClientConfig ssl(); /** * The name of the TLS configuration to use. @@ -78,147 +75,323 @@ public class GrpcClientConfiguration { *

    * Important: This is only supported when using the Quarkus (Vert.x-based) gRPC client. */ - @ConfigItem - public Optional tlsConfigurationName; + Optional tlsConfigurationName(); /** * The TLS config. * Only use this if you want to use the Quarkus gRPC client. */ - public TlsClientConfig tls; + TlsClientConfig tls(); /** * Use a name resolver. Defaults to dns. * If set to "stork", host will be treated as SmallRye Stork service name */ - @ConfigItem(defaultValue = DNS) - public String nameResolver; + @WithDefault(DNS) + String nameResolver(); /** * Whether {@code plain-text} should be used instead of {@code TLS}. * Enabled by default, except if TLS/SSL is configured. In this case, {@code plain-text} is disabled. */ - @ConfigItem - public Optional plainText; + Optional plainText(); /** * The duration after which a keep alive ping is sent. */ - @ConfigItem - public Optional keepAliveTime; + Optional keepAliveTime(); /** * The flow control window in bytes. Default is 1MiB. */ - @ConfigItem - public OptionalInt flowControlWindow; + OptionalInt flowControlWindow(); /** * The duration without ongoing RPCs before going to idle mode. */ - @ConfigItem - public Optional idleTimeout; + Optional idleTimeout(); /** * The amount of time the sender of a keep alive ping waits for an acknowledgement. */ - @ConfigItem - public Optional keepAliveTimeout; + Optional keepAliveTimeout(); /** * Whether keep-alive will be performed when there are no outstanding RPC on a connection. */ - @ConfigItem(defaultValue = "false") - public boolean keepAliveWithoutCalls; + @WithDefault("false") + boolean keepAliveWithoutCalls(); /** * The max number of hedged attempts. */ - @ConfigItem(defaultValue = "5") - public int maxHedgedAttempts; + @WithDefault("5") + int maxHedgedAttempts(); /** * The max number of retry attempts. * Retry must be explicitly enabled. */ - @ConfigItem(defaultValue = "5") - public int maxRetryAttempts; + @WithDefault("5") + int maxRetryAttempts(); /** * The maximum number of channel trace events to keep in the tracer for each channel or sub-channel. */ - @ConfigItem - public OptionalInt maxTraceEvents; + OptionalInt maxTraceEvents(); /** * The maximum message size allowed for a single gRPC frame (in bytes). * Default is 4 MiB. */ - @ConfigItem - public OptionalInt maxInboundMessageSize; + OptionalInt maxInboundMessageSize(); /** * The maximum size of metadata allowed to be received (in bytes). * Default is 8192B. */ - @ConfigItem - public OptionalInt maxInboundMetadataSize; + OptionalInt maxInboundMetadataSize(); /** * The negotiation type for the HTTP/2 connection. * Accepted values are: {@code TLS}, {@code PLAINTEXT_UPGRADE}, {@code PLAINTEXT} */ - @ConfigItem(defaultValue = "TLS") - public String negotiationType; + @WithDefault("TLS") + String negotiationType(); /** * Overrides the authority used with TLS and HTTP virtual hosting. */ - @ConfigItem - public Optional overrideAuthority; + Optional overrideAuthority(); /** * The per RPC buffer limit in bytes used for retry. */ - @ConfigItem - public OptionalLong perRpcBufferLimit; + OptionalLong perRpcBufferLimit(); /** * Whether retry is enabled. * Note that retry is disabled by default. */ - @ConfigItem(defaultValue = "false") - public boolean retry; + @WithDefault("false") + boolean retry(); /** * The retry buffer size in bytes. */ - @ConfigItem - public OptionalLong retryBufferSize; + OptionalLong retryBufferSize(); /** * Use a custom user-agent. */ - @ConfigItem - public Optional userAgent; + Optional userAgent(); /** * Use a custom load balancing policy. * Accepted values are: {@code pick_first}, {@code round_robin}, {@code grpclb}. * This value is ignored if name-resolver is set to 'stork'. */ - @ConfigItem(defaultValue = "pick_first") - public String loadBalancingPolicy; + @WithDefault("pick_first") + String loadBalancingPolicy(); /** * The compression to use for each call. The accepted values are {@code gzip} and {@code identity}. */ - @ConfigItem - public Optional compression; + Optional compression(); /** * The deadline used for each call. */ - @ConfigItem - public Optional deadline; + Optional deadline(); + + /** + * Shared configuration for setting up client-side SSL. + */ + @SuppressWarnings("OptionalUsedAsFieldOrParameterType") + @ConfigGroup + interface SslClientConfig { + /** + * The classpath path or file path to a server certificate or certificate chain in PEM format. + */ + Optional certificate(); + + /** + * The classpath path or file path to the corresponding certificate private key file in PEM format. + */ + Optional key(); + + /** + * An optional trust store which holds the certificate information of the certificates to trust + * + * The trust store can be either on classpath or in an external file. + */ + Optional trustStore(); + + } + + @ConfigGroup + interface TlsClientConfig { + + /** + * Whether SSL/TLS is enabled. + */ + @WithDefault("false") + boolean enabled(); + + /** + * Enable trusting all certificates. Disabled by default. + */ + @WithDefault("false") + boolean trustAll(); + + /** + * Trust configuration in the PEM format. + *

    + * When used, {@code trust-certificate-jks} and {@code trust-certificate-p12} must not be used. + */ + PemTrustCertConfiguration trustCertificatePem(); + + /** + * Trust configuration in the JKS format. + *

    + * When configured, {@code trust-certificate-pem} and {@code trust-certificate-p12} must not be used. + */ + JksConfiguration trustCertificateJks(); + + /** + * Trust configuration in the P12 format. + *

    + * When configured, {@code trust-certificate-jks} and {@code trust-certificate-pem} must not be used. + */ + PfxConfiguration trustCertificateP12(); + + /** + * Key/cert configuration in the PEM format. + *

    + * When configured, {@code key-certificate-jks} and {@code key-certificate-p12} must not be used. + */ + PemKeyCertConfiguration keyCertificatePem(); + + /** + * Key/cert configuration in the JKS format. + *

    + * When configured, {@code #key-certificate-pem} and {@code #key-certificate-p12} must not be used. + */ + JksConfiguration keyCertificateJks(); + + /** + * Key/cert configuration in the P12 format. + *

    + * When configured, {@code key-certificate-jks} and {@code #key-certificate-pem} must not be used. + */ + PfxConfiguration keyCertificateP12(); + + /** + * Whether hostname should be verified in the SSL/TLS handshake. + */ + @WithDefault("true") + boolean verifyHostname(); + + @ConfigGroup + interface PemTrustCertConfiguration { + + /** + * Comma-separated list of the trust certificate files (Pem format). + */ + Optional> certs(); + + } + + @ConfigGroup + interface JksConfiguration { + + /** + * Path of the key file (JKS format). + */ + Optional path(); + + /** + * Password of the key file. + */ + Optional password(); + } + + @ConfigGroup + interface PfxConfiguration { + + /** + * Path to the key file (PFX format). + */ + Optional path(); + + /** + * Password of the key. + */ + Optional password(); + } + + @ConfigGroup + interface PemKeyCertConfiguration { + + /** + * Comma-separated list of the path to the key files (Pem format). + */ + Optional> keys(); + + /** + * Comma-separated list of the path to the certificate files (Pem format). + */ + Optional> certs(); + + } + + } + + /** + * Client XDS config + * * XDS usage + */ + @ConfigGroup + interface ClientXds extends GrpcServerConfiguration.Xds { + /** + * Optional explicit target. + */ + Optional target(); + } + + /** + * Stork config for new Vert.x gRPC + */ + @ConfigGroup + interface StorkConfig { + /** + * Number of threads on a delayed gRPC ClientCall + */ + @WithDefault("10") + int threads(); + + /** + * Deadline in milliseconds of delayed gRPC call + */ + @WithDefault("5000") + long deadline(); + + /** + * Number of retries on a gRPC ClientCall + */ + @WithDefault("3") + int retries(); + + /** + * Initial delay in seconds on refresh check + */ + @WithDefault("60") + long delay(); + + /** + * Refresh period in seconds + */ + @WithDefault("120") + long period(); + } } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcCodeGenConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcCodeGenConfig.java index 6b9060a572db5..8c5fc3749e25e 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcCodeGenConfig.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcCodeGenConfig.java @@ -1,14 +1,16 @@ package io.quarkus.grpc.runtime.config; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; /** * not really used, here only to describe config options for code generation */ -@ConfigRoot(name = "generate-code.grpc", phase = ConfigPhase.BUILD_TIME) -public class GrpcCodeGenConfig { +@ConfigMapping(prefix = "quarkus.generate-code.grpc") +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public interface GrpcCodeGenConfig { /** * gRPC code generation can scan dependencies of the application for proto files to generate Java stubs from. @@ -20,21 +22,21 @@ public class GrpcCodeGenConfig { *

  • all - scan all dependencies
  • * */ - @ConfigItem(defaultValue = "none") - public String scanForProto; + @WithDefault("none") + String scanForProto(); /** * Specify the dependencies that are allowed to have proto files that can be imported by this application's protos - * + *

    * Applicable values: *

      *
    • none - default - don't scan dependencies
    • *
    • a comma separated list of groupId:artifactId coordinates to scan
    • *
    • all - scan all dependencies
    • *
    - * + *

    * By default, com.google.protobuf:protobuf-java. */ - @ConfigItem(defaultValue = "com.google.protobuf:protobuf-java") - public String scanForImports; + @WithDefault("com.google.protobuf:protobuf-java") + String scanForImports(); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcConfiguration.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcConfiguration.java index 862bd70ba1d7f..1415c147b226a 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcConfiguration.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcConfiguration.java @@ -4,28 +4,28 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; /** * gRPC configuration root. */ +@ConfigMapping(prefix = "quarkus.grpc") @ConfigRoot(phase = ConfigPhase.RUN_TIME) -public class GrpcConfiguration { +public interface GrpcConfiguration { /** * Configures the gRPC clients. */ - @ConfigItem @ConfigDocSection(generated = true) @ConfigDocMapKey("client-name") - public Map clients; + Map clients(); /** * Configure the gRPC server. */ @ConfigDocSection(generated = true) - public GrpcServerConfiguration server; + GrpcServerConfiguration server(); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerBuildTimeConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerBuildTimeConfig.java index a03f3be8a152b..64a6c0b794aa7 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerBuildTimeConfig.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerBuildTimeConfig.java @@ -1,20 +1,25 @@ package io.quarkus.grpc.runtime.config; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; -@ConfigRoot(phase = ConfigPhase.BUILD_TIME, name = "grpc.server") -public class GrpcServerBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.grpc.server") +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public interface GrpcServerBuildTimeConfig { /** * Whether a health check on gRPC status is published in case the smallrye-health extension is present. */ - @ConfigItem(name = "health.enabled", defaultValue = "true") - public boolean mpHealthEnabled; + @WithName("health.enabled") + @WithDefault("true") + boolean mpHealthEnabled(); /** * Whether the gRPC health check is exposed. */ - @ConfigItem(name = "grpc-health.enabled", defaultValue = "true") - public boolean grpcHealthEnabled; + @WithName("grpc-health.enabled") + @WithDefault("true") + boolean grpcHealthEnabled(); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerConfiguration.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerConfiguration.java index b262339b9172b..0c6baa36aea84 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerConfiguration.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerConfiguration.java @@ -1,61 +1,61 @@ package io.quarkus.grpc.runtime.config; +import java.nio.file.Path; import java.time.Duration; +import java.util.List; import java.util.Optional; import java.util.OptionalInt; +import java.util.Set; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.vertx.core.http.ClientAuth; -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") @ConfigGroup -public class GrpcServerConfiguration { +public interface GrpcServerConfiguration { /** * Do we use separate HTTP server to serve gRPC requests. * Set this to false if you want to use new Vert.x gRPC support, * which uses existing Vert.x HTTP server. */ - @ConfigItem(defaultValue = "true") - public boolean useSeparateServer; + @WithDefault("true") + boolean useSeparateServer(); /** * Configure XDS usage, if enabled. */ - @ConfigItem @ConfigDocSection(generated = true) - public Xds xds; + Xds xds(); /** * Configure InProcess usage, if enabled. */ - @ConfigItem - public InProcess inProcess; + InProcess inProcess(); /** * The gRPC Server port. */ - @ConfigItem(defaultValue = "9000") - public int port; + @WithDefault("9000") + int port(); /** * The gRPC Server port used for tests. */ - @ConfigItem(defaultValue = "9001") - public int testPort; + @WithDefault("9001") + int testPort(); /** * The gRPC server host. */ - @ConfigItem(defaultValue = "0.0.0.0") - public String host; + @WithDefault("0.0.0.0") + String host(); /** * The gRPC handshake timeout. */ - @ConfigItem - public Optional handshakeTimeout; + Optional handshakeTimeout(); /** * The max inbound message size in bytes. @@ -63,64 +63,213 @@ public class GrpcServerConfiguration { * When using a single server (using {@code quarkus.grpc.server.use-separate-server=false}), the default value is 256KB. * When using a separate server (using {@code quarkus.grpc.server.use-separate-server=true}), the default value is 4MB. */ - @ConfigItem - public OptionalInt maxInboundMessageSize; + OptionalInt maxInboundMessageSize(); /** * The max inbound metadata size in bytes */ - @ConfigItem - public OptionalInt maxInboundMetadataSize; + OptionalInt maxInboundMetadataSize(); /** * The SSL/TLS config. */ - public SslServerConfig ssl; + SslServerConfig ssl(); /** * Disables SSL, and uses plain text instead. * If disabled, configure the ssl configuration. */ - @ConfigItem(defaultValue = "true") - public boolean plainText; + @WithDefault("true") + boolean plainText(); + + default boolean isPlainTextEnabled() { + boolean plainText = plainText(); + if (plainText && (ssl().certificate().isPresent() || ssl().keyStore().isPresent())) { + plainText = false; + } + return plainText; + } /** * Whether ALPN should be used. */ - @ConfigItem(defaultValue = "true") - public boolean alpn; + @WithDefault("true") + boolean alpn(); /** * Configures the transport security. */ - @ConfigItem - public GrpcTransportSecurity transportSecurity; + GrpcTransportSecurity transportSecurity(); /** * Enables the gRPC Reflection Service. * By default, the reflection service is only exposed in `dev` mode. * This setting allows overriding this choice and enable the reflection service every time. */ - @ConfigItem(defaultValue = "false") - public boolean enableReflectionService; + @WithDefault("false") + boolean enableReflectionService(); /** * Number of gRPC server verticle instances. * This is useful for scaling easily across multiple cores. * The number should not exceed the amount of event loops. */ - @ConfigItem(defaultValue = "1") - public int instances; + @WithDefault("1") + int instances(); /** * Configures the netty server settings. */ - @ConfigItem - public GrpcServerNettyConfig netty; + GrpcServerNettyConfig netty(); /** * gRPC compression, e.g. "gzip" */ - @ConfigItem - public Optional compression; + Optional compression(); + + /** + * Shared configuration for setting up server-side SSL. + */ + @ConfigGroup + public interface SslServerConfig { + /** + * The classpath path or file path to a server certificate or certificate chain in PEM format. + */ + Optional certificate(); + + /** + * The classpath path or file path to the corresponding certificate private key file in PEM format. + */ + Optional key(); + + /** + * An optional keystore that holds the certificate information instead of specifying separate files. + * The keystore can be either on classpath or an external file. + */ + Optional keyStore(); + + /** + * An optional parameter to specify the type of the keystore file. If not given, the type is automatically detected + * based on the file name. + */ + Optional keyStoreType(); + + /** + * A parameter to specify the password of the keystore file. + */ + Optional keyStorePassword(); + + /** + * A parameter to specify the alias of the keystore file. + */ + Optional keyStoreAlias(); + + /** + * A parameter to specify the alias password of the keystore file. + */ + Optional keyStoreAliasPassword(); + + /** + * An optional trust store which holds the certificate information of the certificates to trust + *

    + * The trust store can be either on classpath or an external file. + */ + Optional trustStore(); + + /** + * An optional parameter to specify type of the trust store file. If not given, the type is automatically detected + * based on the file name. + */ + Optional trustStoreType(); + + /** + * A parameter to specify the password of the trust store file. + */ + Optional trustStorePassword(); + + /** + * The cipher suites to use. If none is given, a reasonable default is selected. + */ + Optional> cipherSuites(); + + /** + * Sets the ordered list of enabled SSL/TLS protocols. + *

    + * If not set, it defaults to {@code "TLSv1.3, TLSv1.2"}. + * The following list of protocols are supported: {@code TLSv1, TLSv1.1, TLSv1.2, TLSv1.3}. + * To only enable {@code TLSv1.3}, set the value to {@code to "TLSv1.3"}. + *

    + * Note that setting an empty list, and enabling SSL/TLS is invalid. + * You must at least have one protocol. + */ + @WithDefault("TLSv1.3,TLSv1.2") + Set protocols(); + + /** + * Configures the engine to require/request client authentication. + * NONE, REQUEST, REQUIRED + */ + @WithDefault("NONE") + ClientAuth clientAuth(); + } + + @ConfigGroup + public interface GrpcServerNettyConfig { + + /** + * Sets a custom keep-alive duration. This configures the time before sending a `keepalive` ping + * when there is no read activity. + */ + Optional keepAliveTime(); + + /** + * Sets a custom permit-keep-alive duration. This configures the most aggressive keep-alive time clients + * are permitted to configure. + * The server will try to detect clients exceeding this rate and when detected will forcefully close the connection. + * + * @see #permitKeepAliveWithoutCalls + */ + Optional permitKeepAliveTime(); + + /** + * Sets whether to allow clients to send keep-alive HTTP/2 PINGs even if + * there are no outstanding RPCs on the connection. + */ + Optional permitKeepAliveWithoutCalls(); + + } + + @ConfigGroup + public interface GrpcTransportSecurity { + + /** + * The path to the certificate file. + */ + Optional certificate(); + + /** + * The path to the private key file. + */ + Optional key(); + } + + /** + * XDS config + * * XDS usage + */ + @ConfigGroup + interface Xds extends Enabled { + /** + * Explicitly enable use of XDS. + */ + @WithDefault("false") + @Override + boolean enabled(); + + /** + * Use secure credentials. + */ + @WithDefault("false") + boolean secure(); + } } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerNettyConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerNettyConfig.java deleted file mode 100644 index 12f491389fabe..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerNettyConfig.java +++ /dev/null @@ -1,37 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.time.Duration; -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") -@ConfigGroup -public class GrpcServerNettyConfig { - - /** - * Sets a custom keep-alive duration. This configures the time before sending a `keepalive` ping - * when there is no read activity. - */ - @ConfigItem - public Optional keepAliveTime; - - /** - * Sets a custom permit-keep-alive duration. This configures the most aggressive keep-alive time clients - * are permitted to configure. - * The server will try to detect clients exceeding this rate and when detected will forcefully close the connection. - * - * @see #permitKeepAliveWithoutCalls - */ - @ConfigItem - public Optional permitKeepAliveTime; - - /** - * Sets whether to allow clients to send keep-alive HTTP/2 PINGs even if - * there are no outstanding RPCs on the connection. - */ - @ConfigItem - public Optional permitKeepAliveWithoutCalls; - -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcTransportSecurity.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcTransportSecurity.java deleted file mode 100644 index 9108c81c8525b..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcTransportSecurity.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") -@ConfigGroup -public class GrpcTransportSecurity { - - /** - * The path to the certificate file. - */ - @ConfigItem - public Optional certificate; - - /** - * The path to the private key file. - */ - @ConfigItem - public Optional key; -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/InProcess.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/InProcess.java index 370bb3917e853..65e574beddecf 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/InProcess.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/InProcess.java @@ -1,31 +1,24 @@ package io.quarkus.grpc.runtime.config; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.IgnoreProperty; +import io.smallrye.config.WithDefault; /** * In-process config * * in-process usage */ @ConfigGroup -public class InProcess implements Enabled { - - @Override - @IgnoreProperty - public boolean isEnabled() { - return enabled; - } +public interface InProcess extends Enabled { /** * Explicitly enable use of in-process. */ - @ConfigItem(defaultValue = "false") - public boolean enabled; + @WithDefault("false") + boolean enabled(); /** * Set in-process name. */ - @ConfigItem(defaultValue = "quarkus-grpc") - public String name; + @WithDefault("quarkus-grpc") + String name(); } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslClientConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslClientConfig.java deleted file mode 100644 index 46fedf5a71404..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslClientConfig.java +++ /dev/null @@ -1,35 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.nio.file.Path; -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * Shared configuration for setting up client-side SSL. - */ -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") -@ConfigGroup -public class SslClientConfig { - /** - * The classpath path or file path to a server certificate or certificate chain in PEM format. - */ - @ConfigItem - public Optional certificate; - - /** - * The classpath path or file path to the corresponding certificate private key file in PEM format. - */ - @ConfigItem - public Optional key; - - /** - * An optional trust store which holds the certificate information of the certificates to trust - * - * The trust store can be either on classpath or in an external file. - */ - @ConfigItem - public Optional trustStore; - -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslServerConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslServerConfig.java deleted file mode 100644 index 6b81e9ba06090..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/SslServerConfig.java +++ /dev/null @@ -1,111 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.nio.file.Path; -import java.util.List; -import java.util.Optional; -import java.util.Set; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.DefaultConverter; -import io.vertx.core.http.ClientAuth; - -/** - * Shared configuration for setting up server-side SSL. - */ -@SuppressWarnings("OptionalUsedAsFieldOrParameterType") -@ConfigGroup -public class SslServerConfig { - /** - * The classpath path or file path to a server certificate or certificate chain in PEM format. - */ - @ConfigItem - public Optional certificate; - - /** - * The classpath path or file path to the corresponding certificate private key file in PEM format. - */ - @ConfigItem - public Optional key; - - /** - * An optional keystore that holds the certificate information instead of specifying separate files. - * The keystore can be either on classpath or an external file. - */ - @ConfigItem - public Optional keyStore; - - /** - * An optional parameter to specify the type of the keystore file. If not given, the type is automatically detected - * based on the file name. - */ - @ConfigItem - public Optional keyStoreType; - - /** - * A parameter to specify the password of the keystore file. - */ - @ConfigItem - public Optional keyStorePassword; - - /** - * A parameter to specify the alias of the keystore file. - */ - @ConfigItem - public Optional keyStoreAlias; - - /** - * A parameter to specify the alias password of the keystore file. - */ - @ConfigItem - public Optional keyStoreAliasPassword; - - /** - * An optional trust store which holds the certificate information of the certificates to trust - *

    - * The trust store can be either on classpath or an external file. - */ - @ConfigItem - public Optional trustStore; - - /** - * An optional parameter to specify type of the trust store file. If not given, the type is automatically detected - * based on the file name. - */ - @ConfigItem - public Optional trustStoreType; - - /** - * A parameter to specify the password of the trust store file. - */ - @ConfigItem - public Optional trustStorePassword; - - /** - * The cipher suites to use. If none is given, a reasonable default is selected. - */ - @ConfigItem - public Optional> cipherSuites; - - /** - * Sets the ordered list of enabled SSL/TLS protocols. - *

    - * If not set, it defaults to {@code "TLSv1.3, TLSv1.2"}. - * The following list of protocols are supported: {@code TLSv1, TLSv1.1, TLSv1.2, TLSv1.3}. - * To only enable {@code TLSv1.3}, set the value to {@code to "TLSv1.3"}. - *

    - * Note that setting an empty list, and enabling SSL/TLS is invalid. - * You must at least have one protocol. - */ - @DefaultConverter - @ConfigItem(defaultValue = "TLSv1.3,TLSv1.2") - public Set protocols; - - /** - * Configures the engine to require/request client authentication. - * NONE, REQUEST, REQUIRED - */ - @ConfigItem(defaultValue = "NONE") - public ClientAuth clientAuth; - -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/StorkConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/StorkConfig.java deleted file mode 100644 index 0faaad79e3ec7..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/StorkConfig.java +++ /dev/null @@ -1,40 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * Stork config for new Vert.x gRPC - */ -@ConfigGroup -public class StorkConfig { - /** - * Number of threads on a delayed gRPC ClientCall - */ - @ConfigItem(defaultValue = "10") - public int threads; - - /** - * Deadline in milliseconds of delayed gRPC call - */ - @ConfigItem(defaultValue = "5000") - public long deadline; - - /** - * Number of retries on a gRPC ClientCall - */ - @ConfigItem(defaultValue = "3") - public int retries; - - /** - * Initial delay in seconds on refresh check - */ - @ConfigItem(defaultValue = "60") - public long delay; - - /** - * Refresh period in seconds - */ - @ConfigItem(defaultValue = "120") - public long period; -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java deleted file mode 100644 index a48f9786b6c21..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/TlsClientConfig.java +++ /dev/null @@ -1,132 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import java.util.List; -import java.util.Optional; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -@ConfigGroup -public class TlsClientConfig { - - /** - * Whether SSL/TLS is enabled. - */ - @ConfigItem(defaultValue = "false") - public boolean enabled; - - /** - * Enable trusting all certificates. Disabled by default. - */ - @ConfigItem(defaultValue = "false") - public boolean trustAll; - - /** - * Trust configuration in the PEM format. - *

    - * When used, {@code trust-certificate-jks} and {@code trust-certificate-p12} must not be used. - */ - public PemTrustCertConfiguration trustCertificatePem; - - /** - * Trust configuration in the JKS format. - *

    - * When configured, {@code trust-certificate-pem} and {@code trust-certificate-p12} must not be used. - */ - public JksConfiguration trustCertificateJks; - - /** - * Trust configuration in the P12 format. - *

    - * When configured, {@code trust-certificate-jks} and {@code trust-certificate-pem} must not be used. - */ - public PfxConfiguration trustCertificateP12; - - /** - * Key/cert configuration in the PEM format. - *

    - * When configured, {@code key-certificate-jks} and {@code key-certificate-p12} must not be used. - */ - public PemKeyCertConfiguration keyCertificatePem; - - /** - * Key/cert configuration in the JKS format. - *

    - * When configured, {@code #key-certificate-pem} and {@code #key-certificate-p12} must not be used. - */ - public JksConfiguration keyCertificateJks; - - /** - * Key/cert configuration in the P12 format. - *

    - * When configured, {@code key-certificate-jks} and {@code #key-certificate-pem} must not be used. - */ - public PfxConfiguration keyCertificateP12; - - /** - * Whether hostname should be verified in the SSL/TLS handshake. - */ - @ConfigItem(defaultValue = "true") - public boolean verifyHostname; - - @ConfigGroup - public static class PemTrustCertConfiguration { - - /** - * Comma-separated list of the trust certificate files (Pem format). - */ - @ConfigItem - public Optional> certs; - - } - - @ConfigGroup - public static class JksConfiguration { - - /** - * Path of the key file (JKS format). - */ - @ConfigItem - public Optional path; - - /** - * Password of the key file. - */ - @ConfigItem - public Optional password; - } - - @ConfigGroup - public static class PfxConfiguration { - - /** - * Path to the key file (PFX format). - */ - @ConfigItem - public Optional path; - - /** - * Password of the key. - */ - @ConfigItem - public Optional password; - } - - @ConfigGroup - public static class PemKeyCertConfiguration { - - /** - * Comma-separated list of the path to the key files (Pem format). - */ - @ConfigItem - public Optional> keys; - - /** - * Comma-separated list of the path to the certificate files (Pem format). - */ - @ConfigItem - public Optional> certs; - - } - -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Xds.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Xds.java deleted file mode 100644 index 5c5bdd6754e03..0000000000000 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/Xds.java +++ /dev/null @@ -1,31 +0,0 @@ -package io.quarkus.grpc.runtime.config; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.IgnoreProperty; - -/** - * XDS config - * * XDS usage - */ -@ConfigGroup -public class Xds implements Enabled { - - @Override - @IgnoreProperty - public boolean isEnabled() { - return enabled; - } - - /** - * Explicitly enable use of XDS. - */ - @ConfigItem(defaultValue = "false") - public boolean enabled; - - /** - * Use secure credentials. - */ - @ConfigItem(defaultValue = "false") - public boolean secure; -} diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/GrpcServices.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/GrpcServices.java index ce3e554f1397b..3a64d17585aa4 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/GrpcServices.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devmode/GrpcServices.java @@ -114,7 +114,7 @@ public ServiceDefinitionAndStatus setValue(ServiceDefinitionAndStatus value) { } public boolean hasTestableMethod() { - if (configuration.server.ssl.certificate.isPresent() || configuration.server.ssl.keyStore.isPresent()) { + if (configuration.server().ssl().certificate().isPresent() || configuration.server().ssl().keyStore().isPresent()) { return false; } Map prototypes = DevConsoleManager.getGlobal("io.quarkus.grpc.messagePrototypes"); @@ -156,8 +156,8 @@ public boolean hasPrototype() { } public boolean isTestable() { - return configuration.server.ssl.certificate.isEmpty() - && configuration.server.ssl.keyStore.isEmpty(); + return configuration.server().ssl().certificate().isEmpty() + && configuration.server().ssl().keyStore().isEmpty(); } public String getPrototype() { diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devui/GrpcJsonRPCService.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devui/GrpcJsonRPCService.java index 7d367e3d43710..f55adb7b9159b 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devui/GrpcJsonRPCService.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/devui/GrpcJsonRPCService.java @@ -28,7 +28,7 @@ import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; import io.quarkus.grpc.runtime.devmode.GrpcServices; import io.quarkus.vertx.http.runtime.CertificateConfig; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; import io.smallrye.mutiny.operators.multi.processors.BroadcastProcessor; @@ -49,7 +49,7 @@ public class GrpcJsonRPCService { private Map> callsInProgress; @Inject - HttpConfiguration httpConfiguration; + VertxHttpConfig httpConfig; @Inject GrpcConfiguration grpcConfiguration; @@ -63,24 +63,24 @@ public class GrpcJsonRPCService { @PostConstruct public void init() { - GrpcServerConfiguration serverConfig = grpcConfiguration.server; - if (serverConfig.useSeparateServer) { - this.host = serverConfig.host; - this.port = serverConfig.port; - this.ssl = serverConfig.ssl.certificate.isPresent() || serverConfig.ssl.keyStore.isPresent(); + GrpcServerConfiguration serverConfig = grpcConfiguration.server(); + if (serverConfig.useSeparateServer()) { + this.host = serverConfig.host(); + this.port = serverConfig.port(); + this.ssl = serverConfig.ssl().certificate().isPresent() || serverConfig.ssl().keyStore().isPresent(); } else { - this.host = httpConfiguration.host; - this.port = httpConfiguration.port; - this.ssl = isTLSConfigured(httpConfiguration.ssl.certificate); + this.host = httpConfig.host(); + this.port = httpConfig.port(); + this.ssl = isTLSConfigured(httpConfig.ssl().certificate()); } this.grpcServiceClassInfos = getGrpcServiceClassInfos(); this.callsInProgress = new HashMap<>(); } private boolean isTLSConfigured(CertificateConfig certificate) { - return certificate.files.isPresent() - || certificate.keyFiles.isPresent() - || certificate.keyStoreFile.isPresent(); + return certificate.files().isPresent() + || certificate.keyFiles().isPresent() + || certificate.keyStoreFile().isPresent(); } public JsonArray getServices() { diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/stork/StorkGrpcChannel.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/stork/StorkGrpcChannel.java index 51e7995434647..b5314a7986776 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/stork/StorkGrpcChannel.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/stork/StorkGrpcChannel.java @@ -30,7 +30,7 @@ import io.grpc.Deadline; import io.grpc.MethodDescriptor; import io.grpc.internal.DelayedClientCall; -import io.quarkus.grpc.runtime.config.StorkConfig; +import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; import io.smallrye.mutiny.Uni; import io.smallrye.stork.Stork; import io.smallrye.stork.api.Service; @@ -48,7 +48,7 @@ public class StorkGrpcChannel extends Channel implements AutoCloseable { private final GrpcClient client; private final String serviceName; - private final StorkConfig stork; + private final GrpcClientConfiguration.StorkConfig stork; private final Executor executor; private static class Context { @@ -60,13 +60,14 @@ private static class Context { AtomicReference ref; } - public StorkGrpcChannel(GrpcClient client, String serviceName, StorkConfig stork, Executor executor) { + public StorkGrpcChannel(GrpcClient client, String serviceName, GrpcClientConfiguration.StorkConfig stork, + Executor executor) { this.client = client; this.serviceName = serviceName; this.stork = stork; this.executor = executor; - this.scheduler = new ScheduledThreadPoolExecutor(stork.threads); - this.scheduler.scheduleAtFixedRate(this::refresh, stork.delay, stork.period, TimeUnit.SECONDS); + this.scheduler = new ScheduledThreadPoolExecutor(stork.threads()); + this.scheduler.scheduleAtFixedRate(this::refresh, stork.delay(), stork.period(), TimeUnit.SECONDS); } @Override @@ -85,12 +86,12 @@ public ClientCall newCall(MethodDescr context.ref = STORK_SERVICE_INSTANCE.get(); DelayedClientCall delayed = new StorkDelayedClientCall<>(executor, scheduler, - Deadline.after(stork.deadline, TimeUnit.MILLISECONDS)); + Deadline.after(stork.deadline(), TimeUnit.MILLISECONDS)); asyncCall(methodDescriptor, callOptions, context) .onFailure() .retry() - .atMost(stork.retries) + .atMost(stork.retries()) .subscribe() .asCompletionStage() .thenApply(delayed::setCall) diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java index e7cc902b6384a..eb79f38ccaddc 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/Channels.java @@ -5,12 +5,7 @@ import static io.grpc.netty.NettyChannelBuilder.DEFAULT_FLOW_CONTROL_WINDOW; import static io.quarkus.grpc.runtime.GrpcTestPortUtils.testPort; import static io.quarkus.grpc.runtime.config.GrpcClientConfiguration.DNS; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configureJksKeyCertOptions; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configureJksTrustOptions; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePemKeyCertOptions; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePemTrustOptions; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePfxKeyCertOptions; -import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.configurePfxTrustOptions; +import static io.quarkus.grpc.runtime.supports.SSLConfigHelper.*; import java.io.IOException; import java.io.InputStream; @@ -20,6 +15,7 @@ import java.nio.file.Path; import java.time.Duration; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -32,6 +28,8 @@ import java.util.concurrent.TimeoutException; import jakarta.enterprise.context.spi.CreationalContext; +import jakarta.enterprise.inject.Any; +import jakarta.enterprise.util.TypeLiteral; import org.eclipse.microprofile.context.ManagedExecutor; import org.jboss.logging.Logger; @@ -55,12 +53,12 @@ import io.quarkus.arc.InstanceHandle; import io.quarkus.grpc.GrpcClient; import io.quarkus.grpc.RegisterClientInterceptor; +import io.quarkus.grpc.api.ChannelBuilderCustomizer; import io.quarkus.grpc.runtime.ClientInterceptorStorage; import io.quarkus.grpc.runtime.GrpcClientInterceptorContainer; import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; -import io.quarkus.grpc.runtime.config.SslClientConfig; -import io.quarkus.grpc.runtime.config.TlsClientConfig; +import io.quarkus.grpc.runtime.config.InProcess; import io.quarkus.grpc.runtime.stork.StorkGrpcChannel; import io.quarkus.grpc.runtime.stork.StorkMeasuringGrpcInterceptor; import io.quarkus.grpc.runtime.stork.VertxStorkMeasuringGrpcInterceptor; @@ -115,17 +113,17 @@ public static Channel createChannel(String name, Set perClientIntercepto GrpcBuilderProvider provider = GrpcBuilderProvider.findChannelBuilderProvider(config); - boolean vertxGrpc = config.useQuarkusGrpcClient; + boolean vertxGrpc = config.useQuarkusGrpcClient(); - String host = config.host; + String host = config.host(); // handle client port - int port = config.port; + int port = config.port(); if (LaunchMode.current() == LaunchMode.TEST) { - port = config.testPort.orElse(testPort(configProvider.getServerConfiguration())); + port = config.testPort().orElse(testPort(configProvider.getServerConfiguration())); } - String nameResolver = config.nameResolver; + String nameResolver = config.nameResolver(); boolean stork = Stork.STORK.equalsIgnoreCase(nameResolver); @@ -151,8 +149,15 @@ public static Channel createChannel(String name, Set perClientIntercepto } } - boolean plainText = config.ssl.trustStore.isEmpty(); - Optional usePlainText = config.plainText; + List> channelBuilderCustomizers = container + .select(new TypeLiteral>() { + }, Any.Literal.INSTANCE) + .stream() + .sorted(Comparator., Integer> comparing(ChannelBuilderCustomizer::priority)) + .toList(); + + boolean plainText = config.ssl().trustStore().isEmpty(); + Optional usePlainText = config.plainText(); if (usePlainText.isPresent()) { plainText = usePlainText.get(); } @@ -163,9 +168,9 @@ public static Channel createChannel(String name, Set perClientIntercepto SslContext context = null; if (!plainText && provider == null) { - Path trustStorePath = config.ssl.trustStore.orElse(null); - Path certificatePath = config.ssl.certificate.orElse(null); - Path keyPath = config.ssl.key.orElse(null); + Path trustStorePath = config.ssl().trustStore().orElse(null); + Path certificatePath = config.ssl().certificate().orElse(null); + Path keyPath = config.ssl().key().orElse(null); SslContextBuilder sslContextBuilder = GrpcSslContexts.forClient(); if (trustStorePath != null) { try (InputStream stream = streamFor(trustStorePath, "trust store")) { @@ -187,60 +192,73 @@ public static Channel createChannel(String name, Set perClientIntercepto context = sslContextBuilder.build(); } - String loadBalancingPolicy = stork ? Stork.STORK : config.loadBalancingPolicy; + String loadBalancingPolicy = stork ? Stork.STORK : config.loadBalancingPolicy(); ManagedChannelBuilder builder; if (provider != null) { builder = provider.createChannelBuilder(config, target); } else { - builder = NettyChannelBuilder - .forTarget(target) + builder = NettyChannelBuilder.forTarget(target); + } + + for (ChannelBuilderCustomizer customizer : channelBuilderCustomizers) { + Map map = customizer.customize(name, config, builder); + builder.defaultServiceConfig(map); + } + + if (builder instanceof NettyChannelBuilder) { + NettyChannelBuilder ncBuilder = (NettyChannelBuilder) builder; + builder = ncBuilder // clients are intercepted using the IOThreadClientInterceptor interceptor which will decide on which // thread the messages should be processed. .directExecutor() // will use I/O thread - must not be blocked. .offloadExecutor(Infrastructure.getDefaultExecutor()) .defaultLoadBalancingPolicy(loadBalancingPolicy) - .flowControlWindow(config.flowControlWindow.orElse(DEFAULT_FLOW_CONTROL_WINDOW)) - .keepAliveWithoutCalls(config.keepAliveWithoutCalls) - .maxHedgedAttempts(config.maxHedgedAttempts) - .maxRetryAttempts(config.maxRetryAttempts) - .maxInboundMetadataSize(config.maxInboundMetadataSize.orElse(DEFAULT_MAX_HEADER_LIST_SIZE)) - .maxInboundMessageSize(config.maxInboundMessageSize.orElse(DEFAULT_MAX_MESSAGE_SIZE)) - .negotiationType(NegotiationType.valueOf(config.negotiationType.toUpperCase())); + .flowControlWindow(config.flowControlWindow().orElse(DEFAULT_FLOW_CONTROL_WINDOW)) + .keepAliveWithoutCalls(config.keepAliveWithoutCalls()) + .maxHedgedAttempts(config.maxHedgedAttempts()) + .maxRetryAttempts(config.maxRetryAttempts()) + .maxInboundMetadataSize(config.maxInboundMetadataSize().orElse(DEFAULT_MAX_HEADER_LIST_SIZE)) + .maxInboundMessageSize(config.maxInboundMessageSize().orElse(DEFAULT_MAX_MESSAGE_SIZE)) + .negotiationType(NegotiationType.valueOf(config.negotiationType().toUpperCase())); + + if (context != null) { + ncBuilder.sslContext(context); + } } - if (config.retry) { + if (config.retry()) { builder.enableRetry(); } else { builder.disableRetry(); } - if (config.maxTraceEvents.isPresent()) { - builder.maxTraceEvents(config.maxTraceEvents.getAsInt()); + if (config.maxTraceEvents().isPresent()) { + builder.maxTraceEvents(config.maxTraceEvents().getAsInt()); } - Optional userAgent = config.userAgent; + Optional userAgent = config.userAgent(); if (userAgent.isPresent()) { builder.userAgent(userAgent.get()); } - if (config.retryBufferSize.isPresent()) { - builder.retryBufferSize(config.retryBufferSize.getAsLong()); + if (config.retryBufferSize().isPresent()) { + builder.retryBufferSize(config.retryBufferSize().getAsLong()); } - if (config.perRpcBufferLimit.isPresent()) { - builder.perRpcBufferLimit(config.perRpcBufferLimit.getAsLong()); + if (config.perRpcBufferLimit().isPresent()) { + builder.perRpcBufferLimit(config.perRpcBufferLimit().getAsLong()); } - Optional overrideAuthority = config.overrideAuthority; + Optional overrideAuthority = config.overrideAuthority(); if (overrideAuthority.isPresent()) { builder.overrideAuthority(overrideAuthority.get()); } - Optional keepAliveTime = config.keepAliveTime; + Optional keepAliveTime = config.keepAliveTime(); if (keepAliveTime.isPresent()) { builder.keepAliveTime(keepAliveTime.get().toMillis(), TimeUnit.MILLISECONDS); } - Optional keepAliveTimeout = config.keepAliveTimeout; + Optional keepAliveTimeout = config.keepAliveTimeout(); if (keepAliveTimeout.isPresent()) { builder.keepAliveTimeout(keepAliveTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } - Optional idleTimeout = config.idleTimeout; + Optional idleTimeout = config.idleTimeout(); if (idleTimeout.isPresent()) { builder.idleTimeout(idleTimeout.get().toMillis(), TimeUnit.MILLISECONDS); } @@ -248,10 +266,6 @@ public static Channel createChannel(String name, Set perClientIntercepto if (plainText && provider == null) { builder.usePlaintext(); } - if (context != null && (builder instanceof NettyChannelBuilder)) { - NettyChannelBuilder ncBuilder = (NettyChannelBuilder) builder; - ncBuilder.sslContext(context); - } interceptorContainer.getSortedPerServiceInterceptors(perClientInterceptors).forEach(builder::intercept); interceptorContainer.getSortedGlobalInterceptors().forEach(builder::intercept); @@ -265,6 +279,15 @@ public static Channel createChannel(String name, Set perClientIntercepto HttpClientOptions options = new HttpClientOptions(); options.setHttp2ClearTextUpgrade(false); // this fixes i30379 + // Start with almost empty options and default max msg size ... + GrpcClientOptions clientOptions = new GrpcClientOptions() + .setTransportOptions(options) + .setMaxMessageSize(config.maxInboundMessageSize().orElse(DEFAULT_MAX_MESSAGE_SIZE)); + + for (ChannelBuilderCustomizer customizer : channelBuilderCustomizers) { + customizer.customize(name, config, clientOptions); + } + if (!plainText) { TlsConfigurationRegistry registry = Arc.container().select(TlsConfigurationRegistry.class).get(); @@ -273,11 +296,11 @@ public static Channel createChannel(String name, Set perClientIntercepto options.setUseAlpn(true); TlsConfiguration configuration = null; - if (config.tlsConfigurationName.isPresent()) { - Optional maybeConfiguration = registry.get(config.tlsConfigurationName.get()); + if (config.tlsConfigurationName().isPresent()) { + Optional maybeConfiguration = registry.get(config.tlsConfigurationName().get()); if (!maybeConfiguration.isPresent()) { throw new IllegalStateException("Unable to find the TLS configuration " - + config.tlsConfigurationName.get() + " for the gRPC client " + name + "."); + + config.tlsConfigurationName().get() + " for the gRPC client " + name + "."); } configuration = maybeConfiguration.get(); } else if (registry.getDefault().isPresent() && (registry.getDefault().get().getTrustStoreOptions() != null @@ -287,26 +310,26 @@ public static Channel createChannel(String name, Set perClientIntercepto if (configuration != null) { TlsConfigUtils.configure(options, configuration); - } else if (config.tls.enabled) { - TlsClientConfig tls = config.tls; - options.setSsl(true).setTrustAll(tls.trustAll); - - configurePemTrustOptions(options, tls.trustCertificatePem); - configureJksTrustOptions(options, tls.trustCertificateJks); - configurePfxTrustOptions(options, tls.trustCertificateP12); - - configurePemKeyCertOptions(options, tls.keyCertificatePem); - configureJksKeyCertOptions(options, tls.keyCertificateJks); - configurePfxKeyCertOptions(options, tls.keyCertificateP12); - options.setVerifyHost(tls.verifyHostname); + } else if (config.tls().enabled()) { + GrpcClientConfiguration.TlsClientConfig tls = config.tls(); + options.setSsl(true).setTrustAll(tls.trustAll()); + + configurePemTrustOptions(options, tls.trustCertificatePem()); + configureJksTrustOptions(options, tls.trustCertificateJks()); + configurePfxTrustOptions(options, tls.trustCertificateP12()); + + configurePemKeyCertOptions(options, tls.keyCertificatePem()); + configureJksKeyCertOptions(options, tls.keyCertificateJks()); + configurePfxKeyCertOptions(options, tls.keyCertificateP12()); + options.setVerifyHost(tls.verifyHostname()); } else { - if (config.ssl.trustStore.isPresent()) { - Optional trustStorePath = config.ssl.trustStore; + if (config.ssl().trustStore().isPresent()) { + Optional trustStorePath = config.ssl().trustStore(); PemTrustOptions to = new PemTrustOptions(); to.addCertValue(bufferFor(trustStorePath.get(), "trust store")); options.setTrustOptions(to); - Optional certificatePath = config.ssl.certificate; - Optional keyPath = config.ssl.key; + Optional certificatePath = config.ssl().certificate(); + Optional keyPath = config.ssl().key(); if (certificatePath.isPresent() && keyPath.isPresent()) { PemKeyCertOptions cko = new PemKeyCertOptions(); cko.setCertValue(bufferFor(certificatePath.get(), "certificate")); @@ -317,14 +340,14 @@ public static Channel createChannel(String name, Set perClientIntercepto } } - options.setKeepAlive(config.keepAliveWithoutCalls); - Optional keepAliveTimeout = config.keepAliveTimeout; + options.setKeepAlive(config.keepAliveWithoutCalls()); + Optional keepAliveTimeout = config.keepAliveTimeout(); if (keepAliveTimeout.isPresent()) { int keepAliveTimeoutN = (int) keepAliveTimeout.get().toSeconds(); options.setKeepAliveTimeout(keepAliveTimeoutN); options.setHttp2KeepAliveTimeout(keepAliveTimeoutN); } - Optional idleTimeout = config.idleTimeout; + Optional idleTimeout = config.idleTimeout(); if (idleTimeout.isPresent()) { options.setIdleTimeout((int) idleTimeout.get().toMillis()); options.setIdleTimeoutUnit(TimeUnit.MILLISECONDS); @@ -336,13 +359,14 @@ public static Channel createChannel(String name, Set perClientIntercepto options.setMetricsName("grpc|" + name); Vertx vertx = container.instance(Vertx.class).get(); - io.vertx.grpc.client.GrpcClient client = io.vertx.grpc.client.GrpcClient.client(vertx, - new GrpcClientOptions().setTransportOptions(options) - .setMaxMessageSize(config.maxInboundMessageSize.orElse(DEFAULT_MAX_MESSAGE_SIZE))); + io.vertx.grpc.client.GrpcClient client = io.vertx.grpc.client.GrpcClient.client( + vertx, + clientOptions); + Channel channel; if (stork) { ManagedExecutor executor = container.instance(ManagedExecutor.class).get(); - channel = new StorkGrpcChannel(client, config.host, config.stork, executor); // host = service-name + channel = new StorkGrpcChannel(client, config.host(), config.stork(), executor); // host = service-name } else { channel = new GrpcClientChannel(client, SocketAddress.inetSocketAddress(port, host)); } @@ -359,38 +383,283 @@ public static Channel createChannel(String name, Set perClientIntercepto } private static GrpcClientConfiguration testConfig(GrpcServerConfiguration serverConfiguration) { - GrpcClientConfiguration config = new GrpcClientConfiguration(); - config.port = serverConfiguration.testPort; - config.testPort = OptionalInt.empty(); - config.host = serverConfiguration.host; - config.plainText = Optional.of(serverConfiguration.plainText); - config.compression = Optional.empty(); - config.flowControlWindow = OptionalInt.empty(); - config.idleTimeout = Optional.empty(); - config.keepAliveTime = Optional.empty(); - config.keepAliveTimeout = Optional.empty(); - config.loadBalancingPolicy = "pick_first"; - config.maxHedgedAttempts = 5; - config.maxInboundMessageSize = OptionalInt.empty(); - config.maxInboundMetadataSize = OptionalInt.empty(); - config.maxRetryAttempts = 0; - config.maxTraceEvents = OptionalInt.empty(); - config.nameResolver = DNS; - config.negotiationType = "PLAINTEXT"; - config.overrideAuthority = Optional.empty(); - config.perRpcBufferLimit = OptionalLong.empty(); - config.retry = false; - config.retryBufferSize = OptionalLong.empty(); - config.ssl = new SslClientConfig(); - config.ssl.key = Optional.empty(); - config.ssl.certificate = Optional.empty(); - config.ssl.trustStore = Optional.empty(); - config.userAgent = Optional.empty(); - if (serverConfiguration.ssl.certificate.isPresent() || serverConfiguration.ssl.keyStore.isPresent()) { + if (serverConfiguration.ssl().certificate().isPresent() || serverConfiguration.ssl().keyStore().isPresent()) { LOGGER.warn("gRPC client created without configuration and the gRPC server is configured for SSL. " + "Configuring SSL for such clients is not supported."); } - return config; + + return new GrpcClientConfiguration() { + + @Override + public boolean useQuarkusGrpcClient() { + return false; + } + + @Override + public ClientXds xds() { + return null; + } + + @Override + public InProcess inProcess() { + return null; + } + + @Override + public StorkConfig stork() { + return null; + } + + @Override + public int port() { + return serverConfiguration.testPort(); + } + + @Override + public OptionalInt testPort() { + return OptionalInt.empty(); + } + + @Override + public String host() { + return serverConfiguration.host(); + } + + @Override + public SslClientConfig ssl() { + return new SslClientConfig() { + @Override + public Optional certificate() { + return Optional.empty(); + } + + @Override + public Optional key() { + return Optional.empty(); + } + + @Override + public Optional trustStore() { + return Optional.empty(); + } + }; + } + + @Override + public Optional tlsConfigurationName() { + return Optional.empty(); + } + + @Override + public TlsClientConfig tls() { + return new TlsClientConfig() { + @Override + public boolean enabled() { + return false; + } + + @Override + public boolean trustAll() { + return false; + } + + @Override + public PemTrustCertConfiguration trustCertificatePem() { + return new PemTrustCertConfiguration() { + @Override + public Optional> certs() { + return Optional.empty(); + } + }; + } + + @Override + public JksConfiguration trustCertificateJks() { + return new JksConfiguration() { + @Override + public Optional path() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + }; + } + + @Override + public PfxConfiguration trustCertificateP12() { + return new PfxConfiguration() { + @Override + public Optional path() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + }; + } + + @Override + public PemKeyCertConfiguration keyCertificatePem() { + return new PemKeyCertConfiguration() { + @Override + public Optional> keys() { + return Optional.empty(); + } + + @Override + public Optional> certs() { + return Optional.empty(); + } + }; + } + + @Override + public JksConfiguration keyCertificateJks() { + return new JksConfiguration() { + @Override + public Optional path() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + }; + } + + @Override + public PfxConfiguration keyCertificateP12() { + return new PfxConfiguration() { + @Override + public Optional path() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + }; + } + + @Override + public boolean verifyHostname() { + return false; + } + }; + } + + @Override + public String nameResolver() { + return DNS; + } + + @Override + public Optional plainText() { + return Optional.of(serverConfiguration.plainText()); + } + + @Override + public Optional keepAliveTime() { + return Optional.empty(); + } + + @Override + public OptionalInt flowControlWindow() { + return OptionalInt.empty(); + } + + @Override + public Optional idleTimeout() { + return Optional.empty(); + } + + @Override + public Optional keepAliveTimeout() { + return Optional.empty(); + } + + @Override + public boolean keepAliveWithoutCalls() { + return false; + } + + @Override + public int maxHedgedAttempts() { + return 5; + } + + @Override + public int maxRetryAttempts() { + return 0; + } + + @Override + public OptionalInt maxTraceEvents() { + return OptionalInt.empty(); + } + + @Override + public OptionalInt maxInboundMessageSize() { + return OptionalInt.empty(); + } + + @Override + public OptionalInt maxInboundMetadataSize() { + return OptionalInt.empty(); + } + + @Override + public String negotiationType() { + return "PLAINTEXT"; + } + + @Override + public Optional overrideAuthority() { + return Optional.empty(); + } + + @Override + public OptionalLong perRpcBufferLimit() { + return OptionalLong.empty(); + } + + @Override + public boolean retry() { + return false; + } + + @Override + public OptionalLong retryBufferSize() { + return OptionalLong.empty(); + } + + @Override + public Optional userAgent() { + return Optional.empty(); + } + + @Override + public String loadBalancingPolicy() { + return "pick_first"; + } + + @Override + public Optional compression() { + return Optional.empty(); + } + + @Override + public Optional deadline() { + return Optional.empty(); + } + }; } private static Buffer bufferFor(Path path, String resourceName) throws IOException { diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/GrpcClientConfigProvider.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/GrpcClientConfigProvider.java index c0438f63e0510..9a0e38e7f7895 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/GrpcClientConfigProvider.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/GrpcClientConfigProvider.java @@ -24,7 +24,7 @@ public class GrpcClientConfigProvider { GrpcConfiguration config; public GrpcClientConfiguration getConfiguration(String name) { - Map clients = config.clients; + Map clients = config.clients(); if (clients == null) { return null; } else { @@ -33,17 +33,17 @@ public GrpcClientConfiguration getConfiguration(String name) { } public GrpcServerConfiguration getServerConfiguration() { - return config.server; + return config.server(); } AbstractStub adjustCallOptions(String serviceName, AbstractStub stub) { - GrpcClientConfiguration clientConfig = config.clients != null ? config.clients.get(serviceName) : null; + GrpcClientConfiguration clientConfig = config.clients() != null ? config.clients().get(serviceName) : null; if (clientConfig != null) { - if (clientConfig.compression.isPresent()) { - stub = stub.withCompression(clientConfig.compression.get()); + if (clientConfig.compression().isPresent()) { + stub = stub.withCompression(clientConfig.compression().get()); } - if (clientConfig.deadline.isPresent()) { - Duration deadline = clientConfig.deadline.get(); + if (clientConfig.deadline().isPresent()) { + Duration deadline = clientConfig.deadline().get(); stub = stub.withDeadlineAfter(deadline.toMillis(), TimeUnit.MILLISECONDS); } } diff --git a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java index c8e1f7e6cdbd7..09777a05a4ac1 100644 --- a/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java +++ b/extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/supports/SSLConfigHelper.java @@ -1,6 +1,6 @@ package io.quarkus.grpc.runtime.supports; -import io.quarkus.grpc.runtime.config.TlsClientConfig; +import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; import io.vertx.core.net.JksOptions; import io.vertx.core.net.KeyCertOptions; import io.vertx.core.net.PemKeyCertOptions; @@ -11,55 +11,58 @@ public class SSLConfigHelper { public static void configurePemTrustOptions(TCPSSLOptions options, - TlsClientConfig.PemTrustCertConfiguration configuration) { - if ((configuration.certs.isPresent() && !configuration.certs.get().isEmpty())) { + GrpcClientConfiguration.TlsClientConfig.PemTrustCertConfiguration configuration) { + if ((configuration.certs().isPresent() && !configuration.certs().get().isEmpty())) { ensureTrustOptionsNotSet(options); options.setTrustOptions(toPemTrustOptions(configuration)); } } - private static PemTrustOptions toPemTrustOptions(TlsClientConfig.PemTrustCertConfiguration configuration) { + private static PemTrustOptions toPemTrustOptions( + GrpcClientConfiguration.TlsClientConfig.PemTrustCertConfiguration configuration) { PemTrustOptions pemTrustOptions = new PemTrustOptions(); - if (configuration.certs.isPresent()) { - for (String cert : configuration.certs.get()) { + if (configuration.certs().isPresent()) { + for (String cert : configuration.certs().get()) { pemTrustOptions.addCertPath(cert); } } return pemTrustOptions; } - public static void configureJksTrustOptions(TCPSSLOptions options, TlsClientConfig.JksConfiguration configuration) { - if (configuration.path.isPresent()) { + public static void configureJksTrustOptions(TCPSSLOptions options, + GrpcClientConfiguration.TlsClientConfig.JksConfiguration configuration) { + if (configuration.path().isPresent()) { ensureTrustOptionsNotSet(options); options.setTrustOptions(toJksOptions(configuration)); } } - private static JksOptions toJksOptions(TlsClientConfig.JksConfiguration configuration) { + private static JksOptions toJksOptions(GrpcClientConfiguration.TlsClientConfig.JksConfiguration configuration) { JksOptions jksOptions = new JksOptions(); - if (configuration.path.isPresent()) { - jksOptions.setPath(configuration.path.get()); + if (configuration.path().isPresent()) { + jksOptions.setPath(configuration.path().get()); } - if (configuration.password.isPresent()) { - jksOptions.setPassword(configuration.password.get()); + if (configuration.password().isPresent()) { + jksOptions.setPassword(configuration.password().get()); } return jksOptions; } - public static void configurePfxTrustOptions(TCPSSLOptions options, TlsClientConfig.PfxConfiguration configuration) { - if (configuration.path.isPresent()) { + public static void configurePfxTrustOptions(TCPSSLOptions options, + GrpcClientConfiguration.TlsClientConfig.PfxConfiguration configuration) { + if (configuration.path().isPresent()) { ensureTrustOptionsNotSet(options); options.setTrustOptions(toPfxOptions(configuration)); } } - private static PfxOptions toPfxOptions(TlsClientConfig.PfxConfiguration configuration) { + private static PfxOptions toPfxOptions(GrpcClientConfiguration.TlsClientConfig.PfxConfiguration configuration) { PfxOptions pfxOptions = new PfxOptions(); - if (configuration.path.isPresent()) { - pfxOptions.setPath(configuration.path.get()); + if (configuration.path().isPresent()) { + pfxOptions.setPath(configuration.path().get()); } - if (configuration.password.isPresent()) { - pfxOptions.setPassword(configuration.password.get()); + if (configuration.password().isPresent()) { + pfxOptions.setPassword(configuration.password().get()); } return pfxOptions; } @@ -71,38 +74,41 @@ private static void ensureTrustOptionsNotSet(TCPSSLOptions options) { } public static void configurePemKeyCertOptions(TCPSSLOptions options, - TlsClientConfig.PemKeyCertConfiguration configuration) { - if (configuration.certs.isPresent() && !configuration.certs.get().isEmpty() && configuration.keys.isPresent() - && !configuration.keys.get().isEmpty()) { + GrpcClientConfiguration.TlsClientConfig.PemKeyCertConfiguration configuration) { + if (configuration.certs().isPresent() && !configuration.certs().get().isEmpty() && configuration.keys().isPresent() + && !configuration.keys().get().isEmpty()) { ensureKeyCertOptionsNotSet(options); options.setKeyCertOptions(toPemKeyCertOptions(configuration)); } } - private static KeyCertOptions toPemKeyCertOptions(TlsClientConfig.PemKeyCertConfiguration configuration) { + private static KeyCertOptions toPemKeyCertOptions( + GrpcClientConfiguration.TlsClientConfig.PemKeyCertConfiguration configuration) { PemKeyCertOptions pemKeyCertOptions = new PemKeyCertOptions(); - if (configuration.certs.isPresent()) { - for (String cert : configuration.certs.get()) { + if (configuration.certs().isPresent()) { + for (String cert : configuration.certs().get()) { pemKeyCertOptions.addCertPath(cert); } } - if (configuration.keys.isPresent()) { - for (String cert : configuration.keys.get()) { + if (configuration.keys().isPresent()) { + for (String cert : configuration.keys().get()) { pemKeyCertOptions.addKeyPath(cert); } } return pemKeyCertOptions; } - public static void configureJksKeyCertOptions(TCPSSLOptions options, TlsClientConfig.JksConfiguration configuration) { - if (configuration.path.isPresent()) { + public static void configureJksKeyCertOptions(TCPSSLOptions options, + GrpcClientConfiguration.TlsClientConfig.JksConfiguration configuration) { + if (configuration.path().isPresent()) { ensureKeyCertOptionsNotSet(options); options.setKeyCertOptions(toJksOptions(configuration)); } } - public static void configurePfxKeyCertOptions(TCPSSLOptions options, TlsClientConfig.PfxConfiguration configuration) { - if (configuration.path.isPresent()) { + public static void configurePfxKeyCertOptions(TCPSSLOptions options, + GrpcClientConfiguration.TlsClientConfig.PfxConfiguration configuration) { + if (configuration.path().isPresent()) { ensureKeyCertOptionsNotSet(options); options.setKeyCertOptions(toPfxOptions(configuration)); } @@ -117,4 +123,4 @@ private static void ensureKeyCertOptionsNotSet(TCPSSLOptions options) { private SSLConfigHelper() { // Utility } -} \ No newline at end of file +} diff --git a/extensions/grpc/xds/src/main/java/io/quarkus/grpc/xds/XdsGrpcServerBuilderProvider.java b/extensions/grpc/xds/src/main/java/io/quarkus/grpc/xds/XdsGrpcServerBuilderProvider.java index 267d6e29604dd..ffc08816876ea 100644 --- a/extensions/grpc/xds/src/main/java/io/quarkus/grpc/xds/XdsGrpcServerBuilderProvider.java +++ b/extensions/grpc/xds/src/main/java/io/quarkus/grpc/xds/XdsGrpcServerBuilderProvider.java @@ -20,11 +20,9 @@ import io.grpc.xds.XdsChannelCredentials; import io.grpc.xds.XdsServerBuilder; import io.grpc.xds.XdsServerCredentials; -import io.quarkus.grpc.runtime.config.ClientXds; import io.quarkus.grpc.runtime.config.Enabled; import io.quarkus.grpc.runtime.config.GrpcClientConfiguration; import io.quarkus.grpc.runtime.config.GrpcServerConfiguration; -import io.quarkus.grpc.runtime.config.Xds; import io.quarkus.grpc.runtime.devmode.DevModeInterceptor; import io.quarkus.grpc.runtime.devmode.GrpcHotReplacementInterceptor; import io.quarkus.grpc.spi.GrpcBuilderProvider; @@ -38,16 +36,16 @@ public class XdsGrpcServerBuilderProvider implements GrpcBuilderProvider { @Override public boolean providesServer(GrpcServerConfiguration configuration) { - return Enabled.isEnabled(configuration.xds); + return Enabled.isEnabled(configuration.xds()); } @Override public ServerBuilder createServerBuilder(Vertx vertx, GrpcServerConfiguration configuration, LaunchMode launchMode) { - Xds xds = configuration.xds; - int port = launchMode == LaunchMode.TEST ? configuration.testPort : configuration.port; + GrpcServerConfiguration.Xds xds = configuration.xds(); + int port = launchMode == LaunchMode.TEST ? configuration.testPort() : configuration.port(); ServerCredentials credentials = InsecureServerCredentials.create(); - if (xds.secure) { + if (xds.secure()) { credentials = XdsServerCredentials.create(credentials); } ServerBuilder builder = XdsServerBuilder.forPort(port, credentials); @@ -96,7 +94,7 @@ public String serverInfo(String host, int port, GrpcServerConfiguration configur @Override public boolean providesChannel(GrpcClientConfiguration configuration) { - return Enabled.isEnabled(configuration.xds) || XDS.equalsIgnoreCase(configuration.nameResolver); + return Enabled.isEnabled(configuration.xds()) || XDS.equalsIgnoreCase(configuration.nameResolver()); } @Override @@ -111,12 +109,12 @@ public String adjustHost(String host) { @Override public ManagedChannelBuilder createChannelBuilder(GrpcClientConfiguration configuration, String target) { - ClientXds xds = configuration.xds; + GrpcClientConfiguration.ClientXds xds = configuration.xds(); ChannelCredentials credentials = InsecureChannelCredentials.create(); - if (xds.secure) { + if (xds.secure()) { credentials = XdsChannelCredentials.create(credentials); } - target = xds.target.orElse(target); // use xds's target, if explicitly set + target = xds.target().orElse(target); // use xds's target, if explicitly set return Grpc.newChannelBuilder(target, credentials); } diff --git a/extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/EnversFastBootingTest.java b/extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/EnversFastBootingTest.java index d6148c030447e..acf8d694c7298 100644 --- a/extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/EnversFastBootingTest.java +++ b/extensions/hibernate-envers/deployment/src/test/java/io/quarkus/hibernate/orm/envers/EnversFastBootingTest.java @@ -5,10 +5,10 @@ import org.hibernate.Session; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.wildfly.common.Assert; import io.quarkus.bootstrap.classloading.ClassLoaderLimiter; import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.common.constraint.Assert; /** * Let's run some checks to verify that the optimisations we have diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java index 680449827e0e4..e13ac3ef3d543 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java @@ -104,6 +104,7 @@ import io.quarkus.deployment.builditem.nativeimage.NativeImageProxyDefinitionBuildItem; import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.index.IndexingUtil; import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; @@ -311,6 +312,7 @@ public void configurationDescriptorBuilding( BuildProducer nativeImageResources, BuildProducer hotDeploymentWatchedFiles, BuildProducer persistenceUnitDescriptors, + BuildProducer reflectiveMethods, List dbKindMetadataBuildItems) { if (!hasEntities(jpaModel)) { @@ -348,7 +350,7 @@ public void configurationDescriptorBuilding( handleHibernateORMWithNoPersistenceXml(hibernateOrmConfig, index, persistenceXmlDescriptors, jdbcDataSources, applicationArchivesBuildItem, launchMode.getLaunchMode(), jpaModel, capabilities, systemProperties, nativeImageResources, hotDeploymentWatchedFiles, persistenceUnitDescriptors, - dbKindMetadataBuildItems); + reflectiveMethods, dbKindMetadataBuildItems); } } @@ -824,6 +826,7 @@ private void handleHibernateORMWithNoPersistenceXml( BuildProducer nativeImageResources, BuildProducer hotDeploymentWatchedFiles, BuildProducer persistenceUnitDescriptors, + BuildProducer reflectiveMethods, List dbKindMetadataBuildItems) { if (!descriptors.isEmpty()) { if (hibernateOrmConfig.isAnyNonPersistenceXmlPropertySet()) { @@ -872,7 +875,7 @@ private void handleHibernateORMWithNoPersistenceXml( jpaModel.getXmlMappings(PersistenceUnitUtil.DEFAULT_PERSISTENCE_UNIT_NAME), jdbcDataSources, applicationArchivesBuildItem, launchMode, capabilities, systemProperties, nativeImageResources, hotDeploymentWatchedFiles, persistenceUnitDescriptors, - storageEngineCollector, dbKindMetadataBuildItems); + reflectiveMethods, storageEngineCollector, dbKindMetadataBuildItems); } else if (!modelClassesAndPackagesForDefaultPersistenceUnit.isEmpty() && (!hibernateOrmConfig.defaultPersistenceUnit().datasource().isPresent() || DataSourceUtil.isDefault(hibernateOrmConfig.defaultPersistenceUnit().datasource().get())) @@ -892,7 +895,7 @@ private void handleHibernateORMWithNoPersistenceXml( jpaModel.getXmlMappings(persistenceUnitEntry.getKey()), jdbcDataSources, applicationArchivesBuildItem, launchMode, capabilities, systemProperties, nativeImageResources, hotDeploymentWatchedFiles, persistenceUnitDescriptors, - storageEngineCollector, dbKindMetadataBuildItems); + reflectiveMethods, storageEngineCollector, dbKindMetadataBuildItems); } if (storageEngineCollector.size() > 1) { @@ -915,6 +918,7 @@ private static void producePersistenceUnitDescriptorFromConfig( BuildProducer nativeImageResources, BuildProducer hotDeploymentWatchedFiles, BuildProducer persistenceUnitDescriptors, + BuildProducer reflectiveMethods, Set storageEngineCollector, List dbKindMetadataBuildItems) { Optional jdbcDataSource = findJdbcDataSource(persistenceUnitName, persistenceUnitConfig, @@ -943,7 +947,7 @@ private static void producePersistenceUnitDescriptorFromConfig( MultiTenancyStrategy multiTenancyStrategy = getMultiTenancyStrategy(persistenceUnitConfig.multitenant()); collectDialectConfig(persistenceUnitName, persistenceUnitConfig, dbKindMetadataBuildItems, jdbcDataSource, multiTenancyStrategy, - systemProperties, descriptor.getProperties()::setProperty, storageEngineCollector); + systemProperties, reflectiveMethods, descriptor.getProperties()::setProperty, storageEngineCollector); // Physical Naming Strategy persistenceUnitConfig.physicalNamingStrategy().ifPresent( @@ -1134,6 +1138,7 @@ private static void collectDialectConfig(String persistenceUnitName, List dbKindMetadataBuildItems, Optional jdbcDataSource, MultiTenancyStrategy multiTenancyStrategy, BuildProducer systemProperties, + BuildProducer reflectiveMethods, BiConsumer puPropertiesCollector, Set storageEngineCollector) { Optional explicitDialect = persistenceUnitConfig.dialect().dialect(); Optional dbKind = jdbcDataSource.map(JdbcDataSourceBuildItem::getDbKind); @@ -1210,6 +1215,14 @@ private static void collectDialectConfig(String persistenceUnitName, if (dbProductVersion.isPresent()) { puPropertiesCollector.accept(AvailableSettings.JAKARTA_HBM2DDL_DB_VERSION, dbProductVersion.get()); } + + if ((dbKind.isPresent() && DatabaseKind.isPostgreSQL(dbKind.get()) + || (dialect.isPresent() && dialect.get().toLowerCase(Locale.ROOT).contains("postgres")))) { + // Workaround for https://hibernate.atlassian.net/browse/HHH-19063 + reflectiveMethods.produce(new ReflectiveMethodBuildItem( + "Accessed in org.hibernate.engine.jdbc.env.internal.DefaultSchemaNameResolver.determineAppropriateResolverDelegate", + true, "org.postgresql.jdbc.PgConnection", "getSchema")); + } } private static void collectDialectConfigForPersistenceXml(String persistenceUnitName, diff --git a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-entity-types.js b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-entity-types.js index ddea65e2cba42..d1b5c041ce7fa 100644 --- a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-entity-types.js +++ b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-entity-types.js @@ -1,15 +1,23 @@ -import { LitElement, html, css} from 'lit'; +import { QwcHotReloadElement, html, css} from 'qwc-hot-reload-element'; import { JsonRpc } from 'jsonrpc'; import '@vaadin/icon'; import '@vaadin/button'; import '@vaadin/grid'; +import '@vaadin/progress-bar'; import { columnBodyRenderer } from '@vaadin/grid/lit.js'; +import { notifier } from 'notifier'; -export class HibernateOrmEntityTypesComponent extends LitElement { +export class HibernateOrmEntityTypesComponent extends QwcHotReloadElement { static styles = css` + :host { + display: flex; + padding-left: 10px; + padding-right: 10px; + } .full-height { - height: 100%; + height: 100%; + width: 100%; } `; @@ -19,10 +27,23 @@ export class HibernateOrmEntityTypesComponent extends LitElement { _persistenceUnits: {state: true, type: Array} } + constructor() { + super(); + this._persistenceUnits = []; + } + connectedCallback() { super.connectedCallback(); + this.hotReload(); + } + + hotReload(){ this.jsonRpc.getInfo().then(response => { this._persistenceUnits = response.result.persistenceUnits; + }).catch(error => { + console.error("Failed to fetch persistence units:", error); + this._persistenceUnits = []; + notifier.showErrorMessage("Failed to fetch persistence units: " + error, "bottom-start", 30); }); } @@ -30,13 +51,16 @@ export class HibernateOrmEntityTypesComponent extends LitElement { if (this._persistenceUnits) { return this._renderAllPUs(); } else { - return html`Loading...`; + return html`

    +
    Fetching persistence units...
    + +
    `; } } _renderAllPUs() { return this._persistenceUnits.length == 0 - ? html`

    No persistence units were found.

    ` + ? html`

    No persistence units were found. Check again

    ` : html` Persistence Unit diff --git a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-named-queries.js b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-named-queries.js index ed664565c59c8..1a97259d47a5f 100644 --- a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-named-queries.js +++ b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-named-queries.js @@ -1,15 +1,23 @@ -import { LitElement, html, css} from 'lit'; +import { QwcHotReloadElement, html, css} from 'qwc-hot-reload-element'; import { JsonRpc } from 'jsonrpc'; import '@vaadin/icon'; import '@vaadin/button'; import '@vaadin/grid'; +import '@vaadin/progress-bar'; import { columnBodyRenderer } from '@vaadin/grid/lit.js'; +import { notifier } from 'notifier'; -export class HibernateOrmNamedQueriesComponent extends LitElement { +export class HibernateOrmNamedQueriesComponent extends QwcHotReloadElement { static styles = css` + :host { + display: flex; + padding-left: 10px; + padding-right: 10px; + } .full-height { - height: 100%; + height: 100%; + width: 100%; } `; @@ -19,10 +27,23 @@ export class HibernateOrmNamedQueriesComponent extends LitElement { _persistenceUnits: {state: true, type: Array} } + constructor() { + super(); + this._persistenceUnits = []; + } + connectedCallback() { super.connectedCallback(); + this.hotReload(); + } + + hotReload(){ this.jsonRpc.getInfo().then(response => { this._persistenceUnits = response.result.persistenceUnits; + }).catch(error => { + console.error("Failed to fetch persistence units:", error); + this._persistenceUnits = []; + notifier.showErrorMessage("Failed to fetch persistence units: " + error, "bottom-start", 30); }); } @@ -30,13 +51,16 @@ export class HibernateOrmNamedQueriesComponent extends LitElement { if (this._persistenceUnits) { return this._renderAllPUs(); } else { - return html`Loading...`; + return html`
    +
    Fetching persistence units...
    + +
    `; } } _renderAllPUs() { return this._persistenceUnits.length == 0 - ? html`

    No persistence units were found.

    ` + ? html`

    No persistence units were found. Check again

    ` : html` Persistence Unit @@ -59,7 +83,7 @@ export class HibernateOrmNamedQueriesComponent extends LitElement { _renderNamedQueriesTable(pu) { if (pu.namedQueries.length == 0) { - return html`

    No named queries were found.

    ` + return html`

    No named queries were found. Check again

    ` } return html` diff --git a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js index 0d6e245238ad1..9c0ef7ef961fc 100644 --- a/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js +++ b/extensions/hibernate-orm/deployment/src/main/resources/dev-ui/hibernate-orm-persistence-units.js @@ -1,19 +1,26 @@ -import { LitElement, html, css} from 'lit'; +import { QwcHotReloadElement, html, css} from 'qwc-hot-reload-element'; import { JsonRpc } from 'jsonrpc'; import '@vaadin/icon'; import '@vaadin/button'; import '@vaadin/grid'; +import '@vaadin/progress-bar'; import { columnBodyRenderer } from '@vaadin/grid/lit.js'; import { notifier } from 'notifier'; import { observeState } from 'lit-element-state'; import { themeState } from 'theme-state'; import '@quarkus-webcomponents/codeblock'; -export class HibernateOrmPersistenceUnitsComponent extends observeState(LitElement) { +export class HibernateOrmPersistenceUnitsComponent extends observeState(QwcHotReloadElement) { static styles = css` + :host { + display: flex; + padding-left: 10px; + padding-right: 10px; + } .full-height { - height: 100%; + height: 100%; + width: 100%; } a.script-heading { display: block; @@ -29,10 +36,23 @@ export class HibernateOrmPersistenceUnitsComponent extends observeState(LitEleme _persistenceUnits: {state: true, type: Array} } + constructor() { + super(); + this._persistenceUnits = []; + } + connectedCallback() { super.connectedCallback(); + this.hotReload(); + } + + hotReload(){ this.jsonRpc.getInfo().then(response => { this._persistenceUnits = response.result.persistenceUnits; + }).catch(error => { + console.error("Failed to fetch persistence units:", error); + this._persistenceUnits = []; + notifier.showErrorMessage("Failed to fetch persistence units: " + error, "bottom-start", 30); }); } @@ -40,13 +60,18 @@ export class HibernateOrmPersistenceUnitsComponent extends observeState(LitEleme if (this._persistenceUnits) { return this._renderAllPUs(); } else { - return html`Loading...`; + return html`
    +
    Fetching persistence units...
    + +
    `; } } _renderAllPUs() { return this._persistenceUnits.length == 0 - ? html`

    No persistence units were found.

    ` + ? html`

    No persistence units were found. + Check again +

    ` : html` Persistence Unit diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/JPAFastBootingTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/JPAFastBootingTest.java index 4c7ecd850e049..3160135b9f437 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/JPAFastBootingTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/JPAFastBootingTest.java @@ -5,11 +5,11 @@ import org.hibernate.Session; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; -import org.wildfly.common.Assert; import io.quarkus.bootstrap.classloading.ClassLoaderLimiter; import io.quarkus.hibernate.orm.enhancer.Address; import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.common.constraint.Assert; /** * Let's run some checks to verify that the optimisations we have diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeDisabledTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeDisabledTest.java new file mode 100644 index 0000000000000..29fd58ab240a3 --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeDisabledTest.java @@ -0,0 +1,55 @@ +package io.quarkus.hibernate.orm.stateless; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.inject.Inject; + +import org.hibernate.StatelessSession; +import org.jboss.shrinkwrap.api.asset.EmptyAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.hibernate.orm.MyEntity; +import io.quarkus.hibernate.orm.naming.PrefixPhysicalNamingStrategy; +import io.quarkus.test.QuarkusUnitTest; + +public class StatelessSessionWithinRequestScopeDisabledTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(MyEntity.class, PrefixPhysicalNamingStrategy.class) + .addAsResource(EmptyAsset.INSTANCE, "import.sql")) + .overrideConfigKey("quarkus.hibernate-orm.request-scoped.enabled", "false"); + + @Inject + StatelessSession statelessSession; + + @BeforeEach + public void activateRequestContext() { + Arc.container().requestContext().activate(); + } + + @Test + public void read() { + assertThatThrownBy(() -> statelessSession + .createSelectionQuery("SELECT entity FROM MyEntity entity WHERE name IS NULL", MyEntity.class).getResultCount()) + .hasMessageContaining( + "Cannot use the StatelessSession because no transaction is active"); + } + + @Test + public void write() { + assertThatThrownBy(() -> statelessSession.insert(new MyEntity("john"))) + .hasMessageContaining( + "Cannot use the StatelessSession because no transaction is active"); + } + + @AfterEach + public void terminateRequestContext() { + Arc.container().requestContext().terminate(); + } +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeTest.java index 04872f3c258b3..be8149e62d9cc 100644 --- a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeTest.java +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/stateless/StatelessSessionWithinRequestScopeTest.java @@ -22,8 +22,7 @@ public class StatelessSessionWithinRequestScopeTest { static QuarkusUnitTest runner = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(MyEntity.class, PrefixPhysicalNamingStrategy.class) - .addAsResource(EmptyAsset.INSTANCE, "import.sql") - .addAsResource("application-physical-naming-strategy.properties", "application.properties")); + .addAsResource(EmptyAsset.INSTANCE, "import.sql")); @Inject StatelessSession statelessSession; @@ -34,9 +33,22 @@ public void activateRequestContext() { } @Test - public void test() throws Exception { - Number result = (Number) statelessSession.createNativeQuery("SELECT COUNT(*) FROM TBL_MYENTITY").getSingleResult(); - assertEquals(0, result.intValue()); + public void read() { + assertEquals(0L, statelessSession + .createSelectionQuery("SELECT entity FROM MyEntity entity WHERE name IS NULL", MyEntity.class) + .getResultCount()); + } + + @Test + public void write() { + assertEquals(0L, statelessSession + .createSelectionQuery("SELECT entity FROM MyEntity entity", MyEntity.class) + .getResultCount()); + // TODO: On contrary to Session, it seems we don't prevent writes on StatelessSessions with no transaction active? + statelessSession.insert(new MyEntity("john")); + assertEquals(1L, statelessSession + .createSelectionQuery("SELECT entity FROM MyEntity entity", MyEntity.class) + .getResultCount()); } @AfterEach diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeDisabledTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeDisabledTest.java new file mode 100644 index 0000000000000..36d7d49762d02 --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeDisabledTest.java @@ -0,0 +1,55 @@ +package io.quarkus.hibernate.orm.transaction; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import jakarta.inject.Inject; + +import org.hibernate.Session; +import org.jboss.shrinkwrap.api.asset.EmptyAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.hibernate.orm.MyEntity; +import io.quarkus.hibernate.orm.naming.PrefixPhysicalNamingStrategy; +import io.quarkus.test.QuarkusUnitTest; + +public class SessionWithinRequestScopeDisabledTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(MyEntity.class, PrefixPhysicalNamingStrategy.class) + .addAsResource(EmptyAsset.INSTANCE, "import.sql")) + .overrideConfigKey("quarkus.hibernate-orm.request-scoped.enabled", "false"); + + @Inject + Session session; + + @BeforeEach + public void activateRequestContext() { + Arc.container().requestContext().activate(); + } + + @Test + public void read() { + assertThatThrownBy(() -> session + .createSelectionQuery("SELECT entity FROM MyEntity entity WHERE name IS NULL", MyEntity.class).getResultCount()) + .hasMessageContaining( + "Cannot use the EntityManager/Session because no transaction is active"); + } + + @Test + public void write() { + assertThatThrownBy(() -> session.persist(new MyEntity("john"))) + .hasMessageContaining( + "Cannot use the EntityManager/Session because no transaction is active"); + } + + @AfterEach + public void terminateRequestContext() { + Arc.container().requestContext().terminate(); + } +} diff --git a/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeTest.java b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeTest.java new file mode 100644 index 0000000000000..1199fb2506fcb --- /dev/null +++ b/extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/transaction/SessionWithinRequestScopeTest.java @@ -0,0 +1,53 @@ +package io.quarkus.hibernate.orm.transaction; + +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import jakarta.inject.Inject; + +import org.hibernate.Session; +import org.jboss.shrinkwrap.api.asset.EmptyAsset; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.arc.Arc; +import io.quarkus.hibernate.orm.MyEntity; +import io.quarkus.hibernate.orm.naming.PrefixPhysicalNamingStrategy; +import io.quarkus.test.QuarkusUnitTest; + +public class SessionWithinRequestScopeTest { + + @RegisterExtension + static QuarkusUnitTest runner = new QuarkusUnitTest() + .withApplicationRoot((jar) -> jar + .addClasses(MyEntity.class, PrefixPhysicalNamingStrategy.class) + .addAsResource(EmptyAsset.INSTANCE, "import.sql")); + + @Inject + Session session; + + @BeforeEach + public void activateRequestContext() { + Arc.container().requestContext().activate(); + } + + @Test + public void read() { + assertEquals(0L, session + .createSelectionQuery("SELECT entity FROM MyEntity entity WHERE name IS NULL", MyEntity.class) + .getResultCount()); + } + + @Test + public void write() { + assertThatThrownBy(() -> session.persist(new MyEntity("john"))) + .hasMessageContaining("Transaction is not active"); + } + + @AfterEach + public void terminateRequestContext() { + Arc.container().requestContext().terminate(); + } +} diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java index a7410b5a49a95..7b87a8f96158e 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/HibernateOrmRuntimeConfig.java @@ -6,7 +6,9 @@ import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithName; import io.smallrye.config.WithParentName; import io.smallrye.config.WithUnnamedKey; @@ -23,6 +25,19 @@ public interface HibernateOrmRuntimeConfig { @ConfigDocMapKey("persistence-unit-name") Map persistenceUnits(); + /** + * Enable or disable access to a Hibernate ORM `EntityManager`/`Session`/`StatelessSession` + * *when no transaction is active* but a request scope is. + * + * When enabled, the corresponding sessions will be read-only. + * + * Defaults to enabled for backwards compatibility, but disabling this is recommended, + * to avoid inconsistent resulsts caused by queries running outside of transactions. + */ + @WithName("request-scoped.enabled") + @WithDefault("true") + boolean requestScopedSessionEnabled(); + static String extensionPropertyKey(String radical) { return "quarkus.hibernate-orm." + radical; } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java index fd55b3440d127..dec877dab0338 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/JPAConfig.java @@ -26,6 +26,7 @@ public class JPAConfig { private final Map persistenceUnits = new HashMap<>(); private final Set deactivatedPersistenceUnitNames = new HashSet<>(); + private final boolean requestScopedSessionEnabled; @Inject public JPAConfig(HibernateOrmRuntimeConfig hibernateOrmRuntimeConfig) { @@ -40,6 +41,7 @@ public JPAConfig(HibernateOrmRuntimeConfig hibernateOrmRuntimeConfig) { persistenceUnits.put(puName, new LazyPersistenceUnit(puName)); } } + this.requestScopedSessionEnabled = hibernateOrmRuntimeConfig.requestScopedSessionEnabled(); } void startAll() { @@ -120,6 +122,13 @@ public Set getDeactivatedPersistenceUnitNames() { return deactivatedPersistenceUnitNames; } + /** + * Returns boolean value for enabling request scoped sessions + */ + public boolean getRequestScopedSessionEnabled() { + return this.requestScopedSessionEnabled; + } + public static class Destroyer implements BeanDestroyer { @Override public void destroy(JPAConfig instance, CreationalContext creationalContext, Map params) { diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/TransactionSessions.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/TransactionSessions.java index 6d7bea5a538b0..246b8345de3d3 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/TransactionSessions.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/TransactionSessions.java @@ -45,7 +45,7 @@ public Session getSession(String unitName) { return sessions.computeIfAbsent(unitName, (un) -> new TransactionScopedSession( getTransactionManager(), getTransactionSynchronizationRegistry(), jpaConfig.getEntityManagerFactory(un).unwrap(SessionFactory.class), un, - requestScopedSession)); + jpaConfig.getRequestScopedSessionEnabled(), requestScopedSession)); } public StatelessSession getStatelessSession(String unitName) { @@ -56,7 +56,7 @@ public StatelessSession getStatelessSession(String unitName) { return staleSessions.computeIfAbsent(unitName, (un) -> new TransactionScopedStatelessSession( getTransactionManager(), getTransactionSynchronizationRegistry(), jpaConfig.getEntityManagerFactory(un).unwrap(SessionFactory.class), un, - requestScopedStatelessSession)); + jpaConfig.getRequestScopedSessionEnabled(), requestScopedStatelessSession)); } private TransactionManager getTransactionManager() { diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedSession.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedSession.java index 5d616754d4a22..415e93fcf119d 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedSession.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedSession.java @@ -53,6 +53,7 @@ import org.hibernate.stat.SessionStatistics; import io.quarkus.arc.Arc; +import io.quarkus.hibernate.orm.runtime.HibernateOrmRuntimeConfig; import io.quarkus.hibernate.orm.runtime.RequestScopedSessionHolder; import io.quarkus.runtime.BlockingOperationControl; import io.quarkus.runtime.BlockingOperationNotAllowedException; @@ -67,12 +68,14 @@ public class TransactionScopedSession implements Session { private final JTASessionOpener jtaSessionOpener; private final String unitName; private final String sessionKey; + private final boolean requestScopedSessionEnabled; private final Instance requestScopedSessions; public TransactionScopedSession(TransactionManager transactionManager, TransactionSynchronizationRegistry transactionSynchronizationRegistry, SessionFactory sessionFactory, String unitName, + boolean requestScopedSessionEnabled, Instance requestScopedSessions) { this.transactionManager = transactionManager; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; @@ -80,6 +83,7 @@ public TransactionScopedSession(TransactionManager transactionManager, this.jtaSessionOpener = JTASessionOpener.create(sessionFactory); this.unitName = unitName; this.sessionKey = this.getClass().getSimpleName() + "-" + unitName; + this.requestScopedSessionEnabled = requestScopedSessionEnabled; this.requestScopedSessions = requestScopedSessions; } @@ -100,15 +104,23 @@ SessionResult acquireSession() { // - org.hibernate.internal.SessionImpl.beforeTransactionCompletion // - org.hibernate.internal.SessionImpl.afterTransactionCompletion return new SessionResult(newSession, false, true); - } else if (Arc.container().requestContext().isActive()) { - RequestScopedSessionHolder requestScopedSessions = this.requestScopedSessions.get(); - return new SessionResult(requestScopedSessions.getOrCreateSession(unitName, sessionFactory), - false, false); + } else if (requestScopedSessionEnabled) { + if (Arc.container().requestContext().isActive()) { + RequestScopedSessionHolder requestScopedSessions = this.requestScopedSessions.get(); + return new SessionResult(requestScopedSessions.getOrCreateSession(unitName, sessionFactory), + false, false); + } else { + throw new ContextNotActiveException( + "Cannot use the EntityManager/Session because neither a transaction nor a CDI request context is active." + + " Consider adding @Transactional to your method to automatically activate a transaction," + + " or @ActivateRequestContext if you have valid reasons not to use transactions."); + } } else { throw new ContextNotActiveException( - "Cannot use the EntityManager/Session because neither a transaction nor a CDI request context is active." + "Cannot use the EntityManager/Session because no transaction is active." + " Consider adding @Transactional to your method to automatically activate a transaction," - + " or @ActivateRequestContext if you have valid reasons not to use transactions."); + + " or set '" + HibernateOrmRuntimeConfig.extensionPropertyKey("request-scoped.enabled") + + "' to 'true' if you have valid reasons not to use transactions."); } } diff --git a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedStatelessSession.java b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedStatelessSession.java index 9509bf924c249..6830a57e069ae 100644 --- a/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedStatelessSession.java +++ b/extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/session/TransactionScopedStatelessSession.java @@ -33,6 +33,7 @@ import org.hibernate.query.criteria.JpaCriteriaInsertSelect; import io.quarkus.arc.Arc; +import io.quarkus.hibernate.orm.runtime.HibernateOrmRuntimeConfig; import io.quarkus.hibernate.orm.runtime.RequestScopedStatelessSessionHolder; import io.quarkus.runtime.BlockingOperationControl; import io.quarkus.runtime.BlockingOperationNotAllowedException; @@ -47,12 +48,14 @@ public class TransactionScopedStatelessSession implements StatelessSession { private final JTAStatelessSessionOpener jtaSessionOpener; private final String unitName; private final String sessionKey; + private final boolean requestScopedSessionEnabled; private final Instance requestScopedSessions; public TransactionScopedStatelessSession(TransactionManager transactionManager, TransactionSynchronizationRegistry transactionSynchronizationRegistry, SessionFactory sessionFactory, String unitName, + boolean requestScopedSessionEnabled, Instance requestScopedSessions) { this.transactionManager = transactionManager; this.transactionSynchronizationRegistry = transactionSynchronizationRegistry; @@ -60,6 +63,7 @@ public TransactionScopedStatelessSession(TransactionManager transactionManager, this.jtaSessionOpener = JTAStatelessSessionOpener.create(sessionFactory); this.unitName = unitName; this.sessionKey = this.getClass().getSimpleName() + "-" + unitName; + this.requestScopedSessionEnabled = requestScopedSessionEnabled; this.requestScopedSessions = requestScopedSessions; } @@ -74,15 +78,23 @@ SessionResult acquireSession() { // The session has automatically joined the JTA transaction when it was constructed. transactionSynchronizationRegistry.putResource(sessionKey, newSession); return new SessionResult(newSession, false, true); - } else if (Arc.container().requestContext().isActive()) { - RequestScopedStatelessSessionHolder requestScopedSessions = this.requestScopedSessions.get(); - return new SessionResult(requestScopedSessions.getOrCreateSession(unitName, sessionFactory), - false, false); + } else if (requestScopedSessionEnabled) { + if (Arc.container().requestContext().isActive()) { + RequestScopedStatelessSessionHolder requestScopedSessions = this.requestScopedSessions.get(); + return new SessionResult(requestScopedSessions.getOrCreateSession(unitName, sessionFactory), + false, false); + } else { + throw new ContextNotActiveException( + "Cannot use the StatelessSession because neither a transaction nor a CDI request context is active." + + " Consider adding @Transactional to your method to automatically activate a transaction," + + " or @ActivateRequestContext if you have valid reasons not to use transactions."); + } } else { throw new ContextNotActiveException( - "Cannot use the StatelessSession because neither a transaction nor a CDI request context is active." + "Cannot use the StatelessSession because no transaction is active." + " Consider adding @Transactional to your method to automatically activate a transaction," - + " or @ActivateRequestContext if you have valid reasons not to use transactions."); + + " or set '" + HibernateOrmRuntimeConfig.extensionPropertyKey("request-scoped.enabled") + + "' to 'true' if you have valid reasons not to use transactions."); } } diff --git a/extensions/infinispan-cache/deployment/pom.xml b/extensions/infinispan-cache/deployment/pom.xml index 5254fcbc79e3a..d75ddb30bb259 100644 --- a/extensions/infinispan-cache/deployment/pom.xml +++ b/extensions/infinispan-cache/deployment/pom.xml @@ -57,9 +57,6 @@ ${project.version}
    - - -AlegacyConfigRoot=true - diff --git a/extensions/infinispan-cache/deployment/src/main/java/io/quarkus/cache/infinispan/deployment/InfinispanCacheProcessor.java b/extensions/infinispan-cache/deployment/src/main/java/io/quarkus/cache/infinispan/deployment/InfinispanCacheProcessor.java index 02ce9ec545d96..85b297f443ce7 100644 --- a/extensions/infinispan-cache/deployment/src/main/java/io/quarkus/cache/infinispan/deployment/InfinispanCacheProcessor.java +++ b/extensions/infinispan-cache/deployment/src/main/java/io/quarkus/cache/infinispan/deployment/InfinispanCacheProcessor.java @@ -41,7 +41,7 @@ UnremovableBeanBuildItem ensureBeanLookupAvailable() { @BuildStep InfinispanClientNameBuildItem requestedInfinispanClientBuildItem(InfinispanCachesBuildTimeConfig buildConfig) { return new InfinispanClientNameBuildItem( - buildConfig.clientName.orElse(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME)); + buildConfig.clientName().orElse(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME)); } @BuildStep diff --git a/extensions/infinispan-cache/runtime/pom.xml b/extensions/infinispan-cache/runtime/pom.xml index 979be55c201a1..8c500552df19a 100644 --- a/extensions/infinispan-cache/runtime/pom.xml +++ b/extensions/infinispan-cache/runtime/pom.xml @@ -42,9 +42,6 @@ ${project.version}
    - - -AlegacyConfigRoot=true - diff --git a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheBuildRecorder.java b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheBuildRecorder.java index 19ad5ace35291..988fcd17dfbf3 100644 --- a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheBuildRecorder.java +++ b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheBuildRecorder.java @@ -57,7 +57,7 @@ public CacheManager get() { cacheInfo.name, cacheInfo.lifespan, cacheInfo.maxIdle); } - InfinispanCacheImpl cache = new InfinispanCacheImpl(cacheInfo, buildConfig.clientName); + InfinispanCacheImpl cache = new InfinispanCacheImpl(cacheInfo, buildConfig.clientName()); caches.put(cacheInfo.name, cache); } return new CacheManagerImpl(caches); diff --git a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheInfoBuilder.java b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheInfoBuilder.java index 7519e082563a7..e35de3fc99005 100644 --- a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheInfoBuilder.java +++ b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheInfoBuilder.java @@ -19,19 +19,19 @@ public static Set build(Set cacheNames, InfinispanC InfinispanCacheInfo cacheInfo = new InfinispanCacheInfo(); cacheInfo.name = cacheName; - InfinispanCacheRuntimeConfig defaultRuntimeConfig = runtimeConfig.defaultConfig; - InfinispanCacheRuntimeConfig namedRuntimeConfig = runtimeConfig.cachesConfig.get(cacheInfo.name); + InfinispanCacheRuntimeConfig defaultRuntimeConfig = runtimeConfig.defaultConfig(); + InfinispanCacheRuntimeConfig namedRuntimeConfig = runtimeConfig.cachesConfig().get(cacheInfo.name); - if (namedRuntimeConfig != null && namedRuntimeConfig.lifespan.isPresent()) { - cacheInfo.lifespan = namedRuntimeConfig.lifespan; - } else if (defaultRuntimeConfig.lifespan.isPresent()) { - cacheInfo.lifespan = defaultRuntimeConfig.lifespan; + if (namedRuntimeConfig != null && namedRuntimeConfig.lifespan().isPresent()) { + cacheInfo.lifespan = namedRuntimeConfig.lifespan(); + } else if (defaultRuntimeConfig.lifespan().isPresent()) { + cacheInfo.lifespan = defaultRuntimeConfig.lifespan(); } - if (namedRuntimeConfig != null && namedRuntimeConfig.maxIdle.isPresent()) { - cacheInfo.maxIdle = namedRuntimeConfig.maxIdle; - } else if (defaultRuntimeConfig.maxIdle.isPresent()) { - cacheInfo.maxIdle = defaultRuntimeConfig.maxIdle; + if (namedRuntimeConfig != null && namedRuntimeConfig.maxIdle().isPresent()) { + cacheInfo.maxIdle = namedRuntimeConfig.maxIdle(); + } else if (defaultRuntimeConfig.maxIdle().isPresent()) { + cacheInfo.maxIdle = defaultRuntimeConfig.maxIdle(); } result.add(cacheInfo); diff --git a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheRuntimeConfig.java b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheRuntimeConfig.java index 51b22ff1ed9c7..acf6d03bb2209 100644 --- a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheRuntimeConfig.java +++ b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCacheRuntimeConfig.java @@ -4,20 +4,17 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class InfinispanCacheRuntimeConfig { +public interface InfinispanCacheRuntimeConfig { /** * The default lifespan of the item stored in the cache */ - @ConfigItem - public Optional lifespan; + Optional lifespan(); /** * The default max-idle of the item stored in the cache */ - @ConfigItem - public Optional maxIdle; + Optional maxIdle(); } diff --git a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesBuildTimeConfig.java b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesBuildTimeConfig.java index 66596bc5170a6..02e161f3b34af 100644 --- a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesBuildTimeConfig.java +++ b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesBuildTimeConfig.java @@ -4,16 +4,16 @@ import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; -@ConfigRoot(phase = BUILD_AND_RUN_TIME_FIXED, name = "cache.infinispan") -public class InfinispanCachesBuildTimeConfig { +@ConfigRoot(phase = BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = "quarkus.cache.infinispan") +public interface InfinispanCachesBuildTimeConfig { /** * The name of the named Infinispan client to be used for communicating with Infinispan. * If not set, use the default Infinispan client. */ - @ConfigItem - public Optional clientName; + Optional clientName(); } diff --git a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesConfig.java b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesConfig.java index c2533936392c8..31837b32840f1 100644 --- a/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesConfig.java +++ b/extensions/infinispan-cache/runtime/src/main/java/io/quarkus/cache/infinispan/runtime/InfinispanCachesConfig.java @@ -6,24 +6,26 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithParentName; -@ConfigRoot(phase = RUN_TIME, name = "cache.infinispan") -public class InfinispanCachesConfig { +@ConfigRoot(phase = RUN_TIME) +@ConfigMapping(prefix = "quarkus.cache.infinispan") +public interface InfinispanCachesConfig { /** * Default configuration applied to all Infinispan caches (lowest precedence) */ - @ConfigItem(name = ConfigItem.PARENT) - public InfinispanCacheRuntimeConfig defaultConfig; + @WithParentName + InfinispanCacheRuntimeConfig defaultConfig(); /** * Additional configuration applied to a specific Infinispan cache (highest precedence) */ - @ConfigItem(name = ConfigItem.PARENT) + @WithParentName @ConfigDocMapKey("cache-name") @ConfigDocSection - Map cachesConfig; + Map cachesConfig(); } diff --git a/extensions/infinispan-client/deployment/pom.xml b/extensions/infinispan-client/deployment/pom.xml index d015e1389bbc9..31d373777421c 100644 --- a/extensions/infinispan-client/deployment/pom.xml +++ b/extensions/infinispan-client/deployment/pom.xml @@ -111,9 +111,6 @@ ${project.version}
    - - -AlegacyConfigRoot=true - diff --git a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java index 162a9bbb3c5cc..707cd73ca97c3 100644 --- a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java +++ b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/InfinispanClientProcessor.java @@ -144,10 +144,10 @@ public void handleProtoStreamRequirements(BuildProducer pr Properties properties = new Properties(); Map marshallers = new HashMap<>(); initMarshaller(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME, - infinispanClientsBuildTimeConfig.defaultInfinispanClient.marshallerClass, marshallers); + infinispanClientsBuildTimeConfig.defaultInfinispanClient().marshallerClass(), marshallers); for (String clientName : infinispanClientsBuildTimeConfig.getInfinispanNamedClientConfigNames()) { initMarshaller(clientName, - infinispanClientsBuildTimeConfig.getInfinispanClientBuildTimeConfig(clientName).marshallerClass, + infinispanClientsBuildTimeConfig.getInfinispanClientBuildTimeConfig(clientName).marshallerClass(), marshallers); } protostreamPropertiesBuildItem.produce(new MarshallingBuildItem(properties, marshallers)); @@ -201,8 +201,8 @@ InfinispanPropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArch nativeImageSecurityProviders.produce(new NativeImageSecurityProviderBuildItem(SASL_SECURITY_PROVIDER)); // add per cache file config - handlePerCacheFileConfig(infinispanClientsBuildTimeConfig.defaultInfinispanClient, resourceBuildItem, hotDeployment); - for (InfinispanClientBuildTimeConfig config : infinispanClientsBuildTimeConfig.namedInfinispanClients.values()) { + handlePerCacheFileConfig(infinispanClientsBuildTimeConfig.defaultInfinispanClient(), resourceBuildItem, hotDeployment); + for (InfinispanClientBuildTimeConfig config : infinispanClientsBuildTimeConfig.namedInfinispanClients().values()) { handlePerCacheFileConfig(config, resourceBuildItem, hotDeployment); } @@ -307,6 +307,7 @@ InfinispanPropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArch String[] elytronClasses = new String[] { "org.wildfly.security.sasl.plain.PlainSaslClientFactory", "org.wildfly.security.sasl.scram.ScramSaslClientFactory", + "org.wildfly.security.sasl.digest.DigestClientFactory", "org.wildfly.security.credential.BearerTokenCredential", "org.wildfly.security.credential.GSSKerberosCredential", "org.wildfly.security.credential.KeyPairCredential", @@ -314,8 +315,8 @@ InfinispanPropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArch "org.wildfly.security.credential.PublicKeyCredential", "org.wildfly.security.credential.SecretKeyCredential", "org.wildfly.security.credential.SSHCredential", - "org.wildfly.security.credential.X509CertificateChainPrivateCredential", - "org.wildfly.security.credential.X509CertificateChainPublicCredential" + "org.wildfly.security.digest.SHA512_256MessageDigest", + "org.wildfly.security.credential.X509CertificateChainPrivateCredential" }; reflectiveClass.produce(ReflectiveClassBuildItem.builder(elytronClasses).reason(getClass().getName()).build()); @@ -325,10 +326,10 @@ InfinispanPropertiesBuildItem setup(ApplicationArchivesBuildItem applicationArch private void handlePerCacheFileConfig(InfinispanClientBuildTimeConfig config, BuildProducer resourceBuildItem, BuildProducer hotDeployment) { - for (InfinispanClientBuildTimeConfig.RemoteCacheConfig cacheConfig : config.cache.values()) { - if (cacheConfig.configurationResource.isPresent()) { - resourceBuildItem.produce(new NativeImageResourceBuildItem(cacheConfig.configurationResource.get())); - hotDeployment.produce(new HotDeploymentWatchedFileBuildItem(cacheConfig.configurationResource.get())); + for (InfinispanClientBuildTimeConfig.RemoteCacheConfig cacheConfig : config.cache().values()) { + if (cacheConfig.configurationResource().isPresent()) { + resourceBuildItem.produce(new NativeImageResourceBuildItem(cacheConfig.configurationResource().get())); + hotDeployment.produce(new HotDeploymentWatchedFileBuildItem(cacheConfig.configurationResource().get())); } } } @@ -339,8 +340,9 @@ BeanContainerListenerBuildItem build(InfinispanRecorder recorder, InfinispanProp Map propertiesMap = builderBuildItem.getProperties(); addMaxEntries(DEFAULT_INFINISPAN_CLIENT_NAME, - infinispanClientsBuildTimeConfig.defaultInfinispanClient, propertiesMap.get(DEFAULT_INFINISPAN_CLIENT_NAME)); - for (Map.Entry config : infinispanClientsBuildTimeConfig.namedInfinispanClients + infinispanClientsBuildTimeConfig.defaultInfinispanClient(), propertiesMap.get(DEFAULT_INFINISPAN_CLIENT_NAME)); + for (Map.Entry config : infinispanClientsBuildTimeConfig + .namedInfinispanClients() .entrySet()) { addMaxEntries(config.getKey(), config.getValue(), propertiesMap.get(config.getKey())); } @@ -382,8 +384,9 @@ private Set infinispanClientNames(CombinedIndexBuildItem indexBuildItem, clientNames.add(annotation.value().asString()); } // dev mode client name for default - 0 config - if (infinispanClientsBuildTimeConfig.defaultInfinispanClient.devService.devservices.enabled - && infinispanClientsBuildTimeConfig.defaultInfinispanClient.devService.devservices.createDefaultClient) { + if (infinispanClientsBuildTimeConfig.defaultInfinispanClient().devservices().devservices().enabled() + && infinispanClientsBuildTimeConfig.defaultInfinispanClient().devservices().devservices() + .createDefaultClient()) { clientNames.add(DEFAULT_INFINISPAN_CLIENT_NAME); } @@ -453,11 +456,11 @@ private void addMaxEntries(String clientName, InfinispanClientBuildTimeConfig co log.debugf("Applying micro profile configuration: %s", config); } // Only write the entries if it is a valid number and it isn't already configured - if (config.nearCacheMaxEntries > 0 && !properties.containsKey(ConfigurationProperties.NEAR_CACHE_MODE)) { + if (config.nearCacheMaxEntries() > 0 && !properties.containsKey(ConfigurationProperties.NEAR_CACHE_MODE)) { // This is already empty so no need for putIfAbsent if (InfinispanClientUtil.isDefault(clientName)) { properties.put(ConfigurationProperties.NEAR_CACHE_MODE, NearCacheMode.INVALIDATED.toString()); - properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES, config.nearCacheMaxEntries); + properties.putIfAbsent(ConfigurationProperties.NEAR_CACHE_MAX_ENTRIES, config.nearCacheMaxEntries()); } } } @@ -471,7 +474,7 @@ UnremovableBeanBuildItem ensureBeanLookupAvailable() { @BuildStep HealthBuildItem addHealthCheck(InfinispanClientsBuildTimeConfig buildTimeConfig) { return new HealthBuildItem("io.quarkus.infinispan.client.runtime.health.InfinispanHealthCheck", - buildTimeConfig.healthEnabled); + buildTimeConfig.healthEnabled()); } @BuildStep diff --git a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/devservices/InfinispanDevServiceProcessor.java b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/devservices/InfinispanDevServiceProcessor.java index 562ba2083eb8c..bf13b719bc387 100644 --- a/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/devservices/InfinispanDevServiceProcessor.java +++ b/extensions/infinispan-client/deployment/src/main/java/io/quarkus/infinispan/client/deployment/devservices/InfinispanDevServiceProcessor.java @@ -83,7 +83,7 @@ public List startInfinispanContainers(LaunchModeBuil InfinispanClientBuildTimeConfig.DevServiceConfiguration devServiceConfig = capturedDevServicesConfiguration.get( devServiceName); restartRequired = restartRequired - || !config.getInfinispanClientBuildTimeConfig(devServiceName).devService.equals( + || !config.getInfinispanClientBuildTimeConfig(devServiceName).devservices().equals( devServiceConfig); } @@ -106,9 +106,9 @@ public List startInfinispanContainers(LaunchModeBuil capturedDevServicesConfiguration = new HashMap<>(); Map newDevServices = new HashMap<>(); capturedDevServicesConfiguration.put(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME, - config.defaultInfinispanClient.devService); - for (Map.Entry entry : config.namedInfinispanClients.entrySet()) { - capturedDevServicesConfiguration.put(entry.getKey(), entry.getValue().devService); + config.defaultInfinispanClient().devservices()); + for (Map.Entry entry : config.namedInfinispanClients().entrySet()) { + capturedDevServicesConfiguration.put(entry.getKey(), entry.getValue().devservices()); } StartupLogCompressor compressor = new StartupLogCompressor( @@ -116,11 +116,11 @@ public List startInfinispanContainers(LaunchModeBuil loggingSetupBuildItem); runInfinispanDevService(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME, launchMode, - compressor, dockerStatusBuildItem, devServicesSharedNetworkBuildItem, config.defaultInfinispanClient, + compressor, dockerStatusBuildItem, devServicesSharedNetworkBuildItem, config.defaultInfinispanClient(), devServicesConfig, newDevServices, properties); - config.namedInfinispanClients.entrySet().forEach(dServ -> { + config.namedInfinispanClients().entrySet().forEach(dServ -> { runInfinispanDevService(dServ.getKey(), launchMode, compressor, dockerStatusBuildItem, devServicesSharedNetworkBuildItem, dServ.getValue(), devServicesConfig, @@ -161,7 +161,7 @@ private void runInfinispanDevService(String clientName, Map properties) { try { - InfinispanDevServicesConfig namedDevServiceConfig = config.devService.devservices; + InfinispanDevServicesConfig namedDevServiceConfig = config.devservices().devservices(); RunningDevService devService = startContainer(clientName, dockerStatusBuildItem, namedDevServiceConfig, launchMode.getLaunchMode(), !devServicesSharedNetworkBuildItem.isEmpty(), devServicesConfig.timeout(), properties); @@ -182,7 +182,7 @@ private void runInfinispanDevService(String clientName, private RunningDevService startContainer(String clientName, DockerStatusBuildItem dockerStatusBuildItem, InfinispanDevServicesConfig devServicesConfig, LaunchMode launchMode, boolean useSharedNetwork, Optional timeout, Map properties) { - if (!devServicesConfig.enabled) { + if (!devServicesConfig.enabled()) { // explicitly disabled log.debug("Not starting Dev Services for Infinispan as it has been disabled in the config"); return null; @@ -211,7 +211,7 @@ private RunningDevService startContainer(String clientName, DockerStatusBuildIte launchMode, useSharedNetwork); timeout.ifPresent(infinispanContainer::withStartupTimeout); - infinispanContainer.withEnv(devServicesConfig.containerEnv); + infinispanContainer.withEnv(devServicesConfig.containerEnv()); infinispanContainer.start(); return getRunningDevService(clientName, infinispanContainer.getContainerId(), infinispanContainer::close, @@ -219,7 +219,8 @@ private RunningDevService startContainer(String clientName, DockerStatusBuildIte infinispanContainer.getUser(), infinispanContainer.getPassword(), properties); }; - return infinispanContainerLocator.locateContainer(devServicesConfig.serviceName, devServicesConfig.shared, launchMode) + return infinispanContainerLocator + .locateContainer(devServicesConfig.serviceName(), devServicesConfig.shared(), launchMode) .map(containerAddress -> getRunningDevService(clientName, containerAddress.getId(), null, containerAddress.getUrl(), DEFAULT_USERNAME, DEFAULT_PASSWORD, properties)) // TODO can this be always right ? .orElseGet(infinispanServerSupplier); @@ -245,10 +246,10 @@ private String runningServiceName(String clientName) { private String getConfigPrefix(String name) { if (name.equals(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME)) { - return QUARKUS + InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_ROOT_NAME + DOT; + return InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX + DOT; } - return QUARKUS + InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_ROOT_NAME + DOT + name + DOT; + return InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX + DOT + name + DOT; } private static class QuarkusInfinispanContainer extends InfinispanContainer { @@ -259,11 +260,11 @@ private static class QuarkusInfinispanContainer extends InfinispanContainer { public QuarkusInfinispanContainer(String clientName, InfinispanDevServicesConfig config, LaunchMode launchMode, boolean useSharedNetwork) { - super(config.imageName.orElse(IMAGE_BASENAME + ":" + Version.getUnbrandedVersion())); - this.fixedExposedPort = config.port; + super(config.imageName().orElse(IMAGE_BASENAME + ":" + Version.getUnbrandedVersion())); + this.fixedExposedPort = config.port(); this.useSharedNetwork = useSharedNetwork; if (launchMode == DEVELOPMENT) { - String label = config.serviceName; + String label = config.serviceName(); if (InfinispanClientUtil.DEFAULT_INFINISPAN_DEV_SERVICE_NAME.equals(label) && !InfinispanClientUtil.isDefault(clientName)) { // Adds the client name suffix to create a different service name in named connections @@ -274,16 +275,16 @@ public QuarkusInfinispanContainer(String clientName, InfinispanDevServicesConfig withUser(DEFAULT_USERNAME); withPassword(InfinispanDevServiceProcessor.DEFAULT_PASSWORD); String command = "-c infinispan.xml"; - if (config.site.isPresent()) { - command = "-c infinispan-xsite.xml -Dinfinispan.site.name=" + config.site.get(); + if (config.site().isPresent()) { + command = "-c infinispan-xsite.xml -Dinfinispan.site.name=" + config.site().get(); } - command = command + config.configFiles.map(files -> files.stream().map(file -> { + command = command + config.configFiles().map(files -> files.stream().map(file -> { String userConfigFile = "/user-config/" + file; withClasspathResourceMapping(file, userConfigFile, BindMode.READ_ONLY); return " -c " + userConfigFile; }).collect(Collectors.joining())).orElse(""); - if (config.tracing.orElse(false)) { + if (config.tracing().orElse(false)) { log.warn( "Starting with Infinispan 15.0, Infinispan support for instrumentation of the server via OpenTelemetry has evolved. Enabling tracing by setting `quarkus.infinispan-client.devservices.tracing.enabled=true` doesn't work anymore.\n" + @@ -298,11 +299,11 @@ public QuarkusInfinispanContainer(String clientName, InfinispanDevServicesConfig " security: false"); } - if (config.mcastPort.isPresent()) { - command = command + " -Djgroups.mcast_port=" + config.mcastPort.getAsInt(); + if (config.mcastPort().isPresent()) { + command = command + " -Djgroups.mcast_port=" + config.mcastPort().getAsInt(); } - config.artifacts.ifPresent(a -> withArtifacts(a.toArray(new String[0]))); + config.artifacts().ifPresent(a -> withArtifacts(a.toArray(new String[0]))); withCommand(command); } diff --git a/extensions/infinispan-client/deployment/src/test/java/org/quarkus/infinispan/client/deployment/InfinispanDefaultMinimalConfigurationTest.java b/extensions/infinispan-client/deployment/src/test/java/org/quarkus/infinispan/client/deployment/InfinispanDefaultMinimalConfigurationTest.java index 61003f76eac8d..be409d8ff2635 100644 --- a/extensions/infinispan-client/deployment/src/test/java/org/quarkus/infinispan/client/deployment/InfinispanDefaultMinimalConfigurationTest.java +++ b/extensions/infinispan-client/deployment/src/test/java/org/quarkus/infinispan/client/deployment/InfinispanDefaultMinimalConfigurationTest.java @@ -32,7 +32,7 @@ public void infinispanConnectionConfiguration() { assertThat(configuration.clientIntelligence()).isEqualTo(ClientIntelligence.HASH_DISTRIBUTION_AWARE); assertThat(configuration.remoteCaches()).isEmpty(); assertThat(configuration.security().authentication().enabled()).isTrue(); - assertThat(configuration.security().authentication().saslMechanism()).isEqualTo("DIGEST-MD5"); + assertThat(configuration.security().authentication().saslMechanism()).isEqualTo("DIGEST-SHA-512"); assertThat(configuration.security().ssl().enabled()).isFalse(); } } diff --git a/extensions/infinispan-client/runtime/pom.xml b/extensions/infinispan-client/runtime/pom.xml index 46275ca6a714c..9a0b808987f3f 100644 --- a/extensions/infinispan-client/runtime/pom.xml +++ b/extensions/infinispan-client/runtime/pom.xml @@ -176,9 +176,6 @@ ${project.version}
    - - -AlegacyConfigRoot=true - diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientBuildTimeConfig.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientBuildTimeConfig.java index 6fe17807543c6..53ac19ac0eca5 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientBuildTimeConfig.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientBuildTimeConfig.java @@ -1,43 +1,40 @@ package io.quarkus.infinispan.client.runtime; -import java.util.HashMap; import java.util.Map; -import java.util.Objects; import java.util.Optional; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithParentName; /** * @author William Burns */ @ConfigGroup -public class InfinispanClientBuildTimeConfig { +public interface InfinispanClientBuildTimeConfig { /** * Sets the bounded entry count for near cache. If this value is 0 or less near cache is disabled. * * @deprecated use per cache configuration for near cache max entries */ - @ConfigItem @Deprecated - public int nearCacheMaxEntries; + @WithDefault("0") + int nearCacheMaxEntries(); /** * Sets the marshallerClass. Default is ProtoStreamMarshaller */ - @ConfigItem - public Optional marshallerClass; + Optional marshallerClass(); /** * Configures caches build time config from the client with the provided configuration. */ - @ConfigItem - public Map cache = new HashMap<>(); + Map cache(); @ConfigGroup - public static class RemoteCacheConfig { + public interface RemoteCacheConfig { // @formatter:off /** @@ -46,8 +43,7 @@ public static class RemoteCacheConfig { * quarkus.infinispan-client.cache.bookscache.configuration-resource=cacheConfig.xml */ // @formatter:on - @ConfigItem - public Optional configurationResource; + Optional configurationResource(); } /** @@ -56,39 +52,17 @@ public static class RemoteCacheConfig { * Dev Services allows Quarkus to automatically start an Infinispan Server in dev and test * mode. */ - @ConfigItem(name = ConfigItem.PARENT) - public DevServiceConfiguration devService; + @WithParentName + DevServiceConfiguration devservices(); @ConfigGroup - public static class DevServiceConfiguration { + public interface DevServiceConfiguration { /** * Dev Services *

    * Dev Services allows Quarkus to automatically start Infinispan in dev and test mode. */ - @ConfigItem @ConfigDocSection(generated = true) - public InfinispanDevServicesConfig devservices; - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - DevServiceConfiguration that = (DevServiceConfiguration) o; - return Objects.equals(devservices, that.devservices); - } - - @Override - public int hashCode() { - return Objects.hash(devservices); - } - } - - @Override - public String toString() { - return "InfinispanClientBuildTimeConfig{" + "nearCacheMaxEntries=" + nearCacheMaxEntries + ", marshallerClass=" - + marshallerClass + ", cache=" + cache + ", devService=" + devService + '}'; + InfinispanDevServicesConfig devservices(); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java index b1b170bc91465..7413575204194 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientProducer.java @@ -78,8 +78,8 @@ private void registerSchemaInServer(String infinispanConfigName, } protobufMetadataCache.put(initializer.getProtoFileName(), initializer.getProtoFile()); } - runtimeConfig.backupCluster.entrySet().forEach(backup -> { - if (backup.getValue().useSchemaRegistration.orElse(true)) { + runtimeConfig.backupCluster().entrySet().forEach(backup -> { + if (backup.getValue().useSchemaRegistration().orElse(true)) { cacheManager.switchToCluster(backup.getKey()); for (SerializationContextInitializer initializer : initializers) { RemoteCache backupProtobufMetadataCache = null; @@ -110,8 +110,8 @@ private void registerSchemaInServer(String infinispanConfigName, } } - runtimeConfig.backupCluster.entrySet().forEach(backupConfigEntry -> { - if (backupConfigEntry.getValue().useSchemaRegistration.orElse(true)) { + runtimeConfig.backupCluster().entrySet().forEach(backupConfigEntry -> { + if (backupConfigEntry.getValue().useSchemaRegistration().orElse(true)) { cacheManager.switchToCluster(backupConfigEntry.getKey()); RemoteCache backupProtobufMetadataCache = null; for (Map.Entry property : namedProperties.entrySet()) { @@ -149,10 +149,10 @@ private void initialize(String infinispanConfigName, Map pro InfinispanClientsRuntimeConfig infinispanClientsRuntimeConfig = this.infinispanClientsRuntimeConfigHandle.get(); RemoteCacheManager cacheManager = new RemoteCacheManager(conf.build(), - infinispanClientsRuntimeConfig.startClient.orElse(Boolean.TRUE)); + infinispanClientsRuntimeConfig.startClient().orElse(Boolean.TRUE)); remoteCacheManagers.put(infinispanConfigName, cacheManager); - if (infinispanClientsRuntimeConfig.useSchemaRegistration.orElse(Boolean.TRUE)) { + if (infinispanClientsRuntimeConfig.useSchemaRegistration().orElse(Boolean.TRUE)) { registerSchemaInServer(infinispanConfigName, properties, cacheManager); } } @@ -188,185 +188,189 @@ private ConfigurationBuilder builderFromProperties(String infinispanClientName, return builder; } - if (infinispanClientRuntimeConfig.uri.isPresent()) { - properties.put(ConfigurationProperties.URI, infinispanClientRuntimeConfig.uri.get()); + if (infinispanClientRuntimeConfig.uri().isPresent()) { + properties.put(ConfigurationProperties.URI, infinispanClientRuntimeConfig.uri().get()); } else { - if (infinispanClientRuntimeConfig.serverList.isPresent()) { + if (infinispanClientRuntimeConfig.serverList().isPresent()) { log.warn( "Use 'quarkus.infinispan-client.hosts' instead of the deprecated 'quarkus.infinispan-client.server-list'"); - properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.serverList.get()); + properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.serverList().get()); } - if (infinispanClientRuntimeConfig.hosts.isPresent()) { - properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.hosts.get()); + if (infinispanClientRuntimeConfig.hosts().isPresent()) { + properties.put(ConfigurationProperties.SERVER_LIST, infinispanClientRuntimeConfig.hosts().get()); } - if (infinispanClientRuntimeConfig.authUsername.isPresent()) { + if (infinispanClientRuntimeConfig.authUsername().isPresent()) { log.warn( "Use 'quarkus.infinispan-client.username' instead of the deprecated 'quarkus.infinispan-client.auth-username'"); - properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.authUsername.get()); + properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.authUsername().get()); } - if (infinispanClientRuntimeConfig.username.isPresent()) { - properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.username.get()); + if (infinispanClientRuntimeConfig.username().isPresent()) { + properties.put(ConfigurationProperties.AUTH_USERNAME, infinispanClientRuntimeConfig.username().get()); } - if (infinispanClientRuntimeConfig.authPassword.isPresent()) { + if (infinispanClientRuntimeConfig.authPassword().isPresent()) { log.warn( "Use 'quarkus.infinispan-client.password' instead of the deprecated 'quarkus.infinispan-client.auth-password'"); - properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.authPassword.get()); + properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.authPassword().get()); } - if (infinispanClientRuntimeConfig.password.isPresent()) { - properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.password.get()); + if (infinispanClientRuntimeConfig.password().isPresent()) { + properties.put(ConfigurationProperties.AUTH_PASSWORD, infinispanClientRuntimeConfig.password().get()); } } - if (infinispanClientRuntimeConfig.clientIntelligence.isPresent()) { - properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, infinispanClientRuntimeConfig.clientIntelligence.get()); + if (infinispanClientRuntimeConfig.clientIntelligence().isPresent()) { + properties.put(ConfigurationProperties.CLIENT_INTELLIGENCE, + infinispanClientRuntimeConfig.clientIntelligence().get()); } - if (infinispanClientRuntimeConfig.useAuth.isPresent()) { - properties.put(ConfigurationProperties.USE_AUTH, infinispanClientRuntimeConfig.useAuth.get()); + if (infinispanClientRuntimeConfig.useAuth().isPresent()) { + properties.put(ConfigurationProperties.USE_AUTH, infinispanClientRuntimeConfig.useAuth().get()); } - if (infinispanClientRuntimeConfig.authRealm.isPresent()) { - properties.put(ConfigurationProperties.AUTH_REALM, infinispanClientRuntimeConfig.authRealm.get()); + if (infinispanClientRuntimeConfig.authRealm().isPresent()) { + properties.put(ConfigurationProperties.AUTH_REALM, infinispanClientRuntimeConfig.authRealm().get()); } - if (infinispanClientRuntimeConfig.authServerName.isPresent()) { - properties.put(ConfigurationProperties.AUTH_SERVER_NAME, infinispanClientRuntimeConfig.authServerName.get()); + if (infinispanClientRuntimeConfig.authServerName().isPresent()) { + properties.put(ConfigurationProperties.AUTH_SERVER_NAME, infinispanClientRuntimeConfig.authServerName().get()); } - if (infinispanClientRuntimeConfig.saslMechanism.isPresent()) { - properties.put(ConfigurationProperties.SASL_MECHANISM, infinispanClientRuntimeConfig.saslMechanism.get()); + if (infinispanClientRuntimeConfig.saslMechanism().isPresent()) { + properties.put(ConfigurationProperties.SASL_MECHANISM, infinispanClientRuntimeConfig.saslMechanism().get()); } - if (infinispanClientRuntimeConfig.keyStore.isPresent()) { - properties.put(ConfigurationProperties.KEY_STORE_FILE_NAME, infinispanClientRuntimeConfig.keyStore.get()); + if (infinispanClientRuntimeConfig.keyStore().isPresent()) { + properties.put(ConfigurationProperties.KEY_STORE_FILE_NAME, infinispanClientRuntimeConfig.keyStore().get()); } - if (infinispanClientRuntimeConfig.keyStorePassword.isPresent()) { + if (infinispanClientRuntimeConfig.keyStorePassword().isPresent()) { properties.put(ConfigurationProperties.KEY_STORE_PASSWORD, - infinispanClientRuntimeConfig.keyStorePassword.get()); + infinispanClientRuntimeConfig.keyStorePassword().get()); } - if (infinispanClientRuntimeConfig.keyStoreType.isPresent()) { + if (infinispanClientRuntimeConfig.keyStoreType().isPresent()) { properties.put(ConfigurationProperties.KEY_STORE_TYPE, - infinispanClientRuntimeConfig.keyStoreType.get()); + infinispanClientRuntimeConfig.keyStoreType().get()); } - if (infinispanClientRuntimeConfig.keyAlias.isPresent()) { + if (infinispanClientRuntimeConfig.keyAlias().isPresent()) { properties.put(ConfigurationProperties.KEY_ALIAS, - infinispanClientRuntimeConfig.keyAlias.get()); + infinispanClientRuntimeConfig.keyAlias().get()); } - if (infinispanClientRuntimeConfig.trustStore.isPresent()) { - properties.put(ConfigurationProperties.TRUST_STORE_FILE_NAME, infinispanClientRuntimeConfig.trustStore.get()); + if (infinispanClientRuntimeConfig.trustStore().isPresent()) { + properties.put(ConfigurationProperties.TRUST_STORE_FILE_NAME, infinispanClientRuntimeConfig.trustStore().get()); } - if (infinispanClientRuntimeConfig.trustStorePassword.isPresent()) { + if (infinispanClientRuntimeConfig.trustStorePassword().isPresent()) { properties.put(ConfigurationProperties.TRUST_STORE_PASSWORD, - infinispanClientRuntimeConfig.trustStorePassword.get()); + infinispanClientRuntimeConfig.trustStorePassword().get()); } - if (infinispanClientRuntimeConfig.trustStoreType.isPresent()) { - properties.put(ConfigurationProperties.TRUST_STORE_TYPE, infinispanClientRuntimeConfig.trustStoreType.get()); + if (infinispanClientRuntimeConfig.trustStoreType().isPresent()) { + properties.put(ConfigurationProperties.TRUST_STORE_TYPE, infinispanClientRuntimeConfig.trustStoreType().get()); } - if (infinispanClientRuntimeConfig.sslProvider.isPresent()) { - properties.put(ConfigurationProperties.SSL_PROVIDER, infinispanClientRuntimeConfig.sslProvider.get()); + if (infinispanClientRuntimeConfig.sslProvider().isPresent()) { + properties.put(ConfigurationProperties.SSL_PROVIDER, infinispanClientRuntimeConfig.sslProvider().get()); } - if (infinispanClientRuntimeConfig.sslProtocol.isPresent()) { - properties.put(ConfigurationProperties.SSL_PROTOCOL, infinispanClientRuntimeConfig.sslProtocol.get()); + if (infinispanClientRuntimeConfig.sslProtocol().isPresent()) { + properties.put(ConfigurationProperties.SSL_PROTOCOL, infinispanClientRuntimeConfig.sslProtocol().get()); } - if (infinispanClientRuntimeConfig.sslCiphers.isPresent()) { + if (infinispanClientRuntimeConfig.sslCiphers().isPresent()) { properties.put(ConfigurationProperties.SSL_CIPHERS, - infinispanClientRuntimeConfig.sslCiphers.get().stream().collect(Collectors.joining(" "))); + infinispanClientRuntimeConfig.sslCiphers().get().stream().collect(Collectors.joining(" "))); } - if (infinispanClientRuntimeConfig.sslHostNameValidation.isPresent()) { + if (infinispanClientRuntimeConfig.sslHostNameValidation().isPresent()) { properties.put(ConfigurationProperties.SSL_HOSTNAME_VALIDATION, - infinispanClientRuntimeConfig.sslHostNameValidation.get()); + infinispanClientRuntimeConfig.sslHostNameValidation().get()); } - if (infinispanClientRuntimeConfig.sniHostName.isPresent()) { - properties.put(ConfigurationProperties.SNI_HOST_NAME, infinispanClientRuntimeConfig.sniHostName.get()); + if (infinispanClientRuntimeConfig.sniHostName().isPresent()) { + properties.put(ConfigurationProperties.SNI_HOST_NAME, infinispanClientRuntimeConfig.sniHostName().get()); } - if (infinispanClientRuntimeConfig.socketTimeout.isPresent()) { - properties.put(ConfigurationProperties.SO_TIMEOUT, infinispanClientRuntimeConfig.socketTimeout.get()); + if (infinispanClientRuntimeConfig.socketTimeout().isPresent()) { + properties.put(ConfigurationProperties.SO_TIMEOUT, infinispanClientRuntimeConfig.socketTimeout().get()); } builder.withProperties(properties); - if (infinispanClientRuntimeConfig.tracingPropagationEnabled.isPresent()) { - if (!infinispanClientRuntimeConfig.tracingPropagationEnabled.get()) { + if (infinispanClientRuntimeConfig.tracingPropagationEnabled().isPresent()) { + if (!infinispanClientRuntimeConfig.tracingPropagationEnabled().get()) { builder.disableTracingPropagation(); } } if (infinispanClientBuildTimeConfig != null) { - for (Map.Entry buildCacheConfig : infinispanClientBuildTimeConfig.cache + for (Map.Entry buildCacheConfig : infinispanClientBuildTimeConfig + .cache() .entrySet()) { String cacheName = buildCacheConfig.getKey(); // Do this if the cache config is only present in the build time configuration - if (!infinispanClientRuntimeConfig.cache.containsKey(cacheName)) { + if (!infinispanClientRuntimeConfig.cache().containsKey(cacheName)) { InfinispanClientBuildTimeConfig.RemoteCacheConfig buildCacheConfigValue = buildCacheConfig.getValue(); - if (buildCacheConfig.getValue().configurationResource.isPresent()) { + if (buildCacheConfig.getValue().configurationResource().isPresent()) { URL configFile = Thread.currentThread().getContextClassLoader() - .getResource(buildCacheConfigValue.configurationResource.get()); + .getResource(buildCacheConfigValue.configurationResource().get()); configureRemoteCacheConfigurationURI(builder, cacheName, configFile); } } } } - for (Map.Entry cacheConfig : infinispanClientRuntimeConfig.cache + for (Map.Entry cacheConfig : infinispanClientRuntimeConfig + .cache() .entrySet()) { String cacheName = cacheConfig.getKey(); InfinispanClientRuntimeConfig.RemoteCacheConfig runtimeCacheConfig = cacheConfig.getValue(); URL configFile = null; // Check if the build time resource file configuration exists if (infinispanClientBuildTimeConfig != null) { - InfinispanClientBuildTimeConfig.RemoteCacheConfig buildtimeCacheConfig = infinispanClientBuildTimeConfig.cache + InfinispanClientBuildTimeConfig.RemoteCacheConfig buildtimeCacheConfig = infinispanClientBuildTimeConfig.cache() .get( cacheName); - if (buildtimeCacheConfig != null && buildtimeCacheConfig.configurationResource.isPresent()) { + if (buildtimeCacheConfig != null && buildtimeCacheConfig.configurationResource().isPresent()) { configFile = Thread.currentThread().getContextClassLoader() - .getResource(buildtimeCacheConfig.configurationResource.get()); + .getResource(buildtimeCacheConfig.configurationResource().get()); } } // Override build time resource if configuration-uri runtime resource is provided - if (runtimeCacheConfig.configurationUri.isPresent()) { + if (runtimeCacheConfig.configurationUri().isPresent()) { configFile = Thread.currentThread().getContextClassLoader() - .getResource(runtimeCacheConfig.configurationUri.get()); + .getResource(runtimeCacheConfig.configurationUri().get()); } if (configFile != null) { configureRemoteCacheConfigurationURI(builder, cacheName, configFile); } else { // Inline configuration - if (runtimeCacheConfig.configuration.isPresent()) { - builder.remoteCache(cacheName).configuration(runtimeCacheConfig.configuration.get()); + if (runtimeCacheConfig.configuration().isPresent()) { + builder.remoteCache(cacheName).configuration(runtimeCacheConfig.configuration().get()); } } // Configures near caching - if (runtimeCacheConfig.nearCacheMaxEntries.isPresent()) { - builder.remoteCache(cacheName).nearCacheMaxEntries(runtimeCacheConfig.nearCacheMaxEntries.get()); + if (runtimeCacheConfig.nearCacheMaxEntries().isPresent()) { + builder.remoteCache(cacheName).nearCacheMaxEntries(runtimeCacheConfig.nearCacheMaxEntries().get()); } - if (runtimeCacheConfig.nearCacheMode.isPresent()) { - builder.remoteCache(cacheName).nearCacheMode(runtimeCacheConfig.nearCacheMode.get()); + if (runtimeCacheConfig.nearCacheMode().isPresent()) { + builder.remoteCache(cacheName).nearCacheMode(runtimeCacheConfig.nearCacheMode().get()); } - if (runtimeCacheConfig.nearCacheUseBloomFilter.isPresent()) { - builder.remoteCache(cacheName).nearCacheUseBloomFilter(runtimeCacheConfig.nearCacheUseBloomFilter.get()); + if (runtimeCacheConfig.nearCacheUseBloomFilter().isPresent()) { + builder.remoteCache(cacheName).nearCacheUseBloomFilter(runtimeCacheConfig.nearCacheUseBloomFilter().get()); } } - for (Map.Entry backupCluster : infinispanClientRuntimeConfig.backupCluster + for (Map.Entry backupCluster : infinispanClientRuntimeConfig + .backupCluster() .entrySet()) { InfinispanClientRuntimeConfig.BackupClusterConfig backupClusterConfig = backupCluster.getValue(); ClusterConfigurationBuilder clusterConfigurationBuilder = builder.addCluster(backupCluster.getKey()); - clusterConfigurationBuilder.addClusterNodes(backupClusterConfig.hosts); - if (backupClusterConfig.clientIntelligence.isPresent()) { + clusterConfigurationBuilder.addClusterNodes(backupClusterConfig.hosts()); + if (backupClusterConfig.clientIntelligence().isPresent()) { clusterConfigurationBuilder.clusterClientIntelligence( - ClientIntelligence.valueOf(backupClusterConfig.clientIntelligence.get())); + ClientIntelligence.valueOf(backupClusterConfig.clientIntelligence().get())); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientRuntimeConfig.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientRuntimeConfig.java index c16d708f3bb6a..c54488510923f 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientRuntimeConfig.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientRuntimeConfig.java @@ -9,13 +9,14 @@ import org.infinispan.client.hotrod.configuration.NearCacheMode; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; /** * @author Katia Aresti */ @ConfigGroup -public class InfinispanClientRuntimeConfig { +public interface InfinispanClientRuntimeConfig { // @formatter:off /** @@ -23,16 +24,14 @@ public class InfinispanClientRuntimeConfig { * If provided {@link #hosts}, {@link #username} and {@link #password} will be ignored. */ // @formatter:on - @ConfigItem - public Optional uri; + Optional uri(); // @formatter:off /** * Sets the host name/port to connect to. Each one is separated by a semicolon (eg. host1:11222;host2:11222). */ // @formatter:on - @ConfigItem - public Optional hosts; + Optional hosts(); // @formatter:off /** @@ -40,9 +39,8 @@ public class InfinispanClientRuntimeConfig { * @deprecated {@link #hosts} should be used to configure the list or uri for an uri connection string. */ // @formatter:on - @ConfigItem @Deprecated - public Optional serverList; + Optional serverList(); // @formatter:off /** @@ -57,8 +55,8 @@ public class InfinispanClientRuntimeConfig { * greatly. This is the default. */ // @formatter:on - @ConfigItem(defaultValue = "HASH_DISTRIBUTION_AWARE") - Optional clientIntelligence; + @WithDefault("HASH_DISTRIBUTION_AWARE") + Optional clientIntelligence(); // @formatter:off /** @@ -66,164 +64,147 @@ public class InfinispanClientRuntimeConfig { * deployments. Default is 'true'. */ // @formatter:on - @ConfigItem(defaultValue = "true") - Optional useAuth; + @WithDefault("true") + Optional useAuth(); /** * Sets username used by authentication. */ - @ConfigItem - Optional username; + Optional username(); /** * Sets username used by authentication. * * @deprecated {@link #username} should be used to configure the credentials username. */ - @ConfigItem @Deprecated - Optional authUsername; + Optional authUsername(); /** * Sets password used by authentication. */ - @ConfigItem - Optional password; + Optional password(); /** * Sets password used by authentication * * @deprecated {@link #password} should be used to configure the credentials password. */ - @ConfigItem @Deprecated - Optional authPassword; + Optional authPassword(); /** * Sets realm used by authentication */ - @ConfigItem(defaultValue = "default") - Optional authRealm; + @WithDefault("default") + Optional authRealm(); /** * Sets server name used by authentication */ - @ConfigItem(defaultValue = "infinispan") - Optional authServerName; + @WithDefault("infinispan") + Optional authServerName(); // @formatter:off /** * Sets SASL mechanism used by authentication. * Available values: - * * `DIGEST-MD5` - Uses the MD5 hashing algorithm in addition to nonces to encrypt credentials. This is the default. + * * `DIGEST-SHA-512` - Uses the SHA-512 hashing algorithm to securely hash and verify credentials. This is the default. + * * `DIGEST-MD5` - Uses the MD5 hashing algorithm in addition to nonces to encrypt credentials. * * `EXTERNAL` - Uses client certificates to provide valid identities to Infinispan Server and enable encryption. * * `PLAIN` - Sends credentials in plain text (unencrypted) over the wire in a way that is similar to HTTP BASIC * authentication. You should use `PLAIN` authentication only in combination with TLS encryption. */ // @formatter:on - @ConfigItem(defaultValue = "DIGEST-MD5") - Optional saslMechanism; + @WithDefault("DIGEST-SHA-512") + Optional saslMechanism(); /** * Specifies the filename of a keystore to use to create the {@link SSLContext}. * You also need to specify a keyStorePassword. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional keyStore; + Optional keyStore(); /** * Specifies the password needed to open the keystore. You also need to specify a keyStore. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional keyStorePassword; + Optional keyStorePassword(); /** * Specifies the type of the keyStore, such as PKCS12. */ - @ConfigItem - Optional keyStoreType; + Optional keyStoreType(); /** * Sets the unique name used to identify a specific key pair in a keystore for secure connections. */ - @ConfigItem - Optional keyAlias; + Optional keyAlias(); /** * Specifies the filename of a truststore to use to create the {@link SSLContext}. * You also need to specify a trustStorePassword. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional trustStore; + Optional trustStore(); /** * Specifies the password needed to open the truststore You also need to specify a trustStore. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional trustStorePassword; + Optional trustStorePassword(); /** * Specifies the type of the truststore, such as JKS or JCEKS. Defaults to JKS if trustStore is enabled. */ - @ConfigItem - Optional trustStoreType; + Optional trustStoreType(); /** * Configures the secure socket protocol. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional sslProtocol; + Optional sslProtocol(); /** * Sets the ssl provider. For example BCFIPS * Setting this implicitly enables SSL/TLS. */ - @ConfigItem - Optional sslProvider; + Optional sslProvider(); /** * Configures the ciphers. * Setting this property implicitly enables SSL/TLS. */ - @ConfigItem - Optional> sslCiphers; + Optional> sslCiphers(); /** * Do SSL hostname validation. * Defaults to true. */ - @ConfigItem - Optional sslHostNameValidation; + Optional sslHostNameValidation(); /** * SNI host name. Mandatory when SSL is enabled and host name validation is true. */ - @ConfigItem - Optional sniHostName; + Optional sniHostName(); /** * Configures the socket timeout. */ - @ConfigItem - Optional socketTimeout; + Optional socketTimeout(); /** * Whether a tracing propagation is enabled in case the Opentelemetry extension is present. * By default the propagation of the context is propagated from the client to the Infinispan Server. */ - @ConfigItem(name = "tracing.propagation.enabled") - public Optional tracingPropagationEnabled; + @WithName("tracing.propagation.enabled") + Optional tracingPropagationEnabled(); /** * Configures caches from the client with the provided configuration. */ - @ConfigItem - public Map cache; + Map cache(); /** * // @formatter:off @@ -235,11 +216,10 @@ public class InfinispanClientRuntimeConfig { * * // @formatter:on */ - @ConfigItem - public Map backupCluster; + Map backupCluster(); @ConfigGroup - public static class RemoteCacheConfig { + public interface RemoteCacheConfig { // @formatter:off /** @@ -249,8 +229,7 @@ public static class RemoteCacheConfig { * quarkus.infinispan-client.cache.bookscache.configuration= */ // @formatter:on - @ConfigItem - public Optional configuration; + Optional configuration(); // @formatter:off /** @@ -259,14 +238,12 @@ public static class RemoteCacheConfig { * quarkus.infinispan-client.cache.bookscache.configuration-uri=cacheConfig.xml */ // @formatter:on - @ConfigItem - public Optional configurationUri; + Optional configurationUri(); /** * The maximum number of entries to keep locally for the specified cache. */ - @ConfigItem - public Optional nearCacheMaxEntries; + Optional nearCacheMaxEntries(); // @formatter:off /** @@ -277,8 +254,7 @@ public static class RemoteCacheConfig { * invalidation messages will be sent to clients to remove them from the near cache. */ // @formatter:on - @ConfigItem - public Optional nearCacheMode; + Optional nearCacheMode(); // @formatter:off /** @@ -287,19 +263,17 @@ public static class RemoteCacheConfig { * invalidation messages. */ // @formatter:on - @ConfigItem - public Optional nearCacheUseBloomFilter; + Optional nearCacheUseBloomFilter(); } @ConfigGroup - public static class BackupClusterConfig { + public interface BackupClusterConfig { // @formatter:off /** * Sets the host name/port to connect to. Each one is separated by a semicolon (eg. hostA:11222;hostB:11222). */ // @formatter:on - @ConfigItem - public String hosts; + String hosts(); // @formatter:off /** @@ -314,8 +288,8 @@ public static class BackupClusterConfig { * greatly. This is the default. */ // @formatter:on - @ConfigItem(defaultValue = "HASH_DISTRIBUTION_AWARE") - Optional clientIntelligence; + @WithDefault("HASH_DISTRIBUTION_AWARE") + Optional clientIntelligence(); // @formatter:off /** @@ -325,14 +299,7 @@ public static class BackupClusterConfig { * This setting will be ignored if the Global Setting is set up to false. */ // @formatter:on - @ConfigItem(defaultValue = "true") - Optional useSchemaRegistration; - } - - @Override - public String toString() { - return "InfinispanClientRuntimeConfig{" + - "hosts=" + hosts + - '}'; + @WithDefault("true") + Optional useSchemaRegistration(); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientUtil.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientUtil.java index 99c6b72c42c69..0cd0c34af1096 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientUtil.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientUtil.java @@ -7,7 +7,7 @@ public final class InfinispanClientUtil { public static final String DEFAULT_INFINISPAN_DEV_SERVICE_NAME = "infinispan"; public static final String DEFAULT_INFINISPAN_CLIENT_NAME = ""; - public static final String INFINISPAN_CLIENT_CONFIG_ROOT_NAME = "infinispan-client"; + public static final String INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX = "quarkus.infinispan-client"; public static boolean isDefault(String infinispanClientName) { return DEFAULT_INFINISPAN_CLIENT_NAME.equals(infinispanClientName); @@ -19,12 +19,12 @@ public static boolean hasDefault(Collection infinispanClientNames) { public static List infinispanClientPropertyKeys(String infinispanClientName, String radical) { if (infinispanClientName == null || InfinispanClientUtil.isDefault(infinispanClientName)) { - return List.of("quarkus.infinispan-client." + radical); + return List.of(INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX + "." + radical); } else { // Two possible syntaxes: with or without quotes return List.of( - "quarkus.infinispan-client.\"" + infinispanClientName + "\"." + radical, - "quarkus.infinispan-client." + infinispanClientName + "." + radical); + INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX + ".\"" + infinispanClientName + "\"." + radical, + INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX + "." + infinispanClientName + "." + radical); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsBuildTimeConfig.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsBuildTimeConfig.java index 93e48925e9372..709f120daf3d3 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsBuildTimeConfig.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsBuildTimeConfig.java @@ -7,42 +7,47 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; -@ConfigRoot(name = InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_ROOT_NAME, phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public class InfinispanClientsBuildTimeConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX) +public interface InfinispanClientsBuildTimeConfig { /** * The default Infinispan Client. */ - @ConfigItem(name = ConfigItem.PARENT) - public InfinispanClientBuildTimeConfig defaultInfinispanClient; + @WithParentName + InfinispanClientBuildTimeConfig defaultInfinispanClient(); /** * Named clients. */ - @ConfigItem(name = ConfigItem.PARENT) + @WithParentName @ConfigDocMapKey("client-name") @ConfigDocSection - public Map namedInfinispanClients; + Map namedInfinispanClients(); /** * Whether or not a health check is published in case the smallrye-health extension is present. *

    * This is a global setting and is not specific to an Infinispan Client. */ - @ConfigItem(name = "health.enabled", defaultValue = "true") - public boolean healthEnabled; + @WithName("health.enabled") + @WithDefault("true") + boolean healthEnabled(); - public Set getInfinispanNamedClientConfigNames() { - return Collections.unmodifiableSet(new HashSet<>(namedInfinispanClients.keySet())); + default Set getInfinispanNamedClientConfigNames() { + return Collections.unmodifiableSet(new HashSet<>(namedInfinispanClients().keySet())); } - public InfinispanClientBuildTimeConfig getInfinispanClientBuildTimeConfig(String infinispanClientName) { + default InfinispanClientBuildTimeConfig getInfinispanClientBuildTimeConfig(String infinispanClientName) { if (InfinispanClientUtil.isDefault(infinispanClientName)) { - return defaultInfinispanClient; + return defaultInfinispanClient(); } - return namedInfinispanClients.get(infinispanClientName); + return namedInfinispanClients().get(infinispanClientName); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsRuntimeConfig.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsRuntimeConfig.java index 751d39db83f13..8d5d0c5780d3f 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsRuntimeConfig.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanClientsRuntimeConfig.java @@ -5,26 +5,29 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithParentName; -@ConfigRoot(name = InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_ROOT_NAME, phase = ConfigPhase.RUN_TIME) -public class InfinispanClientsRuntimeConfig { +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = InfinispanClientUtil.INFINISPAN_CLIENT_CONFIG_MAPPING_PREFIX) +public interface InfinispanClientsRuntimeConfig { /** * The default Infinispan Client. */ - @ConfigItem(name = ConfigItem.PARENT) - public InfinispanClientRuntimeConfig defaultInfinispanClient; + @WithParentName + InfinispanClientRuntimeConfig defaultInfinispanClient(); /** * Named clients. */ - @ConfigItem(name = ConfigItem.PARENT) + @WithParentName @ConfigDocMapKey("client-name") @ConfigDocSection - public Map namedInfinispanClients; + Map namedInfinispanClients(); // @formatter:off /** @@ -34,19 +37,19 @@ public class InfinispanClientsRuntimeConfig { * This is a global setting and is not specific to a Infinispan Client. */ // @formatter:on - @ConfigItem(defaultValue = "true") - Optional useSchemaRegistration; + @WithDefault("true") + Optional useSchemaRegistration(); /** * Starts the client and connects to the server. If set to false, you'll need to start it yourself. */ - @ConfigItem(defaultValue = "true") - public Optional startClient; + @WithDefault("true") + Optional startClient(); - public InfinispanClientRuntimeConfig getInfinispanClientRuntimeConfig(String infinispanClientName) { + default InfinispanClientRuntimeConfig getInfinispanClientRuntimeConfig(String infinispanClientName) { if (InfinispanClientUtil.isDefault(infinispanClientName)) { - return defaultInfinispanClient; + return defaultInfinispanClient(); } - return namedInfinispanClients.get(infinispanClientName); + return namedInfinispanClients().get(infinispanClientName); } } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanDevServicesConfig.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanDevServicesConfig.java index 5296d468953d6..c541232951532 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanDevServicesConfig.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/InfinispanDevServicesConfig.java @@ -2,16 +2,16 @@ import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Optional; import java.util.OptionalInt; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; @ConfigGroup -public class InfinispanDevServicesConfig { +public interface InfinispanDevServicesConfig { /** * If DevServices has been explicitly enabled or disabled. DevServices is generally enabled @@ -20,8 +20,8 @@ public class InfinispanDevServicesConfig { * When DevServices is enabled Quarkus will attempt to automatically configure and start * a database when running in Dev or Test mode and when Docker is running. */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); /** * When the configuration is empty, an Infinispan default client is automatically created to connect @@ -29,16 +29,15 @@ public class InfinispanDevServicesConfig { * yet we still need to spin up an Infinispan Server. In such cases, this property serves to determine * whether the client should be created by default or not by the extension. */ - @ConfigItem(defaultValue = "true") - public boolean createDefaultClient; + @WithDefault("true") + boolean createDefaultClient(); /** * Optional fixed port the dev service will listen to. *

    * If not defined, the port will be chosen randomly. */ - @ConfigItem - public OptionalInt port; + OptionalInt port(); /** * Indicates if the Infinispan server managed by Quarkus Dev Services is shared. @@ -51,8 +50,8 @@ public class InfinispanDevServicesConfig { *

    * Container sharing is only used in dev mode. */ - @ConfigItem(defaultValue = "true") - public boolean shared; + @WithDefault("true") + boolean shared(); /** * The value of the {@code quarkus-dev-service-infinispan} label attached to the started container. @@ -64,15 +63,14 @@ public class InfinispanDevServicesConfig { *

    * This property is used when you need multiple shared Infinispan servers. */ - @ConfigItem(defaultValue = InfinispanClientUtil.DEFAULT_INFINISPAN_DEV_SERVICE_NAME) - public String serviceName; + @WithDefault(InfinispanClientUtil.DEFAULT_INFINISPAN_DEV_SERVICE_NAME) + String serviceName(); /** * The image to use. * Note that only official Infinispan images are supported. */ - @ConfigItem - public Optional imageName = Optional.empty(); + Optional imageName(); /** * List of the artifacts to automatically download and add to the Infinispan server libraries. @@ -82,8 +80,7 @@ public class InfinispanDevServicesConfig { *

    * If an invalid value is passed, the Infinispan server will throw an error when trying to start. */ - @ConfigItem - public Optional> artifacts; + Optional> artifacts(); /** * Add a site name to start the Infinispan Server Container with Cross Site Replication enabled (ex. lon). @@ -94,8 +91,7 @@ public class InfinispanDevServicesConfig { * https://infinispan.org/docs/stable/titles/xsite/xsite.html * Configure {@link #mcastPort} to avoid forming a cluster with any other running Infinispan Server container. */ - @ConfigItem - public Optional site; + Optional site(); /** * If you are running an Infinispan Server already in docker, if the containers use the same mcastPort they will form a @@ -106,62 +102,32 @@ public class InfinispanDevServicesConfig { * see * https://github.com/infinispan/infinispan-simple-tutorials/blob/main/infinispan-remote/cross-site-replication/docker-compose/ */ - @ConfigItem - public OptionalInt mcastPort; + OptionalInt mcastPort(); /** * Runs the Infinispan Server container with tracing enabled. Traces are disabled by default */ - @ConfigItem(name = "tracing.enabled", defaultValue = "false") + @WithName("tracing.enabled") + @WithDefault("false") @Deprecated(forRemoval = true) - public Optional tracing; + Optional tracing(); /** * Sets Infinispan Server otlp endpoint. Default value is http://localhost:4317 */ - @ConfigItem(name = "tracing.exporter.otlp.endpoint", defaultValue = "http://localhost:4317") + @WithName("tracing.exporter.otlp.endpoint") + @WithDefault("http://localhost:4317") @Deprecated(forRemoval = true) - public Optional exporterOtlpEndpoint; + Optional exporterOtlpEndpoint(); /** * Environment variables that are passed to the container. */ - @ConfigItem @ConfigDocMapKey("environment-variable-name") - public Map containerEnv; + Map containerEnv(); /** * Infinispan Server configuration chunks to be passed to the container. */ - @ConfigItem - public Optional> configFiles; - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - InfinispanDevServicesConfig that = (InfinispanDevServicesConfig) o; - return enabled == that.enabled && - Objects.equals(port, that.port) && - Objects.equals(shared, that.shared) && - Objects.equals(serviceName, that.serviceName) && - Objects.equals(imageName, that.imageName) && - Objects.equals(artifacts, that.artifacts) && - Objects.equals(containerEnv, that.containerEnv); - } - - @Override - public int hashCode() { - return Objects.hash(enabled, port, shared, serviceName, imageName, artifacts, containerEnv); - } - - @Override - public String toString() { - return "InfinispanDevServicesConfig{" + "enabled=" + enabled + ", port=" + port + ", shared=" + shared - + ", serviceName='" + serviceName + '\'' + ", imageName=" + imageName + ", artifacts=" + artifacts - + ", site=" + site + ", mcastPort=" + mcastPort + ", tracing=" + tracing + ", exporterOtlpEndpoint=" - + exporterOtlpEndpoint + '}'; - } + Optional> configFiles(); } diff --git a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/health/InfinispanHealthCheck.java b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/health/InfinispanHealthCheck.java index 782d2f40449ec..1addd0ed96833 100644 --- a/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/health/InfinispanHealthCheck.java +++ b/extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/runtime/health/InfinispanHealthCheck.java @@ -46,15 +46,15 @@ public void configure(InfinispanClientsRuntimeConfig config) { .select(RemoteCacheManager.class, Any.Literal.INSTANCE) .handles(); - if (config.defaultInfinispanClient != null) { + if (config.defaultInfinispanClient() != null) { RemoteCacheManager client = getClient(handle, null); if (client != null) { checks.add(new InfinispanClientCheck(InfinispanClientUtil.DEFAULT_INFINISPAN_CLIENT_NAME, client, - config.defaultInfinispanClient)); + config.defaultInfinispanClient())); } } - config.namedInfinispanClients.entrySet().forEach(new Consumer>() { + config.namedInfinispanClients().entrySet().forEach(new Consumer>() { @Override public void accept(Map.Entry namedInfinispanClientConfig) { RemoteCacheManager client = getClient(handle, namedInfinispanClientConfig.getKey()); diff --git a/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoDevUIProcessor.java b/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoDevUIProcessor.java index 47a8e6d6bc25b..8681b2dbb948d 100644 --- a/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoDevUIProcessor.java +++ b/extensions/info/deployment/src/main/java/io/quarkus/info/deployment/InfoDevUIProcessor.java @@ -20,11 +20,11 @@ public class InfoDevUIProcessor { void create(BuildProducer cardPageProducer, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, InfoBuildTimeConfig config, - ManagementInterfaceBuildTimeConfig managementInterfaceBuildTimeConfig, + ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, LaunchModeBuildItem launchModeBuildItem) { var path = nonApplicationRootPathBuildItem.resolveManagementPath(config.path(), - managementInterfaceBuildTimeConfig, launchModeBuildItem); + managementBuildTimeConfig, launchModeBuildItem); WebComponentPageBuilder infoPage = Page.webComponentPageBuilder() .title("Information") diff --git a/extensions/jackson/deployment/pom.xml b/extensions/jackson/deployment/pom.xml index a67b293e16e3a..36d5510452b79 100644 --- a/extensions/jackson/deployment/pom.xml +++ b/extensions/jackson/deployment/pom.xml @@ -56,9 +56,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/jackson/deployment/src/main/java/io/quarkus/jackson/deployment/JacksonProcessor.java b/extensions/jackson/deployment/src/main/java/io/quarkus/jackson/deployment/JacksonProcessor.java index bbb443b535381..0406ab7204afb 100644 --- a/extensions/jackson/deployment/src/main/java/io/quarkus/jackson/deployment/JacksonProcessor.java +++ b/extensions/jackson/deployment/src/main/java/io/quarkus/jackson/deployment/JacksonProcessor.java @@ -513,10 +513,10 @@ public SyntheticBeanBuildItem jacksonSupport(JacksonSupportRecorder recorder, } private Optional determinePropertyNamingStrategyClassName(JacksonBuildTimeConfig jacksonBuildTimeConfig) { - if (jacksonBuildTimeConfig.propertyNamingStrategy.isEmpty()) { + if (jacksonBuildTimeConfig.propertyNamingStrategy().isEmpty()) { return Optional.empty(); } - var propertyNamingStrategy = jacksonBuildTimeConfig.propertyNamingStrategy.get(); + var propertyNamingStrategy = jacksonBuildTimeConfig.propertyNamingStrategy().get(); Field field; try { diff --git a/extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonErroneousTimeZonePropertiesTest.java b/extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonErroneousTimeZonePropertiesTest.java index db53d96ddedd8..f874a5e84edd0 100644 --- a/extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonErroneousTimeZonePropertiesTest.java +++ b/extensions/jackson/deployment/src/test/java/io/quarkus/jackson/deployment/JacksonErroneousTimeZonePropertiesTest.java @@ -2,7 +2,6 @@ import static org.junit.jupiter.api.Assertions.fail; -import java.time.zone.ZoneRulesException; import java.util.Date; import jakarta.inject.Inject; @@ -15,6 +14,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.config.ConfigValidationException; public class JacksonErroneousTimeZonePropertiesTest { @@ -22,7 +22,7 @@ public class JacksonErroneousTimeZonePropertiesTest { static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar.addClasses(Pojo.class, SomeBean.class)) .withConfigurationResource("application-erroneous-timezone-properties.properties") - .setExpectedException(ZoneRulesException.class); + .setExpectedException(ConfigValidationException.class); @Test public void test() { diff --git a/extensions/jackson/runtime/pom.xml b/extensions/jackson/runtime/pom.xml index fafc5dde6bc8e..3316ce40d38ff 100644 --- a/extensions/jackson/runtime/pom.xml +++ b/extensions/jackson/runtime/pom.xml @@ -64,9 +64,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/ConfigurationCustomizer.java b/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/ConfigurationCustomizer.java index 3aa6ef532010b..3f63a61bf821f 100644 --- a/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/ConfigurationCustomizer.java +++ b/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/ConfigurationCustomizer.java @@ -24,31 +24,31 @@ public class ConfigurationCustomizer implements ObjectMapperCustomizer { @Override public void customize(ObjectMapper objectMapper) { - if (!jacksonBuildTimeConfig.failOnUnknownProperties) { + if (!jacksonBuildTimeConfig.failOnUnknownProperties()) { // this feature is enabled by default, so we disable it objectMapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES); } - if (!jacksonBuildTimeConfig.failOnEmptyBeans) { + if (!jacksonBuildTimeConfig.failOnEmptyBeans()) { // this feature is enabled by default, so we disable it objectMapper.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS); } - if (!jacksonBuildTimeConfig.writeDatesAsTimestamps) { + if (!jacksonBuildTimeConfig.writeDatesAsTimestamps()) { // this feature is enabled by default, so we disable it objectMapper.disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS); } - if (!jacksonBuildTimeConfig.writeDurationsAsTimestamps) { + if (!jacksonBuildTimeConfig.writeDurationsAsTimestamps()) { // this feature is enabled by default, so we disable it objectMapper.disable(SerializationFeature.WRITE_DURATIONS_AS_TIMESTAMPS); } - if (jacksonBuildTimeConfig.acceptCaseInsensitiveEnums) { + if (jacksonBuildTimeConfig.acceptCaseInsensitiveEnums()) { objectMapper.enable(MapperFeature.ACCEPT_CASE_INSENSITIVE_ENUMS); } - JsonInclude.Include serializationInclusion = jacksonBuildTimeConfig.serializationInclusion.orElse(null); + JsonInclude.Include serializationInclusion = jacksonBuildTimeConfig.serializationInclusion().orElse(null); if (serializationInclusion != null) { objectMapper.setSerializationInclusion(serializationInclusion); } - ZoneId zoneId = jacksonBuildTimeConfig.timezone.orElse(null); - if ((zoneId != null) && !zoneId.getId().equals("UTC")) { // Jackson uses UTC as the default, so let's not reset it + ZoneId zoneId = jacksonBuildTimeConfig.timezone(); + if (!zoneId.getId().equals("UTC")) { // Jackson uses UTC as the default, so let's not reset it objectMapper.setTimeZone(TimeZone.getTimeZone(zoneId)); } if (jacksonSupport.configuredNamingStrategy().isPresent()) { diff --git a/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/JacksonBuildTimeConfig.java b/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/JacksonBuildTimeConfig.java index 4dc633c8ac07f..47931ebaba869 100644 --- a/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/JacksonBuildTimeConfig.java +++ b/extensions/jackson/runtime/src/main/java/io/quarkus/jackson/runtime/JacksonBuildTimeConfig.java @@ -4,74 +4,72 @@ import java.util.Optional; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.PropertyNamingStrategies; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; @ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public class JacksonBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.jackson") +public interface JacksonBuildTimeConfig { /** * If enabled, Jackson will fail when encountering unknown properties. *

    * You can still override it locally with {@code @JsonIgnoreProperties(ignoreUnknown = false)}. */ - @ConfigItem(defaultValue = "false") - public boolean failOnUnknownProperties; + @WithDefault("false") + boolean failOnUnknownProperties(); /** * If enabled, Jackson will fail when no accessors are found for a type. * This is enabled by default to match the default Jackson behavior. */ - @ConfigItem(defaultValue = "true") - public boolean failOnEmptyBeans; + @WithDefault("true") + boolean failOnEmptyBeans(); /** * If enabled, Jackson will serialize dates as numeric value(s). * When disabled, they are serialized in ISO 8601 format. */ - @ConfigItem(defaultValue = "false") - public boolean writeDatesAsTimestamps; + @WithDefault("false") + boolean writeDatesAsTimestamps(); /** * If enabled, Jackson will serialize durations as numeric value(s). * When disabled, they are serialized in ISO 8601 format. * This is enabled by default to match the default Jackson behavior. */ - @ConfigItem(defaultValue = "true") - public boolean writeDurationsAsTimestamps; + @WithDefault("true") + boolean writeDurationsAsTimestamps(); /** * If enabled, Jackson will ignore case during Enum deserialization. */ - @ConfigItem(defaultValue = "false") - public boolean acceptCaseInsensitiveEnums; + @WithDefault("false") + boolean acceptCaseInsensitiveEnums(); /** * If set, Jackson will default to using the specified timezone when formatting dates. * Some examples values are "Asia/Jakarta" and "GMT+3". - * If not set, Jackson will use its own default. */ - @ConfigItem(defaultValue = "UTC") - public Optional timezone; + @WithDefault("UTC") + ZoneId timezone(); /** * Define which properties of Java Beans are to be included in serialization. */ - @ConfigItem - public Optional serializationInclusion; + Optional serializationInclusion(); /** * Defines how names of JSON properties ("external names") are derived * from names of POJO methods and fields ("internal names"). * The value can be one of the one of the constants in {@link com.fasterxml.jackson.databind.PropertyNamingStrategies}, * so for example, {@code LOWER_CAMEL_CASE} or {@code UPPER_CAMEL_CASE}. - * + *

    * The value can also be a fully qualified class name of a {@link com.fasterxml.jackson.databind.PropertyNamingStrategy} * subclass. */ - @ConfigItem - public Optional propertyNamingStrategy; + Optional propertyNamingStrategy(); } diff --git a/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java b/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java index fb18910d4c80d..d173820860bd8 100644 --- a/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java +++ b/extensions/jdbc/jdbc-oracle/deployment/src/main/java/io/quarkus/jdbc/oracle/deployment/OracleMetadataOverrides.java @@ -130,6 +130,8 @@ void runtimeInitializeDriver(BuildProducer run //referring to various other types which aren't allowed in a captured heap. runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.diagnostics.Diagnostic")); runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.replay.driver.FailoverManagerImpl")); + runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.diagnostics.AbstractDiagnosable")); + runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.driver.AbstractTrueCacheConnectionPools")); runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.diagnostics.CommonDiagnosable")); runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.replay.driver.TxnFailoverManagerImpl")); runtimeInitialized.produce(new RuntimeInitializedClassBuildItem("oracle.jdbc.diagnostics.OracleDiagnosticsMXBean")); @@ -152,7 +154,7 @@ NativeImageAllowIncompleteClasspathBuildItem naughtyDriver() { @BuildStep RemovedResourceBuildItem enhancedCharsetSubstitutions() { - return new RemovedResourceBuildItem(ArtifactKey.fromString("com.oracle.database.jdbc:ojdbc11"), + return new RemovedResourceBuildItem(ArtifactKey.fromString("com.oracle.database.jdbc:ojdbc17"), Collections.singleton("oracle/nativeimage/CharacterSetFeature.class")); } diff --git a/extensions/jdbc/jdbc-oracle/deployment/src/test/java/io/quarkus/jdbc/oracle/deployment/RegexMatchTest.java b/extensions/jdbc/jdbc-oracle/deployment/src/test/java/io/quarkus/jdbc/oracle/deployment/RegexMatchTest.java index 8bef768890a50..49361b3a09086 100644 --- a/extensions/jdbc/jdbc-oracle/deployment/src/test/java/io/quarkus/jdbc/oracle/deployment/RegexMatchTest.java +++ b/extensions/jdbc/jdbc-oracle/deployment/src/test/java/io/quarkus/jdbc/oracle/deployment/RegexMatchTest.java @@ -19,7 +19,7 @@ public class RegexMatchTest { @Test public void jarRegexIsMatching() { - final String EXAMPLE_CLASSPATH = "/home/sanne/sources/quarkus/integration-tests/jpa-oracle/target/quarkus-integration-test-jpa-oracle-999-SNAPSHOT-native-image-source-jar/lib/com.oracle.database.jdbc.ojdbc11-21.3.0.0.jar"; + final String EXAMPLE_CLASSPATH = "/home/sanne/sources/quarkus/integration-tests/jpa-oracle/target/quarkus-integration-test-jpa-oracle-999-SNAPSHOT-native-image-source-jar/lib/com.oracle.database.jdbc.ojdbc17-23.6.0.24.10.jar"; final Pattern pattern = Pattern.compile(OracleMetadataOverrides.DRIVER_JAR_MATCH_REGEX); final Matcher matcher = pattern.matcher(EXAMPLE_CLASSPATH); Assert.assertTrue(matcher.find()); diff --git a/extensions/jdbc/jdbc-oracle/runtime/pom.xml b/extensions/jdbc/jdbc-oracle/runtime/pom.xml index 1bcb3dc8a4802..0e9fb513a07ea 100644 --- a/extensions/jdbc/jdbc-oracle/runtime/pom.xml +++ b/extensions/jdbc/jdbc-oracle/runtime/pom.xml @@ -23,7 +23,7 @@ com.oracle.database.jdbc - ojdbc11 + ojdbc17 com.oracle.database.nls diff --git a/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java b/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java index abc4f56e90893..b88ce76447f7e 100644 --- a/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java +++ b/extensions/jdbc/jdbc-postgresql/deployment/src/main/java/io/quarkus/jdbc/postgresql/deployment/JDBCPostgreSQLProcessor.java @@ -14,6 +14,7 @@ import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.NativeImageFeatureBuildItem; import io.quarkus.deployment.builditem.SslNativeConfigBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; import io.quarkus.deployment.pkg.steps.NativeOrNativeSourcesBuild; @@ -40,9 +41,12 @@ RuntimeReinitializedClassBuildItem runtimeReinitialize() { @BuildStep void registerDriver(BuildProducer jdbcDriver, + BuildProducer resources, SslNativeConfigBuildItem sslNativeConfigBuildItem) { jdbcDriver.produce(new JdbcDriverBuildItem(DatabaseKind.POSTGRESQL, "org.postgresql.Driver", "org.postgresql.xa.PGXADataSource")); + // Accessed in org.postgresql.Driver.loadDefaultProperties + resources.produce(new NativeImageResourceBuildItem("org/postgresql/driverconfig.properties")); } @BuildStep diff --git a/extensions/kafka-client/deployment/pom.xml b/extensions/kafka-client/deployment/pom.xml index 936dd84eaf808..1f153248d8ad3 100644 --- a/extensions/kafka-client/deployment/pom.xml +++ b/extensions/kafka-client/deployment/pom.xml @@ -84,9 +84,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/DevServicesKafkaProcessor.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/DevServicesKafkaProcessor.java index f6a3c35aa8f3d..67b1cbbb63b38 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/DevServicesKafkaProcessor.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/DevServicesKafkaProcessor.java @@ -305,7 +305,7 @@ private boolean hasKafkaChannelWithoutBootstrapServers() { } private KafkaDevServiceCfg getConfiguration(KafkaBuildTimeConfig cfg) { - KafkaDevServicesBuildTimeConfig devServicesConfig = cfg.devservices; + KafkaDevServicesBuildTimeConfig devServicesConfig = cfg.devservices(); return new KafkaDevServiceCfg(devServicesConfig); } @@ -324,17 +324,17 @@ private static final class KafkaDevServiceCfg { private final RedpandaBuildTimeConfig redpanda; public KafkaDevServiceCfg(KafkaDevServicesBuildTimeConfig config) { - this.devServicesEnabled = config.enabled.orElse(true); - this.provider = config.provider; - this.imageName = config.imageName.orElseGet(provider::getDefaultImageName); - this.fixedExposedPort = config.port.orElse(0); - this.shared = config.shared; - this.serviceName = config.serviceName; - this.topicPartitions = config.topicPartitions; - this.topicPartitionsTimeout = config.topicPartitionsTimeout; - this.containerEnv = config.containerEnv; - - this.redpanda = config.redpanda; + this.devServicesEnabled = config.enabled().orElse(true); + this.provider = config.provider(); + this.imageName = config.imageName().orElseGet(provider::getDefaultImageName); + this.fixedExposedPort = config.port().orElse(0); + this.shared = config.shared(); + this.serviceName = config.serviceName(); + this.topicPartitions = config.topicPartitions(); + this.topicPartitionsTimeout = config.topicPartitionsTimeout(); + this.containerEnv = config.containerEnv(); + + this.redpanda = config.redpanda(); } @Override diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java index e84db5c16bc44..c6f88c55ff670 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaBuildTimeConfig.java @@ -1,19 +1,23 @@ package io.quarkus.kafka.client.deployment; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; -@ConfigRoot(name = "kafka", phase = ConfigPhase.BUILD_TIME) -public class KafkaBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.kafka") +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public interface KafkaBuildTimeConfig { /** * Whether a health check is published in case the smallrye-health extension is present. *

    * If you enable the health check, you must specify the `kafka.bootstrap.servers` property. */ - @ConfigItem(name = "health.enabled", defaultValue = "false") - public boolean healthEnabled; + @WithName("health.enabled") + @WithDefault("false") + boolean healthEnabled(); /** * Whether to enable Snappy in native mode. @@ -21,24 +25,25 @@ public class KafkaBuildTimeConfig { * Note that Snappy requires GraalVM 21+ and embeds a native library in the native executable. * This library is unpacked and loaded when the application starts. */ - @ConfigItem(name = "snappy.enabled", defaultValue = "false") - public boolean snappyEnabled; + @WithName("snappy.enabled") + @WithDefault("false") + boolean snappyEnabled(); /** * Whether to load the Snappy native library from the shared classloader. * This setting is only used in tests if the tests are using different profiles, which would lead to * unsatisfied link errors when loading Snappy. */ - @ConfigItem(name = "snappy.load-from-shared-classloader", defaultValue = "false") - public boolean snappyLoadFromSharedClassLoader; + @WithName("snappy.load-from-shared-classloader") + @WithDefault("false") + boolean snappyLoadFromSharedClassLoader(); /** * Dev Services. *

    * Dev Services allows Quarkus to automatically start Kafka in dev and test mode. */ - @ConfigItem @ConfigDocSection(generated = true) - public KafkaDevServicesBuildTimeConfig devservices; + KafkaDevServicesBuildTimeConfig devservices(); } diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaDevServicesBuildTimeConfig.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaDevServicesBuildTimeConfig.java index 4207faf0f521b..bfc9fa1492b55 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaDevServicesBuildTimeConfig.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaDevServicesBuildTimeConfig.java @@ -5,11 +5,9 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; -@ConfigGroup -public class KafkaDevServicesBuildTimeConfig { +public interface KafkaDevServicesBuildTimeConfig { /** * If Dev Services for Kafka has been explicitly enabled or disabled. Dev Services are generally enabled @@ -17,16 +15,14 @@ public class KafkaDevServicesBuildTimeConfig { * {@code kafka.bootstrap.servers} is set or if all the Reactive Messaging Kafka channel are configured with a * {@code bootstrap.servers}. */ - @ConfigItem - public Optional enabled = Optional.empty(); + Optional enabled(); /** * Optional fixed port the dev service will listen to. *

    * If not defined, the port will be chosen randomly. */ - @ConfigItem - public Optional port; + Optional port(); /** * Kafka dev service container type. @@ -44,10 +40,10 @@ public class KafkaDevServicesBuildTimeConfig { *

    * Note that Strimzi and Kafka Native images are launched in Kraft mode. */ - @ConfigItem(defaultValue = "redpanda") - public Provider provider = Provider.REDPANDA; + @WithDefault("redpanda") + Provider provider(); - public enum Provider { + enum Provider { REDPANDA("docker.io/redpandadata/redpanda:v24.1.2"), STRIMZI("quay.io/strimzi-test-container/test-container:latest-kafka-3.7.0"), KAFKA_NATIVE("quay.io/ogunalp/kafka-native:latest"); @@ -68,8 +64,7 @@ public String getDefaultImageName() { *

    * Dependent on the provider. */ - @ConfigItem - public Optional imageName; + Optional imageName(); /** * Indicates if the Kafka broker managed by Quarkus Dev Services is shared. @@ -82,8 +77,8 @@ public String getDefaultImageName() { *

    * Container sharing is only used in dev mode. */ - @ConfigItem(defaultValue = "true") - public boolean shared; + @WithDefault("true") + boolean shared(); /** * The value of the {@code quarkus-dev-service-kafka} label attached to the started container. @@ -95,8 +90,8 @@ public String getDefaultImageName() { *

    * This property is used when you need multiple shared Kafka brokers. */ - @ConfigItem(defaultValue = "kafka") - public String serviceName; + @WithDefault("kafka") + String serviceName(); /** * The topic-partition pairs to create in the Dev Services Kafka broker. @@ -106,29 +101,26 @@ public String getDefaultImageName() { *

    * The topic creation will not try to re-partition existing topics with different number of partitions. */ - @ConfigItem @ConfigDocMapKey("topic-name") - public Map topicPartitions; + Map topicPartitions(); /** * Timeout for admin client calls used in topic creation. *

    * Defaults to 2 seconds. */ - @ConfigItem(defaultValue = "2S") - public Duration topicPartitionsTimeout; + @WithDefault("2S") + Duration topicPartitionsTimeout(); /** * Environment variables that are passed to the container. */ - @ConfigItem @ConfigDocMapKey("environment-variable-name") - public Map containerEnv; + Map containerEnv(); /** * Allows configuring the Redpanda broker. */ - @ConfigItem - public RedpandaBuildTimeConfig redpanda; + RedpandaBuildTimeConfig redpanda(); } diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java index 70c2cd59df207..8ab081e9d304e 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/KafkaProcessor.java @@ -325,7 +325,7 @@ public void handleSnappyInNative(BuildProducer reflect void loadSnappyIfEnabled(LaunchModeBuildItem launch, SnappyRecorder recorder, KafkaBuildTimeConfig config) { boolean loadFromSharedClassLoader = false; if (launch.isTest()) { - loadFromSharedClassLoader = config.snappyLoadFromSharedClassLoader; + loadFromSharedClassLoader = config.snappyLoadFromSharedClassLoader(); } recorder.loadSnappy(loadFromSharedClassLoader); } @@ -482,7 +482,7 @@ public void accept(ClassInfo c) { @BuildStep HealthBuildItem addHealthCheck(KafkaBuildTimeConfig buildTimeConfig) { return new HealthBuildItem("io.quarkus.kafka.client.health.KafkaHealthCheck", - buildTimeConfig.healthEnabled); + buildTimeConfig.healthEnabled()); } @BuildStep @@ -546,7 +546,7 @@ public HasSnappy(KafkaBuildTimeConfig config) { @Override public boolean getAsBoolean() { - return QuarkusClassLoader.isClassPresentAtRuntime("org.xerial.snappy.OSInfo") && config.snappyEnabled; + return QuarkusClassLoader.isClassPresentAtRuntime("org.xerial.snappy.OSInfo") && config.snappyEnabled(); } } diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaBuildTimeConfig.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaBuildTimeConfig.java index ab9d1345f8633..13d35908fb26e 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaBuildTimeConfig.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaBuildTimeConfig.java @@ -2,8 +2,7 @@ import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; /** * Allows configuring the Redpanda broker. @@ -11,8 +10,7 @@ * * Find more info about Redpanda on https://redpanda.com/. */ -@ConfigGroup -public class RedpandaBuildTimeConfig { +public interface RedpandaBuildTimeConfig { /** * Enables transaction support. @@ -28,14 +26,13 @@ public class RedpandaBuildTimeConfig { * KIP-360 (Improve reliability of * idempotent/transactional producer) are not supported. */ - @ConfigItem(defaultValue = "true") - public boolean transactionEnabled; + @WithDefault("true") + boolean transactionEnabled(); /** * Port to access the Redpanda HTTP Proxy (pandaproxy). *

    * If not defined, the port will be chosen randomly. */ - @ConfigItem - public Optional proxyPort; + Optional proxyPort(); } diff --git a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaKafkaContainer.java b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaKafkaContainer.java index 8dca5de6d1c12..ae8ff79af7b95 100644 --- a/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaKafkaContainer.java +++ b/extensions/kafka-client/deployment/src/main/java/io/quarkus/kafka/client/deployment/RedpandaKafkaContainer.java @@ -60,7 +60,7 @@ protected void containerIsStarting(InspectContainerResponse containerInfo, boole command += String.format("--kafka-addr %s ", getKafkaAddresses()); command += String.format("--advertise-kafka-addr %s ", getKafkaAdvertisedAddresses()); command += "--set redpanda.auto_create_topics_enabled=true "; - if (redpandaConfig.transactionEnabled) { + if (redpandaConfig.transactionEnabled()) { command += "--set redpanda.enable_idempotence=true "; command += "--set redpanda.enable_transactions=true "; } @@ -104,8 +104,8 @@ protected void configure() { addFixedExposedPort(fixedExposedPort, DevServicesKafkaProcessor.KAFKA_PORT); } - if (redpandaConfig.proxyPort.isPresent()) { - addFixedExposedPort(redpandaConfig.proxyPort.get(), PANDAPROXY_PORT); + if (redpandaConfig.proxyPort().isPresent()) { + addFixedExposedPort(redpandaConfig.proxyPort().get(), PANDAPROXY_PORT); } else { addExposedPort(PANDAPROXY_PORT); } diff --git a/extensions/kafka-client/runtime/pom.xml b/extensions/kafka-client/runtime/pom.xml index 6571aa714bb34..de94c0e1ac346 100644 --- a/extensions/kafka-client/runtime/pom.xml +++ b/extensions/kafka-client/runtime/pom.xml @@ -101,9 +101,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/kafka-streams/deployment/pom.xml b/extensions/kafka-streams/deployment/pom.xml index 392701b2a2a75..d391443cf2f51 100644 --- a/extensions/kafka-streams/deployment/pom.xml +++ b/extensions/kafka-streams/deployment/pom.xml @@ -66,9 +66,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsBuildTimeConfig.java b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsBuildTimeConfig.java index ef449bef60cfd..a1e931cc1c95f 100644 --- a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsBuildTimeConfig.java +++ b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsBuildTimeConfig.java @@ -1,15 +1,19 @@ package io.quarkus.kafka.streams.deployment; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; -@ConfigRoot(name = "kafka-streams", phase = ConfigPhase.BUILD_TIME) -public class KafkaStreamsBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.kafka-streams") +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public interface KafkaStreamsBuildTimeConfig { /** * Whether a health check is published in case the smallrye-health extension is present (defaults to true). */ - @ConfigItem(name = "health.enabled", defaultValue = "true") - public boolean healthEnabled; + @WithName("health.enabled") + @WithDefault("true") + boolean healthEnabled(); } diff --git a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java index 71ff4ebc25cf9..d12f73a7c6c27 100644 --- a/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java +++ b/extensions/kafka-streams/deployment/src/main/java/io/quarkus/kafka/streams/deployment/KafkaStreamsProcessor.java @@ -9,7 +9,9 @@ import org.apache.kafka.common.serialization.Serdes.ByteArraySerde; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.DefaultProductionExceptionHandler; +import org.apache.kafka.streams.errors.LogAndContinueProcessingExceptionHandler; import org.apache.kafka.streams.errors.LogAndFailExceptionHandler; +import org.apache.kafka.streams.errors.LogAndFailProcessingExceptionHandler; import org.apache.kafka.streams.processor.FailOnInvalidTimestamp; import org.apache.kafka.streams.processor.internals.DefaultKafkaClientSupplier; import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor; @@ -82,10 +84,15 @@ private void registerCompulsoryClasses(BuildProducer r .build()); reflectiveClasses.produce(ReflectiveClassBuildItem.builder( org.apache.kafka.streams.processor.internals.assignment.HighAvailabilityTaskAssignor.class, - org.apache.kafka.streams.processor.internals.assignment.StickyTaskAssignor.class, + org.apache.kafka.streams.processor.internals.assignment.LegacyStickyTaskAssignor.class, org.apache.kafka.streams.processor.internals.assignment.FallbackPriorTaskAssignor.class) .reason(getClass().getName()) .methods().fields().build()); + // for backwards compatibility with < Kafka 3.9.0 + reflectiveClasses.produce(ReflectiveClassBuildItem.builder( + "org.apache.kafka.streams.processor.internals.assignment.StickyTaskAssignor") + .reason(getClass().getName()) + .methods().fields().build()); // See https://github.com/quarkusio/quarkus/issues/23404 reflectiveClasses.produce(ReflectiveClassBuildItem .builder("org.apache.kafka.streams.processor.internals.StateDirectory$StateDirectoryProcessFile") @@ -98,12 +105,19 @@ private void registerCompulsoryClasses(BuildProducer r org.apache.kafka.streams.state.BuiltInDslStoreSuppliers.InMemoryDslStoreSuppliers.class) .reason(getClass().getName()) .build()); + reflectiveClasses.produce(ReflectiveClassBuildItem + .builder(org.apache.kafka.streams.errors.LogAndFailProcessingExceptionHandler.class, + org.apache.kafka.streams.errors.LogAndContinueProcessingExceptionHandler.class) + .reason(getClass().getName()) + .methods().fields().build()); } private void registerClassesThatClientMaySpecify(BuildProducer reflectiveClasses, LaunchModeBuildItem launchMode) { Properties properties = buildKafkaStreamsProperties(launchMode.getLaunchMode()); - registerExceptionHandler(reflectiveClasses, properties); + registerDeserializationExceptionHandler(reflectiveClasses, properties); + registerProcessingExceptionHandler(reflectiveClasses, properties); + registerProductionExceptionHandler(reflectiveClasses, properties); registerDefaultSerdes(reflectiveClasses, properties); registerDslStoreSupplier(reflectiveClasses, properties); } @@ -121,13 +135,15 @@ private void registerDslStoreSupplier(BuildProducer re } } - private void registerExceptionHandler(BuildProducer reflectiveClasses, + private void registerDeserializationExceptionHandler(BuildProducer reflectiveClasses, Properties kafkaStreamsProperties) { String exceptionHandlerClassName = kafkaStreamsProperties .getProperty(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG); if (exceptionHandlerClassName == null) { - registerDefaultExceptionHandler(reflectiveClasses); + reflectiveClasses.produce(ReflectiveClassBuildItem.builder(LogAndFailExceptionHandler.class) + .reason(getClass().getName()) + .build()); } else { reflectiveClasses.produce( ReflectiveClassBuildItem.builder(exceptionHandlerClassName) @@ -136,10 +152,41 @@ private void registerExceptionHandler(BuildProducer re } } - private void registerDefaultExceptionHandler(BuildProducer reflectiveClasses) { - reflectiveClasses.produce(ReflectiveClassBuildItem.builder(LogAndFailExceptionHandler.class) - .reason(getClass().getName()) - .build()); + private void registerProcessingExceptionHandler(BuildProducer reflectiveClasses, + Properties kafkaStreamsProperties) { + String processingExceptionHandlerClassName = kafkaStreamsProperties + .getProperty(StreamsConfig.PROCESSING_EXCEPTION_HANDLER_CLASS_CONFIG); + + if (processingExceptionHandlerClassName == null) { + reflectiveClasses.produce( + ReflectiveClassBuildItem.builder(LogAndFailProcessingExceptionHandler.class, + LogAndContinueProcessingExceptionHandler.class) + .reason(getClass().getName()) + .build()); + } else { + reflectiveClasses.produce( + ReflectiveClassBuildItem.builder(processingExceptionHandlerClassName) + .reason(getClass().getName()) + .build()); + } + } + + private void registerProductionExceptionHandler(BuildProducer reflectiveClasses, + Properties kafkaStreamsProperties) { + String productionExceptionHandlerClassName = kafkaStreamsProperties + .getProperty(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG); + + if (productionExceptionHandlerClassName == null) { + reflectiveClasses.produce( + ReflectiveClassBuildItem.builder(DefaultProductionExceptionHandler.class) + .reason(getClass().getName()) + .build()); + } else { + reflectiveClasses.produce( + ReflectiveClassBuildItem.builder(productionExceptionHandlerClassName) + .reason(getClass().getName()) + .build()); + } } private void registerDefaultSerdes(BuildProducer reflectiveClasses, @@ -213,11 +260,11 @@ void addHealthChecks(KafkaStreamsBuildTimeConfig buildTimeConfig, BuildProducer< healthChecks.produce( new HealthBuildItem( "io.quarkus.kafka.streams.runtime.health.KafkaStreamsTopicsHealthCheck", - buildTimeConfig.healthEnabled)); + buildTimeConfig.healthEnabled())); healthChecks.produce( new HealthBuildItem( "io.quarkus.kafka.streams.runtime.health.KafkaStreamsStateHealthCheck", - buildTimeConfig.healthEnabled)); + buildTimeConfig.healthEnabled())); } @BuildStep diff --git a/extensions/kafka-streams/runtime/pom.xml b/extensions/kafka-streams/runtime/pom.xml index e89e81725e8f4..f3c807cec5ee4 100644 --- a/extensions/kafka-streams/runtime/pom.xml +++ b/extensions/kafka-streams/runtime/pom.xml @@ -76,9 +76,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java index 078fa15e338f7..04974867961bc 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsProducer.java @@ -93,7 +93,7 @@ public KafkaStreamsProducer(KafkaStreamsSupport kafkaStreamsSupport, KafkaStream Properties buildTimeProperties = kafkaStreamsSupport.getProperties(); - String bootstrapServersConfig = asString(runtimeConfig.bootstrapServers); + String bootstrapServersConfig = asString(runtimeConfig.bootstrapServers()); if (DEFAULT_KAFKA_BROKER.equalsIgnoreCase(bootstrapServersConfig)) { // Try to see if kafka.bootstrap.servers is set, if so, use that value, if not, keep localhost:9092 bootstrapServersConfig = ConfigProvider.getConfig().getOptionalValue("kafka.bootstrap.servers", String.class) @@ -109,7 +109,7 @@ public KafkaStreamsProducer(KafkaStreamsSupport kafkaStreamsSupport, KafkaStream this.executorService = executorService; - this.topicsTimeout = runtimeConfig.topicsTimeout; + this.topicsTimeout = runtimeConfig.topicsTimeout(); this.trimmedTopics = isTopicsCheckEnabled() ? runtimeConfig.getTrimmedTopics() : Collections.emptyList(); this.streamsConfig = new StreamsConfig(kafkaStreamsProperties); this.kafkaStreams = initializeKafkaStreams(streamsConfig, topology.get(), @@ -217,66 +217,68 @@ private static Properties getStreamsProperties(Properties properties, // add runtime options streamsProperties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServersConfig); - streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, runtimeConfig.applicationId); + streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, runtimeConfig.applicationId()); // app id - if (runtimeConfig.applicationServer.isPresent()) { - streamsProperties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, runtimeConfig.applicationServer.get()); + if (runtimeConfig.applicationServer().isPresent()) { + streamsProperties.put(StreamsConfig.APPLICATION_SERVER_CONFIG, runtimeConfig.applicationServer().get()); } // schema registry - if (runtimeConfig.schemaRegistryUrl.isPresent()) { - streamsProperties.put(runtimeConfig.schemaRegistryKey, runtimeConfig.schemaRegistryUrl.get()); + if (runtimeConfig.schemaRegistryUrl().isPresent()) { + streamsProperties.put(runtimeConfig.schemaRegistryKey(), runtimeConfig.schemaRegistryUrl().get()); } // set the security protocol (in case we are doing PLAIN_TEXT) - setProperty(runtimeConfig.securityProtocol, streamsProperties, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG); + setProperty(runtimeConfig.securityProtocol(), streamsProperties, CommonClientConfigs.SECURITY_PROTOCOL_CONFIG); // sasl - SaslConfig sc = runtimeConfig.sasl; + SaslConfig sc = runtimeConfig.sasl(); if (sc != null) { - setProperty(sc.mechanism, streamsProperties, SaslConfigs.SASL_MECHANISM); + setProperty(sc.mechanism(), streamsProperties, SaslConfigs.SASL_MECHANISM); - setProperty(sc.jaasConfig, streamsProperties, SaslConfigs.SASL_JAAS_CONFIG); + setProperty(sc.jaasConfig(), streamsProperties, SaslConfigs.SASL_JAAS_CONFIG); - setProperty(sc.clientCallbackHandlerClass, streamsProperties, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS); + setProperty(sc.clientCallbackHandlerClass(), streamsProperties, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS); - setProperty(sc.loginCallbackHandlerClass, streamsProperties, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS); - setProperty(sc.loginClass, streamsProperties, SaslConfigs.SASL_LOGIN_CLASS); + setProperty(sc.loginCallbackHandlerClass(), streamsProperties, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS); + setProperty(sc.loginClass(), streamsProperties, SaslConfigs.SASL_LOGIN_CLASS); - setProperty(sc.kerberosServiceName, streamsProperties, SaslConfigs.SASL_KERBEROS_SERVICE_NAME); - setProperty(sc.kerberosKinitCmd, streamsProperties, SaslConfigs.SASL_KERBEROS_KINIT_CMD); - setProperty(sc.kerberosTicketRenewWindowFactor, streamsProperties, + setProperty(sc.kerberosServiceName(), streamsProperties, SaslConfigs.SASL_KERBEROS_SERVICE_NAME); + setProperty(sc.kerberosKinitCmd(), streamsProperties, SaslConfigs.SASL_KERBEROS_KINIT_CMD); + setProperty(sc.kerberosTicketRenewWindowFactor(), streamsProperties, SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR); - setProperty(sc.kerberosTicketRenewJitter, streamsProperties, SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER); - setProperty(sc.kerberosMinTimeBeforeRelogin, streamsProperties, SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN); + setProperty(sc.kerberosTicketRenewJitter(), streamsProperties, SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER); + setProperty(sc.kerberosMinTimeBeforeRelogin(), streamsProperties, + SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN); - setProperty(sc.loginRefreshWindowFactor, streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_FACTOR); - setProperty(sc.loginRefreshWindowJitter, streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_JITTER); + setProperty(sc.loginRefreshWindowFactor(), streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_FACTOR); + setProperty(sc.loginRefreshWindowJitter(), streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_JITTER); - setProperty(sc.loginRefreshMinPeriod, streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_MIN_PERIOD_SECONDS, + setProperty(sc.loginRefreshMinPeriod(), streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_MIN_PERIOD_SECONDS, DurationToSecondsFunction.INSTANCE); - setProperty(sc.loginRefreshBuffer, streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_BUFFER_SECONDS, + setProperty(sc.loginRefreshBuffer(), streamsProperties, SaslConfigs.SASL_LOGIN_REFRESH_BUFFER_SECONDS, DurationToSecondsFunction.INSTANCE); } // ssl - SslConfig ssl = runtimeConfig.ssl; + SslConfig ssl = runtimeConfig.ssl(); if (ssl != null) { - setProperty(ssl.protocol, streamsProperties, SslConfigs.SSL_PROTOCOL_CONFIG); - setProperty(ssl.provider, streamsProperties, SslConfigs.SSL_PROVIDER_CONFIG); - setProperty(ssl.cipherSuites, streamsProperties, SslConfigs.SSL_CIPHER_SUITES_CONFIG); - setProperty(ssl.enabledProtocols, streamsProperties, SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); - - setTrustStoreConfig(ssl.truststore, streamsProperties); - setKeyStoreConfig(ssl.keystore, streamsProperties); - setKeyConfig(ssl.key, streamsProperties); - - setProperty(ssl.keymanagerAlgorithm, streamsProperties, SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); - setProperty(ssl.trustmanagerAlgorithm, streamsProperties, SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); - Optional eia = Optional.of(ssl.endpointIdentificationAlgorithm.orElse("")); + setProperty(ssl.protocol(), streamsProperties, SslConfigs.SSL_PROTOCOL_CONFIG); + setProperty(ssl.provider(), streamsProperties, SslConfigs.SSL_PROVIDER_CONFIG); + setProperty(ssl.cipherSuites(), streamsProperties, SslConfigs.SSL_CIPHER_SUITES_CONFIG); + setProperty(ssl.enabledProtocols(), streamsProperties, SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); + + setTrustStoreConfig(ssl.truststore(), streamsProperties); + setKeyStoreConfig(ssl.keystore(), streamsProperties); + setKeyConfig(ssl.key(), streamsProperties); + + setProperty(ssl.keymanagerAlgorithm(), streamsProperties, SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); + setProperty(ssl.trustmanagerAlgorithm(), streamsProperties, SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); + Optional eia = Optional.of(ssl.endpointIdentificationAlgorithm().orElse("")); setProperty(eia, streamsProperties, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); - setProperty(ssl.secureRandomImplementation, streamsProperties, SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); + setProperty(ssl.secureRandomImplementation(), streamsProperties, + SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); } return streamsProperties; @@ -284,26 +286,26 @@ private static Properties getStreamsProperties(Properties properties, private static void setTrustStoreConfig(TrustStoreConfig tsc, Properties properties) { if (tsc != null) { - setProperty(tsc.type, properties, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG); - setProperty(tsc.location, properties, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG); - setProperty(tsc.password, properties, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG); - setProperty(tsc.certificates, properties, SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG); + setProperty(tsc.type(), properties, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG); + setProperty(tsc.location(), properties, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG); + setProperty(tsc.password(), properties, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG); + setProperty(tsc.certificates(), properties, SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG); } } private static void setKeyStoreConfig(KeyStoreConfig ksc, Properties properties) { if (ksc != null) { - setProperty(ksc.type, properties, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG); - setProperty(ksc.location, properties, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); - setProperty(ksc.password, properties, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); - setProperty(ksc.key, properties, SslConfigs.SSL_KEYSTORE_KEY_CONFIG); - setProperty(ksc.certificateChain, properties, SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG); + setProperty(ksc.type(), properties, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG); + setProperty(ksc.location(), properties, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); + setProperty(ksc.password(), properties, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); + setProperty(ksc.key(), properties, SslConfigs.SSL_KEYSTORE_KEY_CONFIG); + setProperty(ksc.certificateChain(), properties, SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG); } } private static void setKeyConfig(KeyConfig kc, Properties properties) { if (kc != null) { - setProperty(kc.password, properties, SslConfigs.SSL_KEY_PASSWORD_CONFIG); + setProperty(kc.password(), properties, SslConfigs.SSL_KEY_PASSWORD_CONFIG); } } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java index 7a5b58d54a580..70006ea43849e 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KafkaStreamsRuntimeConfig.java @@ -6,52 +6,53 @@ import java.util.Optional; import java.util.stream.Collectors; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; -@ConfigRoot(name = "kafka-streams", phase = ConfigPhase.RUN_TIME) -public class KafkaStreamsRuntimeConfig { +@ConfigMapping(prefix = "quarkus.kafka-streams") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface KafkaStreamsRuntimeConfig { /** * Default Kafka bootstrap server. */ - public static final String DEFAULT_KAFKA_BROKER = "localhost:9092"; + String DEFAULT_KAFKA_BROKER = "localhost:9092"; /** * A unique identifier for this Kafka Streams application. * If not set, defaults to quarkus.application.name. */ - @ConfigItem(defaultValue = "${quarkus.application.name}") - public String applicationId; + @WithDefault("${quarkus.application.name}") + String applicationId(); /** * A comma-separated list of host:port pairs identifying the Kafka bootstrap server(s). * If not set, fallback to {@code kafka.bootstrap.servers}, and if not set either use {@code localhost:9092}. */ - @ConfigItem(defaultValue = DEFAULT_KAFKA_BROKER) - public List bootstrapServers; + @WithDefault(DEFAULT_KAFKA_BROKER) + List bootstrapServers(); /** * A unique identifier of this application instance, typically in the form host:port. */ - @ConfigItem - public Optional applicationServer; + Optional applicationServer(); /** * A comma-separated list of topic names. * The pipeline will only be started once all these topics are present in the Kafka cluster * and {@code ignore.topics} is set to false. */ - @ConfigItem - public Optional> topics; + Optional> topics(); /** * Timeout to wait for topic names to be returned from admin client. * If set to 0 (or negative), {@code topics} check is ignored. */ - @ConfigItem(defaultValue = "10S") - public Duration topicsTimeout; + @WithDefault("10S") + Duration topicsTimeout(); /** * The schema registry key. @@ -62,48 +63,33 @@ public class KafkaStreamsRuntimeConfig { * For Apicurio Registry, use {@code apicurio.registry.url}. * For Confluent schema registry, use {@code schema.registry.url}. */ - @ConfigItem(defaultValue = "schema.registry.url") - public String schemaRegistryKey; + @WithDefault("schema.registry.url") + String schemaRegistryKey(); /** * The schema registry URL. */ - @ConfigItem - public Optional schemaRegistryUrl; + Optional schemaRegistryUrl(); /** * The security protocol to use * See https://docs.confluent.io/current/streams/developer-guide/security.html#security-example */ - @ConfigItem(name = "security.protocol") - public Optional securityProtocol; + @WithName("security.protocol") + Optional securityProtocol(); /** * The SASL JAAS config. */ - public SaslConfig sasl; + SaslConfig sasl(); /** * Kafka SSL config */ - public SslConfig ssl; + SslConfig ssl(); - @Override - public String toString() { - return "KafkaStreamsRuntimeConfig{" + - "applicationId='" + applicationId + '\'' + - ", bootstrapServers=" + bootstrapServers + - ", applicationServer=" + applicationServer + - ", topics=" + topics + - ", schemaRegistryKey='" + schemaRegistryKey + '\'' + - ", schemaRegistryUrl=" + schemaRegistryUrl + - ", sasl=" + sasl + - ", ssl=" + ssl + - '}'; - } - - public List getTrimmedTopics() { - return topics.orElseThrow(() -> new IllegalArgumentException("Missing list of topics")) + default List getTrimmedTopics() { + return topics().orElseThrow(() -> new IllegalArgumentException("Missing list of topics")) .stream().map(String::trim).collect(Collectors.toList()); } } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyConfig.java index dc8d9f88eb828..449f4f796e65d 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyConfig.java @@ -3,13 +3,11 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class KeyConfig { +public interface KeyConfig { /** * Password of the private key in the key store */ - @ConfigItem - public Optional password; + Optional password(); } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyStoreConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyStoreConfig.java index 94be06c846141..3a25173c8b184 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyStoreConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/KeyStoreConfig.java @@ -3,37 +3,31 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class KeyStoreConfig { +public interface KeyStoreConfig { /** * Key store type */ - @ConfigItem - public Optional type; + Optional type(); /** * Key store location */ - @ConfigItem - public Optional location; + Optional location(); /** * Key store password */ - @ConfigItem - public Optional password; + Optional password(); /** * Key store private key */ - @ConfigItem - public Optional key; + Optional key(); /** * Key store certificate chain */ - @ConfigItem - public Optional certificateChain; + Optional certificateChain(); } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SaslConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SaslConfig.java index 79dd6ac72efe5..b1b6f2e415718 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SaslConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SaslConfig.java @@ -4,94 +4,79 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class SaslConfig { +public interface SaslConfig { /** * SASL mechanism used for client connections */ - @ConfigItem - public Optional mechanism; + Optional mechanism(); /** * JAAS login context parameters for SASL connections in the format used by JAAS configuration files */ - @ConfigItem - public Optional jaasConfig; + Optional jaasConfig(); /** * The fully qualified name of a SASL client callback handler class */ - @ConfigItem - public Optional clientCallbackHandlerClass; + Optional clientCallbackHandlerClass(); /** * The fully qualified name of a SASL login callback handler class */ - @ConfigItem - public Optional loginCallbackHandlerClass; + Optional loginCallbackHandlerClass(); /** * The fully qualified name of a class that implements the Login interface */ - @ConfigItem - public Optional loginClass; + Optional loginClass(); /** * The Kerberos principal name that Kafka runs as */ - @ConfigItem - public Optional kerberosServiceName; + Optional kerberosServiceName(); /** * Kerberos kinit command path */ - @ConfigItem - public Optional kerberosKinitCmd; + Optional kerberosKinitCmd(); /** * Login thread will sleep until the specified window factor of time from last refresh */ - @ConfigItem - public Optional kerberosTicketRenewWindowFactor; + Optional kerberosTicketRenewWindowFactor(); /** * Percentage of random jitter added to the renewal time */ - @ConfigItem - public Optional kerberosTicketRenewJitter; + Optional kerberosTicketRenewJitter(); /** * Percentage of random jitter added to the renewal time */ - @ConfigItem - public Optional kerberosMinTimeBeforeRelogin; + Optional kerberosMinTimeBeforeRelogin(); /** * Login refresh thread will sleep until the specified window factor relative to the * credential's lifetime has been reached- */ - @ConfigItem - public Optional loginRefreshWindowFactor; + Optional loginRefreshWindowFactor(); /** * The maximum amount of random jitter relative to the credential's lifetime */ - @ConfigItem - public Optional loginRefreshWindowJitter; + Optional loginRefreshWindowJitter(); /** * The desired minimum duration for the login refresh thread to wait before refreshing a credential */ - @ConfigItem - public Optional loginRefreshMinPeriod; + Optional loginRefreshMinPeriod(); /** * The amount of buffer duration before credential expiration to maintain when refreshing a credential */ - @ConfigItem - public Optional loginRefreshBuffer; + Optional loginRefreshBuffer(); } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SslConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SslConfig.java index 654dec481084c..8027024e9eee5 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SslConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/SslConfig.java @@ -3,71 +3,64 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class SslConfig { +public interface SslConfig { /** * The SSL protocol used to generate the SSLContext */ - @ConfigItem - public Optional protocol; + Optional protocol(); /** * The name of the security provider used for SSL connections */ - @ConfigItem - public Optional provider; + Optional provider(); /** * A list of cipher suites */ - @ConfigItem - public Optional cipherSuites; + Optional cipherSuites(); /** * The list of protocols enabled for SSL connections */ - @ConfigItem - public Optional enabledProtocols; + Optional enabledProtocols(); /** * Truststore config */ - public TrustStoreConfig truststore; + TrustStoreConfig truststore(); /** * Keystore config */ - public KeyStoreConfig keystore; + KeyStoreConfig keystore(); /** * Key config */ - public KeyConfig key; + KeyConfig key(); /** * The algorithm used by key manager factory for SSL connections */ - @ConfigItem - public Optional keymanagerAlgorithm; + Optional keymanagerAlgorithm(); /** * The algorithm used by trust manager factory for SSL connections */ - @ConfigItem - public Optional trustmanagerAlgorithm; + Optional trustmanagerAlgorithm(); /** * The endpoint identification algorithm to validate server hostname using server certificate */ - @ConfigItem(defaultValue = "https") - public Optional endpointIdentificationAlgorithm; + @WithDefault("https") + Optional endpointIdentificationAlgorithm(); /** * The SecureRandom PRNG implementation to use for SSL cryptography operations */ - @ConfigItem - public Optional secureRandomImplementation; + Optional secureRandomImplementation(); } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/TrustStoreConfig.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/TrustStoreConfig.java index d98e140266fe7..6eae5512c88d0 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/TrustStoreConfig.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/TrustStoreConfig.java @@ -3,31 +3,26 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class TrustStoreConfig { +public interface TrustStoreConfig { /** * Trust store type */ - @ConfigItem - public Optional type; + Optional type(); /** * Trust store location */ - @ConfigItem - public Optional location; + Optional location(); /** * Trust store password */ - @ConfigItem - public Optional password; + Optional password(); /** * Trust store certificates */ - @ConfigItem - public Optional certificates; + Optional certificates(); } diff --git a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheck.java b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheck.java index 84c23f440df87..032f048c439fc 100644 --- a/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheck.java +++ b/extensions/kafka-streams/runtime/src/main/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheck.java @@ -35,8 +35,8 @@ public class KafkaStreamsTopicsHealthCheck implements HealthCheck { @PostConstruct public void init() { - if (kafkaStreamsRuntimeConfig.topicsTimeout.compareTo(Duration.ZERO) > 0) { - trimmedTopics = kafkaStreamsRuntimeConfig.topics + if (kafkaStreamsRuntimeConfig.topicsTimeout().compareTo(Duration.ZERO) > 0) { + trimmedTopics = kafkaStreamsRuntimeConfig.topics() .orElseThrow(() -> new IllegalArgumentException("Missing list of topics")) .stream() .map(String::trim) @@ -49,7 +49,7 @@ public HealthCheckResponse call() { HealthCheckResponseBuilder builder = HealthCheckResponse.named("Kafka Streams topics health check").up(); if (trimmedTopics != null) { try { - Set missingTopics = manager.getMissingTopics(trimmedTopics, kafkaStreamsRuntimeConfig.topicsTimeout); + Set missingTopics = manager.getMissingTopics(trimmedTopics, kafkaStreamsRuntimeConfig.topicsTimeout()); List availableTopics = new ArrayList<>(trimmedTopics); availableTopics.removeAll(missingTopics); diff --git a/extensions/kafka-streams/runtime/src/test/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheckTest.java b/extensions/kafka-streams/runtime/src/test/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheckTest.java index 9fa11bb265702..0820b29e8334c 100644 --- a/extensions/kafka-streams/runtime/src/test/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheckTest.java +++ b/extensions/kafka-streams/runtime/src/test/java/io/quarkus/kafka/streams/runtime/health/KafkaStreamsTopicsHealthCheckTest.java @@ -31,10 +31,13 @@ public class KafkaStreamsTopicsHealthCheckTest { public void setUp() { MockitoAnnotations.initMocks(this); - KafkaStreamsRuntimeConfig kafkaStreamsRuntimeConfig = new KafkaStreamsRuntimeConfig(); - kafkaStreamsRuntimeConfig.topics = Optional.of(Collections.singletonList("topic")); - kafkaStreamsRuntimeConfig.topicsTimeout = Duration.ofSeconds(10); - healthCheck.kafkaStreamsRuntimeConfig = kafkaStreamsRuntimeConfig; + KafkaStreamsRuntimeConfig configMock = Mockito.mock(KafkaStreamsRuntimeConfig.class); + Mockito.doReturn(Optional.of(Collections.singletonList("topic"))) + .when(configMock).topics(); + + Mockito.doReturn(Duration.ofSeconds(10)) + .when(configMock).topicsTimeout(); + healthCheck.kafkaStreamsRuntimeConfig = configMock; healthCheck.init(); } diff --git a/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientBuildTimeConfig.java b/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientBuildTimeConfig.java similarity index 89% rename from extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientBuildTimeConfig.java rename to extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientBuildTimeConfig.java index 7a2ff95e2bbfe..21cb0803a4c6e 100644 --- a/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientBuildTimeConfig.java +++ b/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientBuildTimeConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.common; +package io.quarkus.keycloak.admin.client.common.deployment; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; diff --git a/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientInjectionEnabled.java b/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientInjectionEnabled.java similarity index 81% rename from extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientInjectionEnabled.java rename to extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientInjectionEnabled.java index e94bbbfdd8e4b..ea3bae2f399c9 100644 --- a/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientInjectionEnabled.java +++ b/extensions/keycloak-admin-client-common/deployment/src/main/java/io/quarkus/keycloak/admin/client/common/deployment/KeycloakAdminClientInjectionEnabled.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.common; +package io.quarkus.keycloak.admin.client.common.deployment; import java.util.function.BooleanSupplier; diff --git a/extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/ConfigValidationTest.java b/extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/deployment/test/ConfigValidationTest.java similarity index 91% rename from extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/ConfigValidationTest.java rename to extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/deployment/test/ConfigValidationTest.java index 43cd6ad06e27e..9a79c4972c111 100644 --- a/extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/ConfigValidationTest.java +++ b/extensions/keycloak-admin-client-common/deployment/src/test/java/io/quarkus/keycloak/admin/client/common/deployment/test/ConfigValidationTest.java @@ -1,12 +1,15 @@ -package io.quarkus.keycloak.admin.client.common; +package io.quarkus.keycloak.admin.client.common.deployment.test; -import static io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfigUtil.validate; +import static io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfigUtil.validate; import java.util.Optional; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfig; +import io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfigUtil; + public class ConfigValidationTest { @Test diff --git a/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfig.java b/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfig.java similarity index 97% rename from extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfig.java rename to extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfig.java index aab53d23fda50..d94f463b4492d 100644 --- a/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfig.java +++ b/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.common; +package io.quarkus.keycloak.admin.client.common.runtime; import java.util.Optional; diff --git a/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfigUtil.java b/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfigUtil.java similarity index 86% rename from extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfigUtil.java rename to extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfigUtil.java index 79c2191222f01..f9210a38ff8a7 100644 --- a/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/KeycloakAdminClientConfigUtil.java +++ b/extensions/keycloak-admin-client-common/runtime/src/main/java/io/quarkus/keycloak/admin/client/common/runtime/KeycloakAdminClientConfigUtil.java @@ -1,6 +1,6 @@ -package io.quarkus.keycloak.admin.client.common; +package io.quarkus.keycloak.admin.client.common.runtime; -import static io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfig.GrantType.PASSWORD; +import static io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfig.GrantType.PASSWORD; import org.jboss.logging.Logger; @@ -32,7 +32,7 @@ public static void validate(KeycloakAdminClientConfig config) { } } - static final class KeycloakAdminClientException extends RuntimeException { + public static final class KeycloakAdminClientException extends RuntimeException { private KeycloakAdminClientException(String message) { super(String.format("Failed to create Keycloak admin client: %s.", message)); diff --git a/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientReactiveProcessor.java b/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/KeycloakAdminRestClientProcessor.java similarity index 81% rename from extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientReactiveProcessor.java rename to extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/KeycloakAdminRestClientProcessor.java index d233e7aa78ceb..04bbf20cd8b3a 100644 --- a/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientReactiveProcessor.java +++ b/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/KeycloakAdminRestClientProcessor.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive; +package io.quarkus.keycloak.admin.rest.client.deployment; import jakarta.enterprise.context.RequestScoped; @@ -22,12 +22,12 @@ import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyIgnoreWarningBuildItem; import io.quarkus.deployment.builditem.nativeimage.ServiceProviderBuildItem; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientInjectionEnabled; -import io.quarkus.keycloak.admin.client.reactive.runtime.ResteasyReactiveClientProvider; -import io.quarkus.keycloak.admin.client.reactive.runtime.ResteasyReactiveKeycloakAdminClientRecorder; +import io.quarkus.keycloak.admin.client.common.deployment.KeycloakAdminClientInjectionEnabled; +import io.quarkus.keycloak.admin.rest.client.runtime.KeycloakAdminRestClientProvider; +import io.quarkus.keycloak.admin.rest.client.runtime.KeycloakAdminRestClientRecorder; import io.quarkus.tls.TlsRegistryBuildItem; -public class KeycloakAdminClientReactiveProcessor { +public class KeycloakAdminRestClientProcessor { @BuildStep void marker(BuildProducer producer) { @@ -40,7 +40,7 @@ public void nativeImage(BuildProducer serviceProviderP BuildProducer reflectiveClassProducer, BuildProducer reflectiveHierarchyProducer) { serviceProviderProducer.produce(new ServiceProviderBuildItem(ResteasyClientProvider.class.getName(), - ResteasyReactiveClientProvider.class.getName())); + KeycloakAdminRestClientProvider.class.getName())); reflectiveClassProducer.produce(ReflectiveClassBuildItem.builder( StringListMapDeserializer.class, StringOrArrayDeserializer.class, @@ -54,20 +54,20 @@ public void nativeImage(BuildProducer serviceProviderP @Record(ExecutionTime.STATIC_INIT) @BuildStep - void avoidRuntimeInitIssueInClientBuilderWrapper(ResteasyReactiveKeycloakAdminClientRecorder recorder) { + void avoidRuntimeInitIssueInClientBuilderWrapper(KeycloakAdminRestClientRecorder recorder) { recorder.avoidRuntimeInitIssueInClientBuilderWrapper(); } @Record(ExecutionTime.RUNTIME_INIT) @Produce(ServiceStartBuildItem.class) @BuildStep - public void integrate(ResteasyReactiveKeycloakAdminClientRecorder recorder, TlsRegistryBuildItem tlsRegistryBuildItem) { + public void integrate(KeycloakAdminRestClientRecorder recorder, TlsRegistryBuildItem tlsRegistryBuildItem) { recorder.setClientProvider(tlsRegistryBuildItem.registry()); } @Record(ExecutionTime.RUNTIME_INIT) @BuildStep(onlyIf = KeycloakAdminClientInjectionEnabled.class) - public void registerKeycloakAdminClientBeans(ResteasyReactiveKeycloakAdminClientRecorder recorder, + public void registerKeycloakAdminClientBeans(KeycloakAdminRestClientRecorder recorder, BuildProducer syntheticBeanBuildItemBuildProducer) { syntheticBeanBuildItemBuildProducer.produce(SyntheticBeanBuildItem .configure(Keycloak.class) diff --git a/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java b/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java similarity index 88% rename from extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java rename to extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java index c458742cb7f17..f35ad136e2d9e 100644 --- a/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java +++ b/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/rest/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.adminclient.deployment.devservices; +package io.quarkus.keycloak.admin.rest.client.deployment.devservices; import java.util.Map; @@ -10,7 +10,7 @@ import io.quarkus.devservices.keycloak.KeycloakAdminPageBuildItem; import io.quarkus.devservices.keycloak.KeycloakDevServicesRequiredBuildItem; import io.quarkus.devui.spi.page.CardPageBuildItem; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientInjectionEnabled; +import io.quarkus.keycloak.admin.client.common.deployment.KeycloakAdminClientInjectionEnabled; @BuildSteps(onlyIfNot = IsNormal.class, onlyIf = { DevServicesConfig.Enabled.class, KeycloakAdminClientInjectionEnabled.class }) diff --git a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientInjectionDevServicesTest.java b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java similarity index 97% rename from extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientInjectionDevServicesTest.java rename to extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java index 1551f3dfb1577..eae06ca6b310b 100644 --- a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientInjectionDevServicesTest.java +++ b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive; +package io.quarkus.keycloak.admin.rest.client.deployment.test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; diff --git a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientMutualTlsDevServicesTest.java b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java similarity index 98% rename from extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientMutualTlsDevServicesTest.java rename to extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java index 753eabfe19bc3..fe2849d28c4f3 100644 --- a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientMutualTlsDevServicesTest.java +++ b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive; +package io.quarkus.keycloak.admin.rest.client.deployment.test; import java.io.File; import java.util.ArrayList; diff --git a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientZeroConfigDevServicesTest.java b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java similarity index 96% rename from extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientZeroConfigDevServicesTest.java rename to extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java index fe3853598015a..27776ee3ef311 100644 --- a/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/client/reactive/KeycloakAdminClientZeroConfigDevServicesTest.java +++ b/extensions/keycloak-admin-rest-client/deployment/src/test/java/io/quarkus/keycloak/admin/rest/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive; +package io.quarkus.keycloak.admin.rest.client.deployment.test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; diff --git a/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java b/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientProvider.java similarity index 95% rename from extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java rename to extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientProvider.java index aa08f7edb66c5..5d8669b5ad131 100644 --- a/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveClientProvider.java +++ b/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientProvider.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive.runtime; +package io.quarkus.keycloak.admin.rest.client.runtime; import java.security.KeyStore; import java.util.List; @@ -31,7 +31,7 @@ import io.vertx.core.net.SSLOptions; import io.vertx.core.net.TrustOptions; -public class ResteasyReactiveClientProvider implements ResteasyClientProvider { +public class KeycloakAdminRestClientProvider implements ResteasyClientProvider { private static final List HANDLED_MEDIA_TYPES = List.of(MediaType.APPLICATION_JSON); private static final int WRITER_PROVIDER_PRIORITY = Priorities.USER + 100; // ensures that it will be used first @@ -40,12 +40,12 @@ public class ResteasyReactiveClientProvider implements ResteasyClientProvider { private final boolean tlsTrustAll; private final TlsConfig tlsConfig; - public ResteasyReactiveClientProvider(boolean tlsTrustAll) { + public KeycloakAdminRestClientProvider(boolean tlsTrustAll) { this.tlsTrustAll = tlsTrustAll; this.tlsConfig = null; } - public ResteasyReactiveClientProvider(TlsConfiguration tlsConfiguration) { + public KeycloakAdminRestClientProvider(TlsConfiguration tlsConfiguration) { tlsTrustAll = tlsConfiguration.isTrustAll(); this.tlsConfig = createTlsConfig(tlsConfiguration); } diff --git a/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveKeycloakAdminClientRecorder.java b/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientRecorder.java similarity index 84% rename from extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveKeycloakAdminClientRecorder.java rename to extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientRecorder.java index 0d99d1ec027f0..3fe3fc72acce2 100644 --- a/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/client/reactive/runtime/ResteasyReactiveKeycloakAdminClientRecorder.java +++ b/extensions/keycloak-admin-rest-client/runtime/src/main/java/io/quarkus/keycloak/admin/rest/client/runtime/KeycloakAdminRestClientRecorder.java @@ -1,24 +1,24 @@ -package io.quarkus.keycloak.admin.client.reactive.runtime; +package io.quarkus.keycloak.admin.rest.client.runtime; -import static io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfigUtil.validate; +import static io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfigUtil.validate; import java.util.function.Supplier; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfig; +import io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfig; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; import io.quarkus.tls.TlsConfiguration; import io.quarkus.tls.TlsConfigurationRegistry; @Recorder -public class ResteasyReactiveKeycloakAdminClientRecorder { +public class KeycloakAdminRestClientRecorder { private final RuntimeValue keycloakAdminClientConfigRuntimeValue; - public ResteasyReactiveKeycloakAdminClientRecorder( + public KeycloakAdminRestClientRecorder( RuntimeValue keycloakAdminClientConfigRuntimeValue) { this.keycloakAdminClientConfigRuntimeValue = keycloakAdminClientConfigRuntimeValue; } @@ -28,7 +28,7 @@ public void setClientProvider(Supplier registrySupplie var namedTlsConfig = TlsConfiguration.from(registry, keycloakAdminClientConfigRuntimeValue.getValue().tlsConfigurationName()); if (namedTlsConfig.isPresent()) { - Keycloak.setClientProvider(new ResteasyReactiveClientProvider(namedTlsConfig.get())); + Keycloak.setClientProvider(new KeycloakAdminRestClientProvider(namedTlsConfig.get())); } else { final boolean trustAll; if (registry.getDefault().isPresent()) { @@ -36,7 +36,7 @@ public void setClientProvider(Supplier registrySupplie } else { trustAll = false; } - Keycloak.setClientProvider(new ResteasyReactiveClientProvider(trustAll)); + Keycloak.setClientProvider(new KeycloakAdminRestClientProvider(trustAll)); } } diff --git a/extensions/keycloak-admin-rest-client/runtime/src/main/java/resources/META-INF/quarkus-extension.yaml b/extensions/keycloak-admin-rest-client/runtime/src/main/java/resources/META-INF/quarkus-extension.yaml deleted file mode 100644 index 92570a014faf2..0000000000000 --- a/extensions/keycloak-admin-rest-client/runtime/src/main/java/resources/META-INF/quarkus-extension.yaml +++ /dev/null @@ -1,14 +0,0 @@ ---- -artifact: ${project.groupId}:${project.artifactId}:${project.version} -name: "Reactive Keycloak Admin Client" -metadata: - keywords: - - "keycloak" - - "keycloak-admin-client" - - "admin" - - "openid-connect" - - "reactive" - categories: - - "security" - - "reactive" - status: "stable" \ No newline at end of file diff --git a/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java b/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/KeycloakAdminResteasyClientProcessor.java similarity index 85% rename from extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java rename to extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/KeycloakAdminResteasyClientProcessor.java index 981e6dc89cd9a..3f2b94eb8c0d5 100644 --- a/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientProcessor.java +++ b/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/KeycloakAdminResteasyClientProcessor.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.adminclient.deployment; +package io.quarkus.keycloak.admin.resteasy.client.deployment; import jakarta.enterprise.context.RequestScoped; @@ -23,11 +23,11 @@ import io.quarkus.deployment.builditem.ServiceStartBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveHierarchyIgnoreWarningBuildItem; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientInjectionEnabled; -import io.quarkus.keycloak.adminclient.ResteasyKeycloakAdminClientRecorder; +import io.quarkus.keycloak.admin.client.common.deployment.KeycloakAdminClientInjectionEnabled; +import io.quarkus.keycloak.admin.resteasy.client.runtime.KeycloakAdminResteasyClientRecorder; import io.quarkus.tls.TlsRegistryBuildItem; -public class KeycloakAdminClientProcessor { +public class KeycloakAdminResteasyClientProcessor { @BuildStep ReflectiveHierarchyIgnoreWarningBuildItem marker(BuildProducer prod) { @@ -49,20 +49,20 @@ ReflectiveClassBuildItem reflect() { @Record(ExecutionTime.STATIC_INIT) @BuildStep - void avoidRuntimeInitIssueInClientBuilderWrapper(ResteasyKeycloakAdminClientRecorder recorder) { + void avoidRuntimeInitIssueInClientBuilderWrapper(KeycloakAdminResteasyClientRecorder recorder) { recorder.avoidRuntimeInitIssueInClientBuilderWrapper(); } @Record(ExecutionTime.RUNTIME_INIT) @Produce(ServiceStartBuildItem.class) @BuildStep - public void integrate(ResteasyKeycloakAdminClientRecorder recorder, TlsRegistryBuildItem tlsRegistryBuildItem) { + public void integrate(KeycloakAdminResteasyClientRecorder recorder, TlsRegistryBuildItem tlsRegistryBuildItem) { recorder.setClientProvider(tlsRegistryBuildItem.registry()); } @Record(ExecutionTime.RUNTIME_INIT) @BuildStep(onlyIf = KeycloakAdminClientInjectionEnabled.class) - public void registerKeycloakAdminClientBeans(ResteasyKeycloakAdminClientRecorder recorder, + public void registerKeycloakAdminClientBeans(KeycloakAdminResteasyClientRecorder recorder, BuildProducer syntheticBeanBuildItemBuildProducer) { syntheticBeanBuildItemBuildProducer.produce(SyntheticBeanBuildItem .configure(Keycloak.class) diff --git a/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/devservices/KeycloakDevServiceRequiredBuildStep.java b/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java similarity index 87% rename from extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/devservices/KeycloakDevServiceRequiredBuildStep.java rename to extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java index 08c67909510c8..8bc7a20f2675b 100644 --- a/extensions/keycloak-admin-rest-client/deployment/src/main/java/io/quarkus/keycloak/admin/client/reactive/devservices/KeycloakDevServiceRequiredBuildStep.java +++ b/extensions/keycloak-admin-resteasy-client/deployment/src/main/java/io/quarkus/keycloak/admin/resteasy/client/deployment/devservices/KeycloakDevServiceRequiredBuildStep.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.admin.client.reactive.devservices; +package io.quarkus.keycloak.admin.resteasy.client.deployment.devservices; import java.util.Map; @@ -10,7 +10,7 @@ import io.quarkus.devservices.keycloak.KeycloakAdminPageBuildItem; import io.quarkus.devservices.keycloak.KeycloakDevServicesRequiredBuildItem; import io.quarkus.devui.spi.page.CardPageBuildItem; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientInjectionEnabled; +import io.quarkus.keycloak.admin.client.common.deployment.KeycloakAdminClientInjectionEnabled; @BuildSteps(onlyIfNot = IsNormal.class, onlyIf = { DevServicesConfig.Enabled.class, KeycloakAdminClientInjectionEnabled.class }) diff --git a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientInjectionDevServicesTest.java b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java similarity index 97% rename from extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientInjectionDevServicesTest.java rename to extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java index a87d149b61ca5..9d7f895acbb3f 100644 --- a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientInjectionDevServicesTest.java +++ b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientInjectionDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.adminclient.deployment; +package io.quarkus.keycloak.admin.resteasy.client.deployment.test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; diff --git a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientMutualTlsDevServicesTest.java b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java similarity index 98% rename from extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientMutualTlsDevServicesTest.java rename to extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java index 33eaac20f98ad..e5ca6cf15fa5f 100644 --- a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientMutualTlsDevServicesTest.java +++ b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientMutualTlsDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.adminclient.deployment; +package io.quarkus.keycloak.admin.resteasy.client.deployment.test; import java.io.File; import java.util.ArrayList; diff --git a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientZeroConfigDevServicesTest.java b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java similarity index 96% rename from extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientZeroConfigDevServicesTest.java rename to extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java index 440ed7514621d..fcf0fe1415538 100644 --- a/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/adminclient/deployment/KeycloakAdminClientZeroConfigDevServicesTest.java +++ b/extensions/keycloak-admin-resteasy-client/deployment/src/test/java/io/quarkus/keycloak/admin/resteasy/client/deployment/test/KeycloakAdminClientZeroConfigDevServicesTest.java @@ -1,4 +1,4 @@ -package io.quarkus.keycloak.adminclient.deployment; +package io.quarkus.keycloak.admin.resteasy.client.deployment.test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; diff --git a/extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java b/extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/admin/resteasy/client/runtime/KeycloakAdminResteasyClientRecorder.java similarity index 94% rename from extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java rename to extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/admin/resteasy/client/runtime/KeycloakAdminResteasyClientRecorder.java index 6cfc88870ff58..ac1127505385f 100644 --- a/extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/adminclient/ResteasyKeycloakAdminClientRecorder.java +++ b/extensions/keycloak-admin-resteasy-client/runtime/src/main/java/io/quarkus/keycloak/admin/resteasy/client/runtime/KeycloakAdminResteasyClientRecorder.java @@ -1,6 +1,6 @@ -package io.quarkus.keycloak.adminclient; +package io.quarkus.keycloak.admin.resteasy.client.runtime; -import static io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfigUtil.validate; +import static io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfigUtil.validate; import java.util.function.Supplier; @@ -22,7 +22,7 @@ import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.keycloak.admin.client.common.KeycloakAdminClientConfig; +import io.quarkus.keycloak.admin.client.common.runtime.KeycloakAdminClientConfig; import io.quarkus.resteasy.common.runtime.jackson.QuarkusJacksonSerializer; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; @@ -30,11 +30,11 @@ import io.quarkus.tls.TlsConfigurationRegistry; @Recorder -public class ResteasyKeycloakAdminClientRecorder { +public class KeycloakAdminResteasyClientRecorder { private final RuntimeValue keycloakAdminClientConfigRuntimeValue; - public ResteasyKeycloakAdminClientRecorder( + public KeycloakAdminResteasyClientRecorder( RuntimeValue keycloakAdminClientConfigRuntimeValue) { this.keycloakAdminClientConfigRuntimeValue = keycloakAdminClientConfigRuntimeValue; } diff --git a/extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/DefaultPolicyEnforcerResolver.java b/extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/DefaultPolicyEnforcerResolver.java index 71f8b9afe237b..01de4a56b45f7 100644 --- a/extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/DefaultPolicyEnforcerResolver.java +++ b/extensions/keycloak-authorization/runtime/src/main/java/io/quarkus/keycloak/pep/runtime/DefaultPolicyEnforcerResolver.java @@ -22,7 +22,7 @@ import io.quarkus.oidc.runtime.TenantConfigBean; import io.quarkus.security.spi.runtime.BlockingSecurityExecutor; import io.quarkus.tls.TlsConfigurationRegistry; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; import io.smallrye.mutiny.Uni; import io.vertx.ext.web.RoutingContext; @@ -37,10 +37,10 @@ public class DefaultPolicyEnforcerResolver implements PolicyEnforcerResolver { private final OidcTlsSupport tlsSupport; DefaultPolicyEnforcerResolver(TenantConfigBean tenantConfigBean, KeycloakPolicyEnforcerConfig config, - HttpConfiguration httpConfiguration, BlockingSecurityExecutor blockingSecurityExecutor, + VertxHttpConfig httpConfig, BlockingSecurityExecutor blockingSecurityExecutor, Instance configResolver, InjectableInstance tlsConfigRegistryInstance) { - this.readTimeout = httpConfiguration.readTimeout.toMillis(); + this.readTimeout = httpConfig.readTimeout().toMillis(); if (tlsConfigRegistryInstance.isResolvable()) { this.tlsSupport = OidcTlsSupport.of(tlsConfigRegistryInstance.get()); diff --git a/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/DevServicesKubernetesProcessor.java b/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/DevServicesKubernetesProcessor.java index 397c8aa44dbd8..2ccabb4846d11 100644 --- a/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/DevServicesKubernetesProcessor.java +++ b/extensions/kubernetes-client/deployment/src/main/java/io/quarkus/kubernetes/client/deployment/DevServicesKubernetesProcessor.java @@ -91,7 +91,11 @@ public DevServicesResultBuildItem setupKubernetesDevService( LoggingSetupBuildItem loggingSetupBuildItem, DevServicesConfig devServicesConfig) { + System.out.println("HOLLY KUBE setting up Kubernetes DevServices existing " + devService); + System.out.println("HOLLY KUBE ds config " + devServicesConfig.enabled()); + KubernetesDevServiceCfg configuration = getConfiguration(kubernetesClientBuildTimeConfig); + System.out.println("HOLLY KUBE config enabled = " + configuration.devServicesEnabled); if (devService != null) { boolean shouldShutdownTheCluster = !configuration.equals(cfg); @@ -106,6 +110,7 @@ public DevServicesResultBuildItem setupKubernetesDevService( (launchMode.isTest() ? "(test) " : "") + "Kubernetes Dev Services Starting:", consoleInstalledBuildItem, loggingSetupBuildItem); try { + System.out.println("HOLLY KUBE about to call the method start kube which doesn't actually start it"); devService = startKubernetes(dockerStatusBuildItem, configuration, launchMode, !devServicesSharedNetworkBuildItem.isEmpty(), devServicesConfig.timeout()); @@ -144,6 +149,7 @@ public DevServicesResultBuildItem setupKubernetesDevService( + "cluster automatically."); } + System.out.println("HOLLY KUBE made the dev services result " + devService); return devService.toBuildItem(); } @@ -162,6 +168,7 @@ private void shutdownCluster() { @SuppressWarnings("unchecked") private RunningDevService startKubernetes(DockerStatusBuildItem dockerStatusBuildItem, KubernetesDevServiceCfg config, LaunchModeBuildItem launchMode, boolean useSharedNetwork, Optional timeout) { + System.out.println("HOLLY KUBE about to start " + config.devServicesEnabled); if (!config.devServicesEnabled) { // explicitly disabled log.debug("Not starting Dev Services for Kubernetes, as it has been disabled in the config."); @@ -173,10 +180,15 @@ private RunningDevService startKubernetes(DockerStatusBuildItem dockerStatusBuil log.debug("Not starting Dev Services for Kubernetes, the " + KUBERNETES_CLIENT_MASTER_URL + " is configured."); return null; } + System.out.println("HOLLY KUBE config override " + config.overrideKubeconfig); if (!config.overrideKubeconfig) { var autoConfigMasterUrl = Config.autoConfigure(null).getMasterUrl(); + System.out.println("HOLLY KUBE comparing " + DEFAULT_MASTER_URL_ENDING_WITH_SLASH + " and " + + autoConfigMasterUrl); if (!DEFAULT_MASTER_URL_ENDING_WITH_SLASH.equals(autoConfigMasterUrl)) { + System.out.println("HOLLY KUBE bailing because " + DEFAULT_MASTER_URL_ENDING_WITH_SLASH + " is not " + + autoConfigMasterUrl); log.debug( "Not starting Dev Services for Kubernetes, the Kubernetes client is auto-configured. Set " + KUBERNETES_CLIENT_DEVSERVICES_OVERRIDE_KUBECONFIG @@ -184,18 +196,22 @@ private RunningDevService startKubernetes(DockerStatusBuildItem dockerStatusBuil return null; } } + System.out.println("HOLLY KUBE got past config override "); if (!dockerStatusBuildItem.isContainerRuntimeAvailable()) { log.warn( "Docker isn't working, please configure the Kubernetes client."); return null; } + System.out.println("HOLLY KUBE got past runtime check "); final Optional maybeContainerAddress = KubernetesContainerLocator.locateContainer(config.serviceName, config.shared, launchMode.getLaunchMode()); + System.out.println("HOLLY KUBE container address " + maybeContainerAddress); final Supplier defaultKubernetesClusterSupplier = () -> { + System.out.println("HOLLY KUBE default supplier doing its thing! " + config.flavor); KubernetesContainer container; switch (config.flavor) { case api_only: @@ -228,6 +244,7 @@ private RunningDevService startKubernetes(DockerStatusBuildItem dockerStatusBuil timeout.ifPresent(container::withStartupTimeout); container.withEnv(config.containerEnv); + System.out.println("HOLLY KUBE really starting the container! "); container.start(); diff --git a/extensions/kubernetes-service-binding/runtime/pom.xml b/extensions/kubernetes-service-binding/runtime/pom.xml index bfbaf1d045e8a..fcb6283056199 100644 --- a/extensions/kubernetes-service-binding/runtime/pom.xml +++ b/extensions/kubernetes-service-binding/runtime/pom.xml @@ -67,5 +67,55 @@ + + + disable-test-compile-on-windows + + + windows + + + + + + maven-compiler-plugin + + + default-testCompile + test-compile + + testCompile + + + true + + + + + + maven-resources-plugin + + + default-testResources + test-resources + + testResources + + + true + + + + + + maven-surefire-plugin + + true + + + + + + diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeploy.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeploy.java index d2acba0da7915..a60da68078049 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeploy.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/KubernetesDeploy.java @@ -22,7 +22,7 @@ private KubernetesDeploy() { } /** - * @return {@code true} if @{code quarkus.kubernetes.deploy=true} AND the target Kubernetes API server is reachable, + * @return {@code true} if {@code quarkus.kubernetes.deploy=true} AND the target Kubernetes API server is reachable, * {@code false} otherwise * * @throws RuntimeException if there was an error while communicating with the Kubernetes API server @@ -38,7 +38,7 @@ public boolean check(KubernetesClientBuildItem clientBuilder) { } /** - * @return {@code true} if @{code quarkus.kubernetes.deploy=true} AND the target Kubernetes API server is reachable + * @return {@code true} if {@code quarkus.kubernetes.deploy=true} AND the target Kubernetes API server is reachable * {@code false} otherwise or if there was an error while communicating with the Kubernetes API server */ public boolean checkSilently(KubernetesClientBuildItem clientBuilder) { diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenShiftConfig.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenShiftConfig.java index 533389dc0b9b4..f58c57bc42e22 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenShiftConfig.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenShiftConfig.java @@ -136,7 +136,7 @@ enum OpenshiftFlavor { static boolean isOpenshiftBuildEnabled(ContainerImageConfig containerImageConfig, Capabilities capabilities) { boolean implicitlyEnabled = ContainerImageCapabilitiesUtil.getActiveContainerImageCapability(capabilities) .filter(c -> c.contains(OPENSHIFT) || c.contains(S2I)).isPresent(); - return containerImageConfig.builder.map(b -> b.equals(OPENSHIFT) || b.equals(S2I)).orElse(implicitlyEnabled); + return containerImageConfig.builder().map(b -> b.equals(OPENSHIFT) || b.equals(S2I)).orElse(implicitlyEnabled); } default DeploymentResourceKind getDeploymentResourceKind(Capabilities capabilities) { diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java index 4dab0ccd0d55c..b0af39ae116e7 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/OpenshiftProcessor.java @@ -104,7 +104,7 @@ public void populateInternalRegistry( BuildProducer containerImageRegistry, BuildProducer singleSegmentContainerImageRequest) { - if (containerImageConfig.registry.isEmpty() && containerImageConfig.image.isEmpty()) { + if (containerImageConfig.registry().isEmpty() && containerImageConfig.image().isEmpty()) { DeploymentResourceKind deploymentResourceKind = openshiftConfig.getDeploymentResourceKind(capabilities); if (deploymentResourceKind != DeploymentResourceKind.DeploymentConfig) { if (OpenShiftConfig.isOpenshiftBuildEnabled(containerImageConfig, capabilities)) { @@ -323,7 +323,7 @@ public List createDecorators(ApplicationInfoBuildItem applic result.add(new DecoratorBuildItem(OPENSHIFT, new RemoveBuilderImageResourceDecorator(DEFAULT_S2I_IMAGE_NAME))); } - if (containerImageConfig.builder.isEmpty() + if (containerImageConfig.builder().isEmpty() || OpenShiftConfig.isOpenshiftBuildEnabled(containerImageConfig, capabilities)) { result.add(new DecoratorBuildItem(OPENSHIFT, new ApplyBuilderImageDecorator(name, builderImage))); ImageReference imageRef = ImageReference.parse(builderImage); @@ -367,7 +367,7 @@ public List createDecorators(ApplicationInfoBuildItem applic image.ifPresent(i -> { String registry = i.registry - .or(() -> containerImageConfig.registry) + .or(() -> containerImageConfig.registry()) .orElse(fallbackRegistry.map(FallbackContainerImageRegistryBuildItem::getRegistry) .orElse(DOCKERIO_REGISTRY)); String repositoryWithRegistry = registry + "/" + i.getRepository(); diff --git a/extensions/liquibase-mongodb/deployment/pom.xml b/extensions/liquibase-mongodb/deployment/pom.xml index cc4f53053719e..33d685500b95d 100644 --- a/extensions/liquibase-mongodb/deployment/pom.xml +++ b/extensions/liquibase-mongodb/deployment/pom.xml @@ -48,9 +48,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/liquibase-mongodb/deployment/src/main/java/io/quarkus/liquibase/mongodb/deployment/LiquibaseMongodbProcessor.java b/extensions/liquibase-mongodb/deployment/src/main/java/io/quarkus/liquibase/mongodb/deployment/LiquibaseMongodbProcessor.java index 345a99cc7be1c..863e6f6914372 100644 --- a/extensions/liquibase-mongodb/deployment/src/main/java/io/quarkus/liquibase/mongodb/deployment/LiquibaseMongodbProcessor.java +++ b/extensions/liquibase-mongodb/deployment/src/main/java/io/quarkus/liquibase/mongodb/deployment/LiquibaseMongodbProcessor.java @@ -265,7 +265,7 @@ private List getChangeLogs(LiquibaseMongodbBuildTimeConfig liquibaseBuil Thread.currentThread().getContextClassLoader())) { Set resources = new LinkedHashSet<>( - findAllChangeLogFiles(liquibaseBuildConfig.changeLog, changeLogParserFactory, + findAllChangeLogFiles(liquibaseBuildConfig.changeLog(), changeLogParserFactory, classLoaderResourceAccessor, changeLogParameters)); LOGGER.debugf("Liquibase changeLogs: %s", resources); diff --git a/extensions/liquibase-mongodb/runtime/pom.xml b/extensions/liquibase-mongodb/runtime/pom.xml index df90bef0fcdeb..856a9fd6f3bde 100644 --- a/extensions/liquibase-mongodb/runtime/pom.xml +++ b/extensions/liquibase-mongodb/runtime/pom.xml @@ -81,9 +81,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/LiquibaseMongodbFactory.java b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/LiquibaseMongodbFactory.java index 0286f923a9614..50da6e7374b28 100644 --- a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/LiquibaseMongodbFactory.java +++ b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/LiquibaseMongodbFactory.java @@ -44,20 +44,20 @@ private ResourceAccessor resolveResourceAccessor() throws FileNotFoundException compositeResourceAccessor .addResourceAccessor(new ClassLoaderResourceAccessor(Thread.currentThread().getContextClassLoader())); - if (!liquibaseMongodbBuildTimeConfig.changeLog.startsWith("filesystem:") - && liquibaseMongodbBuildTimeConfig.searchPath.isEmpty()) { + if (!liquibaseMongodbBuildTimeConfig.changeLog().startsWith("filesystem:") + && liquibaseMongodbBuildTimeConfig.searchPath().isEmpty()) { return compositeResourceAccessor; } - if (liquibaseMongodbBuildTimeConfig.searchPath.isEmpty()) { + if (liquibaseMongodbBuildTimeConfig.searchPath().isEmpty()) { compositeResourceAccessor.addResourceAccessor( new DirectoryResourceAccessor( - Paths.get(StringUtil.changePrefix(liquibaseMongodbBuildTimeConfig.changeLog, "filesystem:", "")) + Paths.get(StringUtil.changePrefix(liquibaseMongodbBuildTimeConfig.changeLog(), "filesystem:", "")) .getParent())); return compositeResourceAccessor; } - for (String searchPath : liquibaseMongodbBuildTimeConfig.searchPath.get()) { + for (String searchPath : liquibaseMongodbBuildTimeConfig.searchPath().get()) { compositeResourceAccessor.addResourceAccessor(new DirectoryResourceAccessor(Paths.get(searchPath))); } @@ -66,7 +66,7 @@ private ResourceAccessor resolveResourceAccessor() throws FileNotFoundException private String parseChangeLog(String changeLog) { - if (changeLog.startsWith("filesystem:") && liquibaseMongodbBuildTimeConfig.searchPath.isEmpty()) { + if (changeLog.startsWith("filesystem:") && liquibaseMongodbBuildTimeConfig.searchPath().isEmpty()) { return Paths.get(StringUtil.changePrefix(changeLog, "filesystem:", "")).getFileName().toString(); } @@ -83,8 +83,8 @@ private String parseChangeLog(String changeLog) { public Liquibase createLiquibase() { try (ResourceAccessor resourceAccessor = resolveResourceAccessor()) { - String parsedChangeLog = parseChangeLog(liquibaseMongodbBuildTimeConfig.changeLog); - String connectionString = this.mongoClientConfig.connectionString.orElse("mongodb://localhost:27017"); + String parsedChangeLog = parseChangeLog(liquibaseMongodbBuildTimeConfig.changeLog()); + String connectionString = this.mongoClientConfig.connectionString().orElse("mongodb://localhost:27017"); // Every MongoDB client configuration must be added to the connection string, we didn't add all as it would be too much to support. // For reference, all connections string options can be found here: https://www.mongodb.com/docs/manual/reference/connection-string/#connection-string-options. @@ -93,48 +93,48 @@ public Liquibase createLiquibase() { if (!matcher.matches() || matcher.group("db") == null || matcher.group("db").isEmpty()) { connectionString = matcher.replaceFirst( "${prefix}${hosts}/" - + this.mongoClientConfig.database + + this.mongoClientConfig.database() .orElseThrow(() -> new IllegalArgumentException("Config property " + "'quarkus.mongodb.database' must be defined when no database exist in the connection string")) + "${options}"); } - if (mongoClientConfig.credentials.authSource.isPresent()) { + if (mongoClientConfig.credentials().authSource().isPresent()) { boolean alreadyHasQueryParams = connectionString.contains("?"); connectionString += (alreadyHasQueryParams ? "&" : "?") + "authSource=" - + mongoClientConfig.credentials.authSource.get(); + + mongoClientConfig.credentials().authSource().get(); } - if (mongoClientConfig.credentials.authMechanism.isPresent()) { + if (mongoClientConfig.credentials().authMechanism().isPresent()) { boolean alreadyHasQueryParams = connectionString.contains("?"); connectionString += (alreadyHasQueryParams ? "&" : "?") + "authMechanism=" - + mongoClientConfig.credentials.authMechanism.get(); + + mongoClientConfig.credentials().authMechanism().get(); } - if (!mongoClientConfig.credentials.authMechanismProperties.isEmpty()) { + if (!mongoClientConfig.credentials().authMechanismProperties().isEmpty()) { boolean alreadyHasQueryParams = connectionString.contains("?"); connectionString += (alreadyHasQueryParams ? "&" : "?") + "authMechanismProperties=" - + mongoClientConfig.credentials.authMechanismProperties.entrySet().stream() + + mongoClientConfig.credentials().authMechanismProperties().entrySet().stream() .map(prop -> prop.getKey() + ":" + prop.getValue()).collect(Collectors.joining(",")); } Database database = DatabaseFactory.getInstance().openDatabase(connectionString, - this.mongoClientConfig.credentials.username.orElse(null), - this.mongoClientConfig.credentials.password.orElse(null), + this.mongoClientConfig.credentials().username().orElse(null), + this.mongoClientConfig.credentials().password().orElse(null), null, resourceAccessor); if (database != null) { - liquibaseMongodbConfig.liquibaseCatalogName.ifPresent(database::setLiquibaseCatalogName); - liquibaseMongodbConfig.liquibaseSchemaName.ifPresent(database::setLiquibaseSchemaName); - liquibaseMongodbConfig.liquibaseTablespaceName.ifPresent(database::setLiquibaseTablespaceName); + liquibaseMongodbConfig.liquibaseCatalogName().ifPresent(database::setLiquibaseCatalogName); + liquibaseMongodbConfig.liquibaseSchemaName().ifPresent(database::setLiquibaseSchemaName); + liquibaseMongodbConfig.liquibaseTablespaceName().ifPresent(database::setLiquibaseTablespaceName); - if (liquibaseMongodbConfig.defaultCatalogName.isPresent()) { - database.setDefaultCatalogName(liquibaseMongodbConfig.defaultCatalogName.get()); + if (liquibaseMongodbConfig.defaultCatalogName().isPresent()) { + database.setDefaultCatalogName(liquibaseMongodbConfig.defaultCatalogName().get()); } - if (liquibaseMongodbConfig.defaultSchemaName.isPresent()) { - database.setDefaultSchemaName(liquibaseMongodbConfig.defaultSchemaName.get()); + if (liquibaseMongodbConfig.defaultSchemaName().isPresent()) { + database.setDefaultSchemaName(liquibaseMongodbConfig.defaultSchemaName().get()); } } Liquibase liquibase = new Liquibase(parsedChangeLog, resourceAccessor, database); - for (Map.Entry entry : liquibaseMongodbConfig.changeLogParameters.entrySet()) { + for (Map.Entry entry : liquibaseMongodbConfig.changeLogParameters().entrySet()) { liquibase.getChangeLogParameters().set(entry.getKey(), entry.getValue()); } @@ -155,7 +155,7 @@ public LiquibaseMongodbConfig getConfiguration() { * @return the label expression */ public LabelExpression createLabels() { - return new LabelExpression(liquibaseMongodbConfig.labels.orElse(null)); + return new LabelExpression(liquibaseMongodbConfig.labels().orElse(null)); } /** @@ -164,6 +164,6 @@ public LabelExpression createLabels() { * @return the contexts */ public Contexts createContexts() { - return new Contexts(liquibaseMongodbConfig.contexts.orElse(null)); + return new Contexts(liquibaseMongodbConfig.contexts().orElse(null)); } } diff --git a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbBuildTimeConfig.java b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbBuildTimeConfig.java index a06832603b4af..c483d268003a8 100644 --- a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbBuildTimeConfig.java +++ b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbBuildTimeConfig.java @@ -3,25 +3,26 @@ import java.util.List; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; /** * The liquibase configuration */ -@ConfigRoot(name = "liquibase-mongodb", phase = ConfigPhase.BUILD_TIME) -public class LiquibaseMongodbBuildTimeConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = "quarkus.liquibase-mongodb") +public interface LiquibaseMongodbBuildTimeConfig { /** * The change log file */ - @ConfigItem(defaultValue = "db/changeLog.xml") - public String changeLog; + @WithDefault("db/changeLog.xml") + String changeLog(); /** * The search path for DirectoryResourceAccessor */ - @ConfigItem - public Optional> searchPath; + Optional> searchPath(); } diff --git a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbConfig.java b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbConfig.java index 00a3c8d7e3eaa..41f36265787c2 100644 --- a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbConfig.java +++ b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbConfig.java @@ -1,91 +1,84 @@ package io.quarkus.liquibase.mongodb.runtime; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; /** * The liquibase configuration */ -@ConfigRoot(name = "liquibase-mongodb", phase = ConfigPhase.RUN_TIME) -public class LiquibaseMongodbConfig { +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = "quarkus.liquibase-mongodb") +public interface LiquibaseMongodbConfig { /** * Flag to enable / disable Liquibase. * */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); /** * The migrate at start flag */ - @ConfigItem - public boolean migrateAtStart; + @WithDefault("false") + boolean migrateAtStart(); /** * The validate on update flag */ - @ConfigItem(defaultValue = "true") - public boolean validateOnMigrate; + @WithDefault("true") + boolean validateOnMigrate(); /** * The clean at start flag */ - @ConfigItem - public boolean cleanAtStart; + @WithDefault("false") + boolean cleanAtStart(); /** * The parameters to be passed to the changelog. * Defined as key value pairs. */ - @ConfigItem - public Map changeLogParameters = new HashMap<>();; + Map changeLogParameters(); /** * The list of contexts */ - @ConfigItem - public Optional> contexts = Optional.empty(); + Optional> contexts(); /** * The list of labels */ - @ConfigItem - public Optional> labels = Optional.empty(); + Optional> labels(); /** * The default catalog name */ - @ConfigItem - public Optional defaultCatalogName = Optional.empty(); + Optional defaultCatalogName(); /** * The default schema name */ - @ConfigItem - public Optional defaultSchemaName = Optional.empty(); + Optional defaultSchemaName(); /** * The liquibase tables catalog name */ - @ConfigItem - public Optional liquibaseCatalogName = Optional.empty(); + Optional liquibaseCatalogName(); /** * The liquibase tables schema name */ - @ConfigItem - public Optional liquibaseSchemaName = Optional.empty(); + Optional liquibaseSchemaName(); /** * The liquibase tables tablespace name */ - @ConfigItem - public Optional liquibaseTablespaceName = Optional.empty(); + Optional liquibaseTablespaceName(); } diff --git a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbRecorder.java b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbRecorder.java index 226c068640d49..c1f377450a9a6 100644 --- a/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbRecorder.java +++ b/extensions/liquibase-mongodb/runtime/src/main/java/io/quarkus/liquibase/mongodb/runtime/LiquibaseMongodbRecorder.java @@ -28,13 +28,13 @@ public Supplier liquibaseSupplier(LiquibaseMongodbConfi return new Supplier() { @Override public LiquibaseMongodbFactory get() { - return new LiquibaseMongodbFactory(config, buildTimeConfig, mongodbConfig.defaultMongoClientConfig); + return new LiquibaseMongodbFactory(config, buildTimeConfig, mongodbConfig.defaultMongoClientConfig()); } }; } public void doStartActions() { - if (!config.getValue().enabled) { + if (!config.getValue().enabled()) { return; } try { @@ -48,18 +48,18 @@ public void doStartActions() { try { LiquibaseMongodbFactory liquibaseFactory = liquibaseFactoryHandle.get(); - if (!liquibaseFactory.getConfiguration().cleanAtStart - && !liquibaseFactory.getConfiguration().migrateAtStart) { + if (!liquibaseFactory.getConfiguration().cleanAtStart() + && !liquibaseFactory.getConfiguration().migrateAtStart()) { // Don't initialize if no clean or migration required at start return; } try (Liquibase liquibase = liquibaseFactory.createLiquibase()) { - if (liquibaseFactory.getConfiguration().cleanAtStart) { + if (liquibaseFactory.getConfiguration().cleanAtStart()) { liquibase.dropAll(); } - if (liquibaseFactory.getConfiguration().migrateAtStart) { - if (liquibaseFactory.getConfiguration().validateOnMigrate) { + if (liquibaseFactory.getConfiguration().migrateAtStart()) { + if (liquibaseFactory.getConfiguration().validateOnMigrate()) { liquibase.validate(); } liquibase.update(liquibaseFactory.createContexts(), liquibaseFactory.createLabels()); diff --git a/extensions/liquibase/deployment/pom.xml b/extensions/liquibase/deployment/pom.xml index c41fc61187a3e..7d7ea5f1933b2 100644 --- a/extensions/liquibase/deployment/pom.xml +++ b/extensions/liquibase/deployment/pom.xml @@ -74,9 +74,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/liquibase/deployment/src/main/java/io/quarkus/liquibase/deployment/LiquibaseProcessor.java b/extensions/liquibase/deployment/src/main/java/io/quarkus/liquibase/deployment/LiquibaseProcessor.java index b6587fb184fc3..c733f25229c21 100644 --- a/extensions/liquibase/deployment/src/main/java/io/quarkus/liquibase/deployment/LiquibaseProcessor.java +++ b/extensions/liquibase/deployment/src/main/java/io/quarkus/liquibase/deployment/LiquibaseProcessor.java @@ -336,14 +336,8 @@ private List getChangeLogs(Collection dataSourceNames, Liquibase List liquibaseDataSources = new ArrayList<>(); - if (DataSourceUtil.hasDefault(dataSourceNames)) { - liquibaseDataSources.add(liquibaseBuildConfig.defaultDataSource); - } - for (String dataSourceName : dataSourceNames) { - if (!DataSourceUtil.isDefault(dataSourceName)) { - liquibaseDataSources.add(liquibaseBuildConfig.getConfigForDataSourceName(dataSourceName)); - } + liquibaseDataSources.add(liquibaseBuildConfig.datasources().get(dataSourceName)); } ChangeLogParameters changeLogParameters = new ChangeLogParameters(); @@ -351,8 +345,8 @@ private List getChangeLogs(Collection dataSourceNames, Liquibase Set resources = new LinkedHashSet<>(); for (LiquibaseDataSourceBuildTimeConfig liquibaseDataSourceConfig : liquibaseDataSources) { - Optional> oSearchPaths = liquibaseDataSourceConfig.searchPath; - String changeLog = liquibaseDataSourceConfig.changeLog; + Optional> oSearchPaths = liquibaseDataSourceConfig.searchPath(); + String changeLog = liquibaseDataSourceConfig.changeLog(); String parsedChangeLog = parseChangeLog(oSearchPaths, changeLog); try (ResourceAccessor resourceAccessor = resolveResourceAccessor(oSearchPaths, changeLog)) { diff --git a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigFixture.java b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigFixture.java index 1e4f916ab50d9..e33cc9cb867fd 100644 --- a/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigFixture.java +++ b/extensions/liquibase/deployment/src/test/java/io/quarkus/liquibase/test/LiquibaseExtensionConfigFixture.java @@ -48,7 +48,7 @@ public void assertAllConfigurationSettings(LiquibaseConfig configuration, String public void assertDefaultConfigurationSettings(LiquibaseConfig configuration) { - assertEquals(configuration.changeLog, LiquibaseDataSourceBuildTimeConfig.defaultConfig().changeLog); + assertEquals(configuration.changeLog, LiquibaseDataSourceBuildTimeConfig.DEFAULT_CHANGE_LOG); assertEquals(configuration.databaseChangeLogTableName, GlobalConfiguration.DATABASECHANGELOG_TABLE_NAME.getCurrentValue()); diff --git a/extensions/liquibase/runtime/pom.xml b/extensions/liquibase/runtime/pom.xml index 2e094d6c9d4e3..7cf7e6533a7bc 100644 --- a/extensions/liquibase/runtime/pom.xml +++ b/extensions/liquibase/runtime/pom.xml @@ -93,9 +93,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseBuildTimeConfig.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseBuildTimeConfig.java index b153b8dcd4c20..a0290bfc8691c 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseBuildTimeConfig.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseBuildTimeConfig.java @@ -1,52 +1,31 @@ package io.quarkus.liquibase.runtime; -import java.util.Collections; import java.util.Map; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; /** * The liquibase build time configuration */ -@ConfigRoot(name = "liquibase", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public final class LiquibaseBuildTimeConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = "quarkus.liquibase") +public interface LiquibaseBuildTimeConfig { /** - * Gets the default build time configuration - * - * @return the liquibase build time default configuration + * Datasources. */ - public static LiquibaseBuildTimeConfig defaultConfig() { - LiquibaseBuildTimeConfig defaultConfig = new LiquibaseBuildTimeConfig(); - defaultConfig.defaultDataSource = LiquibaseDataSourceBuildTimeConfig.defaultConfig(); - return defaultConfig; - } - - /** - * Gets the {@link LiquibaseBuildTimeConfig} for the given datasource name. - */ - public LiquibaseDataSourceBuildTimeConfig getConfigForDataSourceName(String dataSourceName) { - return DataSourceUtil.isDefault(dataSourceName) - ? defaultDataSource - : namedDataSources.getOrDefault(dataSourceName, LiquibaseDataSourceBuildTimeConfig.defaultConfig()); - } - - /** - * Liquibase configuration for the default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public LiquibaseDataSourceBuildTimeConfig defaultDataSource; - - /** - * Named datasources. - */ - @ConfigItem(name = ConfigItem.PARENT) @ConfigDocMapKey("datasource-name") @ConfigDocSection - public Map namedDataSources = Collections.emptyMap(); + @WithParentName + @WithUnnamedKey(DataSourceUtil.DEFAULT_DATASOURCE_NAME) + @WithDefaults + public Map datasources(); } diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseConfig.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseConfig.java index 55d036b556281..a9fc7b70e7469 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseConfig.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseConfig.java @@ -8,8 +8,6 @@ import java.util.Map; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigDocMapKey; - /** * The liquibase configuration */ @@ -50,7 +48,6 @@ public class LiquibaseConfig { */ public List labels = null; - @ConfigDocMapKey("parameter-name") public Map changeLogParameters = null; /** diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseCreator.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseCreator.java index 5dfae13603e4f..5915420bc1e7d 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseCreator.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseCreator.java @@ -17,33 +17,29 @@ public LiquibaseCreator(LiquibaseDataSourceRuntimeConfig liquibaseRuntimeConfig, public LiquibaseFactory createLiquibaseFactory(DataSource dataSource, String dataSourceName) { LiquibaseConfig config = new LiquibaseConfig(); - config.changeLog = liquibaseBuildTimeConfig.changeLog; - config.searchPath = liquibaseBuildTimeConfig.searchPath; - config.changeLogParameters = liquibaseRuntimeConfig.changeLogParameters; + config.changeLog = liquibaseBuildTimeConfig.changeLog(); + config.searchPath = liquibaseBuildTimeConfig.searchPath(); + config.changeLogParameters = liquibaseRuntimeConfig.changeLogParameters(); - if (liquibaseRuntimeConfig.labels.isPresent()) { - config.labels = liquibaseRuntimeConfig.labels.get(); + if (liquibaseRuntimeConfig.labels().isPresent()) { + config.labels = liquibaseRuntimeConfig.labels().get(); } - if (liquibaseRuntimeConfig.contexts.isPresent()) { - config.contexts = liquibaseRuntimeConfig.contexts.get(); + if (liquibaseRuntimeConfig.contexts().isPresent()) { + config.contexts = liquibaseRuntimeConfig.contexts().get(); } - if (liquibaseRuntimeConfig.databaseChangeLogLockTableName.isPresent()) { - config.databaseChangeLogLockTableName = liquibaseRuntimeConfig.databaseChangeLogLockTableName.get(); - } - if (liquibaseRuntimeConfig.databaseChangeLogTableName.isPresent()) { - config.databaseChangeLogTableName = liquibaseRuntimeConfig.databaseChangeLogTableName.get(); - } - config.password = liquibaseRuntimeConfig.password; - config.username = liquibaseRuntimeConfig.username; - config.defaultSchemaName = liquibaseRuntimeConfig.defaultSchemaName; - config.defaultCatalogName = liquibaseRuntimeConfig.defaultCatalogName; - config.liquibaseTablespaceName = liquibaseRuntimeConfig.liquibaseTablespaceName; - config.liquibaseSchemaName = liquibaseRuntimeConfig.liquibaseSchemaName; - config.liquibaseCatalogName = liquibaseRuntimeConfig.liquibaseCatalogName; - config.migrateAtStart = liquibaseRuntimeConfig.migrateAtStart; - config.cleanAtStart = liquibaseRuntimeConfig.cleanAtStart; - config.validateOnMigrate = liquibaseRuntimeConfig.validateOnMigrate; - config.allowDuplicatedChangesetIdentifiers = liquibaseRuntimeConfig.allowDuplicatedChangesetIdentifiers; + config.databaseChangeLogLockTableName = liquibaseRuntimeConfig.databaseChangeLogLockTableName(); + config.databaseChangeLogTableName = liquibaseRuntimeConfig.databaseChangeLogTableName(); + config.password = liquibaseRuntimeConfig.password(); + config.username = liquibaseRuntimeConfig.username(); + config.defaultSchemaName = liquibaseRuntimeConfig.defaultSchemaName(); + config.defaultCatalogName = liquibaseRuntimeConfig.defaultCatalogName(); + config.liquibaseTablespaceName = liquibaseRuntimeConfig.liquibaseTablespaceName(); + config.liquibaseSchemaName = liquibaseRuntimeConfig.liquibaseSchemaName(); + config.liquibaseCatalogName = liquibaseRuntimeConfig.liquibaseCatalogName(); + config.migrateAtStart = liquibaseRuntimeConfig.migrateAtStart(); + config.cleanAtStart = liquibaseRuntimeConfig.cleanAtStart(); + config.validateOnMigrate = liquibaseRuntimeConfig.validateOnMigrate(); + config.allowDuplicatedChangesetIdentifiers = liquibaseRuntimeConfig.allowDuplicatedChangesetIdentifiers(); return new LiquibaseFactory(config, dataSource, dataSourceName); } } diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceBuildTimeConfig.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceBuildTimeConfig.java index f4d9c03c3ae44..a9818c6de7d06 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceBuildTimeConfig.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceBuildTimeConfig.java @@ -4,37 +4,24 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; /** * The liquibase data source build time configuration */ @ConfigGroup -public final class LiquibaseDataSourceBuildTimeConfig { +public interface LiquibaseDataSourceBuildTimeConfig { static final String DEFAULT_CHANGE_LOG = "db/changeLog.xml"; - /** - * Creates a {@link LiquibaseDataSourceBuildTimeConfig} with default settings. - * - * @return {@link LiquibaseDataSourceBuildTimeConfig} - */ - public static final LiquibaseDataSourceBuildTimeConfig defaultConfig() { - LiquibaseDataSourceBuildTimeConfig defaultConfig = new LiquibaseDataSourceBuildTimeConfig(); - defaultConfig.changeLog = DEFAULT_CHANGE_LOG; - defaultConfig.searchPath = Optional.empty(); - return defaultConfig; - } - /** * The liquibase change log file. All included change log files in this file are scanned and add to the projects. */ - @ConfigItem(defaultValue = DEFAULT_CHANGE_LOG) - public String changeLog; + @WithDefault(DEFAULT_CHANGE_LOG) + String changeLog(); /** * The search path for DirectoryResourceAccessor */ - @ConfigItem - public Optional> searchPath; + Optional> searchPath(); } diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceRuntimeConfig.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceRuntimeConfig.java index 6dcc0f87f97d0..5556b09e99a90 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceRuntimeConfig.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseDataSourceRuntimeConfig.java @@ -1,19 +1,18 @@ package io.quarkus.liquibase.runtime; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; /** * The liquibase data source runtime time configuration */ @ConfigGroup -public final class LiquibaseDataSourceRuntimeConfig { +public interface LiquibaseDataSourceRuntimeConfig { /** * The default liquibase lock table @@ -25,120 +24,97 @@ public final class LiquibaseDataSourceRuntimeConfig { */ static final String DEFAULT_LOG_TABLE = "DATABASECHANGELOG"; - /** - * Creates a {@link LiquibaseDataSourceRuntimeConfig} with default settings. - * - * @return {@link LiquibaseDataSourceRuntimeConfig} - */ - public static final LiquibaseDataSourceRuntimeConfig defaultConfig() { - LiquibaseDataSourceRuntimeConfig config = new LiquibaseDataSourceRuntimeConfig(); - config.databaseChangeLogLockTableName = Optional.of(DEFAULT_LOCK_TABLE); - config.databaseChangeLogTableName = Optional.of(DEFAULT_LOG_TABLE); - return config; - } - /** * {@code true} to execute Liquibase automatically when the application starts, {@code false} otherwise. * */ - @ConfigItem - public boolean migrateAtStart; + @WithDefault("false") + boolean migrateAtStart(); /** * {@code true} to validate the applied changes against the available ones, {@code false} otherwise. It is only used if * {@code migration-at-start} is {@code true} * */ - @ConfigItem(defaultValue = "true") - public boolean validateOnMigrate; + @WithDefault("true") + boolean validateOnMigrate(); /** * {@code true} to execute Liquibase clean command automatically when the application starts, {@code false} otherwise. * */ - @ConfigItem - public boolean cleanAtStart; + @WithDefault("false") + boolean cleanAtStart(); /** * Comma-separated case-sensitive list of ChangeSet contexts to execute for liquibase. */ - @ConfigItem - public Optional> contexts = Optional.empty(); + Optional> contexts(); /** * Comma-separated case-sensitive list of expressions defining labeled ChangeSet to execute for liquibase. */ - @ConfigItem - public Optional> labels = Optional.empty(); + Optional> labels(); /** * Map of parameters that can be used inside Liquibase changeLog files. */ - @ConfigItem @ConfigDocMapKey("parameter-name") - public Map changeLogParameters = new HashMap<>(); + Map changeLogParameters(); /** * The liquibase change log lock table name. Name of table to use for tracking concurrent Liquibase usage. */ - @ConfigItem(defaultValue = DEFAULT_LOCK_TABLE) - public Optional databaseChangeLogLockTableName = Optional.empty(); + @WithDefault(DEFAULT_LOCK_TABLE) + String databaseChangeLogLockTableName(); /** * The liquibase change log table name. Name of table to use for tracking change history. */ - @ConfigItem(defaultValue = DEFAULT_LOG_TABLE) - public Optional databaseChangeLogTableName = Optional.empty(); + @WithDefault(DEFAULT_LOG_TABLE) + String databaseChangeLogTableName(); /** * The name of Liquibase's default catalog. */ - @ConfigItem - public Optional defaultCatalogName = Optional.empty(); + Optional defaultCatalogName(); /** * The name of Liquibase's default schema. Overwrites the default schema name * (returned by the RDBMS) with a different database schema. */ - @ConfigItem - public Optional defaultSchemaName = Optional.empty(); + Optional defaultSchemaName(); /** * The username that Liquibase uses to connect to the database. * If no specific username is configured, falls back to the datasource username and password. */ - @ConfigItem - public Optional username = Optional.empty(); + Optional username(); /** * The password that Liquibase uses to connect to the database. * If no specific password is configured, falls back to the datasource username and password. */ - @ConfigItem - public Optional password = Optional.empty(); + Optional password(); /** * The name of the catalog with the liquibase tables. */ - @ConfigItem - public Optional liquibaseCatalogName = Optional.empty(); + Optional liquibaseCatalogName(); /** * The name of the schema with the liquibase tables. */ - @ConfigItem - public Optional liquibaseSchemaName = Optional.empty(); + Optional liquibaseSchemaName(); /** * The name of the tablespace where the -LOG and -LOCK tables will be created (if they do not exist yet). */ - @ConfigItem - public Optional liquibaseTablespaceName = Optional.empty(); + Optional liquibaseTablespaceName(); /** * Allows duplicated changeset identifiers without failing Liquibase execution. */ - @ConfigItem - public Optional allowDuplicatedChangesetIdentifiers = Optional.empty(); + Optional allowDuplicatedChangesetIdentifiers(); } diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseFactoryProducer.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseFactoryProducer.java index ae368b0ac4b95..4ab86056dc588 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseFactoryProducer.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseFactoryProducer.java @@ -27,10 +27,10 @@ public LiquibaseFactoryProducer(LiquibaseBuildTimeConfig liquibaseBuildTimeConfi } public LiquibaseFactory createLiquibaseFactory(DataSource dataSource, String dataSourceName) { - LiquibaseDataSourceBuildTimeConfig matchingBuildTimeConfig = liquibaseBuildTimeConfig - .getConfigForDataSourceName(dataSourceName); - LiquibaseDataSourceRuntimeConfig matchingRuntimeConfig = liquibaseRuntimeConfig - .getConfigForDataSourceName(dataSourceName); + LiquibaseDataSourceBuildTimeConfig matchingBuildTimeConfig = liquibaseBuildTimeConfig.datasources() + .get(dataSourceName); + LiquibaseDataSourceRuntimeConfig matchingRuntimeConfig = liquibaseRuntimeConfig.datasources() + .get(dataSourceName); return new LiquibaseCreator(matchingRuntimeConfig, matchingBuildTimeConfig) .createLiquibaseFactory(dataSource, dataSourceName); } diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java index 8334c79ae0aaa..c0f217bb77b42 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRecorder.java @@ -68,12 +68,12 @@ public LiquibaseFactory apply(SyntheticCreationalContext conte } public void doStartActions(String dataSourceName) { - if (!config.getValue().enabled) { + if (!config.getValue().enabled()) { return; } - var dataSourceConfig = config.getValue().getConfigForDataSourceName(dataSourceName); - if (!dataSourceConfig.cleanAtStart && !dataSourceConfig.migrateAtStart) { + var dataSourceConfig = config.getValue().datasources().get(dataSourceName); + if (!dataSourceConfig.cleanAtStart() && !dataSourceConfig.migrateAtStart()) { return; } @@ -88,15 +88,15 @@ public void doStartActions(String dataSourceName) { try (Liquibase liquibase = liquibaseFactory.createLiquibase(); ResettableSystemProperties resettableSystemProperties = liquibaseFactory .createResettableSystemProperties()) { - if (dataSourceConfig.cleanAtStart) { + if (dataSourceConfig.cleanAtStart()) { liquibase.dropAll(); } - if (dataSourceConfig.migrateAtStart) { + if (dataSourceConfig.migrateAtStart()) { var lockService = LockServiceFactory.getInstance() .getLockService(liquibase.getDatabase()); lockService.waitForLock(); try { - if (dataSourceConfig.validateOnMigrate) { + if (dataSourceConfig.validateOnMigrate()) { liquibase.validate(); } liquibase.update(liquibaseFactory.createContexts(), liquibaseFactory.createLabels()); diff --git a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRuntimeConfig.java b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRuntimeConfig.java index 20f904d8c6629..0775f636a04e9 100644 --- a/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRuntimeConfig.java +++ b/extensions/liquibase/runtime/src/main/java/io/quarkus/liquibase/runtime/LiquibaseRuntimeConfig.java @@ -1,57 +1,39 @@ package io.quarkus.liquibase.runtime; -import java.util.Collections; import java.util.Map; import io.quarkus.datasource.common.runtime.DataSourceUtil; import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; /** * Liquibase runtime configuration. */ -@ConfigRoot(name = "liquibase", phase = ConfigPhase.RUN_TIME) -public final class LiquibaseRuntimeConfig { - - /** - * Gets the default runtime configuration - * - * @return the liquibase runtime default configuration - */ - public static LiquibaseRuntimeConfig defaultConfig() { - return new LiquibaseRuntimeConfig(); - } - - /** - * Gets the {@link LiquibaseDataSourceRuntimeConfig} for the given datasource name. - */ - public LiquibaseDataSourceRuntimeConfig getConfigForDataSourceName(String dataSourceName) { - return DataSourceUtil.isDefault(dataSourceName) - ? defaultDataSource - : namedDataSources.getOrDefault(dataSourceName, LiquibaseDataSourceRuntimeConfig.defaultConfig()); - } +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = "quarkus.liquibase") +public interface LiquibaseRuntimeConfig { /** * Flag to enable / disable Liquibase. * */ - @ConfigItem(defaultValue = "true") - public boolean enabled; - - /** - * Liquibase configuration for the default datasource. - */ - @ConfigItem(name = ConfigItem.PARENT) - public LiquibaseDataSourceRuntimeConfig defaultDataSource = LiquibaseDataSourceRuntimeConfig.defaultConfig(); + @WithDefault("true") + boolean enabled(); /** - * Named datasources. + * Datasources. */ - @ConfigItem(name = ConfigItem.PARENT) @ConfigDocMapKey("datasource-name") @ConfigDocSection - public Map namedDataSources = Collections.emptyMap(); + @WithParentName + @WithUnnamedKey(DataSourceUtil.DEFAULT_DATASOURCE_NAME) + @WithDefaults + public Map datasources(); } diff --git a/extensions/liquibase/runtime/src/test/java/io/quarkus/liquibase/runtime/LiquibaseCreatorTest.java b/extensions/liquibase/runtime/src/test/java/io/quarkus/liquibase/runtime/LiquibaseCreatorTest.java deleted file mode 100644 index e0b841d0a2e4a..0000000000000 --- a/extensions/liquibase/runtime/src/test/java/io/quarkus/liquibase/runtime/LiquibaseCreatorTest.java +++ /dev/null @@ -1,208 +0,0 @@ -package io.quarkus.liquibase.runtime; - -import static org.junit.jupiter.api.Assertions.*; - -import java.util.Collections; -import java.util.Optional; - -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -import io.quarkus.datasource.common.runtime.DataSourceUtil; - -class LiquibaseCreatorTest { - - private LiquibaseDataSourceRuntimeConfig runtimeConfig = LiquibaseDataSourceRuntimeConfig.defaultConfig(); - private LiquibaseDataSourceBuildTimeConfig buildConfig = LiquibaseDataSourceBuildTimeConfig.defaultConfig(); - private LiquibaseConfig defaultConfig = new LiquibaseConfig(); - - /** - * class under test. - */ - private LiquibaseCreator creator; - - @Test - @DisplayName("changeLog default matches liquibase default") - void testChangeLogDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.changeLog, createdLiquibaseConfig().changeLog); - } - - @Test - @DisplayName("changeLog carried over from configuration") - void testChangeLogOverridden() { - buildConfig.changeLog = "/db/test.xml"; - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(buildConfig.changeLog, createdLiquibaseConfig().changeLog); - } - - @Test - @DisplayName("migrateAtStart default matches liquibase default") - void testMigrateAtStartDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.migrateAtStart, createdLiquibaseConfig().migrateAtStart); - } - - @Test - @DisplayName("migrateAtStart carried over from configuration") - void testMigrateAtStartOverridden() { - runtimeConfig.migrateAtStart = true; - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertTrue(createdLiquibaseConfig().migrateAtStart); - } - - @Test - @DisplayName("cleanAtStart default matches liquibase default") - void testCleanAtStartDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.cleanAtStart, createdLiquibaseConfig().cleanAtStart); - } - - @Test - @DisplayName("cleanAtStart carried over from configuration") - void testCleanAtStartOverridden() { - runtimeConfig.cleanAtStart = true; - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertTrue(createdLiquibaseConfig().cleanAtStart); - } - - @Test - @DisplayName("databaseChangeLogLockTableName default matches liquibase default") - void testDatabaseChangeLogLockTableNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.databaseChangeLogLockTableName, createdLiquibaseConfig().databaseChangeLogLockTableName); - } - - @Test - @DisplayName("DatabaseChangeLogLockTableName carried over from configuration") - void testDatabaseChangeLogLockTableNameOverridden() { - runtimeConfig.databaseChangeLogLockTableName = Optional.of("TEST_LOCK"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.databaseChangeLogLockTableName.get(), - createdLiquibaseConfig().databaseChangeLogLockTableName); - } - - @Test - @DisplayName("databaseChangeLogTableName default matches liquibase default") - void testDatabaseChangeLogTableNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.databaseChangeLogTableName, createdLiquibaseConfig().databaseChangeLogTableName); - } - - @Test - @DisplayName("databaseChangeLogTableName carried over from configuration") - void testDatabaseChangeLogTableNameOverridden() { - runtimeConfig.databaseChangeLogLockTableName = Optional.of("TEST_LOG"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.databaseChangeLogTableName.get(), createdLiquibaseConfig().databaseChangeLogTableName); - } - - @Test - @DisplayName("defaultCatalogName default matches liquibase default") - void testDefaultCatalogNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.defaultCatalogName, createdLiquibaseConfig().defaultCatalogName); - } - - @Test - @DisplayName("defaultCatalogName carried over from configuration") - void testDefaultCatalogNameOverridden() { - runtimeConfig.defaultCatalogName = Optional.of("CATALOG1,CATALOG2"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.defaultCatalogName, createdLiquibaseConfig().defaultCatalogName); - } - - @Test - @DisplayName("defaultSchemaName default matches liquibase default") - void testDefaultSchemaNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.defaultSchemaName, createdLiquibaseConfig().defaultSchemaName); - } - - @Test - @DisplayName("defaultSchemaName carried over from configuration") - void testDefaultSchemaNameOverridden() { - runtimeConfig.defaultSchemaName = Optional.of("SCHEMA1"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.defaultSchemaName, createdLiquibaseConfig().defaultSchemaName); - } - - @Test - @DisplayName("contexts default matches liquibase default") - void testContextsDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.contexts, createdLiquibaseConfig().contexts); - } - - @Test - @DisplayName("contexts carried over from configuration") - void testContextsOverridden() { - runtimeConfig.contexts = Optional.of(Collections.singletonList("CONTEXT1,CONTEXT2")); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertIterableEquals(runtimeConfig.contexts.get(), createdLiquibaseConfig().contexts); - } - - @Test - @DisplayName("labels default matches liquibase default") - void testLabelsDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.labels, createdLiquibaseConfig().labels); - } - - @Test - @DisplayName("labels carried over from configuration") - void testLabelsOverridden() { - runtimeConfig.labels = Optional.of(Collections.singletonList("LABEL1,LABEL2")); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertIterableEquals(runtimeConfig.labels.get(), createdLiquibaseConfig().labels); - } - - @Test - @DisplayName("liquibaseCatalogName default matches liquibase default") - void testLiquibaseCatalogNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.liquibaseCatalogName, createdLiquibaseConfig().liquibaseCatalogName); - } - - @Test - @DisplayName("defaultCatalogName carried over from configuration") - void testLiquibaseCatalogNameOverridden() { - runtimeConfig.liquibaseCatalogName = Optional.of("LIQUIBASE_CATALOG"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.liquibaseCatalogName, createdLiquibaseConfig().liquibaseCatalogName); - } - - @Test - @DisplayName("liquibaseSchemaName default matches liquibase default") - void testLiquibaseSchemaNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.liquibaseSchemaName, createdLiquibaseConfig().liquibaseSchemaName); - } - - @Test - @DisplayName("liquibaseSchemaName carried over from configuration") - void testLiquibaseSchemaNameOverridden() { - runtimeConfig.liquibaseSchemaName = Optional.of("LIQUIBASE_SCHEMA"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.liquibaseSchemaName, createdLiquibaseConfig().liquibaseSchemaName); - } - - @Test - @DisplayName("liquibaseTablespaceName default matches liquibase default") - void testLiquibaseTablespaceNameDefault() { - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(defaultConfig.liquibaseTablespaceName, createdLiquibaseConfig().liquibaseTablespaceName); - } - - @Test - @DisplayName("liquibaseTablespaceName carried over from configuration") - void testLiquibaseTablespaceNameOverridden() { - runtimeConfig.liquibaseSchemaName = Optional.of("LIQUIBASE_SPACE"); - creator = new LiquibaseCreator(runtimeConfig, buildConfig); - assertEquals(runtimeConfig.liquibaseTablespaceName, createdLiquibaseConfig().liquibaseTablespaceName); - } - - private LiquibaseConfig createdLiquibaseConfig() { - return creator.createLiquibaseFactory(null, DataSourceUtil.DEFAULT_DATASOURCE_NAME).getConfiguration(); - } -} diff --git a/extensions/load-shedding/runtime/src/main/java/io/quarkus/load/shedding/runtime/ManagementRequestPrioritizer.java b/extensions/load-shedding/runtime/src/main/java/io/quarkus/load/shedding/runtime/ManagementRequestPrioritizer.java index ee011ccd35f31..380f873a6630d 100644 --- a/extensions/load-shedding/runtime/src/main/java/io/quarkus/load/shedding/runtime/ManagementRequestPrioritizer.java +++ b/extensions/load-shedding/runtime/src/main/java/io/quarkus/load/shedding/runtime/ManagementRequestPrioritizer.java @@ -5,7 +5,7 @@ import io.quarkus.load.shedding.RequestPrioritizer; import io.quarkus.load.shedding.RequestPriority; -import io.quarkus.vertx.http.runtime.HttpBuildTimeConfig; +import io.quarkus.vertx.http.runtime.VertxHttpBuildTimeConfig; import io.quarkus.vertx.http.runtime.management.ManagementInterfaceBuildTimeConfig; import io.vertx.core.http.HttpServerRequest; @@ -14,21 +14,22 @@ public class ManagementRequestPrioritizer implements RequestPrioritizer${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/logging-gelf/runtime/pom.xml b/extensions/logging-gelf/runtime/pom.xml index d81bc6d6c6650..834499b925e3d 100644 --- a/extensions/logging-gelf/runtime/pom.xml +++ b/extensions/logging-gelf/runtime/pom.xml @@ -49,9 +49,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/AdditionalFieldConfig.java b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/AdditionalFieldConfig.java index 70a852bcba223..710016e56be6a 100644 --- a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/AdditionalFieldConfig.java +++ b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/AdditionalFieldConfig.java @@ -1,24 +1,23 @@ package io.quarkus.logging.gelf; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; /** * Post additional fields. E.g. `fieldName1=value1,fieldName2=value2`. */ @ConfigGroup -public class AdditionalFieldConfig { +public interface AdditionalFieldConfig { /** * Additional field value. */ - @ConfigItem - public String value; + String value(); /** * Additional field type specification. * Supported types: String, long, Long, double, Double and discover. * Discover is the default if not specified, it discovers field type based on parseability. */ - @ConfigItem(defaultValue = "discover") - public String type; + @WithDefault("discover") + String type(); } diff --git a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java index 0d604ae5c3b9f..c9ae24559e843 100644 --- a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java +++ b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfConfig.java @@ -6,44 +6,46 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -@ConfigRoot(phase = ConfigPhase.RUN_TIME, name = "log.handler.gelf") -public class GelfConfig { +@ConfigMapping(prefix = "quarkus.log.handler.gelf") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface GelfConfig { /** * Determine whether to enable the GELF logging handler */ - @ConfigItem - public boolean enabled; + @WithDefault("false") + boolean enabled(); /** * Hostname/IP-Address of the Logstash/Graylog Host * By default it uses UDP, prepend tcp: to the hostname to switch to TCP, example: "tcp:localhost" */ - @ConfigItem(defaultValue = "localhost") - public String host; + @WithDefault("localhost") + String host(); /** * The port */ - @ConfigItem(defaultValue = "12201") - public int port; + @WithDefault("12201") + int port(); /** * GELF version: 1.0 or 1.1 */ - @ConfigItem(defaultValue = "1.1") - public String version; + @WithDefault("1.1") + String version(); /** * Whether to post Stack-Trace to StackTrace field. * * @see #stackTraceThrowableReference to customize the way the Stack-Trace is handled. */ - @ConfigItem(defaultValue = "true") - public boolean extractStackTrace; + @WithDefault("true") + boolean extractStackTrace(); /** * Only used when `extractStackTrace` is `true`. @@ -53,32 +55,32 @@ public class GelfConfig { * Negative throwable reference walk the exception chain from the root cause side: -1 will extract the root cause, * -2 the exception wrapping the root cause, ... */ - @ConfigItem - public int stackTraceThrowableReference; + @WithDefault("0") + int stackTraceThrowableReference(); /** * Whether to perform Stack-Trace filtering */ - @ConfigItem - public boolean filterStackTrace; + @WithDefault("false") + boolean filterStackTrace(); /** * Java date pattern, see {@link java.text.SimpleDateFormat} */ - @ConfigItem(defaultValue = "yyyy-MM-dd HH:mm:ss,SSS") - public String timestampPattern; + @WithDefault("yyyy-MM-dd HH:mm:ss,SSS") + String timestampPattern(); /** * The logging-gelf log level. */ - @ConfigItem(defaultValue = "ALL") - public Level level; + @WithDefault("ALL") + Level level(); /** * Name of the facility. */ - @ConfigItem(defaultValue = "jboss-logmanager") - public String facility; + @WithDefault("jboss-logmanager") + String facility(); /** * Post additional fields. @@ -89,67 +91,62 @@ public class GelfConfig { * quarkus.log.handler.gelf.additional-field.field1.type=String * */ - @ConfigItem - @ConfigDocMapKey("field-name") @ConfigDocSection - public Map additionalField; + @ConfigDocMapKey("field-name") + Map additionalField(); /** * Whether to include all fields from the MDC. */ - @ConfigItem - public boolean includeFullMdc; + @WithDefault("false") + boolean includeFullMdc(); /** * Send additional fields whose values are obtained from MDC. Name of the Fields are comma-separated. Example: * mdcFields=Application,Version,SomeOtherFieldName */ - @ConfigItem() - public Optional mdcFields; + Optional mdcFields(); /** * Dynamic MDC Fields allows you to extract MDC values based on one or more regular expressions. Multiple regexes are * comma-separated. The name of the MDC entry is used as GELF field name. */ - @ConfigItem - public Optional dynamicMdcFields; + Optional dynamicMdcFields(); /** * Pattern-based type specification for additional and MDC fields. Key-value pairs are comma-separated. Example: * my_field.*=String,business\..*\.field=double */ - @ConfigItem - public Optional dynamicMdcFieldTypes; + Optional dynamicMdcFieldTypes(); /** * Maximum message size (in bytes). * If the message size is exceeded, the appender will submit the message in multiple chunks. */ - @ConfigItem(defaultValue = "8192") - public int maximumMessageSize; + @WithDefault("8192") + int maximumMessageSize(); /** * Include message parameters from the log event */ - @ConfigItem(defaultValue = "true") - public boolean includeLogMessageParameters; + @WithDefault("true") + boolean includeLogMessageParameters(); /** * Include source code location */ - @ConfigItem(defaultValue = "true") - public boolean includeLocation; + @WithDefault("true") + boolean includeLocation(); /** * Origin hostname */ - @ConfigItem - public Optional originHost; + Optional originHost(); /** * Bypass hostname resolution. If you didn't set the {@code originHost} property, and resolution is disabled, the value * “unknown” will be used as hostname */ - @ConfigItem - public boolean skipHostnameResolution; + @WithDefault("false") + boolean skipHostnameResolution(); } diff --git a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java index 95d4a9b03e32d..432c37751d0c0 100644 --- a/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java +++ b/extensions/logging-gelf/runtime/src/main/java/io/quarkus/logging/gelf/GelfLogHandlerRecorder.java @@ -13,59 +13,59 @@ @Recorder public class GelfLogHandlerRecorder { public RuntimeValue> initializeHandler(final GelfConfig config) { - if (!config.enabled) { + if (!config.enabled()) { return new RuntimeValue<>(Optional.empty()); } String previousSkipHostnameResolution = null; - if (config.skipHostnameResolution) { + if (config.skipHostnameResolution()) { previousSkipHostnameResolution = System.setProperty(PROPERTY_LOGSTASH_GELF_SKIP_HOSTNAME_RESOLUTION, "true"); } final JBoss7GelfLogHandler handler = new JBoss7GelfLogHandler(); - if (config.skipHostnameResolution) { + if (config.skipHostnameResolution()) { if (previousSkipHostnameResolution == null) { System.clearProperty(PROPERTY_LOGSTASH_GELF_SKIP_HOSTNAME_RESOLUTION); } else { System.setProperty(PROPERTY_LOGSTASH_GELF_SKIP_HOSTNAME_RESOLUTION, previousSkipHostnameResolution); } } - handler.setVersion(config.version); - handler.setFacility(config.facility); - String extractStackTrace = String.valueOf(config.extractStackTrace); - if (config.extractStackTrace && config.stackTraceThrowableReference != 0) { - extractStackTrace = String.valueOf(config.stackTraceThrowableReference); + handler.setVersion(config.version()); + handler.setFacility(config.facility()); + String extractStackTrace = String.valueOf(config.extractStackTrace()); + if (config.extractStackTrace() && config.stackTraceThrowableReference() != 0) { + extractStackTrace = String.valueOf(config.stackTraceThrowableReference()); } handler.setExtractStackTrace(extractStackTrace); - handler.setFilterStackTrace(config.filterStackTrace); - handler.setTimestampPattern(config.timestampPattern); - handler.setIncludeFullMdc(config.includeFullMdc); - handler.setDynamicMdcFields(config.dynamicMdcFields.orElse(null)); - handler.setMdcFields(config.mdcFields.orElse(null)); - handler.setDynamicMdcFieldTypes(config.dynamicMdcFieldTypes.orElse(null)); - handler.setHost(config.host); - handler.setPort(config.port); - handler.setLevel(config.level); - handler.setMaximumMessageSize(config.maximumMessageSize); - handler.setIncludeLocation(config.includeLocation); - handler.setIncludeLogMessageParameters(config.includeLogMessageParameters); - if (config.originHost.isPresent()) { - handler.setOriginHost(config.originHost.get()); + handler.setFilterStackTrace(config.filterStackTrace()); + handler.setTimestampPattern(config.timestampPattern()); + handler.setIncludeFullMdc(config.includeFullMdc()); + handler.setDynamicMdcFields(config.dynamicMdcFields().orElse(null)); + handler.setMdcFields(config.mdcFields().orElse(null)); + handler.setDynamicMdcFieldTypes(config.dynamicMdcFieldTypes().orElse(null)); + handler.setHost(config.host()); + handler.setPort(config.port()); + handler.setLevel(config.level()); + handler.setMaximumMessageSize(config.maximumMessageSize()); + handler.setIncludeLocation(config.includeLocation()); + handler.setIncludeLogMessageParameters(config.includeLogMessageParameters()); + if (config.originHost().isPresent()) { + handler.setOriginHost(config.originHost().get()); } // handle additional fields - if (!config.additionalField.isEmpty()) { + if (!config.additionalField().isEmpty()) { StringBuilder additionalFieldsValue = new StringBuilder(); StringBuilder additionalFieldsType = new StringBuilder(); - for (Map.Entry additionalField : config.additionalField.entrySet()) { + for (Map.Entry additionalField : config.additionalField().entrySet()) { if (additionalFieldsValue.length() > 0) { additionalFieldsValue.append(','); } - additionalFieldsValue.append(additionalField.getKey()).append('=').append(additionalField.getValue().value); + additionalFieldsValue.append(additionalField.getKey()).append('=').append(additionalField.getValue().value()); if (additionalFieldsType.length() > 0) { additionalFieldsType.append(','); } - additionalFieldsType.append(additionalField.getKey()).append('=').append(additionalField.getValue().type); + additionalFieldsType.append(additionalField.getKey()).append('=').append(additionalField.getValue().type()); } handler.setAdditionalFields(additionalFieldsValue.toString()); diff --git a/extensions/logging-gelf/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/logging-gelf/runtime/src/main/resources/META-INF/quarkus-extension.yaml index 940b2fe729696..eb1f05f2e7e38 100644 --- a/extensions/logging-gelf/runtime/src/main/resources/META-INF/quarkus-extension.yaml +++ b/extensions/logging-gelf/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -9,6 +9,6 @@ metadata: guide: "https://quarkus.io/guides/centralized-log-management" categories: - "core" - status: "preview" + status: "deprecated" config: - "quarkus.log.handler.gelf." diff --git a/extensions/logging-json/deployment/pom.xml b/extensions/logging-json/deployment/pom.xml index 673743cabce09..2412f617653c1 100644 --- a/extensions/logging-json/deployment/pom.xml +++ b/extensions/logging-json/deployment/pom.xml @@ -69,9 +69,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/ConsoleJsonFormatterCustomConfigTest.java b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/ConsoleJsonFormatterCustomConfigTest.java index e9113992293ad..29468cddb470e 100644 --- a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/ConsoleJsonFormatterCustomConfigTest.java +++ b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/ConsoleJsonFormatterCustomConfigTest.java @@ -15,8 +15,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.logging.json.runtime.AdditionalFieldConfig; import io.quarkus.logging.json.runtime.JsonFormatter; +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig; import io.quarkus.test.QuarkusUnitTest; public class ConsoleJsonFormatterCustomConfigTest { @@ -37,14 +37,14 @@ public void jsonFormatterCustomConfigurationTest() { .isEqualTo(StructuredFormatter.ExceptionOutputType.DETAILED_AND_FORMATTED); assertThat(jsonFormatter.getRecordDelimiter()).isEqualTo("\n;"); assertThat(jsonFormatter.isPrintDetails()).isTrue(); - assertThat(jsonFormatter.getExcludedKeys()).containsExactly("timestamp", "sequence"); + assertThat(jsonFormatter.getExcludedKeys()).containsExactlyInAnyOrder("timestamp", "sequence"); assertThat(jsonFormatter.getAdditionalFields().size()).isEqualTo(2); assertThat(jsonFormatter.getAdditionalFields().containsKey("foo")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("foo").type).isEqualTo(AdditionalFieldConfig.Type.INT); - assertThat(jsonFormatter.getAdditionalFields().get("foo").value).isEqualTo("42"); + assertThat(jsonFormatter.getAdditionalFields().get("foo").type()).isEqualTo(AdditionalFieldConfig.Type.INT); + assertThat(jsonFormatter.getAdditionalFields().get("foo").value()).isEqualTo("42"); assertThat(jsonFormatter.getAdditionalFields().containsKey("bar")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("bar").type).isEqualTo(AdditionalFieldConfig.Type.STRING); - assertThat(jsonFormatter.getAdditionalFields().get("bar").value).isEqualTo("baz"); + assertThat(jsonFormatter.getAdditionalFields().get("bar").type()).isEqualTo(AdditionalFieldConfig.Type.STRING); + assertThat(jsonFormatter.getAdditionalFields().get("bar").value()).isEqualTo("baz"); } @Test diff --git a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/FileJsonFormatterCustomConfigTest.java b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/FileJsonFormatterCustomConfigTest.java index 7f59c11bf0cb8..aa90ca7a1e7ce 100644 --- a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/FileJsonFormatterCustomConfigTest.java +++ b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/FileJsonFormatterCustomConfigTest.java @@ -15,8 +15,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.logging.json.runtime.AdditionalFieldConfig; import io.quarkus.logging.json.runtime.JsonFormatter; +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig; import io.quarkus.test.QuarkusUnitTest; public class FileJsonFormatterCustomConfigTest { @@ -37,14 +37,14 @@ public void jsonFormatterCustomConfigurationTest() { .isEqualTo(StructuredFormatter.ExceptionOutputType.DETAILED_AND_FORMATTED); assertThat(jsonFormatter.getRecordDelimiter()).isEqualTo("\n;"); assertThat(jsonFormatter.isPrintDetails()).isTrue(); - assertThat(jsonFormatter.getExcludedKeys()).containsExactly("timestamp", "sequence"); + assertThat(jsonFormatter.getExcludedKeys()).containsExactlyInAnyOrder("timestamp", "sequence"); assertThat(jsonFormatter.getAdditionalFields().size()).isEqualTo(2); assertThat(jsonFormatter.getAdditionalFields().containsKey("foo")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("foo").type).isEqualTo(AdditionalFieldConfig.Type.INT); - assertThat(jsonFormatter.getAdditionalFields().get("foo").value).isEqualTo("42"); + assertThat(jsonFormatter.getAdditionalFields().get("foo").type()).isEqualTo(AdditionalFieldConfig.Type.INT); + assertThat(jsonFormatter.getAdditionalFields().get("foo").value()).isEqualTo("42"); assertThat(jsonFormatter.getAdditionalFields().containsKey("bar")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("bar").type).isEqualTo(AdditionalFieldConfig.Type.STRING); - assertThat(jsonFormatter.getAdditionalFields().get("bar").value).isEqualTo("baz"); + assertThat(jsonFormatter.getAdditionalFields().get("bar").type()).isEqualTo(AdditionalFieldConfig.Type.STRING); + assertThat(jsonFormatter.getAdditionalFields().get("bar").value()).isEqualTo("baz"); } @Test diff --git a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SocketJsonFormatterCustomConfigTest.java b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SocketJsonFormatterCustomConfigTest.java index e6e0d8a9ac0a2..0615e731b7655 100644 --- a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SocketJsonFormatterCustomConfigTest.java +++ b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SocketJsonFormatterCustomConfigTest.java @@ -15,8 +15,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.logging.json.runtime.AdditionalFieldConfig; import io.quarkus.logging.json.runtime.JsonFormatter; +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig; import io.quarkus.test.QuarkusUnitTest; public class SocketJsonFormatterCustomConfigTest { @@ -37,14 +37,14 @@ public void jsonFormatterCustomConfigurationTest() { .isEqualTo(StructuredFormatter.ExceptionOutputType.DETAILED_AND_FORMATTED); assertThat(jsonFormatter.getRecordDelimiter()).isEqualTo("\n;"); assertThat(jsonFormatter.isPrintDetails()).isTrue(); - assertThat(jsonFormatter.getExcludedKeys()).containsExactly("timestamp", "sequence"); + assertThat(jsonFormatter.getExcludedKeys()).containsExactlyInAnyOrder("timestamp", "sequence"); assertThat(jsonFormatter.getAdditionalFields().size()).isEqualTo(2); assertThat(jsonFormatter.getAdditionalFields().containsKey("foo")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("foo").type).isEqualTo(AdditionalFieldConfig.Type.INT); - assertThat(jsonFormatter.getAdditionalFields().get("foo").value).isEqualTo("42"); + assertThat(jsonFormatter.getAdditionalFields().get("foo").type()).isEqualTo(AdditionalFieldConfig.Type.INT); + assertThat(jsonFormatter.getAdditionalFields().get("foo").value()).isEqualTo("42"); assertThat(jsonFormatter.getAdditionalFields().containsKey("bar")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("bar").type).isEqualTo(AdditionalFieldConfig.Type.STRING); - assertThat(jsonFormatter.getAdditionalFields().get("bar").value).isEqualTo("baz"); + assertThat(jsonFormatter.getAdditionalFields().get("bar").type()).isEqualTo(AdditionalFieldConfig.Type.STRING); + assertThat(jsonFormatter.getAdditionalFields().get("bar").value()).isEqualTo("baz"); } @Test diff --git a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SyslogJsonFormatterCustomConfigTest.java b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SyslogJsonFormatterCustomConfigTest.java index 809e036e3f78c..b7c6cadcde06c 100644 --- a/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SyslogJsonFormatterCustomConfigTest.java +++ b/extensions/logging-json/deployment/src/test/java/io/quarkus/logging/json/SyslogJsonFormatterCustomConfigTest.java @@ -15,8 +15,8 @@ import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import io.quarkus.logging.json.runtime.AdditionalFieldConfig; import io.quarkus.logging.json.runtime.JsonFormatter; +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig; import io.quarkus.test.QuarkusUnitTest; public class SyslogJsonFormatterCustomConfigTest { @@ -37,14 +37,14 @@ public void jsonFormatterCustomConfigurationTest() { .isEqualTo(StructuredFormatter.ExceptionOutputType.DETAILED_AND_FORMATTED); assertThat(jsonFormatter.getRecordDelimiter()).isEqualTo("\n;"); assertThat(jsonFormatter.isPrintDetails()).isTrue(); - assertThat(jsonFormatter.getExcludedKeys()).containsExactly("timestamp", "sequence"); + assertThat(jsonFormatter.getExcludedKeys()).containsExactlyInAnyOrder("timestamp", "sequence"); assertThat(jsonFormatter.getAdditionalFields().size()).isEqualTo(2); assertThat(jsonFormatter.getAdditionalFields().containsKey("foo")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("foo").type).isEqualTo(AdditionalFieldConfig.Type.INT); - assertThat(jsonFormatter.getAdditionalFields().get("foo").value).isEqualTo("42"); + assertThat(jsonFormatter.getAdditionalFields().get("foo").type()).isEqualTo(AdditionalFieldConfig.Type.INT); + assertThat(jsonFormatter.getAdditionalFields().get("foo").value()).isEqualTo("42"); assertThat(jsonFormatter.getAdditionalFields().containsKey("bar")).isTrue(); - assertThat(jsonFormatter.getAdditionalFields().get("bar").type).isEqualTo(AdditionalFieldConfig.Type.STRING); - assertThat(jsonFormatter.getAdditionalFields().get("bar").value).isEqualTo("baz"); + assertThat(jsonFormatter.getAdditionalFields().get("bar").type()).isEqualTo(AdditionalFieldConfig.Type.STRING); + assertThat(jsonFormatter.getAdditionalFields().get("bar").value()).isEqualTo("baz"); } @Test diff --git a/extensions/logging-json/runtime/pom.xml b/extensions/logging-json/runtime/pom.xml index 5372151fe388e..5f6fa67995766 100644 --- a/extensions/logging-json/runtime/pom.xml +++ b/extensions/logging-json/runtime/pom.xml @@ -58,9 +58,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalField.java b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalField.java new file mode 100644 index 0000000000000..0883196882059 --- /dev/null +++ b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalField.java @@ -0,0 +1,7 @@ +package io.quarkus.logging.json.runtime; + +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig; + +public record AdditionalField(String value, AdditionalFieldConfig.Type type) { + +} diff --git a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalFieldConfig.java b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalFieldConfig.java deleted file mode 100644 index dba33ea8e0d72..0000000000000 --- a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/AdditionalFieldConfig.java +++ /dev/null @@ -1,30 +0,0 @@ -package io.quarkus.logging.json.runtime; - -import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; - -/** - * Post additional fields. E.g. `fieldName1=value1,fieldName2=value2`. - */ -@ConfigGroup -public class AdditionalFieldConfig { - /** - * Additional field value. - */ - @ConfigItem - public String value; - - /** - * Additional field type specification. - * Supported types: {@code string}, {@code int}, and {@code long}. - * String is the default if not specified. - */ - @ConfigItem(defaultValue = "string") - public Type type; - - public enum Type { - STRING, - INT, - LONG, - } -} diff --git a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonFormatter.java b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonFormatter.java index 237ace899639c..3816d15fc208c 100644 --- a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonFormatter.java +++ b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonFormatter.java @@ -11,7 +11,7 @@ public class JsonFormatter extends org.jboss.logmanager.formatters.JsonFormatter { private Set excludedKeys; - private Map additionalFields; + private Map additionalFields; /** * Creates a new JSON formatter. @@ -46,7 +46,7 @@ public JsonFormatter(final String keyOverrides) { * @param additionalFields additionalFields to be added to the output */ public JsonFormatter(final String keyOverrides, final Set excludedKeys, - final Map additionalFields) { + final Map additionalFields) { super(keyOverrides); this.excludedKeys = excludedKeys; this.additionalFields = additionalFields; @@ -60,11 +60,11 @@ public void setExcludedKeys(Set excludedKeys) { this.excludedKeys = excludedKeys; } - public Map getAdditionalFields() { + public Map getAdditionalFields() { return this.additionalFields; } - public void setAdditionalFields(Map additionalFields) { + public void setAdditionalFields(Map additionalFields) { this.additionalFields = additionalFields; } @@ -77,15 +77,15 @@ protected Generator createGenerator(final Writer writer) { @Override protected void after(final Generator generator, final ExtLogRecord record) throws Exception { for (var entry : this.additionalFields.entrySet()) { - switch (entry.getValue().type) { + switch (entry.getValue().type()) { case STRING: - generator.add(entry.getKey(), entry.getValue().value); + generator.add(entry.getKey(), entry.getValue().value()); break; case INT: - generator.add(entry.getKey(), Integer.valueOf(entry.getValue().value)); + generator.add(entry.getKey(), Integer.valueOf(entry.getValue().value())); break; case LONG: - generator.add(entry.getKey(), Long.valueOf(entry.getValue().value)); + generator.add(entry.getKey(), Long.valueOf(entry.getValue().value())); break; } } diff --git a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonLogConfig.java b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonLogConfig.java index 3f1cd1fffd9d4..6123275b157d4 100644 --- a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonLogConfig.java +++ b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/JsonLogConfig.java @@ -9,112 +9,152 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; /** * Configuration for JSON log formatting. */ -@ConfigRoot(phase = ConfigPhase.RUN_TIME, name = "log") -public class JsonLogConfig { +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = "quarkus.log") +public interface JsonLogConfig { /** * Console logging. */ @ConfigDocSection - @ConfigItem(name = "console.json") - JsonConfig consoleJson; + @WithName("console.json") + JsonConfig consoleJson(); /** * File logging. */ @ConfigDocSection - @ConfigItem(name = "file.json") - JsonConfig fileJson; + @WithName("file.json") + JsonConfig fileJson(); /** * Syslog logging. */ @ConfigDocSection - @ConfigItem(name = "syslog.json") - JsonConfig syslogJson; + @WithName("syslog.json") + JsonConfig syslogJson(); /** * Socket logging. */ @ConfigDocSection - @ConfigItem(name = "socket.json") - JsonConfig socketJson; + @WithName("socket.json") + JsonConfig socketJson(); @ConfigGroup - public static class JsonConfig { + public interface JsonConfig { /** * Determine whether to enable the JSON console formatting extension, which disables "normal" console formatting. */ - @ConfigItem(name = ConfigItem.PARENT, defaultValue = "true") - boolean enable; + @WithParentName + @WithDefault("true") + @Deprecated(forRemoval = true, since = "3.19") + boolean enable(); + + /** + * Determine whether to enable the JSON console formatting extension, which disables "normal" console formatting. + */ + // TODO make it non-optional with default true as soon as we drop the other config + Optional enabled(); + /** * Enable "pretty printing" of the JSON record. Note that some JSON parsers will fail to read the pretty printed output. */ - @ConfigItem - boolean prettyPrint; + @WithDefault("false") + boolean prettyPrint(); + /** * The date format to use. The special string "default" indicates that the default format should be used. */ - @ConfigItem(defaultValue = "default") - String dateFormat; + @WithDefault("default") + String dateFormat(); + /** * The special end-of-record delimiter to be used. By default, newline is used. */ - @ConfigItem - Optional recordDelimiter; + Optional recordDelimiter(); + /** * The zone ID to use. The special string "default" indicates that the default zone should be used. */ - @ConfigItem(defaultValue = "default") - String zoneId; + @WithDefault("default") + String zoneId(); + /** * The exception output type to specify. */ - @ConfigItem(defaultValue = "detailed") - StructuredFormatter.ExceptionOutputType exceptionOutputType; + @WithDefault("detailed") + StructuredFormatter.ExceptionOutputType exceptionOutputType(); + /** * Enable printing of more details in the log. *

    * Printing the details can be expensive as the values are retrieved from the caller. The details include the * source class name, source file name, source method name, and source line number. */ - @ConfigItem - boolean printDetails; + @WithDefault("false") + boolean printDetails(); + /** * Override keys with custom values. Omitting this value indicates that no key overrides will be applied. */ - @ConfigItem - Optional keyOverrides; + Optional keyOverrides(); /** * Keys to be excluded from the JSON output. */ - @ConfigItem - Optional> excludedKeys; + Optional> excludedKeys(); /** * Additional fields to be appended in the JSON logs. */ - @ConfigItem @ConfigDocMapKey("field-name") - Map additionalField; + Map additionalField(); /** * Specify the format of the produced JSON */ - @ConfigItem(defaultValue = "DEFAULT") - LogFormat logFormat; + @WithDefault("default") + LogFormat logFormat(); public enum LogFormat { DEFAULT, ECS } } + + /** + * Post additional fields. E.g. `fieldName1=value1,fieldName2=value2`. + */ + @ConfigGroup + public interface AdditionalFieldConfig { + /** + * Additional field value. + */ + public String value(); + + /** + * Additional field type specification. + * Supported types: {@code string}, {@code int}, and {@code long}. + * String is the default if not specified. + */ + @WithDefault("string") + public Type type(); + + public enum Type { + STRING, + INT, + LONG, + } + } } diff --git a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/LoggingJsonRecorder.java b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/LoggingJsonRecorder.java index d05745e2a76ca..5387653561141 100644 --- a/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/LoggingJsonRecorder.java +++ b/extensions/logging-json/runtime/src/main/java/io/quarkus/logging/json/runtime/LoggingJsonRecorder.java @@ -2,16 +2,20 @@ import java.util.EnumMap; import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.logging.Formatter; +import java.util.stream.Collectors; import org.eclipse.microprofile.config.Config; import org.eclipse.microprofile.config.ConfigProvider; import org.jboss.logmanager.PropertyValues; import org.jboss.logmanager.formatters.StructuredFormatter.Key; -import io.quarkus.logging.json.runtime.AdditionalFieldConfig.Type; +import io.quarkus.logging.json.runtime.JsonLogConfig.AdditionalFieldConfig.Type; import io.quarkus.logging.json.runtime.JsonLogConfig.JsonConfig; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; @@ -20,53 +24,68 @@ public class LoggingJsonRecorder { public RuntimeValue> initializeConsoleJsonLogging(final JsonLogConfig config) { - return getFormatter(config.consoleJson); + return getFormatter(config.consoleJson()); } public RuntimeValue> initializeFileJsonLogging(final JsonLogConfig config) { - return getFormatter(config.fileJson); + return getFormatter(config.fileJson()); } public RuntimeValue> initializeSyslogJsonLogging(JsonLogConfig config) { - return getFormatter(config.syslogJson); + return getFormatter(config.syslogJson()); } public RuntimeValue> initializeSocketJsonLogging(JsonLogConfig config) { - return getFormatter(config.socketJson); + return getFormatter(config.socketJson()); } private RuntimeValue> getFormatter(JsonConfig config) { - if (config.logFormat == JsonConfig.LogFormat.ECS) { - addEcsFieldOverrides(config); + String keyOverrides = config.keyOverrides().orElse(null); + Set excludedKeys = config.excludedKeys().orElse(Set.of()); + Map additionalFields = config.additionalField().entrySet().stream() + .collect(Collectors.toMap(Entry::getKey, e -> new AdditionalField(e.getValue().value(), e.getValue().type()), + (x, y) -> y, LinkedHashMap::new)); + + OverridableJsonConfig overridableJsonConfig = new OverridableJsonConfig(keyOverrides, excludedKeys, additionalFields); + + if (config.logFormat() == JsonConfig.LogFormat.ECS) { + overridableJsonConfig = addEcsFieldOverrides(overridableJsonConfig); } - return getDefaultFormatter(config); + return getDefaultFormatter(config, overridableJsonConfig); } - private RuntimeValue> getDefaultFormatter(JsonConfig config) { - if (!config.enable) { + private RuntimeValue> getDefaultFormatter(JsonConfig config, + OverridableJsonConfig overridableJsonConfig) { + if (!config.enabled().orElse(config.enable())) { return new RuntimeValue<>(Optional.empty()); } - final JsonFormatter formatter = config.keyOverrides.map(ko -> new JsonFormatter(ko)).orElse(new JsonFormatter()); - config.excludedKeys.ifPresent(ek -> formatter.setExcludedKeys(ek)); - Optional.ofNullable(config.additionalField).ifPresent(af -> formatter.setAdditionalFields(af)); - formatter.setPrettyPrint(config.prettyPrint); - final String dateFormat = config.dateFormat; + + final JsonFormatter formatter; + if (overridableJsonConfig.keyOverrides() == null) { + formatter = new JsonFormatter(); + } else { + formatter = new JsonFormatter(overridableJsonConfig.keyOverrides()); + } + formatter.setExcludedKeys(overridableJsonConfig.excludedKeys()); + formatter.setAdditionalFields(overridableJsonConfig.additionalFields()); + formatter.setPrettyPrint(config.prettyPrint()); + final String dateFormat = config.dateFormat(); if (!dateFormat.equals("default")) { formatter.setDateFormat(dateFormat); } - formatter.setExceptionOutputType(config.exceptionOutputType); - formatter.setPrintDetails(config.printDetails); - config.recordDelimiter.ifPresent(formatter::setRecordDelimiter); - final String zoneId = config.zoneId; + formatter.setExceptionOutputType(config.exceptionOutputType()); + formatter.setPrintDetails(config.printDetails()); + config.recordDelimiter().ifPresent(formatter::setRecordDelimiter); + final String zoneId = config.zoneId(); if (!zoneId.equals("default")) { formatter.setZoneId(zoneId); } return new RuntimeValue<>(Optional.of(formatter)); } - private void addEcsFieldOverrides(JsonConfig config) { - EnumMap keyOverrides = PropertyValues.stringToEnumMap(Key.class, config.keyOverrides.orElse(null)); + private OverridableJsonConfig addEcsFieldOverrides(OverridableJsonConfig overridableJsonConfig) { + EnumMap keyOverrides = PropertyValues.stringToEnumMap(Key.class, overridableJsonConfig.keyOverrides()); keyOverrides.putIfAbsent(Key.TIMESTAMP, "@timestamp"); keyOverrides.putIfAbsent(Key.LOGGER_NAME, "log.logger"); keyOverrides.putIfAbsent(Key.LEVEL, "log.level"); @@ -78,29 +97,27 @@ private void addEcsFieldOverrides(JsonConfig config) { keyOverrides.putIfAbsent(Key.SEQUENCE, "event.sequence"); keyOverrides.putIfAbsent(Key.EXCEPTION_MESSAGE, "error.message"); keyOverrides.putIfAbsent(Key.STACK_TRACE, "error.stack_trace"); - config.keyOverrides = Optional.of(PropertyValues.mapToString(keyOverrides)); - config.additionalField.computeIfAbsent("ecs.version", k -> buildFieldConfig("1.12.2", Type.STRING)); - config.additionalField.computeIfAbsent("data_stream.type", k -> buildFieldConfig("logs", Type.STRING)); + Set excludedKeys = new HashSet<>(overridableJsonConfig.excludedKeys()); + excludedKeys.add(Key.LOGGER_CLASS_NAME.getKey()); + excludedKeys.add(Key.RECORD.getKey()); + + Map additionalFields = new LinkedHashMap<>(overridableJsonConfig.additionalFields()); + additionalFields.computeIfAbsent("ecs.version", k -> new AdditionalField("1.12.2", Type.STRING)); + additionalFields.computeIfAbsent("data_stream.type", k -> new AdditionalField("logs", Type.STRING)); Config quarkusConfig = ConfigProvider.getConfig(); quarkusConfig.getOptionalValue("quarkus.application.name", String.class).ifPresent( - s -> config.additionalField.computeIfAbsent("service.name", k -> buildFieldConfig(s, Type.STRING))); + s -> additionalFields.computeIfAbsent("service.name", k -> new AdditionalField(s, Type.STRING))); quarkusConfig.getOptionalValue("quarkus.application.version", String.class).ifPresent( - s -> config.additionalField.computeIfAbsent("service.version", k -> buildFieldConfig(s, Type.STRING))); + s -> additionalFields.computeIfAbsent("service.version", k -> new AdditionalField(s, Type.STRING))); quarkusConfig.getOptionalValue("quarkus.profile", String.class).ifPresent( - s -> config.additionalField.computeIfAbsent("service.environment", k -> buildFieldConfig(s, Type.STRING))); + s -> additionalFields.computeIfAbsent("service.environment", k -> new AdditionalField(s, Type.STRING))); - Set excludedKeys = config.excludedKeys.orElseGet(HashSet::new); - excludedKeys.add(Key.LOGGER_CLASS_NAME.getKey()); - excludedKeys.add(Key.RECORD.getKey()); - config.excludedKeys = Optional.of(excludedKeys); + return new OverridableJsonConfig(PropertyValues.mapToString(keyOverrides), excludedKeys, additionalFields); } - private AdditionalFieldConfig buildFieldConfig(String value, Type type) { - AdditionalFieldConfig field = new AdditionalFieldConfig(); - field.type = type; - field.value = value; - return field; + private record OverridableJsonConfig(String keyOverrides, Set excludedKeys, + Map additionalFields) { } } diff --git a/extensions/mailer/deployment/pom.xml b/extensions/mailer/deployment/pom.xml index 5a1a7d602f460..5354e2b6fb068 100644 --- a/extensions/mailer/deployment/pom.xml +++ b/extensions/mailer/deployment/pom.xml @@ -64,9 +64,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/mailer/deployment/src/main/java/io/quarkus/mailer/deployment/MailerProcessor.java b/extensions/mailer/deployment/src/main/java/io/quarkus/mailer/deployment/MailerProcessor.java index 70f8bc8e54f0b..af68db9ac3336 100644 --- a/extensions/mailer/deployment/src/main/java/io/quarkus/mailer/deployment/MailerProcessor.java +++ b/extensions/mailer/deployment/src/main/java/io/quarkus/mailer/deployment/MailerProcessor.java @@ -81,7 +81,7 @@ public static class CacheAttachmentsEnabled implements BooleanSupplier { MailersBuildTimeConfig config; public boolean getAsBoolean() { - return config.cacheAttachments; + return config.cacheAttachments(); } } diff --git a/extensions/mailer/runtime/pom.xml b/extensions/mailer/runtime/pom.xml index d271031465011..cd2a87d365d3d 100644 --- a/extensions/mailer/runtime/pom.xml +++ b/extensions/mailer/runtime/pom.xml @@ -95,9 +95,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/DkimSignOptionsConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/DkimSignOptionsConfig.java index 21ca77c3666ab..5c55991474cba 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/DkimSignOptionsConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/DkimSignOptionsConfig.java @@ -6,88 +6,77 @@ import java.util.OptionalLong; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class DkimSignOptionsConfig { +public interface DkimSignOptionsConfig { /** * Enables DKIM signing. */ - @ConfigItem(defaultValue = "false") - public boolean enabled; + @WithDefault("false") + boolean enabled(); /** * Configures the PKCS#8 format private key used to sign the email. */ - @ConfigItem - public Optional privateKey = Optional.empty(); + Optional privateKey(); /** * Configures the PKCS#8 format private key file path. */ - @ConfigItem - public Optional privateKeyPath = Optional.empty(); + Optional privateKeyPath(); /** * Configures the Agent or User Identifier (AUID). */ - @ConfigItem - public Optional auid = Optional.empty(); + Optional auid(); /** * Configures the selector used to query the public key. */ - @ConfigItem - public Optional selector = Optional.empty(); + Optional selector(); /** * Configures the Signing Domain Identifier (SDID). */ - @ConfigItem - public Optional sdid = Optional.empty(); + Optional sdid(); /** * Configures the canonicalization algorithm for signed headers. */ - @ConfigItem - public Optional headerCanonAlgo = Optional.empty(); + Optional headerCanonAlgo(); /** * Configures the canonicalization algorithm for mail body. */ - @ConfigItem - public Optional bodyCanonAlgo = Optional.empty(); + Optional bodyCanonAlgo(); /** * Configures the body limit to sign. * * Must be greater than zero. */ - @ConfigItem - public OptionalInt bodyLimit = OptionalInt.empty(); + OptionalInt bodyLimit(); /** * Configures to enable or disable signature sign timestamp. */ - @ConfigItem - public Optional signatureTimestamp = Optional.empty(); + Optional signatureTimestamp(); /** * Configures the expire time in seconds when the signature sign will be expired. * * Must be greater than zero. */ - @ConfigItem - public OptionalLong expireTime = OptionalLong.empty(); + OptionalLong expireTime(); /** * Configures the signed headers in DKIM, separated by commas. * * The order in the list matters. */ - @ConfigItem - public Optional> signedHeaders = Optional.empty(); + Optional> signedHeaders(); public enum CanonicalizationAlgorithmOption { SIMPLE, diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailerRuntimeConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailerRuntimeConfig.java index c38c447db3773..745366403c6c4 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailerRuntimeConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailerRuntimeConfig.java @@ -7,18 +7,17 @@ import java.util.regex.Pattern; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; +import io.smallrye.config.WithConverter; +import io.smallrye.config.WithDefault; @ConfigGroup -public class MailerRuntimeConfig { +public interface MailerRuntimeConfig { /** * Sets the default `from` attribute when not specified in the {@link io.quarkus.mailer.Mail} instance. * It's the sender email address. */ - @ConfigItem - public Optional from = Optional.empty(); + Optional from(); /** * Enables the mock mode. @@ -27,21 +26,19 @@ public class MailerRuntimeConfig { *

    * Disabled by default on PROD, enabled by default on DEV and TEST modes. */ - @ConfigItem - public Optional mock = Optional.empty(); + Optional mock(); /** * Sets the default bounce email address. * A bounced email, or bounce, is an email message that gets rejected by a mail server. */ - @ConfigItem - public Optional bounceAddress = Optional.empty(); + Optional bounceAddress(); /** * Sets the SMTP host name. */ - @ConfigItem(defaultValue = "localhost") - public String host = "localhost"; + @WithDefault("localhost") + String host(); /** * The SMTP port. @@ -55,20 +52,17 @@ public class MailerRuntimeConfig { * Note that the port 465 may be used by SMTP servers, however, IANA has reassigned a new service to this port, * and it should no longer be used for SMTP communications. */ - @ConfigItem - public OptionalInt port = OptionalInt.empty(); + OptionalInt port(); /** * Sets the username to connect to the SMTP server. */ - @ConfigItem - public Optional username = Optional.empty(); + Optional username(); /** * Sets the password to connect to the SMTP server. */ - @ConfigItem - public Optional password = Optional.empty(); + Optional password(); /** * The name of the TLS configuration to use. @@ -80,8 +74,7 @@ public class MailerRuntimeConfig { *

    * The default TLS configuration is not used by default. */ - @ConfigItem - public Optional tlsConfigurationName = Optional.empty(); + Optional tlsConfigurationName(); /** * Enables or disables the TLS/SSL. @@ -89,8 +82,8 @@ public class MailerRuntimeConfig { * @deprecated Use {{@link #tls}} */ @Deprecated - @ConfigItem(defaultValue = "false") - public boolean ssl; + @WithDefault("false") + boolean ssl(); /** * Whether the connection should be secured using TLS. @@ -104,8 +97,7 @@ public class MailerRuntimeConfig { * Note that if a TLS configuration is set, TLS is enabled automatically. So, setting this property to {@code false} is * required to not establish a connection with TLS. */ - @ConfigItem - public Optional tls; + Optional tls(); /** * Set whether all server certificates should be trusted. @@ -113,28 +105,26 @@ public class MailerRuntimeConfig { * * @deprecated Use the TLS registry instead. */ - @ConfigItem @Deprecated - public Optional trustAll = Optional.empty(); + Optional trustAll(); /** * Sets the max number of open connections to the mail server. */ - @ConfigItem(defaultValue = "10") - public int maxPoolSize = 10; + @WithDefault("10") + int maxPoolSize(); /** * Sets the hostname to be used for HELO/EHLO and the Message-ID. */ - @ConfigItem - public Optional ownHostName = Optional.empty(); + Optional ownHostName(); /** * Sets if connection pool is enabled. * If the connection pooling is disabled, the max number of sockets is enforced nevertheless. */ - @ConfigItem(defaultValue = "true") - public boolean keepAlive = true; + @WithDefault("true") + boolean keepAlive(); /** * Disable ESMTP. @@ -142,25 +132,24 @@ public class MailerRuntimeConfig { * The RFC-1869 states that clients should always attempt {@code EHLO} as first command to determine if ESMTP * is supported, if this returns an error code, {@code HELO} is tried to use the regular SMTP command. */ - @ConfigItem(defaultValue = "false") - public boolean disableEsmtp; + @WithDefault("false") + boolean disableEsmtp(); /** * Sets the TLS security mode for the connection. * Either {@code DISABLED}, {@code OPTIONAL} or {@code REQUIRED}. */ - @ConfigItem(defaultValue = "OPTIONAL") - public String startTLS = "OPTIONAL"; + @WithDefault("OPTIONAL") + String startTLS(); /** * Configures DKIM signature verification. */ - @ConfigItem - public DkimSignOptionsConfig dkim = new DkimSignOptionsConfig(); + DkimSignOptionsConfig dkim(); /** * Sets the login mode for the connection. - * Either {@code NONE}, @{code DISABLED}, {@code OPTIONAL}, {@code REQUIRED} or {@code XOAUTH2}. + * Either {@code NONE}, {@code DISABLED}, {@code OPTIONAL}, {@code REQUIRED} or {@code XOAUTH2}. *

      *
    • DISABLED means no login will be attempted
    • *
    • NONE means a login will be attempted if the server supports in and login credentials are set
    • @@ -169,8 +158,8 @@ public class MailerRuntimeConfig { *
    • XOAUTH2 means that a login will be attempted using Google Gmail Oauth2 tokens
    • *
    */ - @ConfigItem(defaultValue = "NONE") - public String login = "NONE"; + @WithDefault("NONE") + String login(); /** * Sets the allowed authentication methods. @@ -179,8 +168,7 @@ public class MailerRuntimeConfig { *

    * The list is given as a space separated list, such as {@code DIGEST-MD5 CRAM-SHA256 CRAM-SHA1 CRAM-MD5 PLAIN LOGIN}. */ - @ConfigItem - public Optional authMethods = Optional.empty(); + Optional authMethods(); /** * Set the trust store. @@ -188,67 +176,61 @@ public class MailerRuntimeConfig { * @deprecated Use the TLS registry instead. */ @Deprecated - @ConfigItem - public Optional keyStore = Optional.empty(); + Optional keyStore(); /** * Sets the trust store password if any. * * @deprecated Use the TLS registry instead. */ - @ConfigItem - @Deprecated - public Optional keyStorePassword = Optional.empty(); + Optional keyStorePassword(); /** * Configures the trust store. * * @deprecated Use the TLS registry instead. */ - @ConfigItem - @Deprecated - public TrustStoreConfig truststore = new TrustStoreConfig(); + TrustStoreConfig truststore(); /** * Whether the mail should always been sent as multipart even if they don't have attachments. * When sets to true, the mail message will be encoded as multipart even for simple mails without attachments. */ - @ConfigItem(defaultValue = "false") - public boolean multiPartOnly; + @WithDefault("false") + boolean multiPartOnly(); /** * Sets if sending allows recipients errors. * If set to true, the mail will be sent to the recipients that the server accepted, if any. */ - @ConfigItem(defaultValue = "false") - public boolean allowRcptErrors; + @WithDefault("false") + boolean allowRcptErrors(); /** * Enables or disables the pipelining capability if the SMTP server supports it. */ - @ConfigItem(defaultValue = "true") - public boolean pipelining = true; + @WithDefault("true") + boolean pipelining(); /** * Sets the connection pool cleaner period. * Zero disables expiration checks and connections will remain in the pool until they are closed. */ - @ConfigItem(defaultValue = "PT1S") - public Duration poolCleanerPeriod = Duration.ofSeconds(1L); + @WithDefault("PT1S") + Duration poolCleanerPeriod(); /** * Set the keep alive timeout for the SMTP connection. * This value determines how long a connection remains unused in the pool before being evicted and closed. * A timeout of 0 means there is no timeout. */ - @ConfigItem(defaultValue = "PT300S") - public Duration keepAliveTimeout = Duration.ofSeconds(300L); + @WithDefault("PT300S") + Duration keepAliveTimeout(); /** * Configures NTLM (Windows New Technology LAN Manager). */ - @ConfigItem - public NtlmConfig ntlm = new NtlmConfig(); + NtlmConfig ntlm(); /** * Allows sending emails to these recipients only. @@ -259,9 +241,7 @@ public class MailerRuntimeConfig { * * @see {@link #logRejectedRecipients} */ - @ConfigItem - @ConvertWith(TrimmedPatternConverter.class) - public Optional> approvedRecipients = Optional.empty(); + Optional> approvedRecipients(); /** * Log rejected recipients as warnings. @@ -270,8 +250,8 @@ public class MailerRuntimeConfig { * * @see {@link #approvedRecipients} */ - @ConfigItem(defaultValue = "false") - public boolean logRejectedRecipients = false; + @WithDefault("false") + boolean logRejectedRecipients(); /** * Log invalid recipients as warnings. @@ -279,6 +259,6 @@ public class MailerRuntimeConfig { * If false, the invalid recipients will not be logged and the thrown exception will not contain the invalid email address. * */ - @ConfigItem(defaultValue = "false") - public boolean logInvalidRecipients = false; + @WithDefault("false") + boolean logInvalidRecipients(); } diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/Mailers.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/Mailers.java index 2151d33d0396b..c8f792ea077f7 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/Mailers.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/Mailers.java @@ -64,7 +64,9 @@ public Mailers(Vertx vertx, io.vertx.mutiny.core.Vertx mutinyVertx, MailersRunti Map localMutinyMailers = new HashMap<>(); if (mailerSupport.hasDefaultMailer) { - MailClient mailClient = createMailClient(vertx, DEFAULT_MAILER_NAME, mailersRuntimeConfig.defaultMailer, + MailerRuntimeConfig defaultMailerConfig = mailersRuntimeConfig.mailers().get(DEFAULT_MAILER_NAME); + + MailClient mailClient = createMailClient(vertx, DEFAULT_MAILER_NAME, defaultMailerConfig, tlsRegistry); io.vertx.mutiny.ext.mail.MailClient mutinyMailClient = io.vertx.mutiny.ext.mail.MailClient.newInstance(mailClient); MockMailboxImpl mockMailbox = new MockMailboxImpl(); @@ -73,18 +75,17 @@ public Mailers(Vertx vertx, io.vertx.mutiny.core.Vertx mutinyVertx, MailersRunti localMockMailboxes.put(DEFAULT_MAILER_NAME, mockMailbox); localMutinyMailers.put(DEFAULT_MAILER_NAME, new MutinyMailerImpl(mutinyVertx, mutinyMailClient, mockMailbox, - mailersRuntimeConfig.defaultMailer.from.orElse(null), - mailersRuntimeConfig.defaultMailer.bounceAddress.orElse(null), - mailersRuntimeConfig.defaultMailer.mock.orElse(launchMode.isDevOrTest()), - mailersRuntimeConfig.defaultMailer.approvedRecipients.orElse(List.of()).stream() + defaultMailerConfig.from().orElse(null), + defaultMailerConfig.bounceAddress().orElse(null), + defaultMailerConfig.mock().orElse(launchMode.isDevOrTest()), + defaultMailerConfig.approvedRecipients().orElse(List.of()).stream() .filter(Objects::nonNull).collect(Collectors.toList()), - mailersRuntimeConfig.defaultMailer.logRejectedRecipients, - mailersRuntimeConfig.defaultMailer.logInvalidRecipients, sentMailEvent)); + defaultMailerConfig.logRejectedRecipients(), + defaultMailerConfig.logInvalidRecipients(), sentMailEvent)); } for (String name : mailerSupport.namedMailers) { - MailerRuntimeConfig namedMailerRuntimeConfig = mailersRuntimeConfig.namedMailers - .getOrDefault(name, new MailerRuntimeConfig()); + MailerRuntimeConfig namedMailerRuntimeConfig = mailersRuntimeConfig.mailers().get(name); MailClient namedMailClient = createMailClient(vertx, name, namedMailerRuntimeConfig, tlsRegistry); @@ -96,13 +97,13 @@ public Mailers(Vertx vertx, io.vertx.mutiny.core.Vertx mutinyVertx, MailersRunti localMockMailboxes.put(name, namedMockMailbox); localMutinyMailers.put(name, new MutinyMailerImpl(mutinyVertx, namedMutinyMailClient, namedMockMailbox, - namedMailerRuntimeConfig.from.orElse(null), - namedMailerRuntimeConfig.bounceAddress.orElse(null), - namedMailerRuntimeConfig.mock.orElse(false), - namedMailerRuntimeConfig.approvedRecipients.orElse(List.of()).stream() + namedMailerRuntimeConfig.from().orElse(null), + namedMailerRuntimeConfig.bounceAddress().orElse(null), + namedMailerRuntimeConfig.mock().orElse(false), + namedMailerRuntimeConfig.approvedRecipients().orElse(List.of()).stream() .filter(p -> p != null).collect(Collectors.toList()), - namedMailerRuntimeConfig.logRejectedRecipients, - namedMailerRuntimeConfig.logInvalidRecipients, + namedMailerRuntimeConfig.logRejectedRecipients(), + namedMailerRuntimeConfig.logInvalidRecipients(), sentMailEvent)); } @@ -149,53 +150,54 @@ private MailClient createMailClient(Vertx vertx, String name, MailerRuntimeConfi private io.vertx.ext.mail.DKIMSignOptions toVertxDkimSignOptions(DkimSignOptionsConfig optionsConfig) { DKIMSignOptions vertxDkimOptions = new io.vertx.ext.mail.DKIMSignOptions(); - String sdid = optionsConfig.sdid + String sdid = optionsConfig.sdid() .orElseThrow(() -> { throw new ConfigurationException("Must provide the Signing Domain Identifier (sdid)."); }); vertxDkimOptions.setSdid(sdid); - String selector = optionsConfig.selector + String selector = optionsConfig.selector() .orElseThrow(() -> { throw new ConfigurationException("Must provide the selector."); }); vertxDkimOptions.setSelector(selector); - if (optionsConfig.auid.isPresent()) { - vertxDkimOptions.setAuid(optionsConfig.auid.get()); + if (optionsConfig.auid().isPresent()) { + vertxDkimOptions.setAuid(optionsConfig.auid().get()); } - if (optionsConfig.bodyLimit.isPresent()) { - int bodyLimit = optionsConfig.bodyLimit.getAsInt(); + if (optionsConfig.bodyLimit().isPresent()) { + int bodyLimit = optionsConfig.bodyLimit().getAsInt(); vertxDkimOptions.setBodyLimit(bodyLimit); } - if (optionsConfig.expireTime.isPresent()) { - long expireTime = optionsConfig.expireTime.getAsLong(); + if (optionsConfig.expireTime().isPresent()) { + long expireTime = optionsConfig.expireTime().getAsLong(); vertxDkimOptions.setExpireTime(expireTime); } - if (optionsConfig.bodyCanonAlgo.isPresent()) { - vertxDkimOptions.setBodyCanonAlgo(CanonicalizationAlgorithm.valueOf(optionsConfig.bodyCanonAlgo.get().toString())); + if (optionsConfig.bodyCanonAlgo().isPresent()) { + vertxDkimOptions + .setBodyCanonAlgo(CanonicalizationAlgorithm.valueOf(optionsConfig.bodyCanonAlgo().get().toString())); } - if (optionsConfig.headerCanonAlgo.isPresent()) { + if (optionsConfig.headerCanonAlgo().isPresent()) { vertxDkimOptions - .setHeaderCanonAlgo(CanonicalizationAlgorithm.valueOf(optionsConfig.headerCanonAlgo.get().toString())); + .setHeaderCanonAlgo(CanonicalizationAlgorithm.valueOf(optionsConfig.headerCanonAlgo().get().toString())); } - if (optionsConfig.privateKey.isPresent()) { - vertxDkimOptions.setPrivateKey(optionsConfig.privateKey.get()); - } else if (optionsConfig.privateKeyPath.isPresent()) { - vertxDkimOptions.setPrivateKeyPath(optionsConfig.privateKeyPath.get()); + if (optionsConfig.privateKey().isPresent()) { + vertxDkimOptions.setPrivateKey(optionsConfig.privateKey().get()); + } else if (optionsConfig.privateKeyPath().isPresent()) { + vertxDkimOptions.setPrivateKeyPath(optionsConfig.privateKeyPath().get()); } - if (optionsConfig.signatureTimestamp.isPresent()) { - vertxDkimOptions.setSignatureTimestamp(optionsConfig.signatureTimestamp.get()); + if (optionsConfig.signatureTimestamp().isPresent()) { + vertxDkimOptions.setSignatureTimestamp(optionsConfig.signatureTimestamp().get()); } - if (optionsConfig.signedHeaders.isPresent()) { - List headers = optionsConfig.signedHeaders.get(); + if (optionsConfig.signedHeaders().isPresent()) { + List headers = optionsConfig.signedHeaders().get(); if (headers.stream().noneMatch(header -> header.equalsIgnoreCase("from"))) { throw new ConfigurationException( @@ -211,43 +213,43 @@ private io.vertx.ext.mail.DKIMSignOptions toVertxDkimSignOptions(DkimSignOptions private io.vertx.ext.mail.MailConfig toVertxMailConfig(String name, MailerRuntimeConfig config, TlsConfigurationRegistry tlsRegistry) { io.vertx.ext.mail.MailConfig cfg = new io.vertx.ext.mail.MailConfig(); - if (config.authMethods.isPresent()) { - cfg.setAuthMethods(config.authMethods.get()); + if (config.authMethods().isPresent()) { + cfg.setAuthMethods(config.authMethods().get()); } - cfg.setDisableEsmtp(config.disableEsmtp); - cfg.setHostname(config.host); - cfg.setKeepAlive(config.keepAlive); - cfg.setLogin(LoginOption.valueOf(config.login.toUpperCase())); - cfg.setMaxPoolSize(config.maxPoolSize); - - if (config.ownHostName.isPresent()) { - cfg.setOwnHostname(config.ownHostName.get()); + cfg.setDisableEsmtp(config.disableEsmtp()); + cfg.setHostname(config.host()); + cfg.setKeepAlive(config.keepAlive()); + cfg.setLogin(LoginOption.valueOf(config.login().toUpperCase())); + cfg.setMaxPoolSize(config.maxPoolSize()); + + if (config.ownHostName().isPresent()) { + cfg.setOwnHostname(config.ownHostName().get()); } - if (config.username.isPresent()) { - cfg.setUsername(config.username.get()); + if (config.username().isPresent()) { + cfg.setUsername(config.username().get()); } - if (config.password.isPresent()) { - cfg.setPassword(config.password.get()); + if (config.password().isPresent()) { + cfg.setPassword(config.password().get()); } - if (config.port.isPresent()) { - cfg.setPort(config.port.getAsInt()); + if (config.port().isPresent()) { + cfg.setPort(config.port().getAsInt()); } - if (config.dkim != null && config.dkim.enabled) { + if (config.dkim() != null && config.dkim().enabled()) { cfg.setEnableDKIM(true); - cfg.addDKIMSignOption(toVertxDkimSignOptions(config.dkim)); + cfg.addDKIMSignOption(toVertxDkimSignOptions(config.dkim())); } - cfg.setStarttls(StartTLSOptions.valueOf(config.startTLS.toUpperCase())); - cfg.setMultiPartOnly(config.multiPartOnly); + cfg.setStarttls(StartTLSOptions.valueOf(config.startTLS().toUpperCase())); + cfg.setMultiPartOnly(config.multiPartOnly()); - cfg.setAllowRcptErrors(config.allowRcptErrors); - cfg.setPipelining(config.pipelining); - cfg.setPoolCleanerPeriod((int) config.poolCleanerPeriod.toMillis()); + cfg.setAllowRcptErrors(config.allowRcptErrors()); + cfg.setPipelining(config.pipelining()); + cfg.setPoolCleanerPeriod((int) config.poolCleanerPeriod().toMillis()); cfg.setPoolCleanerPeriodUnit(TimeUnit.MILLISECONDS); - cfg.setKeepAliveTimeout((int) config.keepAliveTimeout.toMillis()); + cfg.setKeepAliveTimeout((int) config.keepAliveTimeout().toMillis()); cfg.setKeepAliveTimeoutUnit(TimeUnit.MILLISECONDS); configureTLS(name, config, tlsRegistry, cfg); @@ -264,11 +266,11 @@ private io.vertx.ext.mail.MailConfig toVertxMailConfig(String name, MailerRuntim private void configureTLS(String name, MailerRuntimeConfig config, TlsConfigurationRegistry tlsRegistry, MailConfig cfg) { TlsConfiguration configuration = null; boolean defaultTrustAll = false; - if (config.tlsConfigurationName.isPresent()) { - Optional maybeConfiguration = tlsRegistry.get(config.tlsConfigurationName.get()); + if (config.tlsConfigurationName().isPresent()) { + Optional maybeConfiguration = tlsRegistry.get(config.tlsConfigurationName().get()); if (!maybeConfiguration.isPresent()) { throw new IllegalStateException("Unable to find the TLS configuration " - + config.tlsConfigurationName.get() + " for the mailer " + name + "."); + + config.tlsConfigurationName().get() + " for the mailer " + name + "."); } configuration = maybeConfiguration.get(); } else if (tlsRegistry.getDefault().isPresent() && tlsRegistry.getDefault().get().isTrustAll()) { @@ -282,7 +284,7 @@ private void configureTLS(String name, MailerRuntimeConfig config, TlsConfigurat if (configuration != null) { // SMTP is a bit convoluted here. // You can start a non-TLS connection and then upgrade to TLS (using the STARTTLS command). - cfg.setSsl(config.tls.orElse(true)); + cfg.setSsl(config.tls().orElse(true)); if (configuration.getTrustStoreOptions() != null) { cfg.setTrustOptions(configuration.getTrustStoreOptions()); @@ -313,8 +315,8 @@ private void configureTLS(String name, MailerRuntimeConfig config, TlsConfigurat } } else { - boolean trustAll = config.trustAll.isPresent() ? config.trustAll.get() : defaultTrustAll; - cfg.setSsl(config.ssl || config.tls.orElse(trustAll)); + boolean trustAll = config.trustAll().isPresent() ? config.trustAll().get() : defaultTrustAll; + cfg.setSsl(config.ssl() || config.tls().orElse(trustAll)); cfg.setTrustAll(trustAll); applyTruststore(name, config, cfg); } @@ -322,26 +324,26 @@ private void configureTLS(String name, MailerRuntimeConfig config, TlsConfigurat private void applyTruststore(String name, MailerRuntimeConfig config, io.vertx.ext.mail.MailConfig cfg) { // Handle deprecated config - if (config.keyStore.isPresent()) { + if (config.keyStore().isPresent()) { LOGGER.warn("`quarkus.mailer.key-store` is deprecated, use `quarkus.mailer.trust-store.path` instead"); JksOptions options = new JksOptions(); - options.setPath(config.keyStore.get()); - if (config.keyStorePassword.isPresent()) { + options.setPath(config.keyStore().get()); + if (config.keyStorePassword().isPresent()) { LOGGER.warn( "`quarkus.mailer.key-store-password` is deprecated, use `quarkus.mailer.trust-store.password` instead"); - options.setPassword(config.keyStorePassword.get()); + options.setPassword(config.keyStorePassword().get()); } cfg.setTrustOptions(options); return; } - TrustStoreConfig truststore = config.truststore; + TrustStoreConfig truststore = config.truststore(); if (truststore.isConfigured()) { if (cfg.isTrustAll()) { // Use the value configured before. LOGGER.warn( "SMTP is configured with a trust store and also with trust-all, disable trust-all to enforce the trust store usage"); } - cfg.setTrustOptions(getTrustOptions(name, truststore.password, truststore.paths, truststore.type)); + cfg.setTrustOptions(getTrustOptions(name, truststore.password(), truststore.paths(), truststore.type())); } } diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersBuildTimeConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersBuildTimeConfig.java index 02ca61227598c..7eff94298a52c 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersBuildTimeConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersBuildTimeConfig.java @@ -1,16 +1,18 @@ package io.quarkus.mailer.runtime; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -@ConfigRoot(name = "mailer", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public class MailersBuildTimeConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +@ConfigMapping(prefix = "quarkus.mailer") +public interface MailersBuildTimeConfig { /** * Caches data from attachment's Stream to a temporary file. * It tries to delete it after sending email. */ - @ConfigItem(defaultValue = "false") - public boolean cacheAttachments; + @WithDefault("false") + boolean cacheAttachments(); } diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersRuntimeConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersRuntimeConfig.java index 0d5ca9c5b5677..f23c1a4c33488 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersRuntimeConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/MailersRuntimeConfig.java @@ -4,24 +4,24 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefaults; +import io.smallrye.config.WithParentName; +import io.smallrye.config.WithUnnamedKey; -@ConfigRoot(name = "mailer", phase = ConfigPhase.RUN_TIME) -public class MailersRuntimeConfig { - - /** - * The default mailer. - */ - @ConfigItem(name = ConfigItem.PARENT) - public MailerRuntimeConfig defaultMailer; +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +@ConfigMapping(prefix = "quarkus.mailer") +public interface MailersRuntimeConfig { /** * Additional named mailers. */ @ConfigDocSection @ConfigDocMapKey("mailer-name") - @ConfigItem(name = ConfigItem.PARENT) - public Map namedMailers; + @WithParentName + @WithDefaults + @WithUnnamedKey(Mailers.DEFAULT_MAILER_NAME) + Map mailers(); } diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/NtlmConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/NtlmConfig.java index 50f961475d3ff..65e2f278b23e3 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/NtlmConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/NtlmConfig.java @@ -3,21 +3,18 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class NtlmConfig { +public interface NtlmConfig { /** * Sets the workstation used on NTLM authentication. */ - @ConfigItem - public Optional workstation = Optional.empty(); + public Optional workstation(); /** * Sets the domain used on NTLM authentication. */ - @ConfigItem - public Optional domain = Optional.empty(); + public Optional domain(); } diff --git a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/TrustStoreConfig.java b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/TrustStoreConfig.java index 819ff56cf60b6..eabc2f31b816f 100644 --- a/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/TrustStoreConfig.java +++ b/extensions/mailer/runtime/src/main/java/io/quarkus/mailer/runtime/TrustStoreConfig.java @@ -4,19 +4,17 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.WithConverter; @ConfigGroup -public class TrustStoreConfig { +public interface TrustStoreConfig { /** * Sets the trust store password if any. * Note that the password is only used for JKS and PCK#12 trust stores. */ - @ConfigItem - Optional password = Optional.empty(); + Optional password(); /** * Sets the location of the trust store files. @@ -25,9 +23,7 @@ public class TrustStoreConfig { *

    * The relative paths are relative to the application working directly. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - Optional> paths = Optional.empty(); + Optional> paths(); /** * Sets the trust store type. @@ -37,13 +33,12 @@ public class TrustStoreConfig { * * Accepted values are: {@code JKS}, {@code PEM}, {@code PKCS}. */ - @ConfigItem - Optional type = Optional.empty(); + Optional type(); /** * @return {@code true} is the trust store is configured, {@code false otherwise} */ - public boolean isConfigured() { - return paths.isPresent() && !paths.get().isEmpty(); + default boolean isConfigured() { + return paths().isPresent() && !paths().get().isEmpty(); } } diff --git a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpTestBase.java b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpTestBase.java index fc8f0011b2a2a..df44313865aad 100644 --- a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpTestBase.java +++ b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpTestBase.java @@ -1,9 +1,12 @@ package io.quarkus.mailer.runtime; import java.time.Duration; +import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.OptionalInt; import java.util.Set; +import java.util.regex.Pattern; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -112,33 +115,199 @@ public void register(String name, TlsConfiguration configuration) { return mailers.reactiveMailerFromName(Mailers.DEFAULT_MAILER_NAME); } - protected MailersRuntimeConfig getDefaultConfig() { - MailersRuntimeConfig mailersConfig = new MailersRuntimeConfig(); - mailersConfig.defaultMailer = new MailerRuntimeConfig(); - mailersConfig.defaultMailer.from = Optional.of(FROM); - mailersConfig.defaultMailer.host = "localhost"; - mailersConfig.defaultMailer.port = OptionalInt.of(FAKE_SMTP_PORT); - mailersConfig.defaultMailer.startTLS = "DISABLED"; - mailersConfig.defaultMailer.login = "DISABLED"; - mailersConfig.defaultMailer.ssl = false; - mailersConfig.defaultMailer.tls = Optional.empty(); - mailersConfig.defaultMailer.authMethods = Optional.empty(); - mailersConfig.defaultMailer.maxPoolSize = 10; - mailersConfig.defaultMailer.ownHostName = Optional.empty(); - mailersConfig.defaultMailer.username = Optional.empty(); - mailersConfig.defaultMailer.password = Optional.empty(); - mailersConfig.defaultMailer.poolCleanerPeriod = Duration.ofSeconds(1); - mailersConfig.defaultMailer.keepAlive = true; - mailersConfig.defaultMailer.keepAliveTimeout = Duration.ofMinutes(5); - mailersConfig.defaultMailer.trustAll = Optional.empty(); - mailersConfig.defaultMailer.keyStore = Optional.empty(); - mailersConfig.defaultMailer.keyStorePassword = Optional.empty(); - mailersConfig.defaultMailer.truststore = new TrustStoreConfig(); - mailersConfig.defaultMailer.truststore.paths = Optional.empty(); - mailersConfig.defaultMailer.truststore.password = Optional.empty(); - mailersConfig.defaultMailer.truststore.type = Optional.empty(); - - return mailersConfig; + static class DefaultMailersRuntimeConfig implements MailersRuntimeConfig { + + private DefaultMailerRuntimeConfig defaultMailerRuntimeConfig; + + DefaultMailersRuntimeConfig() { + this(new DefaultMailerRuntimeConfig()); + } + + DefaultMailersRuntimeConfig(DefaultMailerRuntimeConfig defaultMailerRuntimeConfig) { + this.defaultMailerRuntimeConfig = defaultMailerRuntimeConfig; + } + + @Override + public Map mailers() { + return Map.of(Mailers.DEFAULT_MAILER_NAME, defaultMailerRuntimeConfig); + } + + } + + static class DefaultMailerRuntimeConfig implements MailerRuntimeConfig { + + @Override + public Optional from() { + return Optional.of(FROM); + } + + @Override + public Optional mock() { + return Optional.empty(); + } + + @Override + public Optional bounceAddress() { + return Optional.empty(); + } + + @Override + public String host() { + return "localhost"; + } + + @Override + public OptionalInt port() { + return OptionalInt.of(FAKE_SMTP_PORT); + } + + @Override + public Optional username() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + + @Override + public Optional tlsConfigurationName() { + return Optional.empty(); + } + + @Override + public boolean ssl() { + return false; + } + + @Override + public Optional tls() { + return Optional.empty(); + } + + @Override + public Optional trustAll() { + return Optional.empty(); + } + + @Override + public int maxPoolSize() { + return 10; + } + + @Override + public Optional ownHostName() { + return Optional.empty(); + } + + @Override + public boolean keepAlive() { + return false; + } + + @Override + public boolean disableEsmtp() { + return false; + } + + @Override + public String startTLS() { + return "DISABLED"; + } + + @Override + public DkimSignOptionsConfig dkim() { + return null; + } + + @Override + public String login() { + return "DISABLED"; + } + + @Override + public Optional authMethods() { + return Optional.empty(); + } + + @Override + public Optional keyStore() { + return Optional.empty(); + } + + @Override + public Optional keyStorePassword() { + return Optional.empty(); + } + + @Override + public TrustStoreConfig truststore() { + return new TrustStoreConfig() { + + @Override + public Optional type() { + return Optional.empty(); + } + + @Override + public Optional> paths() { + return Optional.empty(); + } + + @Override + public Optional password() { + return Optional.empty(); + } + }; + } + + @Override + public boolean multiPartOnly() { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean allowRcptErrors() { + return false; + } + + @Override + public boolean pipelining() { + return false; + } + + @Override + public Duration poolCleanerPeriod() { + return Duration.ofSeconds(1); + } + + @Override + public Duration keepAliveTimeout() { + return Duration.ofMinutes(5); + } + + @Override + public NtlmConfig ntlm() { + return null; + } + + @Override + public Optional> approvedRecipients() { + return Optional.empty(); + } + + @Override + public boolean logRejectedRecipients() { + return false; + } + + @Override + public boolean logInvalidRecipients() { + return false; + } + } } diff --git a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTLSRegistryTest.java b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTLSRegistryTest.java index 6e7d3dd843adc..198da225df2c9 100644 --- a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTLSRegistryTest.java +++ b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTLSRegistryTest.java @@ -18,8 +18,17 @@ public class MailerTLSRegistryTest extends FakeSmtpTestBase { @Test public void sendMailWithCorrectTrustStore() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.tlsConfigurationName = Optional.of("my-mailer"); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public Optional tlsConfigurationName() { + return Optional.of("my-mailer"); + } + + @Override + public Optional tls() { + return Optional.of(true); + } + }); ReactiveMailer mailer = getMailer(mailersConfig, "my-mailer", new BaseTlsConfiguration() { @Override @@ -37,7 +46,7 @@ public TrustOptions getTrustStoreOptions() { @Test public void sendMailWithDefaultTrustAll() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(); ReactiveMailer mailer = getMailer(mailersConfig, null, new BaseTlsConfiguration() { @Override @@ -51,8 +60,12 @@ public boolean isTrustAll() { @Test public void sendMailWithNamedTrustAll() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.tlsConfigurationName = Optional.of("my-mailer"); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public Optional tlsConfigurationName() { + return Optional.of("my-mailer"); + } + }); ReactiveMailer mailer = getMailer(mailersConfig, "my-mailer", new BaseTlsConfiguration() { @Override @@ -66,9 +79,17 @@ public boolean isTrustAll() { @Test public void sendMailWithoutTrustStore() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.tlsConfigurationName = Optional.of("my-mailer"); - mailersConfig.defaultMailer.tls = Optional.of(true); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public Optional tlsConfigurationName() { + return Optional.of("my-mailer"); + } + + @Override + public Optional tls() { + return Optional.of(true); + } + }); startServer(SERVER_JKS); ReactiveMailer mailer = getMailer(mailersConfig, "my-mailer", new BaseTlsConfiguration() { @@ -81,8 +102,12 @@ public void sendMailWithoutTrustStore() { @Test public void testWithWrongTlsName() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.tlsConfigurationName = Optional.of("missing-mailer-configuration"); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public Optional tlsConfigurationName() { + return Optional.of("missing-mailer-configuration"); + } + }); Assertions.assertThatThrownBy(() -> getMailer(mailersConfig, "my-mailer", new BaseTlsConfiguration() { @Override diff --git a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTruststoreTest.java b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTruststoreTest.java index 92c1c9c0cdf14..4d63de7b13be2 100644 --- a/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTruststoreTest.java +++ b/extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTruststoreTest.java @@ -1,6 +1,6 @@ package io.quarkus.mailer.runtime; -import java.util.Collections; +import java.util.List; import java.util.Optional; import java.util.concurrent.CompletionException; @@ -15,10 +15,33 @@ public class MailerTruststoreTest extends FakeSmtpTestBase { @Test public void sendMailWithCorrectTrustStore() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.ssl = true; - mailersConfig.defaultMailer.truststore.password = Optional.of("password"); - mailersConfig.defaultMailer.truststore.paths = Optional.of(Collections.singletonList(CLIENT_TRUSTSTORE)); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public boolean ssl() { + return true; + } + + @Override + public TrustStoreConfig truststore() { + return new TrustStoreConfig() { + + @Override + public Optional type() { + return Optional.empty(); + } + + @Override + public Optional> paths() { + return Optional.of(List.of(CLIENT_TRUSTSTORE)); + } + + @Override + public Optional password() { + return Optional.of("password"); + } + }; + } + }); ReactiveMailer mailer = getMailer(mailersConfig); startServer(SERVER_JKS); @@ -28,10 +51,22 @@ public void sendMailWithCorrectTrustStore() { @SuppressWarnings("deprecation") @Test public void sendMailWithCorrectButDeprecatedTrustStore() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.ssl = true; - mailersConfig.defaultMailer.keyStorePassword = Optional.of("password"); - mailersConfig.defaultMailer.keyStore = Optional.of(CLIENT_TRUSTSTORE); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public boolean ssl() { + return true; + } + + @Override + public Optional keyStorePassword() { + return Optional.of("password"); + } + + @Override + public Optional keyStore() { + return Optional.of(CLIENT_TRUSTSTORE); + } + }); ReactiveMailer mailer = getMailer(mailersConfig); startServer(SERVER_JKS); @@ -40,9 +75,18 @@ public void sendMailWithCorrectButDeprecatedTrustStore() { @Test public void sendMailWithTrustAll() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.ssl = true; - mailersConfig.defaultMailer.trustAll = Optional.of(true); + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public boolean ssl() { + return true; + } + + @Override + public Optional trustAll() { + return Optional.of(true); + } + }); + ReactiveMailer mailer = getMailer(mailersConfig); startServer(SERVER_JKS); mailer.send(getMail()).await().indefinitely(); @@ -50,8 +94,13 @@ public void sendMailWithTrustAll() { @Test public void sendMailWithGlobalTrustAll() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.ssl = true; + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public boolean ssl() { + return true; + } + }); + ReactiveMailer mailer = getMailer(mailersConfig, true); startServer(SERVER_JKS); mailer.send(getMail()).await().indefinitely(); @@ -59,8 +108,12 @@ public void sendMailWithGlobalTrustAll() { @Test public void sendMailWithoutTrustStore() { - MailersRuntimeConfig mailersConfig = getDefaultConfig(); - mailersConfig.defaultMailer.ssl = true; + MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() { + @Override + public boolean ssl() { + return true; + } + }); startServer(SERVER_JKS); ReactiveMailer mailer = getMailer(mailersConfig); diff --git a/extensions/micrometer-opentelemetry/deployment/pom.xml b/extensions/micrometer-opentelemetry/deployment/pom.xml new file mode 100644 index 0000000000000..a500f05fa846a --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/pom.xml @@ -0,0 +1,134 @@ + + + 4.0.0 + + + io.quarkus + quarkus-micrometer-opentelemetry-parent + 999-SNAPSHOT + ../pom.xml + + + quarkus-micrometer-opentelemetry-deployment + Quarkus - Micrometer to OpenTelemetry Bridge - Deployment + Micrometer registry implemented by the OpenTelemetry SDK + + + + + io.quarkus + quarkus-micrometer-opentelemetry + ${project.version} + + + + io.quarkus + quarkus-micrometer-deployment + + + + io.quarkus + quarkus-opentelemetry-deployment + + + + + io.quarkus + quarkus-junit5-internal + test + + + + io.quarkus + quarkus-junit5 + test + + + + io.rest-assured + rest-assured + test + + + + org.awaitility + awaitility + test + + + + org.assertj + assertj-core + test + + + + io.opentelemetry + opentelemetry-sdk-testing + test + + + + io.smallrye.reactive + smallrye-mutiny-vertx-web-client + test + + + + io.quarkus + quarkus-rest-client-deployment + test + + + + io.quarkus + quarkus-rest-jackson-deployment + test + + + + io.quarkus + quarkus-vertx-http-deployment + test + + + + io.quarkus + quarkus-reactive-routes-deployment + test + + + + + + + maven-surefire-plugin + + + org.jboss.logmanager.LogManager + INFO + + + + + maven-compiler-plugin + + + default-compile + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + + + \ No newline at end of file diff --git a/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOTelBridgeConfigBuilderCustomizer.java b/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOTelBridgeConfigBuilderCustomizer.java new file mode 100644 index 0000000000000..fccb0b9c39bba --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOTelBridgeConfigBuilderCustomizer.java @@ -0,0 +1,18 @@ +package io.quarkus.micrometer.opentelemetry.deployment; + +import java.util.Map; + +import io.smallrye.config.PropertiesConfigSource; +import io.smallrye.config.SmallRyeConfigBuilder; +import io.smallrye.config.SmallRyeConfigBuilderCustomizer; + +public class MicrometerOTelBridgeConfigBuilderCustomizer implements SmallRyeConfigBuilderCustomizer { + @Override + public void configBuilder(final SmallRyeConfigBuilder builder) { + // use a priority of 50 to make sure that this is overridable by any of the standard methods + builder.withSources( + new PropertiesConfigSource(Map.of( + "quarkus.otel.metrics.enabled", "true", + "quarkus.otel.logs.enabled", "true"), "quarkus-micrometer-opentelemetry", 1)); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOtelBridgeProcessor.java b/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOtelBridgeProcessor.java new file mode 100644 index 0000000000000..c25a6664880c8 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/main/java/io/quarkus/micrometer/opentelemetry/deployment/MicrometerOtelBridgeProcessor.java @@ -0,0 +1,91 @@ +package io.quarkus.micrometer.opentelemetry.deployment; + +import java.util.Locale; +import java.util.function.BooleanSupplier; + +import jakarta.enterprise.inject.Instance; +import jakarta.inject.Singleton; + +import org.jboss.jandex.ClassType; +import org.jboss.jandex.DotName; +import org.jboss.jandex.ParameterizedType; +import org.jboss.jandex.Type; +import org.jboss.logmanager.Level; + +import io.micrometer.core.instrument.MeterRegistry; +import io.opentelemetry.api.OpenTelemetry; +import io.quarkus.arc.deployment.SyntheticBeanBuildItem; +import io.quarkus.deployment.annotations.BuildProducer; +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.annotations.BuildSteps; +import io.quarkus.deployment.annotations.ExecutionTime; +import io.quarkus.deployment.annotations.Record; +import io.quarkus.deployment.builditem.LogCategoryBuildItem; +import io.quarkus.deployment.builditem.RunTimeConfigurationDefaultBuildItem; +import io.quarkus.micrometer.deployment.MicrometerProcessor; +import io.quarkus.micrometer.opentelemetry.runtime.MicrometerOtelBridgeRecorder; +import io.quarkus.opentelemetry.deployment.OpenTelemetryEnabled; +import io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; + +@BuildSteps(onlyIf = { + MicrometerProcessor.MicrometerEnabled.class, + OpenTelemetryEnabled.class, + MicrometerOtelBridgeProcessor.OtlpMetricsExporterEnabled.class }) +public class MicrometerOtelBridgeProcessor { + + @BuildStep + public void disableOTelAutoInstrumentedMetrics(BuildProducer runtimeConfigProducer) { + runtimeConfigProducer.produce( + new RunTimeConfigurationDefaultBuildItem("quarkus.otel.instrument.http-server-metrics", "false")); + runtimeConfigProducer.produce( + new RunTimeConfigurationDefaultBuildItem("quarkus.otel.instrument.jvm-metrics", "false")); + } + + @BuildStep + public void tuneDefaultConfigs(BuildProducer logCategoryProducer) { + // Suppress noisy logs from Micrometer: + // ...A MeterFilter is being configured after a Meter has been registered to this registry... + logCategoryProducer.produce(new LogCategoryBuildItem( + "io.opentelemetry.instrumentation.micrometer.v1_5.OpenTelemetryMeterRegistry", + Level.ERROR)); + logCategoryProducer.produce(new LogCategoryBuildItem( + "io.micrometer.core.instrument.composite.CompositeMeterRegistry", + Level.ERROR)); + } + + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void createBridgeBean(OTelRuntimeConfig otelRuntimeConfig, + MicrometerOtelBridgeRecorder recorder, + BuildProducer syntheticBeanProducer) { + + if (otelRuntimeConfig.sdkDisabled()) { + return; // No point in creating the bridge if the SDK is disabled + } + + syntheticBeanProducer.produce(SyntheticBeanBuildItem.configure(MeterRegistry.class) + .defaultBean() + .setRuntimeInit() + .unremovable() + .scope(Singleton.class) + .addInjectionPoint(ParameterizedType.create(DotName.createSimple(Instance.class), + new Type[] { ClassType.create(DotName.createSimple(OpenTelemetry.class.getName())) }, null)) + .createWith(recorder.createBridge(otelRuntimeConfig)) + .done()); + } + + /** + * No point in activating the bridge if the OTel metrics if off or the exporter is none. + */ + static class OtlpMetricsExporterEnabled implements BooleanSupplier { + OTelBuildConfig otelBuildConfig; + + public boolean getAsBoolean() { + return otelBuildConfig.metrics().enabled().orElse(Boolean.TRUE) && + !otelBuildConfig.metrics().exporter().stream() + .map(exporter -> exporter.toLowerCase(Locale.ROOT)) + .anyMatch(exporter -> exporter.contains("none")); + } + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/main/resources/META-INF/services/io.smallrye.config.SmallRyeConfigBuilderCustomizer b/extensions/micrometer-opentelemetry/deployment/src/main/resources/META-INF/services/io.smallrye.config.SmallRyeConfigBuilderCustomizer new file mode 100644 index 0000000000000..51c9a69b4249d --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/main/resources/META-INF/services/io.smallrye.config.SmallRyeConfigBuilderCustomizer @@ -0,0 +1 @@ +io.quarkus.micrometer.opentelemetry.deployment.MicrometerOTelBridgeConfigBuilderCustomizer \ No newline at end of file diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/DistributionSummaryTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/DistributionSummaryTest.java new file mode 100644 index 0000000000000..741a99eafa8fd --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/DistributionSummaryTest.java @@ -0,0 +1,129 @@ +package io.quarkus.micrometer.opentelemetry.deployment; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.micrometer.core.instrument.DistributionSummary; +import io.micrometer.core.instrument.MeterRegistry; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.test.QuarkusUnitTest; + +public class DistributionSummaryTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(ManualHistogramBean.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider") + .add(new StringAsset(""" + quarkus.otel.metrics.enabled=true\n + quarkus.otel.traces.exporter=none\n + quarkus.otel.logs.exporter=none\n + quarkus.otel.metrics.exporter=in-memory\n + quarkus.otel.metric.export.interval=300ms\n + quarkus.micrometer.binder-enabled-default=false\n + quarkus.micrometer.binder.http-client.enabled=true\n + quarkus.micrometer.binder.http-server.enabled=true\n + quarkus.micrometer.binder.http-server.match-patterns=/one=/two\n + quarkus.micrometer.binder.http-server.ignore-patterns=/two\n + quarkus.micrometer.binder.vertx.enabled=true\n + pingpong/mp-rest/url=${test.url}\n + quarkus.redis.devservices.enabled=false\n + """), + "application.properties")); + + @Inject + ManualHistogramBean manualHistogramBean; + + @Inject + InMemoryMetricExporter exporter; + + @Test + void histogramTest() { + manualHistogramBean.recordHistogram(); + + MetricData testSummary = exporter.getLastFinishedHistogramItem("testSummary", 4); + assertNotNull(testSummary); + assertThat(testSummary) + .hasDescription("This is a test distribution summary") + .hasUnit("things") + .hasHistogramSatisfying( + histogram -> histogram.hasPointsSatisfying( + points -> points + .hasSum(555.5) + .hasCount(4) + .hasAttributes(attributeEntry("tag", "value")))); + + MetricData textSummaryMax = exporter.getFinishedMetricItem("testSummary.max"); + assertNotNull(textSummaryMax); + assertThat(textSummaryMax) + .hasDescription("This is a test distribution summary") + .hasDoubleGaugeSatisfying( + gauge -> gauge.hasPointsSatisfying( + point -> point + .hasValue(500) + .hasAttributes(attributeEntry("tag", "value")))); + + MetricData testSummaryHistogram = exporter.getFinishedMetricItem("testSummary.histogram"); // present when SLOs are set + assertNotNull(testSummaryHistogram); + assertThat(testSummaryHistogram) + .hasDoubleGaugeSatisfying( + gauge -> gauge.hasPointsSatisfying( + point -> point + .hasValue(1) + .hasAttributes( + attributeEntry("le", "1"), + attributeEntry("tag", "value")), + point -> point + .hasValue(2) + .hasAttributes( + attributeEntry("le", "10"), + attributeEntry("tag", "value")), + point -> point + .hasValue(3) + .hasAttributes( + attributeEntry("le", "100"), + attributeEntry("tag", "value")), + point -> point + .hasValue(4) + .hasAttributes( + attributeEntry("le", "1000"), + attributeEntry("tag", "value")))); + } + + @ApplicationScoped + public static class ManualHistogramBean { + @Inject + MeterRegistry registry; + + public void recordHistogram() { + DistributionSummary summary = DistributionSummary.builder("testSummary") + .description("This is a test distribution summary") + .baseUnit("things") + .tags("tag", "value") + .serviceLevelObjectives(1, 10, 100, 1000) + .distributionStatisticBufferLength(10) + .register(registry); + + summary.record(0.5); + summary.record(5); + summary.record(50); + summary.record(500); + } + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/MetricsDisabledTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/MetricsDisabledTest.java new file mode 100644 index 0000000000000..dc752e9f37384 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/MetricsDisabledTest.java @@ -0,0 +1,77 @@ +package io.quarkus.micrometer.opentelemetry.deployment; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.restassured.RestAssured.when; + +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.PingPongResource; +import io.quarkus.micrometer.opentelemetry.deployment.common.Util; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; + +public class MetricsDisabledTest { + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(Util.class, + PingPongResource.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider") + .add(new StringAsset(""" + quarkus.otel.sdk.disabled=true\n + quarkus.otel.metrics.enabled=true\n + quarkus.otel.traces.exporter=none\n + quarkus.otel.logs.exporter=none\n + quarkus.otel.metrics.exporter=in-memory\n + quarkus.otel.metric.export.interval=300ms\n + quarkus.micrometer.binder.http-client.enabled=true\n + quarkus.micrometer.binder.http-server.enabled=true\n + pingpong/mp-rest/url=${test.url}\n + quarkus.redis.devservices.enabled=false\n + """), + "application.properties")); + + @Inject + protected InMemoryMetricExporter metricExporter; + + protected static String mapToString(Map, ?> map) { + return (String) map.keySet().stream() + .map(key -> "" + key.getKey() + "=" + map.get(key)) + .collect(Collectors.joining(", ", "{", "}")); + } + + @BeforeEach + void setUp() { + metricExporter.reset(); + } + + @Test + void disabledTest() throws InterruptedException { + // The otel metrics are disabled + RestAssured.basePath = "/"; + when().get("/ping/one").then().statusCode(200); + + Thread.sleep(200); + + List metricData = metricExporter.getFinishedMetricItems(); + assertThat(metricData).isEmpty(); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/CountedResource.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/CountedResource.java new file mode 100644 index 0000000000000..29c772a5a403e --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/CountedResource.java @@ -0,0 +1,64 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import static io.quarkus.micrometer.opentelemetry.deployment.compatibility.MicrometerCounterInterceptorTest.*; +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.util.concurrent.CompletableFuture; + +import jakarta.enterprise.context.ApplicationScoped; + +import io.micrometer.core.annotation.Counted; +import io.micrometer.core.aop.MeterTag; +import io.smallrye.mutiny.Uni; + +@ApplicationScoped +public class CountedResource { + @Counted(value = "metric.none", recordFailuresOnly = true) + public void onlyCountFailures() { + } + + @Counted(value = "metric.all", extraTags = { "extra", "tag" }) + public void countAllInvocations(@MeterTag(key = "do_fail", resolver = TestValueResolver.class) boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } + + @Counted(description = "nice description") + public void emptyMetricName(@MeterTag boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } + + @Counted(value = "async.none", recordFailuresOnly = true) + public CompletableFuture onlyCountAsyncFailures(GuardedResult guardedResult) { + return supplyAsync(guardedResult::get); + } + + @Counted(value = "async.all", extraTags = { "extra", "tag" }) + public CompletableFuture countAllAsyncInvocations(GuardedResult guardedResult) { + return supplyAsync(guardedResult::get); + } + + @Counted + public CompletableFuture emptyAsyncMetricName(GuardedResult guardedResult) { + return supplyAsync(guardedResult::get); + } + + @Counted(value = "uni.none", recordFailuresOnly = true) + public Uni onlyCountUniFailures(GuardedResult guardedResult) { + return Uni.createFrom().item(guardedResult::get); + } + + @Counted(value = "uni.all", extraTags = { "extra", "tag" }) + public Uni countAllUniInvocations(GuardedResult guardedResult) { + return Uni.createFrom().item(guardedResult::get); + } + + @Counted + public Uni emptyUniMetricName(GuardedResult guardedResult) { + return Uni.createFrom().item(guardedResult::get); + } + +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/GuardedResult.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/GuardedResult.java new file mode 100644 index 0000000000000..642bde50ba8ff --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/GuardedResult.java @@ -0,0 +1,34 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +public class GuardedResult { + + private boolean complete; + private NullPointerException withException; + + public synchronized Object get() { + while (!complete) { + try { + wait(); + } catch (InterruptedException e) { + // Intentionally empty + } + } + + if (withException == null) { + return new Object(); + } + + throw withException; + } + + public synchronized void complete() { + complete(null); + } + + public synchronized void complete(NullPointerException withException) { + this.complete = true; + this.withException = withException; + notifyAll(); + } + +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/HelloResource.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/HelloResource.java new file mode 100644 index 0000000000000..a7d949f2ddac2 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/HelloResource.java @@ -0,0 +1,23 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.OPTIONS; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; + +@Path("/hello") +@Singleton +public class HelloResource { + @GET + @Path("{message}") + public String hello(@PathParam("message") String message) { + return "hello " + message; + } + + @OPTIONS + @Path("{message}") + public String helloOptions(@PathParam("message") String message) { + return "hello " + message; + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporter.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporter.java new file mode 100644 index 0000000000000..2a01838c480df --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporter.java @@ -0,0 +1,180 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import static java.util.concurrent.TimeUnit.SECONDS; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import jakarta.enterprise.context.ApplicationScoped; + +import org.awaitility.Awaitility; +import org.junit.jupiter.api.Assertions; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.metrics.InstrumentType; +import io.opentelemetry.sdk.metrics.data.AggregationTemporality; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.PointData; +import io.opentelemetry.sdk.metrics.export.MetricExporter; +import io.quarkus.arc.Unremovable; + +@Unremovable +@ApplicationScoped +public class InMemoryMetricExporter implements MetricExporter { + + private final Queue finishedMetricItems = new ConcurrentLinkedQueue<>(); + private final AggregationTemporality aggregationTemporality = AggregationTemporality.CUMULATIVE; + private boolean isStopped = false; + + public MetricDataFilter metrics(final String name) { + return new MetricDataFilter(this, name); + } + + public MetricDataFilter get(final String name) { + return new MetricDataFilter(this, name); + } + + public MetricDataFilter find(final String name) { + return new MetricDataFilter(this, name); + } + + /* + * ignore points with /export in the route + */ + private static boolean notExporterPointData(PointData pointData) { + return pointData.getAttributes().asMap().entrySet().stream() + .noneMatch(entry -> entry.getKey().getKey().equals("uri") && + entry.getValue().toString().contains("/export")); + } + + private static boolean isPathFound(String path, Attributes attributes) { + if (path == null) { + return true;// any match + } + Object value = attributes.asMap().get(AttributeKey.stringKey("uri")); + if (value == null) { + return false; + } + return value.toString().equals(path); + } + + public MetricData getLastFinishedHistogramItem(String testSummary, int count) { + Awaitility.await().atMost(5, SECONDS) + .untilAsserted(() -> Assertions.assertEquals(count, getFinishedMetricItems(testSummary, null).size())); + List metricData = getFinishedMetricItems(testSummary, null); + return metricData.get(metricData.size() - 1);// get last added entry which will be the most recent + } + + public void assertCountDataPointsAtLeast(final String name, final String target, final int count) { + Awaitility.await().atMost(5, SECONDS) + .untilAsserted(() -> Assertions.assertTrue(count < countMaxPoints(name, target))); + } + + public void assertCountDataPointsAtLeastOrEqual(final String name, final String target, final int count) { + Awaitility.await().atMost(5, SECONDS) + .untilAsserted(() -> Assertions.assertTrue(count <= countMaxPoints(name, target))); + } + + public void assertCountDataPointsAtLeastOrEqual(Supplier tag, int count) { + Awaitility.await().atMost(50, SECONDS) + .untilAsserted(() -> Assertions.assertTrue(count <= tag.get().lastReadingPointsSize())); + } + + private Integer countMaxPoints(String name, String target) { + List metricData = getFinishedMetricItems(name, target); + if (metricData.isEmpty()) { + return 0; + } + int size = metricData.get(metricData.size() - 1).getData().getPoints().size(); + return size; + } + + /** + * Returns a {@code List} of the finished {@code Metric}s, represented by {@code MetricData}. + * + * @return a {@code List} of the finished {@code Metric}s. + */ + public List getFinishedMetricItems() { + return Collections.unmodifiableList(new ArrayList<>(finishedMetricItems)); + } + + public MetricData getFinishedMetricItem(String metricName) { + List metricData = getFinishedMetricItems(metricName, null); + if (metricData.isEmpty()) { + return null; + } + return metricData.get(metricData.size() - 1);// get last added entry which will be the most recent + } + + public List getFinishedMetricItems(final String name, final String target) { + return Collections.unmodifiableList(new ArrayList<>( + finishedMetricItems.stream() + .filter(metricData -> metricData.getName().equals(name)) + .filter(metricData -> metricData.getData().getPoints().stream() + .anyMatch(point -> isPathFound(target, point.getAttributes()))) + .collect(Collectors.toList()))); + } + + /** + * Clears the internal {@code List} of finished {@code Metric}s. + * + *

    + * Does not reset the state of this exporter if already shutdown. + */ + public void reset() { + finishedMetricItems.clear(); + } + + @Override + public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) { + return aggregationTemporality; + } + + /** + * Exports the collection of {@code Metric}s into the inmemory queue. + * + *

    + * If this is called after {@code shutdown}, this will return {@code ResultCode.FAILURE}. + */ + @Override + public CompletableResultCode export(Collection metrics) { + if (isStopped) { + return CompletableResultCode.ofFailure(); + } + finishedMetricItems.addAll(metrics); + return CompletableResultCode.ofSuccess(); + } + + /** + * The InMemory exporter does not batch metrics, so this method will immediately return with + * success. + * + * @return always Success + */ + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + /** + * Clears the internal {@code List} of finished {@code Metric}s. + * + *

    + * Any subsequent call to export() function on this MetricExporter, will return {@code + * CompletableResultCode.ofFailure()} + */ + @Override + public CompletableResultCode shutdown() { + isStopped = true; + finishedMetricItems.clear(); + return CompletableResultCode.ofSuccess(); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporterProvider.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporterProvider.java new file mode 100644 index 0000000000000..44aba77c9976b --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/InMemoryMetricExporterProvider.java @@ -0,0 +1,19 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import jakarta.enterprise.inject.spi.CDI; + +import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties; +import io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider; +import io.opentelemetry.sdk.metrics.export.MetricExporter; + +public class InMemoryMetricExporterProvider implements ConfigurableMetricExporterProvider { + @Override + public MetricExporter createExporter(ConfigProperties configProperties) { + return CDI.current().select(InMemoryMetricExporter.class).get(); + } + + @Override + public String getName() { + return "in-memory"; + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/MetricDataFilter.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/MetricDataFilter.java new file mode 100644 index 0000000000000..f7db4076f8739 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/MetricDataFilter.java @@ -0,0 +1,247 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import static io.opentelemetry.semconv.HttpAttributes.HTTP_ROUTE; +import static io.opentelemetry.semconv.UrlAttributes.URL_PATH; + +import java.util.Collection; +import java.util.List; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.common.InstrumentationScopeInfo; +import io.opentelemetry.sdk.metrics.data.Data; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.opentelemetry.sdk.metrics.data.MetricDataType; +import io.opentelemetry.sdk.metrics.data.PointData; +import io.opentelemetry.sdk.resources.Resource; + +public class MetricDataFilter { + private Stream metricData; + + public MetricDataFilter(final InMemoryMetricExporter metricExporter, final String name) { + metricData = metricExporter.getFinishedMetricItems() + .stream() + .filter(metricData -> metricData.getName().equals(name)); + } + + public MetricDataFilter route(final String route) { + metricData = metricData.map(new Function() { + @Override + public MetricData apply(final MetricData metricData) { + return new MetricData() { + @Override + public Resource getResource() { + return metricData.getResource(); + } + + @Override + public InstrumentationScopeInfo getInstrumentationScopeInfo() { + return metricData.getInstrumentationScopeInfo(); + } + + @Override + public String getName() { + return metricData.getName(); + } + + @Override + public String getDescription() { + return metricData.getDescription(); + } + + @Override + public String getUnit() { + return metricData.getUnit(); + } + + @Override + public MetricDataType getType() { + return metricData.getType(); + } + + @Override + public Data getData() { + return new Data() { + @Override + public Collection getPoints() { + return metricData.getData().getPoints().stream().filter(new Predicate() { + @Override + public boolean test(final PointData pointData) { + String value = pointData.getAttributes().get(HTTP_ROUTE); + return value != null && value.equals(route); + } + }).collect(Collectors.toSet()); + } + }; + } + }; + } + }); + return this; + } + + public MetricDataFilter path(final String path) { + metricData = metricData.map(new Function() { + @Override + public MetricData apply(final MetricData metricData) { + return new MetricData() { + @Override + public Resource getResource() { + return metricData.getResource(); + } + + @Override + public InstrumentationScopeInfo getInstrumentationScopeInfo() { + return metricData.getInstrumentationScopeInfo(); + } + + @Override + public String getName() { + return metricData.getName(); + } + + @Override + public String getDescription() { + return metricData.getDescription(); + } + + @Override + public String getUnit() { + return metricData.getUnit(); + } + + @Override + public MetricDataType getType() { + return metricData.getType(); + } + + @Override + public Data getData() { + return new Data() { + @Override + public Collection getPoints() { + return metricData.getData().getPoints().stream().filter(new Predicate() { + @Override + public boolean test(final PointData pointData) { + String value = pointData.getAttributes().get(URL_PATH); + return value != null && value.equals(path); + } + }).collect(Collectors.toSet()); + } + }; + } + }; + } + }); + return this; + } + + public MetricDataFilter tag(final String key, final String value) { + return stringAttribute(key, value); + } + + public MetricDataFilter stringAttribute(final String key, final String value) { + metricData = metricData.map(new Function() { + @Override + public MetricData apply(final MetricData metricData) { + return new MetricData() { + @Override + public Resource getResource() { + return metricData.getResource(); + } + + @Override + public InstrumentationScopeInfo getInstrumentationScopeInfo() { + return metricData.getInstrumentationScopeInfo(); + } + + @Override + public String getName() { + return metricData.getName(); + } + + @Override + public String getDescription() { + return metricData.getDescription(); + } + + @Override + public String getUnit() { + return metricData.getUnit(); + } + + @Override + public MetricDataType getType() { + return metricData.getType(); + } + + @Override + public Data getData() { + return new Data() { + @Override + public Collection getPoints() { + return metricData.getData().getPoints().stream().filter(new Predicate() { + @Override + public boolean test(final PointData pointData) { + String v = pointData.getAttributes().get(AttributeKey.stringKey(key)); + boolean result = v != null && v.equals(value); + if (!result) { + System.out.println( + "\nNot Matching. Expected: " + key + " = " + value + " -> Found: " + v); + } + return result; + } + }).collect(Collectors.toSet()); + } + }; + } + }; + } + }); + return this; + } + + public List getAll() { + return metricData.collect(Collectors.toList()); + } + + public MetricData lastReading() { + return metricData.reduce((first, second) -> second) + .orElseThrow(() -> new IllegalArgumentException("Stream has no elements")); + } + + public int lastReadingPointsSize() { + return metricData.reduce((first, second) -> second) + .map(data -> data.getData().getPoints().size()) + .orElseThrow(() -> new IllegalArgumentException("Stream has no elements")); + } + + /** + * Returns the first point data of the last reading. + * Assumes only one data point can be present. + * + * @param pointDataClass + * @param + * @return + */ + public T lastReadingDataPoint(Class pointDataClass) { + List list = lastReading().getData().getPoints().stream() + .map(pointData -> (T) pointData) + .toList(); + + if (list.size() == 0) { + throw new IllegalArgumentException("Stream has no elements"); + } + if (list.size() > 1) { + throw new IllegalArgumentException("Stream has more than one element"); + } + return list.get(0); + } + + public int countPoints(final MetricData metricData) { + return metricData.getData().getPoints().size(); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/PingPongResource.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/PingPongResource.java new file mode 100644 index 0000000000000..6b2c5faba1d17 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/PingPongResource.java @@ -0,0 +1,75 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import java.util.concurrent.CompletionStage; + +import jakarta.inject.Inject; +import jakarta.inject.Singleton; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.core.Response; + +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.eclipse.microprofile.rest.client.inject.RestClient; + +@Path("/") +@Singleton +public class PingPongResource { + @RegisterRestClient(configKey = "pingpong") + public interface PingPongRestClient { + @GET + @Path("pong/{message}") + String pingpong(@PathParam("message") String message); + + @GET + @Path("pong/{message}") + CompletionStage asyncPingPong(@PathParam("message") String message); + } + + @Inject + @RestClient + PingPongRestClient pingRestClient; + + @GET + @Path("pong/{message}") + public Response pong(@PathParam("message") String message) { + if (message.equals("500")) { + return Response.status(500).build(); + } else if (message.equals("400")) { + return Response.status(400).build(); + } + return Response.ok(message, "text/plain").build(); + } + + @GET + @Path("ping/{message}") + public String ping(@PathParam("message") String message) { + try { + return pingRestClient.pingpong(message); + } catch (Exception ex) { + if (!"400".equals(message) && !"500".equals(message)) { + throw ex; + } + // expected exception + } + return message; + } + + @GET + @Path("async-ping/{message}") + public CompletionStage asyncPing(@PathParam("message") String message) { + return pingRestClient.asyncPingPong(message); + } + + @GET + @Path("one") + public String one() { + return "OK"; + } + + @GET + @Path("two") + public String two() { + return "OK"; + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/ServletEndpoint.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/ServletEndpoint.java new file mode 100644 index 0000000000000..9a685085dd991 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/ServletEndpoint.java @@ -0,0 +1,18 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import java.io.IOException; + +import jakarta.servlet.ServletException; +import jakarta.servlet.annotation.WebServlet; +import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +@WebServlet(name = "ServletEndpoint", urlPatterns = "/servlet/*") +public class ServletEndpoint extends HttpServlet { + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { + resp.setContentType("text/plain"); + resp.getWriter().println("OK"); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/TimedResource.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/TimedResource.java new file mode 100644 index 0000000000000..63b7bad1f375e --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/TimedResource.java @@ -0,0 +1,66 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import static java.util.concurrent.CompletableFuture.supplyAsync; + +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.CompletableFuture; + +import jakarta.enterprise.context.ApplicationScoped; + +import io.micrometer.core.annotation.Timed; +import io.smallrye.mutiny.Uni; + +@ApplicationScoped +public class TimedResource { + @Timed(value = "call", extraTags = { "extra", "tag" }) + public void call(boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + + } + + @Timed(value = "longCall", extraTags = { "extra", "tag" }, longTask = true) + public void longCall(boolean fail) { + try { + Thread.sleep(3); + } catch (InterruptedException e) { + } + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } + + @Timed(value = "async.call", extraTags = { "extra", "tag" }) + public CompletableFuture asyncCall(GuardedResult guardedResult) { + return supplyAsync(guardedResult::get); + } + + @Timed(value = "uni.call", extraTags = { "extra", "tag" }) + public Uni uniCall(GuardedResult guardedResult) { + return Uni.createFrom().item(guardedResult::get); + } + + @Timed(value = "async.longCall", extraTags = { "extra", "tag" }, longTask = true) + public CompletableFuture longAsyncCall(GuardedResult guardedResult) { + try { + Thread.sleep(3); + } catch (InterruptedException e) { + } + return supplyAsync(guardedResult::get); + } + + @Timed(value = "uni.longCall", extraTags = { "extra", "tag" }, longTask = true) + public Uni longUniCall(GuardedResult guardedResult) { + return Uni.createFrom().item(guardedResult::get).onItem().delayIt().by(Duration.of(3, ChronoUnit.MILLIS)); + } + + @Timed(value = "alpha", extraTags = { "extra", "tag" }) + @Timed(value = "bravo", extraTags = { "extra", "tag" }) + public void repeatableCall(boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/Util.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/Util.java new file mode 100644 index 0000000000000..41e193deb05af --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/Util.java @@ -0,0 +1,75 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import static org.assertj.core.api.AssertionsForClassTypes.assertThat; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.logging.LogRecord; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.Assertions; + +import io.micrometer.core.instrument.Meter; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Tag; + +public class Util { + private Util() { + } + + static void assertMessage(String attribute, List records) { + // look through log records and make sure there is a message about the specific attribute + long i = records.stream().filter(x -> Arrays.stream(x.getParameters()).anyMatch(y -> y.equals(attribute))) + .count(); + Assertions.assertEquals(1, i); + } + + static String stackToString(Throwable t) { + StringBuilder sb = new StringBuilder().append("\n"); + while (t.getCause() != null) { + t = t.getCause(); + } + sb.append(t.getClass()).append(": ").append(t.getMessage()).append("\n"); + Arrays.asList(t.getStackTrace()).forEach(x -> sb.append("\t").append(x.toString()).append("\n")); + return sb.toString(); + } + + public static String foundServerRequests(MeterRegistry registry, String message) { + return message + "\nFound:\n" + Util.listMeters(registry, "http.server.requests"); + } + + public static String foundClientRequests(MeterRegistry registry, String message) { + return message + "\nFound:\n" + Util.listMeters(registry, "http.client.requests"); + } + + public static String listMeters(MeterRegistry registry, String meterName) { + return registry.find(meterName).meters().stream() + .map(x -> { + return x.getId().toString(); + }) + .collect(Collectors.joining("\n")); + } + + public static String listMeters(MeterRegistry registry, String meterName, final String tag) { + return registry.find(meterName).meters().stream() + .map(x -> { + return x.getId().getTag(tag); + }) + .collect(Collectors.joining(",")); + } + + public static void waitForMeters(Collection collection, int count) throws InterruptedException { + int i = 0; + do { + Thread.sleep(3); + } while (collection.size() < count && i++ < 10); + } + + public static void assertTags(Tag tag, Meter... meters) { + for (Meter meter : meters) { + assertThat(meter.getId().getTags().contains(tag)); + } + } + +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/VertxWebEndpoint.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/VertxWebEndpoint.java new file mode 100644 index 0000000000000..3e573f2189470 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/common/VertxWebEndpoint.java @@ -0,0 +1,24 @@ +package io.quarkus.micrometer.opentelemetry.deployment.common; + +import io.quarkus.vertx.web.Param; +import io.quarkus.vertx.web.Route; +import io.quarkus.vertx.web.Route.HttpMethod; +import io.quarkus.vertx.web.RouteBase; + +@RouteBase(path = "/vertx") +public class VertxWebEndpoint { + @Route(path = "item/:id", methods = HttpMethod.GET) + public String item(@Param("id") Integer id) { + return "message with id " + id; + } + + @Route(path = "item/:id/:sub", methods = HttpMethod.GET) + public String item(@Param("id") Integer id, @Param("sub") Integer sub) { + return "message with id " + id + " and sub " + sub; + } + + @Route(path = "echo/:msg", methods = { HttpMethod.HEAD, HttpMethod.GET, HttpMethod.OPTIONS }) + public String echo(@Param("msg") String msg) { + return "echo " + msg; + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/HttpCompatibilityTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/HttpCompatibilityTest.java new file mode 100644 index 0000000000000..f91b4ffc96bce --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/HttpCompatibilityTest.java @@ -0,0 +1,241 @@ +package io.quarkus.micrometer.opentelemetry.deployment.compatibility; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; +import static io.restassured.RestAssured.when; +import static java.util.concurrent.TimeUnit.SECONDS; + +import java.util.Comparator; +import java.util.List; + +import jakarta.inject.Inject; + +import org.awaitility.Awaitility; +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.quarkus.micrometer.opentelemetry.deployment.common.HelloResource; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.PingPongResource; +import io.quarkus.micrometer.opentelemetry.deployment.common.ServletEndpoint; +import io.quarkus.micrometer.opentelemetry.deployment.common.Util; +import io.quarkus.micrometer.opentelemetry.deployment.common.VertxWebEndpoint; +import io.quarkus.test.QuarkusUnitTest; +import io.restassured.RestAssured; + +/** + * Copy of io.quarkus.micrometer.deployment.binder.UriTagTest + */ +public class HttpCompatibilityTest { + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(Util.class, + PingPongResource.class, + PingPongResource.PingPongRestClient.class, + ServletEndpoint.class, + VertxWebEndpoint.class, + HelloResource.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider") + .add(new StringAsset(""" + quarkus.otel.metrics.exporter=in-memory\n + quarkus.otel.metric.export.interval=100ms\n + quarkus.micrometer.binder-enabled-default=false\n + quarkus.micrometer.binder.http-client.enabled=true\n + quarkus.micrometer.binder.http-server.enabled=true\n + quarkus.micrometer.binder.http-server.match-patterns=/one=/two\n + quarkus.micrometer.binder.http-server.ignore-patterns=/two\n + quarkus.micrometer.binder.vertx.enabled=true\n + pingpong/mp-rest/url=${test.url}\n + quarkus.redis.devservices.enabled=false\n + """), + "application.properties")); + public static final AttributeKey URI = AttributeKey.stringKey("uri"); + public static final AttributeKey METHOD = AttributeKey.stringKey("method"); + public static final AttributeKey STATUS = AttributeKey.stringKey("status"); + + @Inject + protected InMemoryMetricExporter metricExporter; + + @BeforeEach + void setUp() { + metricExporter.reset(); + } + + /** + * Same as io.quarkus.micrometer.deployment.binder.UriTagTest. + * Makes sure we are getting equivalent results in OTel. + * Micrometer uses timers and OTel uses histograms. + */ + @Test + void testHttpTimerToHistogramCompatibility() { + RestAssured.basePath = "/"; + + // Server GET vs. HEAD methods -- templated + when().get("/hello/one").then().statusCode(200); + when().get("/hello/two").then().statusCode(200); + when().head("/hello/three").then().statusCode(200); + when().head("/hello/four").then().statusCode(200); + when().get("/vertx/echo/thing1").then().statusCode(200); + when().get("/vertx/echo/thing2").then().statusCode(200); + when().head("/vertx/echo/thing3").then().statusCode(200); + when().head("/vertx/echo/thing4").then().statusCode(200); + + // Server -> Rest client -> Server (templated) + when().get("/ping/one").then().statusCode(200); + when().get("/ping/two").then().statusCode(200); + when().get("/ping/three").then().statusCode(200); + when().get("/ping/400").then().statusCode(200); + when().get("/ping/500").then().statusCode(200); + when().get("/async-ping/one").then().statusCode(200); + when().get("/async-ping/two").then().statusCode(200); + when().get("/async-ping/three").then().statusCode(200); + + // Server paths (templated) + when().get("/one").then().statusCode(200); + when().get("/two").then().statusCode(200); + when().get("/vertx/item/123").then().statusCode(200); + when().get("/vertx/item/1/123").then().statusCode(200); + // when().get("/servlet/12345").then().statusCode(200); + + Awaitility.await().atMost(10, SECONDS) + .untilAsserted(() -> { + final List metricDataList = metricExporter.getFinishedMetricItems("http.server.requests", null); + final MetricData metricData = metricDataList.get(metricDataList.size() - 1); // get last collected + assertServerMetrics(metricData); + }); + + final List metricDataList = metricExporter.getFinishedMetricItems("http.server.requests", null); + final MetricData metricData = metricDataList.stream() + .max(Comparator.comparingInt(data -> data.getData().getPoints().size())) + .get(); + + assertThat(metricData.getInstrumentationScopeInfo().getName()) + .isEqualTo("io.opentelemetry.micrometer-1.5"); + + // /one should map to /two, which is ignored. + // Neither should exist w/ timers because they were disabled in the configuration. + assertThat(metricData.getHistogramData().getPoints().stream() + .anyMatch(point -> point.getAttributes().get(URI).equals("/one") || + point.getAttributes().get(URI).equals("/two"))) + .isFalse(); + + // OTel metrics are not enabled + assertThat(metricExporter.getFinishedMetricItem("http.server.request.duration")).isNull(); + + metricExporter.assertCountDataPointsAtLeast("http.client.requests", null, 2); + final List clientMetricDataList = metricExporter.getFinishedMetricItems("http.client.requests", null); + + Awaitility.await().atMost(10, SECONDS) + .untilAsserted(() -> { + final MetricData clientMetricData = clientMetricDataList.get(clientMetricDataList.size() - 1); // get last collected + assertThat(clientMetricData.getInstrumentationScopeInfo().getName()) + .isEqualTo("io.opentelemetry.micrometer-1.5"); + assertThat(clientMetricData) + .hasName("http.client.requests") // in OTel it should be "http.server.request.duration" + .hasDescription("") // in OTel it should be "Duration of HTTP client requests." + .hasUnit("ms") // OTel has seconds + .hasHistogramSatisfying(histogram -> histogram.isCumulative() + .hasPointsSatisfying( + // valid entries + point -> point.hasCount(1) + .hasAttributesSatisfying( + // "uri" not following conventions and should be "http.route" + equalTo(URI, "/pong/{message}"), + // Method not following conventions and should be "http.request.method" + equalTo(METHOD, "GET"), + // status_code not following conventions and should be + // "http.response.status_code" and it should use a long key and not a string key + equalTo(STATUS, "400")), + point -> point.hasCount(1) + .hasAttributesSatisfying( + equalTo(URI, "/pong/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "500")), + point -> point.hasCount(6) // 3 sync requests and 3 async requests + .hasAttributesSatisfying( + equalTo(URI, "/pong/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")))); + }); + } + + private static void assertServerMetrics(MetricData metricData) { + assertThat(metricData) + .hasName("http.server.requests") // in OTel it should be "http.server.request.duration" + .hasDescription("HTTP server request processing time") // in OTel it should be "Duration of HTTP server requests." + .hasUnit("ms") // OTel has seconds + .hasHistogramSatisfying(histogram -> histogram.isCumulative() + .hasPointsSatisfying( + // valid entries + point -> point.hasCount(1) + .hasAttributesSatisfying( + // "uri" not following conventions and should be "http.route" + equalTo(URI, "/vertx/item/{id}"), + // method not following conventions and should be "http.request.method" + equalTo(METHOD, "GET"), + // status_code not following conventions and should be + // "http.response.status_code" and it should use a long key and not a string key + equalTo(STATUS, "200")), + point -> point.hasCount(1) + .hasAttributesSatisfying( + equalTo(URI, "/vertx/item/{id}/{sub}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(2) + .hasAttributesSatisfying( + equalTo(URI, "/hello/{message}"), + equalTo(METHOD, "HEAD"), + equalTo(STATUS, "200")), + point -> point.hasCount(2) + .hasAttributesSatisfying( + equalTo(URI, "/hello/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(2) + .hasAttributesSatisfying( + equalTo(URI, "/vertx/echo/{msg}"), + equalTo(METHOD, "HEAD"), + equalTo(STATUS, "200")), + point -> point.hasCount(2) + .hasAttributesSatisfying( + equalTo(URI, "/vertx/echo/{msg}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(5) // 3 x 200 + 400 + 500 status codes + .hasAttributesSatisfying( + equalTo(URI, "/ping/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(3) + .hasAttributesSatisfying( + equalTo(URI, "/async-ping/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(6) // 3 sync requests and 3 async requests + .hasAttributesSatisfying( + equalTo(URI, "/pong/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "200")), + point -> point.hasCount(1) + .hasAttributesSatisfying( + equalTo(URI, "/pong/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "500")), + point -> point.hasCount(1) + .hasAttributesSatisfying( + equalTo(URI, "/pong/{message}"), + equalTo(METHOD, "GET"), + equalTo(STATUS, "400")))); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/JvmCompatibilityTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/JvmCompatibilityTest.java new file mode 100644 index 0000000000000..5a4eba710f378 --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/JvmCompatibilityTest.java @@ -0,0 +1,97 @@ +package io.quarkus.micrometer.opentelemetry.deployment.compatibility; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; + +import java.util.Comparator; +import java.util.List; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.PingPongResource; +import io.quarkus.micrometer.opentelemetry.deployment.common.Util; +import io.quarkus.test.QuarkusUnitTest; + +public class JvmCompatibilityTest { + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(Util.class, + PingPongResource.class, + PingPongResource.PingPongRestClient.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider") + .add(new StringAsset(""" + quarkus.otel.metrics.exporter=in-memory\n + quarkus.otel.metric.export.interval=300ms\n + quarkus.micrometer.binder-enabled-default=false\n + quarkus.micrometer.binder.jvm=true\n + quarkus.redis.devservices.enabled=false\n + """), + "application.properties")); + + @Inject + protected InMemoryMetricExporter metricExporter; + + // No need to reset tests for JVM + + @Test + void testDoubleSum() { + metricExporter.assertCountDataPointsAtLeastOrEqual("jvm.threads.started", null, 1); + final List metricDataList = metricExporter.getFinishedMetricItems("jvm.threads.started", null); + + metricDataList.forEach(System.out::println); + + final MetricData metricData = metricDataList.stream() + .max(Comparator.comparingInt(data -> data.getData().getPoints().size())) + .get(); + + assertThat(metricData.getInstrumentationScopeInfo().getName()) + .isEqualTo("io.opentelemetry.micrometer-1.5"); + + assertThat(metricData) + .hasName("jvm.threads.started") + .hasDescription("The total number of application threads started in the JVM") + .hasUnit("threads") + .hasDoubleSumSatisfying(doubleSumAssert -> doubleSumAssert + .isMonotonic() + .isCumulative() + .hasPointsSatisfying(point -> point + .satisfies(actual -> assertThat(actual.getValue()).isGreaterThanOrEqualTo(1.0)) + .hasAttributesSatisfying(attributes -> attributes.isEmpty()))); + } + + @Test + void testDoubleGauge() { + metricExporter.assertCountDataPointsAtLeastOrEqual("jvm.classes.loaded", null, 1); + final List metricDataList = metricExporter.getFinishedMetricItems("jvm.classes.loaded", null); + + metricDataList.forEach(System.out::println); + + final MetricData metricData = metricDataList.stream() + .max(Comparator.comparingInt(data -> data.getData().getPoints().size())) + .get(); + + assertThat(metricData.getInstrumentationScopeInfo().getName()) + .isEqualTo("io.opentelemetry.micrometer-1.5"); + + assertThat(metricData) + .hasName("jvm.classes.loaded") + .hasDescription("The number of classes that are currently loaded in the Java virtual machine") + .hasUnit("classes") + .hasDoubleGaugeSatisfying(doubleSumAssert -> doubleSumAssert + .hasPointsSatisfying(point -> point + .satisfies(actual -> assertThat(actual.getValue()).isGreaterThanOrEqualTo(1.0)) + .hasAttributesSatisfying(attributes -> attributes.isEmpty()))); + } +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerCounterInterceptorTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerCounterInterceptorTest.java new file mode 100644 index 0000000000000..5e7c557c7d99f --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerCounterInterceptorTest.java @@ -0,0 +1,125 @@ +package io.quarkus.micrometer.opentelemetry.deployment.compatibility; + +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; +import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; + +import jakarta.enterprise.context.ApplicationScoped; +import jakarta.inject.Inject; +import jakarta.inject.Singleton; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.micrometer.common.annotation.ValueResolver; +import io.micrometer.core.annotation.Counted; +import io.micrometer.core.aop.MeterTag; +import io.opentelemetry.sdk.metrics.data.MetricData; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.Util; +import io.quarkus.test.QuarkusUnitTest; + +/** + * Copy of io.quarkus.micrometer.runtime.MicrometerCounterInterceptorTest + */ +public class MicrometerCounterInterceptorTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(Util.class, CountedBean.class, TestValueResolver.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider") + .add(new StringAsset(""" + quarkus.otel.metrics.exporter=in-memory\n + quarkus.otel.metric.export.interval=300ms\n + quarkus.micrometer.binder.http-client.enabled=true\n + quarkus.micrometer.binder.http-server.enabled=true\n + quarkus.redis.devservices.enabled=false\n + """), + "application.properties")); + + @Inject + CountedBean countedBean; + + @Inject + InMemoryMetricExporter exporter; + + @BeforeEach + void setup() { + exporter.reset(); + } + + @Test + void testCountAllMetrics() { + countedBean.countAllInvocations(false); + Assertions.assertThrows(NullPointerException.class, () -> countedBean.countAllInvocations(true)); + + exporter.assertCountDataPointsAtLeastOrEqual("metric.all", null, 2); + + MetricData metricAll = exporter.getFinishedMetricItem("metric.all"); + assertThat(metricAll) + .isNotNull() + .hasName("metric.all") + .hasDescription("")// currently empty + .hasUnit("")// currently empty + .hasDoubleSumSatisfying(sum -> sum.hasPointsSatisfying( + point -> point + .hasValue(1d) + .hasAttributes(attributeEntry( + "class", + "io.quarkus.micrometer.opentelemetry.deployment.compatibility.MicrometerCounterInterceptorTest$CountedBean"), + attributeEntry("method", "countAllInvocations"), + attributeEntry("extra", "tag"), + attributeEntry("do_fail", "prefix_false"), + attributeEntry("exception", "none"), + attributeEntry("result", "success")), + point -> point + .hasValue(1d) + .hasAttributes(attributeEntry( + "class", + "io.quarkus.micrometer.opentelemetry.deployment.compatibility.MicrometerCounterInterceptorTest$CountedBean"), + attributeEntry("method", "countAllInvocations"), + attributeEntry("extra", "tag"), + attributeEntry("do_fail", "prefix_true"), + attributeEntry("exception", "NullPointerException"), + attributeEntry("result", "failure")))); + } + + @ApplicationScoped + public static class CountedBean { + @Counted(value = "metric.none", recordFailuresOnly = true) + public void onlyCountFailures() { + } + + @Counted(value = "metric.all", extraTags = { "extra", "tag" }) + public void countAllInvocations(@MeterTag(key = "do_fail", resolver = TestValueResolver.class) boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } + + @Counted(description = "nice description") + public void emptyMetricName(@MeterTag boolean fail) { + if (fail) { + throw new NullPointerException("Failed on purpose"); + } + } + } + + @Singleton + public static class TestValueResolver implements ValueResolver { + @Override + public String resolve(Object parameter) { + return "prefix_" + parameter; + } + } + +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerTimedInterceptorTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerTimedInterceptorTest.java new file mode 100644 index 0000000000000..41fec6cf81aab --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/MicrometerTimedInterceptorTest.java @@ -0,0 +1,321 @@ +package io.quarkus.micrometer.opentelemetry.deployment.compatibility; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.opentelemetry.sdk.metrics.data.LongPointData; +import io.quarkus.micrometer.opentelemetry.deployment.common.CountedResource; +import io.quarkus.micrometer.opentelemetry.deployment.common.GuardedResult; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.MetricDataFilter; +import io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource; +import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.mutiny.Uni; + +/** + * Copy of io.quarkus.micrometer.runtime.MicrometerTimedInterceptorTest + */ +public class MicrometerTimedInterceptorTest { + @RegisterExtension + static final QuarkusUnitTest config = new QuarkusUnitTest() + .withConfigurationResource("test-logging.properties") + .overrideConfigKey("quarkus.otel.metrics.exporter", "in-memory") + .overrideConfigKey("quarkus.otel.metric.export.interval", "100ms") + .overrideConfigKey("quarkus.micrometer.binder.mp-metrics.enabled", "false") + .overrideConfigKey("quarkus.micrometer.binder.vertx.enabled", "false") + .overrideConfigKey("quarkus.micrometer.registry-enabled-default", "false") + .withApplicationRoot((jar) -> jar + .addClass(CountedResource.class) + .addClass(TimedResource.class) + .addClass(GuardedResult.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class, MetricDataFilter.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider")); + + @Inject + TimedResource timed; + + @Inject + InMemoryMetricExporter metricExporter; + + @BeforeEach + void setUp() { + metricExporter.reset(); + } + + @Test + void testTimeMethod() { + timed.call(false); + + metricExporter.assertCountDataPointsAtLeastOrEqual("call", null, 1); + assertEquals(1, metricExporter.get("call") + .tag("method", "call") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("call.max") + .tag("method", "call") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_Failed() { + assertThrows(NullPointerException.class, () -> timed.call(true)); + + Supplier metricFilterSupplier = () -> metricExporter.get("call") + .tag("method", "call") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag"); + + metricExporter.assertCountDataPointsAtLeastOrEqual(metricFilterSupplier, 1); + assertEquals(1, metricFilterSupplier.get() + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("call.max") + .tag("method", "call") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_Async() { + GuardedResult guardedResult = new GuardedResult(); + CompletableFuture completableFuture = timed.asyncCall(guardedResult); + guardedResult.complete(); + completableFuture.join(); + + Supplier metricFilterSupplier = () -> metricExporter.get("async.call") + .tag("method", "asyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag"); + + metricExporter.assertCountDataPointsAtLeastOrEqual(metricFilterSupplier, 1); + assertEquals(1, metricFilterSupplier.get() + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("async.call.max") + .tag("method", "asyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_AsyncFailed() { + GuardedResult guardedResult = new GuardedResult(); + CompletableFuture completableFuture = timed.asyncCall(guardedResult); + guardedResult.complete(new NullPointerException()); + assertThrows(java.util.concurrent.CompletionException.class, () -> completableFuture.join()); + + metricExporter.assertCountDataPointsAtLeastOrEqual("async.call", null, 1); + assertEquals(1, metricExporter.get("async.call") + .tag("method", "asyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag") + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("async.call.max") + .tag("method", "asyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_Uni() { + GuardedResult guardedResult = new GuardedResult(); + Uni uni = timed.uniCall(guardedResult); + guardedResult.complete(); + uni.subscribe().asCompletionStage().join(); + + metricExporter.assertCountDataPointsAtLeastOrEqual("uni.call", null, 1); + assertEquals(1, metricExporter.get("uni.call") + .tag("method", "uniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("uni.call.max") + .tag("method", "uniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_UniFailed() throws InterruptedException { + GuardedResult guardedResult = new GuardedResult(); + Uni uni = timed.uniCall(guardedResult); + guardedResult.complete(new NullPointerException()); + assertThrows(java.util.concurrent.CompletionException.class, + () -> uni.subscribe().asCompletionStage().join()); + + // this needs to be executed inline, otherwise the results will be old. + Supplier metricFilterSupplier = () -> metricExporter.get("uni.call") + .tag("method", "uniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag"); + + metricExporter.assertCountDataPointsAtLeastOrEqual(metricFilterSupplier, 1); + assertEquals(1, metricFilterSupplier.get() + .lastReadingDataPoint(HistogramPointData.class).getCount()); + + assertThat(metricExporter.get("uni.call.max") + .tag("method", "uniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "NullPointerException") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()) + .isGreaterThan(0); + } + + @Test + void testTimeMethod_LongTaskTimer() { + timed.longCall(false); + metricExporter.assertCountDataPointsAtLeastOrEqual("longCall.active", null, 1); + assertEquals(0, metricExporter.get("longCall.active") + .tag("method", "longCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + } + + @Test + void testTimeMethod_LongTaskTimer_Failed() { + assertThrows(NullPointerException.class, () -> timed.longCall(true)); + + metricExporter.assertCountDataPointsAtLeastOrEqual("longCall.active", null, 1); + assertEquals(0, metricExporter.get("longCall.active") + .tag("method", "longCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + } + + @Test + void testTimeMethod_LongTaskTimer_Async() { + GuardedResult guardedResult = new GuardedResult(); + CompletableFuture completableFuture = timed.longAsyncCall(guardedResult); + guardedResult.complete(); + completableFuture.join(); + + metricExporter.assertCountDataPointsAtLeastOrEqual("async.longCall.active", null, 1); + assertEquals(0, metricExporter.get("async.longCall.active") + .tag("method", "longAsyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + } + + @Test + void testTimeMethod_LongTaskTimer_AsyncFailed() { + GuardedResult guardedResult = new GuardedResult(); + CompletableFuture completableFuture = timed.longAsyncCall(guardedResult); + guardedResult.complete(new NullPointerException()); + assertThrows(java.util.concurrent.CompletionException.class, () -> completableFuture.join()); + + metricExporter.assertCountDataPointsAtLeastOrEqual("async.longCall.active", null, 1); + assertEquals(0, metricExporter.get("async.longCall.active") + .tag("method", "longAsyncCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + } + + @Test + void testTimeMethod_LongTaskTimer_Uni() { + GuardedResult guardedResult = new GuardedResult(); + Uni uni = timed.longUniCall(guardedResult); + guardedResult.complete(); + uni.subscribe().asCompletionStage().join(); + + metricExporter.assertCountDataPointsAtLeastOrEqual("uni.longCall.active", null, 1); + assertEquals(0, metricExporter.get("uni.longCall.active") + .tag("method", "longUniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + } + + @Test + void testTimeMethod_LongTaskTimer_UniFailed() throws InterruptedException { + GuardedResult guardedResult = new GuardedResult(); + Uni uni = timed.longUniCall(guardedResult); + guardedResult.complete(new NullPointerException()); + assertThrows(java.util.concurrent.CompletionException.class, + () -> uni.subscribe().asCompletionStage().join()); + + // Was "uni.longCall" Now is "uni.longCall.active" and "uni.longCall.duration" + // Metric was executed but now there are no active tasks + + metricExporter.assertCountDataPointsAtLeastOrEqual("uni.longCall.active", null, 1); + assertEquals(0, metricExporter.get("uni.longCall.active") + .tag("method", "longUniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(LongPointData.class).getValue()); + + assertEquals(0, metricExporter.get("uni.longCall.duration") + .tag("method", "longUniCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("extra", "tag") + .lastReadingDataPoint(DoublePointData.class).getValue()); + + } + + @Test + void testTimeMethod_repeatable() { + timed.repeatableCall(false); + + metricExporter.assertCountDataPointsAtLeastOrEqual("alpha", null, 1); + + assertEquals(1, metricExporter.get("alpha") + .tag("method", "repeatableCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingPointsSize()); + + assertEquals(1, metricExporter.get("bravo") + .tag("method", "repeatableCall") + .tag("class", "io.quarkus.micrometer.opentelemetry.deployment.common.TimedResource") + .tag("exception", "none") + .tag("extra", "tag") + .lastReadingPointsSize()); + } + +} diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/RestClientUriParameterTest.java b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/RestClientUriParameterTest.java new file mode 100644 index 0000000000000..2b7a0609d878d --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/java/io/quarkus/micrometer/opentelemetry/deployment/compatibility/RestClientUriParameterTest.java @@ -0,0 +1,94 @@ +package io.quarkus.micrometer.opentelemetry.deployment.compatibility; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import jakarta.inject.Inject; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.Produces; +import jakarta.ws.rs.core.MediaType; + +import org.eclipse.microprofile.config.inject.ConfigProperty; +import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import org.eclipse.microprofile.rest.client.inject.RestClient; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.opentelemetry.sdk.metrics.data.HistogramPointData; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporter; +import io.quarkus.micrometer.opentelemetry.deployment.common.InMemoryMetricExporterProvider; +import io.quarkus.micrometer.opentelemetry.deployment.common.MetricDataFilter; +import io.quarkus.rest.client.reactive.Url; +import io.quarkus.test.QuarkusUnitTest; + +public class RestClientUriParameterTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .withApplicationRoot( + jar -> jar.addClasses(Resource.class, Client.class) + .addClasses(InMemoryMetricExporter.class, + InMemoryMetricExporterProvider.class, + MetricDataFilter.class) + .addClasses(InMemoryMetricExporter.class, InMemoryMetricExporterProvider.class, + MetricDataFilter.class) + .addAsResource(new StringAsset(InMemoryMetricExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.metrics.ConfigurableMetricExporterProvider")) + .withConfigurationResource("test-logging.properties") + .overrideConfigKey("quarkus.otel.metrics.exporter", "in-memory") + .overrideConfigKey("quarkus.otel.metric.export.interval", "300ms") + .overrideConfigKey("quarkus.redis.devservices.enabled", "false") + .overrideConfigKey("quarkus.rest-client.\"client\".url", "http://does-not-exist.io"); + + @Inject + InMemoryMetricExporter metricExporter; + + @RestClient + Client client; + + @ConfigProperty(name = "quarkus.http.test-port") + Integer testPort; + + @Test + public void testOverride() { + String result = client.getById("http://localhost:" + testPort, "bar"); + assertEquals("bar", result); + + metricExporter.assertCountDataPointsAtLeastOrEqual("http.client.requests", null, 1); + assertEquals(1, metricExporter.find("http.client.requests") + .tag("uri", "/example/{id}") + .lastReadingDataPoint(HistogramPointData.class).getCount()); + } + + @Path("/example") + @RegisterRestClient(baseUri = "http://dummy") + public interface Client { + + @GET + @Path("/{id}") + String getById(@Url String baseUri, @PathParam("id") String id); + } + + @Path("/example") + public static class Resource { + + @RestClient + Client client; + + @GET + @Path("/{id}") + @Produces(MediaType.TEXT_PLAIN) + public String example() { + return "bar"; + } + + @GET + @Path("/call") + @Produces(MediaType.TEXT_PLAIN) + public String call() { + return client.getById("http://localhost:8080", "1"); + } + } +} diff --git a/extensions/observability-devservices/testcontainers/src/main/resources/empty.json b/extensions/micrometer-opentelemetry/deployment/src/test/resources/application.properties similarity index 100% rename from extensions/observability-devservices/testcontainers/src/main/resources/empty.json rename to extensions/micrometer-opentelemetry/deployment/src/test/resources/application.properties diff --git a/extensions/micrometer-opentelemetry/deployment/src/test/resources/test-logging.properties b/extensions/micrometer-opentelemetry/deployment/src/test/resources/test-logging.properties new file mode 100644 index 0000000000000..6eed6ab2596da --- /dev/null +++ b/extensions/micrometer-opentelemetry/deployment/src/test/resources/test-logging.properties @@ -0,0 +1,4 @@ +#quarkus.log.category."io.quarkus.micrometer".level=DEBUG +quarkus.log.category."io.quarkus.bootstrap".level=INFO +#quarkus.log.category."io.quarkus.arc".level=DEBUG +quarkus.log.category."io.netty".level=INFO diff --git a/extensions/micrometer-opentelemetry/pom.xml b/extensions/micrometer-opentelemetry/pom.xml new file mode 100644 index 0000000000000..5c5333580541a --- /dev/null +++ b/extensions/micrometer-opentelemetry/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + + + io.quarkus + quarkus-extensions-parent + 999-SNAPSHOT + ../pom.xml + + + quarkus-micrometer-opentelemetry-parent + Quarkus - Micrometer to OpenTelemetry Bridge - Parent + pom + + + runtime + deployment + + + \ No newline at end of file diff --git a/extensions/micrometer-opentelemetry/runtime/pom.xml b/extensions/micrometer-opentelemetry/runtime/pom.xml new file mode 100644 index 0000000000000..15ae91441b756 --- /dev/null +++ b/extensions/micrometer-opentelemetry/runtime/pom.xml @@ -0,0 +1,76 @@ + + + 4.0.0 + + + io.quarkus + quarkus-micrometer-opentelemetry-parent + 999-SNAPSHOT + ../pom.xml + + + quarkus-micrometer-opentelemetry + Quarkus - Micrometer to OpenTelemetry Bridge - Runtime + Micrometer registry implemented by the OpenTelemetry SDK + + + + io.quarkus + quarkus-core + + + + io.quarkus + quarkus-arc + + + + io.quarkus + quarkus-micrometer + + + + io.quarkus + quarkus-opentelemetry + + + + io.opentelemetry.instrumentation + opentelemetry-micrometer-1.5 + + + io.micrometer + micrometer-core + + + + + + + + + io.quarkus + quarkus-extension-maven-plugin + + + maven-compiler-plugin + + + default-compile + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + + + \ No newline at end of file diff --git a/extensions/micrometer-opentelemetry/runtime/src/main/java/io/quarkus/micrometer/opentelemetry/runtime/MicrometerOtelBridgeRecorder.java b/extensions/micrometer-opentelemetry/runtime/src/main/java/io/quarkus/micrometer/opentelemetry/runtime/MicrometerOtelBridgeRecorder.java new file mode 100644 index 0000000000000..017a32d2cc281 --- /dev/null +++ b/extensions/micrometer-opentelemetry/runtime/src/main/java/io/quarkus/micrometer/opentelemetry/runtime/MicrometerOtelBridgeRecorder.java @@ -0,0 +1,45 @@ +package io.quarkus.micrometer.opentelemetry.runtime; + +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import jakarta.enterprise.inject.Instance; +import jakarta.enterprise.util.TypeLiteral; + +import io.micrometer.core.instrument.Clock; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Metrics; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.instrumentation.micrometer.v1_5.OpenTelemetryMeterRegistry; +import io.quarkus.arc.SyntheticCreationalContext; +import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; +import io.quarkus.runtime.annotations.Recorder; + +@Recorder +public class MicrometerOtelBridgeRecorder { + + public Function, MeterRegistry> createBridge( + OTelRuntimeConfig otelRuntimeConfig) { + + return new Function<>() { + @Override + public MeterRegistry apply(SyntheticCreationalContext context) { + Instance openTelemetry = context.getInjectedReference(new TypeLiteral<>() { + }); + + if (openTelemetry.isUnsatisfied()) { + throw new IllegalStateException("OpenTelemetry instance not found"); + } + + MeterRegistry meterRegistry = OpenTelemetryMeterRegistry.builder(openTelemetry.get()) + .setPrometheusMode(false) + .setMicrometerHistogramGaugesEnabled(true) + .setBaseTimeUnit(TimeUnit.MILLISECONDS) + .setClock(Clock.SYSTEM) + .build(); + Metrics.addRegistry(meterRegistry); + return meterRegistry; + } + }; + } +} diff --git a/extensions/micrometer-opentelemetry/runtime/src/main/resources/META-INF/quarkus-extension.yaml b/extensions/micrometer-opentelemetry/runtime/src/main/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..4e45880a891fe --- /dev/null +++ b/extensions/micrometer-opentelemetry/runtime/src/main/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,17 @@ +name: "Micrometer OpenTelemetry Bridge" +artifact: ${project.groupId}:${project.artifactId}:${project.version} +metadata: + keywords: + - "micrometer" + - "metrics" + - "metric" + - "opentelemetry" + - "tracing" + - "logging" + - "monitoring" + guide: "https://quarkus.io/guides/telemetry-micrometer-to-opentelemetry" + categories: + - "observability" + status: "preview" + config: + - "quarkus.micrometer.otel." diff --git a/extensions/micrometer/deployment/pom.xml b/extensions/micrometer/deployment/pom.xml index 6cd5d423c2c9b..d91b14055188d 100644 --- a/extensions/micrometer/deployment/pom.xml +++ b/extensions/micrometer/deployment/pom.xml @@ -171,9 +171,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/MicrometerProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/MicrometerProcessor.java index 0fa56ac6a302f..ec0d31cc7618a 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/MicrometerProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/MicrometerProcessor.java @@ -13,6 +13,7 @@ import org.jboss.jandex.DotName; import org.jboss.jandex.IndexView; import org.jboss.jandex.MethodInfo; +import org.jboss.logmanager.Level; import io.micrometer.core.annotation.Counted; import io.micrometer.core.annotation.Timed; @@ -36,9 +37,11 @@ import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; +import io.quarkus.deployment.builditem.LogCategoryBuildItem; import io.quarkus.deployment.builditem.ShutdownContextBuildItem; -import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveMethodBuildItem; +import io.quarkus.deployment.logging.LoggingSetupBuildItem; import io.quarkus.deployment.metrics.MetricsCapabilityBuildItem; import io.quarkus.deployment.metrics.MetricsFactoryConsumerBuildItem; import io.quarkus.devui.spi.page.CardPageBuildItem; @@ -78,11 +81,16 @@ public class MicrometerProcessor { private static final DotName TIMED_INTERCEPTOR = DotName.createSimple(MicrometerTimedInterceptor.class.getName()); private static final DotName METER_TAG_SUPPORT = DotName.createSimple(MeterTagsSupport.class.getName()); + private static final List OPERATING_SYSTEM_BEAN_CLASS_NAMES = List.of( + "com.ibm.lang.management.OperatingSystemMXBean", // J9 + "com.sun.management.OperatingSystemMXBean" // HotSpot + ); + public static class MicrometerEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return mConfig.enabled; + return mConfig.enabled(); } } @@ -107,7 +115,7 @@ void registerEmptyExamplarProvider( MetricsCapabilityBuildItem metricsCapabilityPrometheusBuildItem( NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem) { return new MetricsCapabilityBuildItem(MetricsFactory.MICROMETER::equals, - nonApplicationRootPathBuildItem.resolvePath(mConfig.export.prometheus.path)); + nonApplicationRootPathBuildItem.resolvePath(mConfig.export().prometheus().path())); } @BuildStep @@ -115,7 +123,8 @@ UnremovableBeanBuildItem registerAdditionalBeans(CombinedIndexBuildItem indexBui BuildProducer providerClasses, BuildProducer reflectiveClasses, BuildProducer additionalBeans, - BuildProducer interceptorBindings) { + BuildProducer interceptorBindings, + BuildProducer reflectiveMethods) { // Create and keep some basic Providers additionalBeans.produce(AdditionalBeanBuildItem.builder() @@ -157,6 +166,14 @@ public List getAdditionalBindings() { "org.HdrHistogram.ConcurrentHistogram") .build()); + for (String beanClassName : OPERATING_SYSTEM_BEAN_CLASS_NAMES) { + String reason = "Accessed by io.micrometer.core.instrument.binder.system.ProcessorMetrics.ProcessorMetrics(java.lang.Iterable)"; + reflectiveMethods.produce(new ReflectiveMethodBuildItem(reason, false, beanClassName, "getCpuLoad")); + reflectiveMethods.produce(new ReflectiveMethodBuildItem(reason, false, beanClassName, "getSystemCpuLoad")); + reflectiveMethods.produce(new ReflectiveMethodBuildItem(reason, false, beanClassName, "getProcessCpuLoad")); + reflectiveMethods.produce(new ReflectiveMethodBuildItem(reason, false, beanClassName, "getProcessCpuTime")); + } + return UnremovableBeanBuildItem.beanTypes(METER_REGISTRY, METER_BINDER, METER_FILTER, METER_REGISTRY_CUSTOMIZER, NAMING_CONVENTION); } @@ -185,6 +202,18 @@ public void transform(TransformationContext ctx) { }); } + @BuildStep + void configLoggingLevel(BuildProducer logCategoryProducer) { + // Avoid users from receiving: + // [io.mic.cor.ins.com.CompositeMeterRegistry] (main) A MeterFilter is being configured after a Meter has been + // registered to this registry... + // It's unavoidable because of how Quarkus startup works and users cannot do anything about it. + // see: https://github.com/micrometer-metrics/micrometer/issues/4920#issuecomment-2298348202 + logCategoryProducer.produce(new LogCategoryBuildItem( + "io.micrometer.core.instrument.composite.CompositeMeterRegistry", + Level.ERROR)); + } + @BuildStep @Consume(BeanContainerBuildItem.class) @Record(ExecutionTime.STATIC_INIT) @@ -213,23 +242,13 @@ void registerExtensionMetrics(MicrometerRecorder recorder, @BuildStep @Consume(RootMeterRegistryBuildItem.class) + @Consume(LoggingSetupBuildItem.class) @Record(ExecutionTime.RUNTIME_INIT) void configureRegistry(MicrometerRecorder recorder, MicrometerConfig config, List providerClassItems, List metricsFactoryConsumerBuildItems, - ShutdownContextBuildItem shutdownContextBuildItem, - BuildProducer systemProperty) { - - // Avoid users from receiving: - // [io.mic.cor.ins.com.CompositeMeterRegistry] (main) A MeterFilter is being configured after a Meter has been - // registered to this registry... - // It's unavoidable because of how Quarkus startup works and users cannot do anything about it. - // see: https://github.com/micrometer-metrics/micrometer/issues/4920#issuecomment-2298348202 - systemProperty.produce( - new SystemPropertyBuildItem( - "quarkus.log.category.\"io.micrometer.core.instrument.composite.CompositeMeterRegistry\".level", - "ERROR")); + ShutdownContextBuildItem shutdownContextBuildItem) { Set> typeClasses = new HashSet<>(); for (MicrometerRegistryProviderBuildItem item : providerClassItems) { diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/GrpcBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/GrpcBinderProcessor.java index fe5199205e449..b8ca6a67043ea 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/GrpcBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/GrpcBinderProcessor.java @@ -27,7 +27,7 @@ static class GrpcClientSupportEnabled implements BooleanSupplier { public boolean getAsBoolean() { return QuarkusClassLoader.isClassPresentAtRuntime(CLIENT_INTERCEPTOR) - && mConfig.checkBinderEnabledWithDefault(mConfig.binder.grpcClient); + && mConfig.checkBinderEnabledWithDefault(mConfig.binder().grpcClient()); } } @@ -36,7 +36,7 @@ static class GrpcServerSupportEnabled implements BooleanSupplier { public boolean getAsBoolean() { return QuarkusClassLoader.isClassPresentAtRuntime(SERVER_INTERCEPTOR) - && mConfig.checkBinderEnabledWithDefault(mConfig.binder.grpcServer); + && mConfig.checkBinderEnabledWithDefault(mConfig.binder().grpcServer()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/HttpBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/HttpBinderProcessor.java index 1e48b656481b0..722a801f7b74c 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/HttpBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/HttpBinderProcessor.java @@ -42,8 +42,8 @@ static class HttpServerBinderEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return mConfig.checkBinderEnabledWithDefault(mConfig.binder.vertx) - && mConfig.checkBinderEnabledWithDefault(mConfig.binder.httpServer); + return mConfig.checkBinderEnabledWithDefault(mConfig.binder().vertx()) + && mConfig.checkBinderEnabledWithDefault(mConfig.binder().httpServer()); } } @@ -52,7 +52,7 @@ static class HttpClientBinderEnabled implements BooleanSupplier { public boolean getAsBoolean() { return QuarkusClassLoader.isClassPresentAtRuntime(REST_CLIENT_REQUEST_FILTER) - && mConfig.checkBinderEnabledWithDefault(mConfig.binder.httpClient); + && mConfig.checkBinderEnabledWithDefault(mConfig.binder().httpClient()); } } @@ -65,8 +65,8 @@ SyntheticBeanBuildItem enableHttpBinders(MicrometerRecorder recorder, VertxConfig vertxConfig, BuildProducer additionalBeans) { - boolean clientEnabled = buildTimeConfig.checkBinderEnabledWithDefault(buildTimeConfig.binder.httpClient); - boolean serverEnabled = buildTimeConfig.checkBinderEnabledWithDefault(buildTimeConfig.binder.httpServer); + boolean clientEnabled = buildTimeConfig.checkBinderEnabledWithDefault(buildTimeConfig.binder().httpClient()); + boolean serverEnabled = buildTimeConfig.checkBinderEnabledWithDefault(buildTimeConfig.binder().httpServer()); if (clientEnabled || serverEnabled) { // Protect from uri tag flood diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/KafkaBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/KafkaBinderProcessor.java index 34e16a373375d..3e45cc8eac0d8 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/KafkaBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/KafkaBinderProcessor.java @@ -27,7 +27,7 @@ static class KafkaSupportEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return KAFKA_CONSUMER_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.kafka); + return KAFKA_CONSUMER_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().kafka()); } } @@ -35,7 +35,7 @@ static class KafkaStreamsSupportEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return KAFKA_STREAMS_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.kafka); + return KAFKA_STREAMS_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().kafka()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/NettyBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/NettyBinderProcessor.java index 5c8b58b1890d4..9a666fef07f11 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/NettyBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/NettyBinderProcessor.java @@ -51,7 +51,7 @@ Class metricsClass() { public boolean getAsBoolean() { return metricsClass() != null && getCheckClass() != null - && getMicrometerConfig().checkBinderEnabledWithDefault(getMicrometerConfig().binder.netty); + && getMicrometerConfig().checkBinderEnabledWithDefault(getMicrometerConfig().binder().netty()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/ReactiveMessagingProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/ReactiveMessagingProcessor.java index 9736592cbc5aa..f99454d43262f 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/ReactiveMessagingProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/ReactiveMessagingProcessor.java @@ -19,7 +19,7 @@ static class ReactiveMessagingSupportEnabled implements BooleanSupplier { public boolean getAsBoolean() { return MESSAGE_OBSERVATION_COLLECTOR_CLASS != null && - mConfig.checkBinderEnabledWithDefault(mConfig.binder.messaging); + mConfig.checkBinderEnabledWithDefault(mConfig.binder().messaging()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/RedisBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/RedisBinderProcessor.java index 10f8a9da11707..d469e14e09941 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/RedisBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/RedisBinderProcessor.java @@ -18,7 +18,7 @@ static class RedisMetricsSupportEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return OBSERVABLE_CLIENT_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.redis); + return OBSERVABLE_CLIENT_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().redis()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/StorkBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/StorkBinderProcessor.java index 06ed79c3ea715..d92e943b50fa1 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/StorkBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/StorkBinderProcessor.java @@ -18,7 +18,7 @@ static class StorkMetricsSupportEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return OBSERVABLE_CLIENT_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.stork); + return OBSERVABLE_CLIENT_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().stork()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VertxBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VertxBinderProcessor.java index 4733a7ae6e161..eced952b0d0e3 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VertxBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VertxBinderProcessor.java @@ -33,7 +33,7 @@ static class VertxBinderEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return METRIC_OPTIONS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.vertx); + return METRIC_OPTIONS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().vertx()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VirtualThreadBinderProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VirtualThreadBinderProcessor.java index 1571bd95cd0ac..4c5cf8f9657c2 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VirtualThreadBinderProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/VirtualThreadBinderProcessor.java @@ -24,7 +24,7 @@ static class VirtualThreadSupportEnabled implements BooleanSupplier { public boolean getAsBoolean() { return VIRTUAL_THREAD_BINDER_CLASS != null // The binder is in another Micrometer artifact - && mConfig.checkBinderEnabledWithDefault(mConfig.binder.virtualThreads); + && mConfig.checkBinderEnabledWithDefault(mConfig.binder().virtualThreads()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/mpmetrics/MicroprofileMetricsProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/mpmetrics/MicroprofileMetricsProcessor.java index f31d1377b8f1b..f9ef7975c70f4 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/mpmetrics/MicroprofileMetricsProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/binder/mpmetrics/MicroprofileMetricsProcessor.java @@ -53,7 +53,7 @@ static class MicroprofileMetricsEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return METRIC_ANNOTATION_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.mpMetrics); + return METRIC_ANNOTATION_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder().mpMetrics()); } } diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/JsonRegistryProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/JsonRegistryProcessor.java index 125c2b5505239..e1a19b3b928ac 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/JsonRegistryProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/JsonRegistryProcessor.java @@ -29,7 +29,7 @@ public static class JsonRegistryEnabled implements BooleanSupplier { MicrometerConfig mConfig; public boolean getAsBoolean() { - return mConfig.checkRegistryEnabledWithDefault(mConfig.export.json); + return mConfig.checkRegistryEnabledWithDefault(mConfig.export().json()); } } @@ -41,7 +41,7 @@ public void initializeJsonRegistry(MicrometerConfig config, BuildProducer additionalBeans, BuildProducer registries, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, - ManagementInterfaceBuildTimeConfig managementInterfaceBuildTimeConfig, + ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, LaunchModeBuildItem launchModeBuildItem, JsonRecorder recorder) { additionalBeans.produce(AdditionalBeanBuildItem.builder() @@ -51,14 +51,14 @@ public void initializeJsonRegistry(MicrometerConfig config, routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .management() - .routeFunction(config.export.json.path, recorder.route()) + .routeFunction(config.export().json().path(), recorder.route()) .routeConfigKey("quarkus.micrometer.export.json.path") .handler(recorder.getHandler()) .blockingRoute() .build()); - var path = nonApplicationRootPathBuildItem.resolveManagementPath(config.export.json.path, - managementInterfaceBuildTimeConfig, launchModeBuildItem); + var path = nonApplicationRootPathBuildItem.resolveManagementPath(config.export().json().path(), + managementBuildTimeConfig, launchModeBuildItem); log.debug("Initialized a JSON meter registry on path=" + path); registries.produce(new RegistryBuildItem("JSON", path)); diff --git a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/PrometheusRegistryProcessor.java b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/PrometheusRegistryProcessor.java index 405ebd2be7dca..cd15eccefee01 100644 --- a/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/PrometheusRegistryProcessor.java +++ b/extensions/micrometer/deployment/src/main/java/io/quarkus/micrometer/deployment/export/PrometheusRegistryProcessor.java @@ -42,7 +42,7 @@ public static class PrometheusEnabled implements BooleanSupplier { public boolean getAsBoolean() { return (REGISTRY_CLASS != null) && QuarkusClassLoader.isClassPresentAtRuntime(REGISTRY_CLASS_NAME) - && mConfig.checkRegistryEnabledWithDefault(mConfig.export.prometheus); + && mConfig.checkRegistryEnabledWithDefault(mConfig.export().prometheus()); } } @@ -61,7 +61,7 @@ MicrometerRegistryProviderBuildItem createPrometheusRegistry(MicrometerConfig co AdditionalBeanBuildItem.Builder builder = AdditionalBeanBuildItem.builder() .addBeanClass("io.quarkus.micrometer.runtime.export.PrometheusMeterRegistryProvider") .setUnremovable(); - if (config.export.prometheus.defaultRegistry) { + if (config.export().prometheus().defaultRegistry()) { builder.addBeanClass("io.quarkus.micrometer.runtime.export.PrometheusMeterRegistryProducer"); } additionalBeans.produce(builder.build()); @@ -96,17 +96,17 @@ void createPrometheusRoute(BuildProducer routes, BuildProducer registries, MicrometerConfig mConfig, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, - ManagementInterfaceBuildTimeConfig managementInterfaceBuildTimeConfig, + ManagementInterfaceBuildTimeConfig managementBuildTimeConfig, LaunchModeBuildItem launchModeBuildItem, PrometheusRecorder recorder) { - PrometheusConfigGroup pConfig = mConfig.export.prometheus; + PrometheusConfigGroup pConfig = mConfig.export().prometheus(); log.debug("PROMETHEUS CONFIG: " + pConfig); // Exact match for resources matched to the root path routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .management() - .routeFunction(pConfig.path, recorder.route()) + .routeFunction(pConfig.path(), recorder.route()) .routeConfigKey("quarkus.micrometer.export.prometheus.path") .handler(recorder.getHandler()) .displayOnNotFoundPage("Metrics") @@ -116,7 +116,7 @@ void createPrometheusRoute(BuildProducer routes, // Match paths that begin with the deployment path routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .management() - .routeFunction(pConfig.path + (pConfig.path.endsWith("/") ? "*" : "/*"), recorder.route()) + .routeFunction(pConfig.path() + (pConfig.path().endsWith("/") ? "*" : "/*"), recorder.route()) .handler(recorder.getHandler()) .blockingRoute() .build()); @@ -124,17 +124,17 @@ void createPrometheusRoute(BuildProducer routes, // Fallback paths (for non text/plain requests) routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .management() - .routeFunction(pConfig.path, recorder.fallbackRoute()) + .routeFunction(pConfig.path(), recorder.fallbackRoute()) .handler(recorder.getFallbackHandler()) .build()); routes.produce(nonApplicationRootPathBuildItem.routeBuilder() .management() - .routeFunction(pConfig.path + (pConfig.path.endsWith("/") ? "*" : "/*"), recorder.fallbackRoute()) + .routeFunction(pConfig.path() + (pConfig.path().endsWith("/") ? "*" : "/*"), recorder.fallbackRoute()) .handler(recorder.getFallbackHandler()) .build()); - var path = nonApplicationRootPathBuildItem.resolveManagementPath(pConfig.path, - managementInterfaceBuildTimeConfig, launchModeBuildItem); + var path = nonApplicationRootPathBuildItem.resolveManagementPath(pConfig.path(), + managementBuildTimeConfig, launchModeBuildItem); registries.produce(new RegistryBuildItem("Prometheus", path)); } } diff --git a/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/NettyMetricsTest.java b/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/NettyMetricsTest.java index 49b15fa6b36a2..fc4ae7d9c18ae 100644 --- a/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/NettyMetricsTest.java +++ b/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/NettyMetricsTest.java @@ -28,19 +28,18 @@ import io.micrometer.core.instrument.Tag; import io.micrometer.core.instrument.Tags; import io.micrometer.core.instrument.binder.MeterBinder; -import io.micrometer.core.instrument.binder.netty4.NettyAllocatorMetrics; import io.micrometer.core.instrument.binder.netty4.NettyEventExecutorMetrics; import io.micrometer.core.instrument.simple.SimpleMeterRegistry; -import io.netty.buffer.PooledByteBufAllocator; -import io.netty.buffer.UnpooledByteBufAllocator; import io.netty.channel.EventLoopGroup; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.SingleThreadEventExecutor; +import io.quarkus.micrometer.runtime.binder.netty.NettyAllocatorMetrics; +import io.quarkus.micrometer.runtime.binder.netty.NettyMetricsProvider; +import io.quarkus.micrometer.runtime.binder.netty.VertxNettyAllocatorMetricsProvider; import io.quarkus.micrometer.test.HelloResource; import io.quarkus.test.QuarkusUnitTest; import io.restassured.RestAssured; import io.vertx.core.Vertx; -import io.vertx.core.buffer.impl.VertxByteBufAllocator; import io.vertx.core.impl.VertxInternal; public class NettyMetricsTest { @@ -72,25 +71,25 @@ static void removeRegistry() { Vertx vertx; private static final Set NAM_PBBA_TAGS = Tags.of( - "id", String.valueOf(PooledByteBufAllocator.DEFAULT.hashCode()), + "name", NettyMetricsProvider.NETTY_DEFAULT_POOLED_ALLOCATOR_NAME, "allocator.type", "PooledByteBufAllocator") .stream() .collect(Collectors.toSet()); private static final Set NAM_UNPBBA_TAGS = Tags.of( - "id", String.valueOf(UnpooledByteBufAllocator.DEFAULT.hashCode()), + "name", NettyMetricsProvider.NETTY_DEFAULT_UNPOOLED_ALLOCATOR_NAME, "allocator.type", "UnpooledByteBufAllocator") .stream() .collect(Collectors.toSet()); private static final Set VX_NAM_PBBA_TAGS = Tags.of( - "id", String.valueOf(VertxByteBufAllocator.POOLED_ALLOCATOR.hashCode()), + "name", VertxNettyAllocatorMetricsProvider.VERTX_POOLED_ALLOCATOR_NAME, "allocator.type", "PooledByteBufAllocator") .stream() .collect(Collectors.toSet()); private static final Set VX_NAM_UNPBBA_TAGS = Tags.of( - "id", String.valueOf(VertxByteBufAllocator.UNPOOLED_ALLOCATOR.hashCode()), + "name", VertxNettyAllocatorMetricsProvider.VERTX_UNPOOLED_ALLOCATOR_NAME, "allocator.type", "UnpooledByteBufAllocator") .stream() .collect(Collectors.toSet()); @@ -98,19 +97,6 @@ static void removeRegistry() { private static final Tag HEAP_MEMORY = Tag.of(AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "heap"); private static final Tag DIRECT_MEMORY = Tag.of(AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "direct"); - enum AllocatorKeyNames implements KeyName { - ID { - public String asString() { - return "id"; - } - }, - ALLOCATOR_TYPE { - public String asString() { - return "allocator.type"; - } - }; - } - enum AllocatorMemoryKeyNames implements KeyName { MEMORY_TYPE { public String asString() { diff --git a/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/UriWithMaxTagMeterFilterTest.java b/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/UriWithMaxTagMeterFilterTest.java index 48cddb4fea857..9e96088b9d2b5 100644 --- a/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/UriWithMaxTagMeterFilterTest.java +++ b/extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/UriWithMaxTagMeterFilterTest.java @@ -41,8 +41,8 @@ public class UriWithMaxTagMeterFilterTest { @Test public void test() throws Exception { - Assertions.assertEquals(1, httpServerConfig.maxUriTags); - Assertions.assertEquals(1, httpClientConfig.maxUriTags); + Assertions.assertEquals(1, httpServerConfig.maxUriTags()); + Assertions.assertEquals(1, httpClientConfig.maxUriTags()); // Server limit is constrained to 1 when().get("/ping/one").then().statusCode(200); diff --git a/extensions/micrometer/runtime/pom.xml b/extensions/micrometer/runtime/pom.xml index e1013832188d3..3a11dfd82aaf7 100644 --- a/extensions/micrometer/runtime/pom.xml +++ b/extensions/micrometer/runtime/pom.xml @@ -175,9 +175,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpClientMetricsTagsContributor.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpClientMetricsTagsContributor.java index 54fc08a5ffe1b..c6c01ba80f54f 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpClientMetricsTagsContributor.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpClientMetricsTagsContributor.java @@ -3,6 +3,7 @@ import io.micrometer.core.instrument.Tags; import io.micrometer.core.instrument.config.MeterFilter; import io.vertx.core.spi.observability.HttpRequest; +import io.vertx.core.spi.observability.HttpResponse; /** * Allows code to add additional Micrometer {@link Tags} to the metrics collected for completed HTTP client requests. @@ -20,5 +21,7 @@ public interface HttpClientMetricsTagsContributor { interface Context { HttpRequest request(); + + HttpResponse response(); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpServerMetricsTagsContributor.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpServerMetricsTagsContributor.java index 88651e171b3d8..0dec6727f65f9 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpServerMetricsTagsContributor.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/HttpServerMetricsTagsContributor.java @@ -3,6 +3,7 @@ import io.micrometer.core.instrument.Tags; import io.micrometer.core.instrument.config.MeterFilter; import io.vertx.core.http.HttpServerRequest; +import io.vertx.core.spi.observability.HttpResponse; /** * Allows code to add additional Micrometer {@link Tags} to the metrics collected for completed HTTP server requests. @@ -20,5 +21,7 @@ public interface HttpServerMetricsTagsContributor { interface Context { HttpServerRequest request(); + + HttpResponse response(); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerMetricsFactory.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerMetricsFactory.java index e237dfc60abcf..09263379bbfb9 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerMetricsFactory.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerMetricsFactory.java @@ -25,7 +25,7 @@ public MicrometerMetricsFactory(MicrometerConfig config, MeterRegistry globalReg @Override public boolean metricsSystemSupported(String name) { return MetricsFactory.MICROMETER.equals(name) || - (MetricsFactory.MP_METRICS.equals(name) && config.binder.mpMetrics.enabled.orElse(false)); + (MetricsFactory.MP_METRICS.equals(name) && config.binder().mpMetrics().enabled().orElse(false)); } /** diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java index 3ee697ae5835d..e5ce7edf68a20 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/MicrometerRecorder.java @@ -115,7 +115,7 @@ public void configureRegistries(MicrometerConfig config, List autoCloseables = new ArrayList<>(); // Base JVM Metrics - if (config.checkBinderEnabledWithDefault(() -> config.binder.jvm)) { + if (config.checkBinderEnabledWithDefault(() -> config.binder().jvm())) { new ClassLoaderMetrics().bindTo(Metrics.globalRegistry); JvmHeapPressureMetrics jvmHeapPressureMetrics = new JvmHeapPressureMetrics(); jvmHeapPressureMetrics.bindTo(Metrics.globalRegistry); @@ -131,7 +131,7 @@ public void configureRegistries(MicrometerConfig config, } // System metrics - if (config.checkBinderEnabledWithDefault(() -> config.binder.system)) { + if (config.checkBinderEnabledWithDefault(() -> config.binder().system())) { new UptimeMetrics().bindTo(Metrics.globalRegistry); new ProcessorMetrics().bindTo(Metrics.globalRegistry); new FileDescriptorMetrics().bindTo(Metrics.globalRegistry); diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfiguration.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfiguration.java index e9b3138571cf4..076589fd59253 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfiguration.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfiguration.java @@ -50,8 +50,8 @@ public HttpBinderConfiguration(boolean httpServerMetrics, boolean httpClientMetr serverEnabled = httpServerMetrics; clientEnabled = httpClientMetrics; - serverSuppress4xxErrors = serverConfig.suppress4xxErrors; - clientSuppress4xxErrors = clientConfig.suppress4xxErrors; + serverSuppress4xxErrors = serverConfig.suppress4xxErrors(); + clientSuppress4xxErrors = clientConfig.suppress4xxErrors(); if (serverEnabled) { Pattern defaultIgnore = null; @@ -59,7 +59,7 @@ public HttpBinderConfiguration(boolean httpServerMetrics, boolean httpClientMetr if (MicrometerRecorder.httpRootUri.equals(MicrometerRecorder.nonApplicationUri)) { // we can't set the default ignore in this case, as the paths overlap - } else if (serverConfig.suppressNonApplicationUris) { + } else if (serverConfig.suppressNonApplicationUris()) { defaultIgnore = Pattern.compile(MicrometerRecorder.nonApplicationUri + ".*"); } @@ -72,16 +72,16 @@ public HttpBinderConfiguration(boolean httpServerMetrics, boolean httpClientMetr // Handle deprecated/previous vertx properties as well serverIgnorePatterns = getIgnorePatterns( - serverConfig.ignorePatterns.isPresent() ? serverConfig.ignorePatterns : vertxConfig.ignorePatterns, + serverConfig.ignorePatterns().isPresent() ? serverConfig.ignorePatterns() : vertxConfig.ignorePatterns(), defaultIgnore); serverMatchPatterns = getMatchPatterns( - serverConfig.matchPatterns.isPresent() ? serverConfig.matchPatterns : vertxConfig.matchPatterns, + serverConfig.matchPatterns().isPresent() ? serverConfig.matchPatterns() : vertxConfig.matchPatterns(), defaultMatch); } if (clientEnabled) { - clientIgnorePatterns = getIgnorePatterns(clientConfig.ignorePatterns, null); - clientMatchPatterns = getMatchPatterns(clientConfig.matchPatterns, null); + clientIgnorePatterns = getIgnorePatterns(clientConfig.ignorePatterns(), null); + clientMatchPatterns = getMatchPatterns(clientConfig.matchPatterns(), null); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpMeterFilterProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpMeterFilterProvider.java index ee2c3ef86b7ab..e2939a3a4c9b0 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpMeterFilterProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/HttpMeterFilterProvider.java @@ -22,7 +22,7 @@ public class HttpMeterFilterProvider { public MeterFilter metricsHttpClientUriTagFilter(HttpClientConfig httpClientConfig) { if (binderConfiguration.isClientEnabled()) { return maximumAllowableUriTagsFilter(binderConfiguration.getHttpClientRequestsName(), - httpClientConfig.maxUriTags); + httpClientConfig.maxUriTags()); } return null; } @@ -32,7 +32,7 @@ public MeterFilter metricsHttpClientUriTagFilter(HttpClientConfig httpClientConf public MeterFilter metricsHttpServerUriTagFilter(HttpServerConfig httpServerConfig) { if (binderConfiguration.isServerEnabled()) { return maximumAllowableUriTagsFilter(binderConfiguration.getHttpServerRequestsName(), - httpServerConfig.maxUriTags); + httpServerConfig.maxUriTags()); } return null; } @@ -42,7 +42,7 @@ public MeterFilter metricsHttpServerUriTagFilter(HttpServerConfig httpServerConf public MeterFilter metricsHttpPushUriTagFilter(HttpServerConfig httpServerConfig) { if (binderConfiguration.isServerEnabled()) { return maximumAllowableUriTagsFilter(binderConfiguration.getHttpServerPushName(), - httpServerConfig.maxUriTags); + httpServerConfig.maxUriTags()); } return null; } @@ -52,7 +52,7 @@ public MeterFilter metricsHttpPushUriTagFilter(HttpServerConfig httpServerConfig public MeterFilter metricsHttpWebSocketsUriTagFilter(HttpServerConfig httpServerConfig) { if (binderConfiguration.isServerEnabled()) { return maximumAllowableUriTagsFilter(binderConfiguration.getHttpServerWebSocketConnectionsName(), - httpServerConfig.maxUriTags); + httpServerConfig.maxUriTags()); } return null; } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyAllocatorMetrics.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyAllocatorMetrics.java new file mode 100644 index 0000000000000..12a9063c37df0 --- /dev/null +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyAllocatorMetrics.java @@ -0,0 +1,105 @@ +package io.quarkus.micrometer.runtime.binder.netty; + +import io.micrometer.core.instrument.Gauge; +import io.micrometer.core.instrument.MeterRegistry; +import io.micrometer.core.instrument.Tags; +import io.micrometer.core.instrument.binder.MeterBinder; +import io.netty.buffer.ByteBufAllocatorMetric; +import io.netty.buffer.ByteBufAllocatorMetricProvider; +import io.netty.buffer.PooledByteBufAllocator; +import io.netty.buffer.PooledByteBufAllocatorMetric; + +/** + * {@link MeterBinder} for Netty memory allocators. + *

    + * This class is based on the MicroMeter NettyAllocatorMetrics class, but remove the "id" from the tags are it's + * computed from the `hashCode` which does not allow aggregation across processed. + * Instead, it gets a {@code name} label indicating an unique name for the allocator. + */ +public class NettyAllocatorMetrics implements MeterBinder { + + private final ByteBufAllocatorMetricProvider allocator; + private final String name; + + /** + * Create a binder instance for the given allocator. + * + * @param name the unique name for the allocator + * @param allocator the {@code ByteBuf} allocator to instrument + */ + public NettyAllocatorMetrics(String name, ByteBufAllocatorMetricProvider allocator) { + this.name = name; + this.allocator = allocator; + } + + @Override + public void bindTo(MeterRegistry registry) { + ByteBufAllocatorMetric allocatorMetric = this.allocator.metric(); + Tags tags = Tags.of( + NettyMeters.AllocatorKeyNames.NAME.asString(), this.name, + NettyMeters.AllocatorKeyNames.ALLOCATOR_TYPE.asString(), this.allocator.getClass().getSimpleName()); + + Gauge + .builder(NettyMeters.ALLOCATOR_MEMORY_USED.getName(), allocatorMetric, + ByteBufAllocatorMetric::usedHeapMemory) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "heap")) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_MEMORY_USED.getName(), allocatorMetric, + ByteBufAllocatorMetric::usedDirectMemory) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "direct")) + .register(registry); + + if (this.allocator instanceof PooledByteBufAllocator pooledByteBufAllocator) { + PooledByteBufAllocatorMetric pooledAllocatorMetric = pooledByteBufAllocator.metric(); + + Gauge + .builder(NettyMeters.ALLOCATOR_MEMORY_PINNED.getName(), pooledByteBufAllocator, + PooledByteBufAllocator::pinnedHeapMemory) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "heap")) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_MEMORY_PINNED.getName(), pooledByteBufAllocator, + PooledByteBufAllocator::pinnedDirectMemory) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "direct")) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_ARENAS.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::numHeapArenas) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "heap")) + .register(registry); + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_ARENAS.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::numDirectArenas) + .tags(tags.and(NettyMeters.AllocatorMemoryKeyNames.MEMORY_TYPE.asString(), "direct")) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_CACHE_SIZE.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::normalCacheSize) + .tags(tags.and(NettyMeters.AllocatorPooledCacheKeyNames.CACHE_TYPE.asString(), "normal")) + .register(registry); + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_CACHE_SIZE.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::smallCacheSize) + .tags(tags.and(NettyMeters.AllocatorPooledCacheKeyNames.CACHE_TYPE.asString(), "small")) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_THREADLOCAL_CACHES.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::numThreadLocalCaches) + .tags(tags) + .register(registry); + + Gauge + .builder(NettyMeters.ALLOCATOR_POOLED_CHUNK_SIZE.getName(), pooledAllocatorMetric, + PooledByteBufAllocatorMetric::chunkSize) + .tags(tags) + .register(registry); + } + } + +} diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMeters.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMeters.java new file mode 100644 index 0000000000000..9a2ab393c0a68 --- /dev/null +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMeters.java @@ -0,0 +1,239 @@ +package io.quarkus.micrometer.runtime.binder.netty; + +import io.micrometer.common.docs.KeyName; +import io.micrometer.core.instrument.Meter; +import io.micrometer.core.instrument.binder.BaseUnits; +import io.micrometer.core.instrument.docs.MeterDocumentation; + +/** + * Copy of the {@link NettyMeters} enum from the MicroMeter NettyMetrics class in oder to replace the {@code ID} tag + * with {@code NAME}. + */ +public enum NettyMeters implements MeterDocumentation { + + /** + * Size of memory used by the allocator, in bytes. + */ + ALLOCATOR_MEMORY_USED { + @Override + public String getName() { + return "netty.allocator.memory.used"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public String getBaseUnit() { + return BaseUnits.BYTES; + } + + @Override + public KeyName[] getKeyNames() { + return KeyName.merge(AllocatorKeyNames.values(), AllocatorMemoryKeyNames.values()); + } + }, + + /** + * Size of memory used by allocated buffers, in bytes. + */ + ALLOCATOR_MEMORY_PINNED { + @Override + public String getName() { + return "netty.allocator.memory.pinned"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public String getBaseUnit() { + return BaseUnits.BYTES; + } + + @Override + public KeyName[] getKeyNames() { + return KeyName.merge(AllocatorKeyNames.values(), AllocatorMemoryKeyNames.values()); + } + }, + + /** + * Number of arenas for a pooled allocator. + */ + ALLOCATOR_POOLED_ARENAS { + @Override + public String getName() { + return "netty.allocator.pooled.arenas"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public KeyName[] getKeyNames() { + return KeyName.merge(AllocatorKeyNames.values(), AllocatorMemoryKeyNames.values()); + } + }, + + /** + * Size of the cache for a pooled allocator, in bytes. + */ + ALLOCATOR_POOLED_CACHE_SIZE { + @Override + public String getName() { + return "netty.allocator.pooled.cache.size"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public String getBaseUnit() { + return BaseUnits.BYTES; + } + + @Override + public KeyName[] getKeyNames() { + return KeyName.merge(AllocatorKeyNames.values(), AllocatorPooledCacheKeyNames.values()); + } + }, + + /** + * Number of ThreadLocal caches for a pooled allocator. + */ + ALLOCATOR_POOLED_THREADLOCAL_CACHES { + @Override + public String getName() { + return "netty.allocator.pooled.threadlocal.caches"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public KeyName[] getKeyNames() { + return AllocatorKeyNames.values(); + } + }, + + /** + * Size of memory chunks for a pooled allocator, in bytes. + */ + ALLOCATOR_POOLED_CHUNK_SIZE { + @Override + public String getName() { + return "netty.allocator.pooled.chunk.size"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public String getBaseUnit() { + return BaseUnits.BYTES; + } + + @Override + public KeyName[] getKeyNames() { + return AllocatorKeyNames.values(); + } + }, + + /** + * Number of pending tasks in the event executor. + */ + EVENT_EXECUTOR_TASKS_PENDING { + @Override + public String getName() { + return "netty.eventexecutor.tasks.pending"; + } + + @Override + public Meter.Type getType() { + return Meter.Type.GAUGE; + } + + @Override + public KeyName[] getKeyNames() { + return EventExecutorTasksPendingKeyNames.values(); + } + }; + + enum AllocatorKeyNames implements KeyName { + + /** + * Unique runtime identifier for the allocator. + */ + NAME { + @Override + public String asString() { + return "name"; + } + }, + /** + * Allocator's class simple name. + */ + ALLOCATOR_TYPE { + @Override + public String asString() { + return "allocator.type"; + } + } + + } + + enum AllocatorMemoryKeyNames implements KeyName { + + /** + * Type of memory allocated: {@code "heap"} memory or {@code "direct"} memory. + */ + MEMORY_TYPE { + @Override + public String asString() { + return "memory.type"; + } + } + + } + + enum AllocatorPooledCacheKeyNames implements KeyName { + + /** + * Type of cache pages for this cache. + */ + CACHE_TYPE { + @Override + public String asString() { + return "cache.type"; + } + } + + } + + enum EventExecutorTasksPendingKeyNames implements KeyName { + + /** + * Event loop name. + */ + NAME { + @Override + public String asString() { + return "name"; + } + } + + } + +} \ No newline at end of file diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMetricsProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMetricsProvider.java index a7c2000d67071..64826256d02b4 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMetricsProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/NettyMetricsProvider.java @@ -4,23 +4,26 @@ import jakarta.inject.Singleton; import io.micrometer.core.instrument.binder.MeterBinder; -import io.micrometer.core.instrument.binder.netty4.NettyAllocatorMetrics; import io.netty.buffer.PooledByteBufAllocator; import io.netty.buffer.UnpooledByteBufAllocator; @Singleton public class NettyMetricsProvider { + public static final String NETTY_DEFAULT_POOLED_ALLOCATOR_NAME = "pooled"; + + public static final String NETTY_DEFAULT_UNPOOLED_ALLOCATOR_NAME = "unpooled"; + @Produces @Singleton public MeterBinder pooledByteBufAllocatorMetrics() { - return new NettyAllocatorMetrics(PooledByteBufAllocator.DEFAULT); + return new NettyAllocatorMetrics(NETTY_DEFAULT_POOLED_ALLOCATOR_NAME, PooledByteBufAllocator.DEFAULT); } @Produces @Singleton public MeterBinder unpooledByteBufAllocatorMetrics() { - return new NettyAllocatorMetrics(UnpooledByteBufAllocator.DEFAULT); + return new NettyAllocatorMetrics(NETTY_DEFAULT_UNPOOLED_ALLOCATOR_NAME, UnpooledByteBufAllocator.DEFAULT); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/ReactiveNettyMetricsProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/ReactiveNettyMetricsProvider.java index 1fb55ef56c729..ddc09cc6511e4 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/ReactiveNettyMetricsProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/ReactiveNettyMetricsProvider.java @@ -6,12 +6,13 @@ import jakarta.inject.Singleton; import io.micrometer.core.instrument.binder.MeterBinder; -import io.micrometer.core.instrument.binder.netty4.NettyAllocatorMetrics; import io.netty.buffer.ByteBufAllocatorMetricProvider; @Singleton public class ReactiveNettyMetricsProvider { + public static final String MULTIPART_ALLOCATOR_NAME = "quarkus-multipart-form-upload"; + @Produces @Singleton public MeterBinder reactiveAllocatorMetrics() throws Exception { @@ -20,7 +21,7 @@ public MeterBinder reactiveAllocatorMetrics() throws Exception { Field af = clazz.getDeclaredField("ALLOC"); af.setAccessible(true); ByteBufAllocatorMetricProvider provider = (ByteBufAllocatorMetricProvider) af.get(null); - return new NettyAllocatorMetrics(provider); + return new NettyAllocatorMetrics(MULTIPART_ALLOCATOR_NAME, provider); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/VertxNettyAllocatorMetricsProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/VertxNettyAllocatorMetricsProvider.java index dbefe9bc812f5..e8c60c745bf09 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/VertxNettyAllocatorMetricsProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/netty/VertxNettyAllocatorMetricsProvider.java @@ -4,23 +4,34 @@ import jakarta.inject.Singleton; import io.micrometer.core.instrument.binder.MeterBinder; -import io.micrometer.core.instrument.binder.netty4.NettyAllocatorMetrics; import io.netty.buffer.ByteBufAllocatorMetricProvider; import io.vertx.core.buffer.impl.VertxByteBufAllocator; @Singleton public class VertxNettyAllocatorMetricsProvider { + /** + * The name of the Vert.x pooled allocator. + */ + public static final String VERTX_POOLED_ALLOCATOR_NAME = "vertx-pooled"; + + /** + * The name of the Vert.x unpooled allocator. + */ + public static final String VERTX_UNPOOLED_ALLOCATOR_NAME = "vertx-unpooled"; + @Produces @Singleton public MeterBinder vertxPooledByteBufAllocatorMetrics() { - return new NettyAllocatorMetrics((ByteBufAllocatorMetricProvider) VertxByteBufAllocator.POOLED_ALLOCATOR); + return new NettyAllocatorMetrics(VERTX_POOLED_ALLOCATOR_NAME, + (ByteBufAllocatorMetricProvider) VertxByteBufAllocator.POOLED_ALLOCATOR); } @Produces @Singleton public MeterBinder vertxUnpooledByteBufAllocatorMetrics() { - return new NettyAllocatorMetrics((ByteBufAllocatorMetricProvider) VertxByteBufAllocator.UNPOOLED_ALLOCATOR); + return new NettyAllocatorMetrics(VERTX_UNPOOLED_ALLOCATOR_NAME, + (ByteBufAllocatorMetricProvider) VertxByteBufAllocator.UNPOOLED_ALLOCATOR); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpClientMetrics.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpClientMetrics.java index 34cc38ef6878b..159d3598f1c6d 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpClientMetrics.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpClientMetrics.java @@ -172,7 +172,7 @@ public void responseEnd(RequestTracker tracker, long bytesRead) { .and(HttpCommonTags.status(tracker.response.statusCode())) .and(HttpCommonTags.outcome(tracker.response.statusCode())); if (!httpClientMetricsTagsContributors.isEmpty()) { - HttpClientMetricsTagsContributor.Context context = new DefaultContext(tracker.request); + HttpClientMetricsTagsContributor.Context context = new DefaultContext(tracker.request, tracker.response); for (int i = 0; i < httpClientMetricsTagsContributors.size(); i++) { try { Tags additionalTags = httpClientMetricsTagsContributors.get(i).contribute(context); @@ -254,6 +254,7 @@ public String getNormalizedUriPath(Map serverMatchPatterns, Lis } } - private record DefaultContext(HttpRequest request) implements HttpClientMetricsTagsContributor.Context { + private record DefaultContext(HttpRequest request, + HttpResponse response) implements HttpClientMetricsTagsContributor.Context { } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpServerMetrics.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpServerMetrics.java index e7148767cc0ee..8714275850068 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpServerMetrics.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/vertx/VertxHttpServerMetrics.java @@ -205,7 +205,7 @@ public void responseEnd(HttpRequestMetric requestMetric, HttpResponse response, VertxMetricsTags.outcome(response), HttpCommonTags.status(response.statusCode())); if (!httpServerMetricsTagsContributors.isEmpty()) { - HttpServerMetricsTagsContributor.Context context = new DefaultContext(requestMetric.request()); + HttpServerMetricsTagsContributor.Context context = new DefaultContext(requestMetric.request(), response); for (int i = 0; i < httpServerMetricsTagsContributors.size(); i++) { try { Tags additionalTags = httpServerMetricsTagsContributors.get(i).contribute(context); @@ -258,6 +258,7 @@ public void disconnected(LongTaskTimer.Sample websocketMetric) { } } - private record DefaultContext(HttpServerRequest request) implements HttpServerMetricsTagsContributor.Context { + private record DefaultContext(HttpServerRequest request, + HttpResponse response) implements HttpServerMetricsTagsContributor.Context { } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/virtualthreads/VirtualThreadCollector.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/virtualthreads/VirtualThreadCollector.java index 8503e03669842..ff017f918e39b 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/virtualthreads/VirtualThreadCollector.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/binder/virtualthreads/VirtualThreadCollector.java @@ -40,12 +40,12 @@ public class VirtualThreadCollector { @Inject public VirtualThreadCollector(MicrometerConfig mc) { - var config = mc.binder.virtualThreads; - this.enabled = JavaVersionUtil.isJava21OrHigher() && config.enabled.orElse(true); + var config = mc.binder().virtualThreads(); + this.enabled = JavaVersionUtil.isJava21OrHigher() && config.enabled().orElse(true); MeterBinder instantiated = null; if (enabled) { - if (config.tags.isPresent()) { - List list = config.tags.get(); + if (config.tags().isPresent()) { + List list = config.tags().get(); this.tags = list.stream().map(this::createTagFromEntry).collect(Collectors.toList()); } else { this.tags = List.of(); diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcClientConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcClientConfigGroup.java index c83302385a213..c0a817f76da30 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcClientConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcClientConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for gRPC Client. */ @ConfigGroup -public class GrpcClientConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface GrpcClientConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * gRPC Client metrics support. *

    @@ -18,18 +17,6 @@ public class GrpcClientConfigGroup implements MicrometerConfig.CapabilityEnabled * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcServerConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcServerConfigGroup.java index 383e8f6a89410..6ebcd86ddb450 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcServerConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/GrpcServerConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for gRPC Server. */ @ConfigGroup -public class GrpcServerConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface GrpcServerConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * gRPC Server metrics support. *

    @@ -18,18 +17,6 @@ public class GrpcServerConfigGroup implements MicrometerConfig.CapabilityEnabled * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpClientConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpClientConfigGroup.java index e650df04f9712..e5e612a591e72 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpClientConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpClientConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for outbound HTTP requests */ @ConfigGroup -public class HttpClientConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface HttpClientConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Outbound HTTP request metrics support. *

    @@ -18,18 +17,6 @@ public class HttpClientConfigGroup implements MicrometerConfig.CapabilityEnabled * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpServerConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpServerConfigGroup.java index e63e50f2e6d06..b93e65857a210 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpServerConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/HttpServerConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for inbound HTTP traffic */ @ConfigGroup -public class HttpServerConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface HttpServerConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Inbound HTTP metrics support. *

    @@ -18,18 +17,6 @@ public class HttpServerConfigGroup implements MicrometerConfig.CapabilityEnabled * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/JsonConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/JsonConfigGroup.java index 05997b5f534c6..1c6b7adee0d58 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/JsonConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/JsonConfigGroup.java @@ -4,15 +4,16 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class JsonConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface JsonConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Support for export to JSON format. Off by default. */ - @ConfigItem(defaultValue = "false") - public Optional enabled; + @WithDefault("false") + @Override + Optional enabled(); /** * The path for the JSON metrics endpoint. @@ -22,35 +23,22 @@ public class JsonConfigGroup implements MicrometerConfig.CapabilityEnabled { * If the management interface is enabled, the value will be resolved as a path relative to * `${quarkus.management.root-path}`. */ - @ConfigItem(defaultValue = "metrics") - public String path; + @WithDefault("metrics") + String path(); /** * Statistics like max, percentiles, and histogram counts decay over time to give greater weight to recent * samples. Samples are accumulated to such statistics in ring buffers which rotate after * the expiry, with this buffer length. */ - @ConfigItem(defaultValue = "3") - public Integer bufferLength; + @WithDefault("3") + Integer bufferLength(); /** * Statistics like max, percentiles, and histogram counts decay over time to give greater weight to recent * samples. Samples are accumulated to such statistics in ring buffers which rotate after * this expiry, with a particular buffer length. */ - @ConfigItem(defaultValue = "P3D") - public Duration expiry; - - @Override - public Optional getEnabled() { - return enabled; - } - - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{path='" + path - + ",enabled=" + enabled - + '}'; - } + @WithDefault("P3D") + Duration expiry(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/KafkaConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/KafkaConfigGroup.java index 46cb547a8f933..840de9ddc8e7f 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/KafkaConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/KafkaConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for Kafka Binders */ @ConfigGroup -public class KafkaConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface KafkaConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Kafka metrics support. *

    @@ -18,18 +17,6 @@ public class KafkaConfigGroup implements MicrometerConfig.CapabilityEnabled { * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MPMetricsConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MPMetricsConfigGroup.java index a34b86e679f88..57295c1688232 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MPMetricsConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MPMetricsConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for the Microprofile Metrics Binder */ @ConfigGroup -public class MPMetricsConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface MPMetricsConfigGroup extends MicrometerConfig.CapabilityEnabled { // @formatter:off /** * Eclipse MicroProfile Metrics support. @@ -32,18 +31,6 @@ public class MPMetricsConfigGroup implements MicrometerConfig.CapabilityEnabled * @asciidoclet */ // @formatter:on - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MicrometerConfig.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MicrometerConfig.java index b5c51a3f002fb..3896532012a54 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MicrometerConfig.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/MicrometerConfig.java @@ -3,23 +3,25 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; /** * Global configuration for the Micrometer extension */ -@ConfigRoot(name = "micrometer", phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) -public final class MicrometerConfig { +@ConfigMapping(prefix = "quarkus.micrometer") +@ConfigRoot(phase = ConfigPhase.BUILD_AND_RUN_TIME_FIXED) +public interface MicrometerConfig { /** * Micrometer metrics support. *

    * Micrometer metrics support is enabled by default. */ - @ConfigItem(defaultValue = "true") - public boolean enabled; + @WithDefault("true") + boolean enabled(); /** * Micrometer MeterRegistry discovery. @@ -27,8 +29,8 @@ public final class MicrometerConfig { * Micrometer MeterRegistry implementations discovered on the classpath * will be enabled automatically by default. */ - @ConfigItem(defaultValue = "true") - public boolean registryEnabledDefault; + @WithDefault("true") + boolean registryEnabledDefault(); /** * Micrometer MeterBinder discovery. @@ -36,24 +38,24 @@ public final class MicrometerConfig { * Micrometer MeterBinder implementations discovered on the classpath * will be enabled automatically by default. */ - @ConfigItem(defaultValue = "true") - public boolean binderEnabledDefault; + @WithDefault("true") + boolean binderEnabledDefault(); /** Build / static runtime config for binders */ - public BinderConfig binder; + BinderConfig binder(); /** Build / static runtime config for exporters */ - public ExportConfig export; + ExportConfig export(); /** * For MeterRegistry configurations with optional 'enabled' attributes, * determine whether the registry is enabled using {@link #registryEnabledDefault} * as the default value. */ - public boolean checkRegistryEnabledWithDefault(CapabilityEnabled config) { - if (enabled) { - Optional configValue = config.getEnabled(); - return configValue.orElseGet(() -> registryEnabledDefault); + default boolean checkRegistryEnabledWithDefault(CapabilityEnabled config) { + if (enabled()) { + Optional configValue = config.enabled(); + return configValue.orElseGet(this::registryEnabledDefault); } return false; } @@ -63,28 +65,20 @@ public boolean checkRegistryEnabledWithDefault(CapabilityEnabled config) { * determine whether the binder is enabled using {@link #binderEnabledDefault} * as the default value. */ - public boolean checkBinderEnabledWithDefault(CapabilityEnabled config) { - if (enabled) { - Optional configValue = config.getEnabled(); - return configValue.orElseGet(() -> binderEnabledDefault); + default boolean checkBinderEnabledWithDefault(CapabilityEnabled config) { + if (enabled()) { + Optional configValue = config.enabled(); + return configValue.orElseGet(this::binderEnabledDefault); } return false; } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + ",binderEnabledDefault=" + binderEnabledDefault - + ",registryEnabledDefault=" + registryEnabledDefault - + '}'; - } - /** Build / static runtime config for binders */ @ConfigGroup - public static class BinderConfig { - public HttpClientConfigGroup httpClient; - public HttpServerConfigGroup httpServer; + interface BinderConfig { + HttpClientConfigGroup httpClient(); + + HttpServerConfigGroup httpServer(); /** * Micrometer JVM metrics support. @@ -93,23 +87,23 @@ public static class BinderConfig { * support is enabled, and either this value is true, or this * value is unset and {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional jvm; + Optional jvm(); + + KafkaConfigGroup kafka(); - public KafkaConfigGroup kafka; + RedisConfigGroup redis(); - public RedisConfigGroup redis; - public StorkConfigGroup stork; + StorkConfigGroup stork(); - public GrpcServerConfigGroup grpcServer; + GrpcServerConfigGroup grpcServer(); - public GrpcClientConfigGroup grpcClient; + GrpcClientConfigGroup grpcClient(); - public ReactiveMessagingConfigGroup messaging; + ReactiveMessagingConfigGroup messaging(); - public MPMetricsConfigGroup mpMetrics; + MPMetricsConfigGroup mpMetrics(); - public VirtualThreadsConfigGroup virtualThreads; + VirtualThreadsConfigGroup virtualThreads(); /** * Micrometer System metrics support. @@ -118,22 +112,28 @@ public static class BinderConfig { * support is enabled, and either this value is true, or this * value is unset and {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional system; + Optional system(); - public VertxConfigGroup vertx; + VertxConfigGroup vertx(); - public NettyConfigGroup netty; + NettyConfigGroup netty(); } /** Build / static runtime config for exporters */ @ConfigGroup - public static class ExportConfig { - public JsonConfigGroup json; - public PrometheusConfigGroup prometheus; + interface ExportConfig { + JsonConfigGroup json(); + + PrometheusConfigGroup prometheus(); } - public interface CapabilityEnabled { - Optional getEnabled(); + interface CapabilityEnabled { + + /** + * Gets enable value + * + * @return {@link Optional} + */ + Optional enabled(); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/NettyConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/NettyConfigGroup.java index d93df52cdb632..28fd69a64eab1 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/NettyConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/NettyConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for Netty Binders */ @ConfigGroup -public class NettyConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface NettyConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Netty metrics support. *

    @@ -18,18 +17,6 @@ public class NettyConfigGroup implements MicrometerConfig.CapabilityEnabled { * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java index 6f1f80ab251da..45774493ce77c 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java @@ -3,10 +3,11 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class PrometheusConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface PrometheusConfigGroup extends MicrometerConfig.CapabilityEnabled { + /** * Support for export to Prometheus. *

    @@ -15,8 +16,8 @@ public class PrometheusConfigGroup implements MicrometerConfig.CapabilityEnabled * and either this value is true, or this value is unset and * {@code quarkus.micrometer.registry-enabled-default} is true. */ - @ConfigItem - public Optional enabled; + @Override + Optional enabled(); /** * The path for the prometheus metrics endpoint (produces text/plain). The default value is @@ -33,28 +34,14 @@ public class PrometheusConfigGroup implements MicrometerConfig.CapabilityEnabled * * @asciidoclet */ - @ConfigItem(defaultValue = "metrics") - public String path; + @WithDefault("metrics") + String path(); /** * By default, this extension will create a Prometheus MeterRegistry instance. *

    * Use this attribute to veto the creation of the default Prometheus MeterRegistry. */ - @ConfigItem(defaultValue = "true") - public boolean defaultRegistry; - - @Override - public Optional getEnabled() { - return enabled; - } - - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{path='" + path - + ",enabled=" + enabled - + ",defaultRegistry=" + defaultRegistry - + '}'; - } + @WithDefault("true") + boolean defaultRegistry(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/ReactiveMessagingConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/ReactiveMessagingConfigGroup.java index c4580a6ae9555..7637b2caa49c1 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/ReactiveMessagingConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/ReactiveMessagingConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for Reactive Messaging Binders */ @ConfigGroup -public class ReactiveMessagingConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface ReactiveMessagingConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Kafka metrics support. *

    @@ -18,18 +17,6 @@ public class ReactiveMessagingConfigGroup implements MicrometerConfig.Capability * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/RedisConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/RedisConfigGroup.java index b79ee7b9c2350..5a938e3f94a30 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/RedisConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/RedisConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for Redis metrics */ @ConfigGroup -public class RedisConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface RedisConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Redis client metrics support. *

    @@ -18,18 +17,6 @@ public class RedisConfigGroup implements MicrometerConfig.CapabilityEnabled { * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/StorkConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/StorkConfigGroup.java index cca42c194fbb2..5e0e7a187d8de 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/StorkConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/StorkConfigGroup.java @@ -3,10 +3,9 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; @ConfigGroup -public class StorkConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface StorkConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Stork metrics support. *

    @@ -15,19 +14,6 @@ public class StorkConfigGroup implements MicrometerConfig.CapabilityEnabled { * and either this value is true, or this value is unset and * {@code quarkus.micrometer.binder-enabled-default} is true. */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } - + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VertxConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VertxConfigGroup.java index 11aa7b8f9cd55..0f84d390c6b85 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VertxConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VertxConfigGroup.java @@ -3,13 +3,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for the Vert.x Binder */ @ConfigGroup -public class VertxConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface VertxConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Vert.x metrics support. *

    @@ -19,18 +18,6 @@ public class VertxConfigGroup implements MicrometerConfig.CapabilityEnabled { * {@code quarkus.micrometer.binder-enabled-default} is true. * */ - @ConfigItem - public Optional enabled; - - @Override - public Optional getEnabled() { - return enabled; - } - @Override - public String toString() { - return this.getClass().getSimpleName() - + "{enabled=" + enabled - + '}'; - } + Optional enabled(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VirtualThreadsConfigGroup.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VirtualThreadsConfigGroup.java index e739b78471163..17019aab90adb 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VirtualThreadsConfigGroup.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/VirtualThreadsConfigGroup.java @@ -4,13 +4,12 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; /** * Build / static runtime config for the virtual thread metric collection. */ @ConfigGroup -public class VirtualThreadsConfigGroup implements MicrometerConfig.CapabilityEnabled { +public interface VirtualThreadsConfigGroup extends MicrometerConfig.CapabilityEnabled { /** * Virtual Threads metrics support. *

    @@ -18,18 +17,13 @@ public class VirtualThreadsConfigGroup implements MicrometerConfig.CapabilityEna * this value is set to {@code true} (default), the JVM supports virtual threads (Java 21+) and the * {@code quarkus.micrometer.binder-enabled-default} property is true. */ - @ConfigItem - public Optional enabled; + @Override + Optional enabled(); + /** * The tags to be added to the metrics. * Empty by default. * When set, tags are passed as: {@code key1=value1,key2=value2}. */ - @ConfigItem - public Optional> tags; - - @Override - public Optional getEnabled() { - return enabled; - } + Optional> tags(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpClientConfig.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpClientConfig.java index 1adb307cad8c9..283cff3ba74ad 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpClientConfig.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpClientConfig.java @@ -3,12 +3,14 @@ import java.util.List; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -@ConfigRoot(name = "micrometer.binder.http-client", phase = ConfigPhase.RUN_TIME) -public class HttpClientConfig { +@ConfigMapping(prefix = "quarkus.micrometer.binder.http-client") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface HttpClientConfig { /** * Comma-separated list of regular expressions used to specify uri * labels in http metrics. @@ -28,15 +30,13 @@ public class HttpClientConfig { * * @asciidoclet */ - @ConfigItem - public Optional> matchPatterns = Optional.empty(); + Optional> matchPatterns(); /** * Comma-separated list of regular expressions defining uri paths * that should be ignored (not measured). */ - @ConfigItem - public Optional> ignorePatterns = Optional.empty(); + Optional> ignorePatterns(); /** * Suppress 4xx errors from metrics collection for unmatched templates. @@ -46,14 +46,14 @@ public class HttpClientConfig { * * @asciidoclet */ - @ConfigItem(defaultValue = "false") - public boolean suppress4xxErrors; + @WithDefault("false") + boolean suppress4xxErrors(); /** * Maximum number of unique URI tag values allowed. After the max number of * tag values is reached, metrics with additional tag values are denied by * filter. */ - @ConfigItem(defaultValue = "100") - public int maxUriTags; + @WithDefault("100") + int maxUriTags(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpServerConfig.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpServerConfig.java index f9342aa1dc5d6..2416b4d645a34 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpServerConfig.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/HttpServerConfig.java @@ -3,12 +3,14 @@ import java.util.List; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -@ConfigRoot(name = "micrometer.binder.http-server", phase = ConfigPhase.RUN_TIME) -public class HttpServerConfig { +@ConfigMapping(prefix = "quarkus.micrometer.binder.http-server") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface HttpServerConfig { /** * Comma-separated list of regular expressions used to specify uri * labels in http metrics. @@ -28,15 +30,13 @@ public class HttpServerConfig { * * @asciidoclet */ - @ConfigItem - public Optional> matchPatterns = Optional.empty(); + Optional> matchPatterns(); /** * Comma-separated list of regular expressions defining uri paths * that should be ignored (not measured). */ - @ConfigItem - public Optional> ignorePatterns = Optional.empty(); + Optional> ignorePatterns(); /** * Suppress non-application uris from metrics collection. @@ -47,8 +47,8 @@ public class HttpServerConfig { * * @asciidoclet */ - @ConfigItem(defaultValue = "true") - public boolean suppressNonApplicationUris; + @WithDefault("true") + boolean suppressNonApplicationUris(); /** * Suppress 4xx errors from metrics collection for unmatched templates. @@ -58,23 +58,14 @@ public class HttpServerConfig { * * @asciidoclet */ - @ConfigItem(defaultValue = "false") - public boolean suppress4xxErrors; + @WithDefault("false") + boolean suppress4xxErrors(); /** * Maximum number of unique URI tag values allowed. After the max number of * tag values is reached, metrics with additional tag values are denied by * filter. */ - @ConfigItem(defaultValue = "100") - public int maxUriTags; - - public void mergeDeprecatedConfig(VertxConfig config) { - if (!ignorePatterns.isPresent()) { - ignorePatterns = config.ignorePatterns; - } - if (!matchPatterns.isPresent()) { - matchPatterns = config.matchPatterns; - } - } + @WithDefault("100") + int maxUriTags(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/PrometheusRuntimeConfig.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/PrometheusRuntimeConfig.java index c4ce90f86b6d1..dd7ab7c41f122 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/PrometheusRuntimeConfig.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/PrometheusRuntimeConfig.java @@ -3,16 +3,18 @@ import java.util.Map; import io.quarkus.runtime.annotations.ConfigDocMapKey; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithParentName; /** * Runtime configuration for Micrometer meter registries. */ @SuppressWarnings("unused") -@ConfigRoot(name = "micrometer.export.prometheus", phase = ConfigPhase.RUN_TIME) -public class PrometheusRuntimeConfig { +@ConfigMapping(prefix = "quarkus.micrometer.export.prometheus") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface PrometheusRuntimeConfig { // @formatter:off /** * Prometheus registry configuration properties. @@ -23,7 +25,7 @@ public class PrometheusRuntimeConfig { * @asciidoclet */ // @formatter:on - @ConfigItem(name = ConfigItem.PARENT) + @WithParentName @ConfigDocMapKey("configuration-property-name") - public Map prometheus; + Map prometheus(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/VertxConfig.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/VertxConfig.java index cbe2a88135ec1..1174bf4270a6d 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/VertxConfig.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/runtime/VertxConfig.java @@ -3,18 +3,18 @@ import java.util.List; import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; -@ConfigRoot(name = "micrometer.binder.vertx", phase = ConfigPhase.RUN_TIME) -public class VertxConfig { +@ConfigMapping(prefix = "quarkus.micrometer.binder.vertx") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface VertxConfig { /** * @deprecated use {@code quarkus.micrometer.binder.http-server.match-patterns} */ @Deprecated - @ConfigItem - public Optional> matchPatterns = Optional.empty(); + Optional> matchPatterns(); /** * Comma-separated list of regular expressions defining uri paths @@ -23,6 +23,5 @@ public class VertxConfig { * @deprecated use {@code quarkus.micrometer.binder.http-server.ignore-patterns} */ @Deprecated - @ConfigItem - public Optional> ignorePatterns = Optional.empty(); + Optional> ignorePatterns(); } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/JsonMeterRegistryProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/JsonMeterRegistryProvider.java index 1c0d662130d98..586e69e191455 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/JsonMeterRegistryProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/JsonMeterRegistryProvider.java @@ -12,6 +12,6 @@ public class JsonMeterRegistryProvider { @Produces @Singleton public JsonMeterRegistry registry(Clock clock, io.quarkus.micrometer.runtime.config.MicrometerConfig config) { - return new JsonMeterRegistry(clock, config.export.json.bufferLength, config.export.json.expiry); + return new JsonMeterRegistry(clock, config.export().json().bufferLength(), config.export().json().expiry()); } } diff --git a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/PrometheusMeterRegistryProvider.java b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/PrometheusMeterRegistryProvider.java index b1cef90873d27..641932a08463d 100644 --- a/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/PrometheusMeterRegistryProvider.java +++ b/extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/export/PrometheusMeterRegistryProvider.java @@ -21,7 +21,7 @@ public class PrometheusMeterRegistryProvider { @Singleton @DefaultBean public PrometheusConfig configure(PrometheusRuntimeConfig config) { - final Map properties = ConfigAdapter.captureProperties(config.prometheus, PREFIX); + final Map properties = ConfigAdapter.captureProperties(config.prometheus(), PREFIX); return ConfigAdapter.validate(properties::get); } diff --git a/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfigurationTest.java b/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfigurationTest.java index a9e7722b85922..0fdddb63e1d50 100644 --- a/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfigurationTest.java +++ b/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/HttpBinderConfigurationTest.java @@ -10,6 +10,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; import io.quarkus.micrometer.runtime.config.runtime.HttpClientConfig; import io.quarkus.micrometer.runtime.config.runtime.HttpServerConfig; @@ -18,12 +19,15 @@ public class HttpBinderConfigurationTest { @Test public void testHttpServerMetricsIgnorePatterns() { - HttpServerConfig serverConfig = new HttpServerConfig(); - serverConfig.ignorePatterns = Optional.of(new ArrayList<>(Arrays.asList(" /item/.* ", " /oranges/.* "))); - + HttpServerConfig httpServerConfig = Mockito.mock(HttpServerConfig.class); + Mockito.doReturn(Optional.of(new ArrayList<>(Arrays.asList(" /item/.* ", " /oranges/.* ")))) + .when(httpServerConfig) + .ignorePatterns(); + VertxConfig vertxConfig = Mockito.mock(VertxConfig.class); + HttpClientConfig httpClientConfig = Mockito.mock(HttpClientConfig.class); HttpBinderConfiguration binderConfig = new HttpBinderConfiguration( true, false, - serverConfig, new HttpClientConfig(), new VertxConfig()); + httpServerConfig, httpClientConfig, vertxConfig); List ignorePatterns = binderConfig.getServerIgnorePatterns(); Assertions.assertEquals(2, ignorePatterns.size()); @@ -39,13 +43,15 @@ public void testHttpServerMetricsIgnorePatterns() { @Test public void testHttpServerMetricsMatchPatterns() { - HttpServerConfig serverConfig = new HttpServerConfig(); - serverConfig.matchPatterns = Optional - .of(new ArrayList<>(Arrays.asList(" /item/\\d+=/item/{id} ", " /msg/\\d+=/msg/{other} "))); - + HttpServerConfig httpServerConfig = Mockito.mock(HttpServerConfig.class); + Mockito.doReturn(Optional + .of(new ArrayList<>(Arrays.asList(" /item/\\d+=/item/{id} ", " /msg/\\d+=/msg/{other} ")))) + .when(httpServerConfig).matchPatterns(); + HttpClientConfig httpClientConfig = Mockito.mock(HttpClientConfig.class); + VertxConfig vertxConfig = Mockito.mock(VertxConfig.class); HttpBinderConfiguration binderConfig = new HttpBinderConfiguration( true, false, - serverConfig, new HttpClientConfig(), new VertxConfig()); + httpServerConfig, httpClientConfig, vertxConfig); Map matchPatterns = binderConfig.getServerMatchPatterns(); Assertions.assertFalse(matchPatterns.isEmpty()); diff --git a/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/RequestMetricInfoTest.java b/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/RequestMetricInfoTest.java index cc2b63d2da680..d3c85ea0184e3 100644 --- a/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/RequestMetricInfoTest.java +++ b/extensions/micrometer/runtime/src/test/java/io/quarkus/micrometer/runtime/binder/RequestMetricInfoTest.java @@ -13,6 +13,7 @@ import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; import io.quarkus.micrometer.runtime.config.runtime.HttpClientConfig; import io.quarkus.micrometer.runtime.config.runtime.HttpServerConfig; @@ -59,13 +60,14 @@ public void testParsePathIgnoreNoLeadingSlash() { @Test public void testHttpServerMetricsIgnorePatterns() { - HttpServerConfig serverConfig = new HttpServerConfig(); - serverConfig.ignorePatterns = Optional.of(new ArrayList<>(Arrays.asList(" /item/.* ", " /oranges/.* "))); - + HttpServerConfig httpServerConfig = Mockito.mock(HttpServerConfig.class); + Mockito.doReturn(Optional.of(new ArrayList<>(Arrays.asList(" /item/.* ", " /oranges/.* ")))) + .when(httpServerConfig).ignorePatterns(); + HttpClientConfig httpClientConfig = Mockito.mock(HttpClientConfig.class); + VertxConfig vertxConfig = Mockito.mock(VertxConfig.class); HttpBinderConfiguration binderConfig = new HttpBinderConfiguration( true, false, - serverConfig, new HttpClientConfig(), new VertxConfig()); - + httpServerConfig, httpClientConfig, vertxConfig); Assertions.assertEquals(2, binderConfig.serverIgnorePatterns.size()); Pattern p = binderConfig.serverIgnorePatterns.get(0); @@ -97,13 +99,16 @@ public void testParsePathMatchReplaceLeadingSlash() { @Test public void testHttpServerMetricsMatchPatterns() { - HttpServerConfig serverConfig = new HttpServerConfig(); - serverConfig.matchPatterns = Optional - .of(new ArrayList<>(Arrays.asList(" /item/\\d+=/item/{id} ", " /msg/\\d+=/msg/{other} "))); + HttpServerConfig httpServerConfig = Mockito.mock(HttpServerConfig.class); + Mockito.doReturn(Optional + .of(new ArrayList<>(Arrays.asList(" /item/\\d+=/item/{id} ", " /msg/\\d+=/msg/{other} ")))) + .when(httpServerConfig).matchPatterns(); + HttpClientConfig httpClientConfig = Mockito.mock(HttpClientConfig.class); + VertxConfig vertxConfig = Mockito.mock(VertxConfig.class); HttpBinderConfiguration binderConfig = new HttpBinderConfiguration( true, false, - serverConfig, new HttpClientConfig(), new VertxConfig()); + httpServerConfig, httpClientConfig, vertxConfig); Assertions.assertFalse(binderConfig.serverMatchPatterns.isEmpty()); Iterator> i = binderConfig.serverMatchPatterns.entrySet().iterator(); diff --git a/extensions/mongodb-client/deployment/pom.xml b/extensions/mongodb-client/deployment/pom.xml index bafad10a55454..3bffa4334d1e8 100644 --- a/extensions/mongodb-client/deployment/pom.xml +++ b/extensions/mongodb-client/deployment/pom.xml @@ -134,9 +134,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesBuildTimeConfig.java b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesBuildTimeConfig.java index c5557fdde24bb..d693fca198cfd 100644 --- a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesBuildTimeConfig.java +++ b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesBuildTimeConfig.java @@ -5,10 +5,10 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; @ConfigGroup -public class DevServicesBuildTimeConfig { +public interface DevServicesBuildTimeConfig { /** * If DevServices has been explicitly enabled or disabled. DevServices is generally enabled @@ -17,36 +17,31 @@ public class DevServicesBuildTimeConfig { * When DevServices is enabled Quarkus will attempt to automatically configure and start * a database when running in Dev or Test mode. */ - @ConfigItem - public Optional enabled = Optional.empty(); + Optional enabled(); /** * The container image name to use, for container based DevServices providers. */ - @ConfigItem - public Optional imageName; + Optional imageName(); /** * Optional fixed port the dev service will listen to. *

    * If not defined, the port will be chosen randomly. */ - @ConfigItem - public Optional port; + Optional port(); /** * Generic properties that are added to the connection URL. */ - @ConfigItem @ConfigDocMapKey("property-key") - public Map properties; + Map properties(); /** * Environment variables that are passed to the container. */ - @ConfigItem @ConfigDocMapKey("environment-variable-name") - public Map containerEnv; + Map containerEnv(); /** * Indicates if the MongoDB server managed by Quarkus Dev Services is shared. @@ -59,8 +54,8 @@ public class DevServicesBuildTimeConfig { *

    * Container sharing is only used in dev mode. */ - @ConfigItem(defaultValue = "true") - public boolean shared; + @WithDefault("true") + boolean shared(); /** * The value of the {@code quarkus-dev-service-mongodb} label attached to the started container. @@ -71,7 +66,7 @@ public class DevServicesBuildTimeConfig { * starts a new container with the {@code quarkus-dev-service-mongodb} label set to the specified value. *

    */ - @ConfigItem(defaultValue = "mongodb") - public String serviceName; + @WithDefault("mongodb") + String serviceName(); } diff --git a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesMongoProcessor.java b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesMongoProcessor.java index 2bab44a74c0ab..a94f159cd6677 100644 --- a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesMongoProcessor.java +++ b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesMongoProcessor.java @@ -247,12 +247,12 @@ private CapturedProperties captureProperties(String connectionName, MongoClientB String connectionString = ConfigProvider.getConfig().getOptionalValue(configPrefix + "connection-string", String.class) .orElse(null); //TODO: update for multiple connections - DevServicesBuildTimeConfig devServicesConfig = mongoClientBuildTimeConfig.devservices; - boolean devServicesEnabled = devServicesConfig.enabled.orElse(true); + DevServicesBuildTimeConfig devServicesConfig = mongoClientBuildTimeConfig.devservices(); + boolean devServicesEnabled = devServicesConfig.enabled().orElse(true); return new CapturedProperties(databaseName, connectionString, devServicesEnabled, - devServicesConfig.imageName.orElseGet(() -> ConfigureUtil.getDefaultImageNameFor("mongo")), - devServicesConfig.port.orElse(null), devServicesConfig.properties, devServicesConfig.containerEnv, - devServicesConfig.shared, devServicesConfig.serviceName); + devServicesConfig.imageName().orElseGet(() -> ConfigureUtil.getDefaultImageNameFor("mongo")), + devServicesConfig.port().orElse(null), devServicesConfig.properties(), devServicesConfig.containerEnv(), + devServicesConfig.shared(), devServicesConfig.serviceName()); } private record CapturedProperties(String database, String connectionString, boolean devServicesEnabled, diff --git a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientBuildTimeConfig.java b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientBuildTimeConfig.java index 4f81fe077ca05..cc85b3f65f756 100644 --- a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientBuildTimeConfig.java +++ b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientBuildTimeConfig.java @@ -1,42 +1,48 @@ package io.quarkus.mongodb.deployment; import io.quarkus.runtime.annotations.ConfigDocSection; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; -@ConfigRoot(name = "mongodb", phase = ConfigPhase.BUILD_TIME) -public class MongoClientBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.mongodb") +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +public interface MongoClientBuildTimeConfig { /** * Whether a health check is published in case the smallrye-health extension is present. */ - @ConfigItem(name = "health.enabled", defaultValue = "true") - public boolean healthEnabled; + @WithDefault("true") + @WithName("health.enabled") + boolean healthEnabled(); /** * Whether metrics are published in case a metrics extension is present. */ - @ConfigItem(name = "metrics.enabled") - public boolean metricsEnabled; + @WithName("metrics.enabled") + @WithDefault("false") + boolean metricsEnabled(); /** * If set to true, the default clients will always be created even if there are no injection points that use them */ - @ConfigItem(name = "force-default-clients") - public boolean forceDefaultClients; + @WithName("force-default-clients") + @WithDefault("false") + boolean forceDefaultClients(); /** - * Whether or not tracing spans of driver commands are sent in case the quarkus-opentelemetry extension is present. + * Whether tracing spans of driver commands are sent in case the quarkus-opentelemetry extension is present. */ - @ConfigItem(name = "tracing.enabled") - public boolean tracingEnabled; + @WithName("tracing.enabled") + @WithDefault("false") + boolean tracingEnabled(); /** * Dev Services. *

    * Dev Services allows Quarkus to automatically start MongoDB in dev and test mode. */ - @ConfigItem @ConfigDocSection(generated = true) - public DevServicesBuildTimeConfig devservices; + DevServicesBuildTimeConfig devservices(); } diff --git a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java index 68003f57ea613..cf0bdf514f63c 100644 --- a/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java +++ b/extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/MongoClientProcessor.java @@ -117,7 +117,7 @@ AdditionalIndexedClassesBuildItem includeDnsTypesToIndex() { @BuildStep AdditionalIndexedClassesBuildItem includeMongoCommandListener(MongoClientBuildTimeConfig buildTimeConfig) { - if (buildTimeConfig.tracingEnabled) { + if (buildTimeConfig.tracingEnabled()) { return new AdditionalIndexedClassesBuildItem( MongoTracingCommandListener.class.getName(), MongoReactiveContextProvider.class.getName()); @@ -130,7 +130,7 @@ void includeMongoCommandMetricListener( BuildProducer additionalIndexedClasses, MongoClientBuildTimeConfig buildTimeConfig, Optional metricsCapability) { - if (!buildTimeConfig.metricsEnabled) { + if (!buildTimeConfig.metricsEnabled()) { return; } boolean withMicrometer = metricsCapability.map(cap -> cap.metricsSupported(MetricsFactory.MICROMETER)) @@ -259,7 +259,7 @@ MongoConnectionPoolListenerBuildItem setupMetrics( // Construction of MongoClient isn't compatible with the MetricsFactoryConsumer pattern. // Use a supplier to defer construction of the pool listener for the supported metrics system - if (buildTimeConfig.metricsEnabled && metricsCapability.isPresent()) { + if (buildTimeConfig.metricsEnabled() && metricsCapability.isPresent()) { if (metricsCapability.get().metricsSupported(MetricsFactory.MICROMETER)) { return new MongoConnectionPoolListenerBuildItem(recorder.createMicrometerConnectionPoolListener()); } else { @@ -347,7 +347,7 @@ void generateClientBeans(MongoClientRecorder recorder, boolean createDefaultBlockingMongoClient = false; boolean createDefaultReactiveMongoClient = false; - if (makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients) { + if (makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients()) { // all clients are expected to exist in this case createDefaultBlockingMongoClient = true; createDefaultReactiveMongoClient = true; @@ -369,12 +369,12 @@ void generateClientBeans(MongoClientRecorder recorder, if (createDefaultBlockingMongoClient) { syntheticBeanBuildItemBuildProducer.produce(createBlockingSyntheticBean(recorder, mongodbConfig, - makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients, + makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients(), MongoClientBeanUtil.DEFAULT_MONGOCLIENT_NAME, false)); } if (createDefaultReactiveMongoClient) { syntheticBeanBuildItemBuildProducer.produce(createReactiveSyntheticBean(recorder, mongodbConfig, - makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients, + makeUnremovable || mongoClientBuildTimeConfig.forceDefaultClients(), MongoClientBeanUtil.DEFAULT_MONGOCLIENT_NAME, false)); } @@ -471,7 +471,7 @@ MongoUnremovableClientsBuildItem unremovable(@SuppressWarnings("unused") BuildPr @BuildStep HealthBuildItem addHealthCheck(MongoClientBuildTimeConfig buildTimeConfig) { return new HealthBuildItem("io.quarkus.mongodb.health.MongoHealthCheck", - buildTimeConfig.healthEnabled); + buildTimeConfig.healthEnabled()); } @BuildStep diff --git a/extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/deployment/MongoClientProcessorTest.java b/extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/deployment/MongoClientProcessorTest.java index ad437dc2dc882..b9d3a387c3579 100644 --- a/extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/deployment/MongoClientProcessorTest.java +++ b/extensions/mongodb-client/deployment/src/test/java/io/quarkus/mongodb/deployment/MongoClientProcessorTest.java @@ -14,6 +14,7 @@ import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; import io.quarkus.deployment.metrics.MetricsCapabilityBuildItem; import io.quarkus.runtime.metrics.MetricsFactory; +import io.smallrye.config.SmallRyeConfigBuilder; class MongoClientProcessorTest { private final MongoClientProcessor buildStep = new MongoClientProcessor(); @@ -51,9 +52,34 @@ private static Optional capability(boolean metricsEn } private static MongoClientBuildTimeConfig config(boolean metricsEnabled) { - MongoClientBuildTimeConfig buildTimeConfig = new MongoClientBuildTimeConfig(); - buildTimeConfig.metricsEnabled = metricsEnabled; - return buildTimeConfig; + return new MongoClientBuildTimeConfig() { + @Override + public boolean healthEnabled() { + return true; + } + + @Override + public boolean metricsEnabled() { + return metricsEnabled; + } + + @Override + public boolean forceDefaultClients() { + return false; + } + + @Override + public boolean tracingEnabled() { + return false; + } + + @Override + public DevServicesBuildTimeConfig devservices() { + return new SmallRyeConfigBuilder().addDiscoveredConverters() + .withMapping(DevServicesBuildTimeConfig.class) + .build().getConfigMapping(DevServicesBuildTimeConfig.class); + } + }; } } diff --git a/extensions/mongodb-client/runtime/pom.xml b/extensions/mongodb-client/runtime/pom.xml index 0f6b273f5ad2a..71f2a5190f6fd 100644 --- a/extensions/mongodb-client/runtime/pom.xml +++ b/extensions/mongodb-client/runtime/pom.xml @@ -156,9 +156,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/health/MongoHealthCheck.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/health/MongoHealthCheck.java index 382e3fb699238..0bd849a6f61ff 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/health/MongoHealthCheck.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/health/MongoHealthCheck.java @@ -48,31 +48,31 @@ public MongoHealthCheck(MongodbConfig config) { Iterable> reactiveHandlers = Arc.container() .select(ReactiveMongoClient.class, Any.Literal.INSTANCE).handles(); - if (config.defaultMongoClientConfig != null) { + if (config.defaultMongoClientConfig() != null) { MongoClient client = getClient(handle, null); ReactiveMongoClient reactiveClient = getReactiveClient(reactiveHandlers, null); if (client != null) { - checks.add(new MongoClientCheck(CLIENT_DEFAULT, client, config.defaultMongoClientConfig)); + checks.add(new MongoClientCheck(CLIENT_DEFAULT, client, config.defaultMongoClientConfig())); } if (reactiveClient != null) { checks.add(new ReactiveMongoClientCheck(CLIENT_DEFAULT_REACTIVE, reactiveClient, - config.defaultMongoClientConfig)); + config.defaultMongoClientConfig())); } } - config.mongoClientConfigs.forEach(new BiConsumer<>() { + config.mongoClientConfigs().forEach(new BiConsumer<>() { @Override public void accept(String name, MongoClientConfig cfg) { MongoClient client = getClient(handle, name); ReactiveMongoClient reactiveClient = getReactiveClient(reactiveHandlers, name); if (client != null) { checks.add(new MongoClientCheck(name, client, - config.defaultMongoClientConfig)); + config.defaultMongoClientConfig())); } if (reactiveClient != null) { checks.add(new ReactiveMongoClientCheck(name, reactiveClient, - config.defaultMongoClientConfig)); + config.defaultMongoClientConfig())); } } }); @@ -143,7 +143,7 @@ public HealthCheckResponse call() { return Uni.combine().all().unis(unis) .collectFailures() // We collect all failures to avoid partial responses. - .combinedWith(new Function, HealthCheckResponse>() { + .with(new Function, HealthCheckResponse>() { @Override public HealthCheckResponse apply(List list) { return MongoHealthCheck.this.combine(list, builder); @@ -181,11 +181,11 @@ public Uni> get() { return Uni.createFrom().item(new Supplier() { @Override public Document get() { - return client.getDatabase(config.healthDatabase).runCommand(COMMAND); + return client.getDatabase(config.healthDatabase()).runCommand(COMMAND); } }) .runSubscriptionOn(Infrastructure.getDefaultExecutor()) - .ifNoItem().after(config.readTimeout.orElse(DEFAULT_TIMEOUT)).fail() + .ifNoItem().after(config.readTimeout().orElse(DEFAULT_TIMEOUT)).fail() .onItemOrFailure().transform(toResult(name)); } } @@ -202,8 +202,8 @@ private class ReactiveMongoClientCheck implements Supplier> get() { - return client.getDatabase(config.healthDatabase).runCommand(COMMAND) - .ifNoItem().after(config.readTimeout.orElse(DEFAULT_TIMEOUT)).fail() + return client.getDatabase(config.healthDatabase()).runCommand(COMMAND) + .ifNoItem().after(config.readTimeout().orElse(DEFAULT_TIMEOUT)).fail() .onItemOrFailure().transform(toResult(name)); } } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/CredentialConfig.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/CredentialConfig.java index 004ae28900d33..58bc99212e3f2 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/CredentialConfig.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/CredentialConfig.java @@ -5,27 +5,24 @@ import io.quarkus.runtime.annotations.ConfigDocMapKey; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConvertWith; import io.quarkus.runtime.configuration.TrimmedStringConverter; +import io.smallrye.config.WithConverter; /** * Configures the credentials and authentication mechanism to connect to the MongoDB server. */ @ConfigGroup -public class CredentialConfig { +public interface CredentialConfig { /** * Configures the username. */ - @ConfigItem - public Optional username; + Optional username(); /** * Configures the password. */ - @ConfigItem - public Optional password; + Optional password(); /** * Configures the authentication mechanism to use if a credential was supplied. @@ -33,8 +30,7 @@ public class CredentialConfig { * sever version. For the GSSAPI and MONGODB-X509 mechanisms, no password is accepted, only the username. * Supported values: null or {@code GSSAPI|PLAIN|MONGODB-X509|SCRAM_SHA_1|SCRAM_SHA_256|MONGODB_AWS} */ - @ConfigItem - public Optional authMechanism; + Optional authMechanism(); /** * Configures the source of the authentication credentials. @@ -44,22 +40,18 @@ public class CredentialConfig { * If the database is specified in neither place, the default value is {@code admin}. This option is only * respected when using the MONGO-CR mechanism (the default). */ - @ConfigItem - public Optional authSource; + Optional authSource(); /** * Allows passing authentication mechanism properties. */ - @ConfigItem @ConfigDocMapKey("property-key") - public Map authMechanismProperties; + Map authMechanismProperties(); /** * The credentials provider name */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional credentialsProvider = Optional.empty(); + Optional<@WithConverter(TrimmedStringConverter.class) String> credentialsProvider(); /** * The credentials provider bean name. @@ -70,7 +62,5 @@ public class CredentialConfig { *

    * For Vault, the credentials provider bean name is {@code vault-credentials-provider}. */ - @ConfigItem - @ConvertWith(TrimmedStringConverter.class) - public Optional credentialsProviderName = Optional.empty(); + Optional<@WithConverter(TrimmedStringConverter.class) String> credentialsProviderName(); } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientConfig.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientConfig.java index b69f2dca419ae..c68a4e8c49bdf 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientConfig.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientConfig.java @@ -9,10 +9,11 @@ import io.quarkus.runtime.annotations.ConfigDocSection; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; @ConfigGroup -public class MongoClientConfig { +public interface MongoClientConfig { /** * Configures the connection string. @@ -60,150 +61,133 @@ public class MongoClientConfig { * override any that are provided via TXT records. * */ - @ConfigItem - public Optional connectionString; + Optional connectionString(); /** * Configures the MongoDB server addresses (one if single mode). * The addresses are passed as {@code host:port}. */ - @ConfigItem(defaultValue = "127.0.0.1:27017") - public List hosts; + @WithDefault("127.0.0.1:27017") + List hosts(); /** * Configure the database name. */ - @ConfigItem - public Optional database; + Optional database(); /** * Configures the application name. */ - @ConfigItem - public Optional applicationName; + Optional applicationName(); /** * Configures the maximum number of connections in the connection pool. */ - @ConfigItem - public OptionalInt maxPoolSize; + OptionalInt maxPoolSize(); /** * Configures the minimum number of connections in the connection pool. */ - @ConfigItem - public OptionalInt minPoolSize; + OptionalInt minPoolSize(); /** * Maximum idle time of a pooled connection. A connection that exceeds this limit will be closed. */ - @ConfigItem - public Optional maxConnectionIdleTime; + Optional maxConnectionIdleTime(); /** * Maximum lifetime of a pooled connection. A connection that exceeds this limit will be closed. */ - @ConfigItem - public Optional maxConnectionLifeTime; + Optional maxConnectionLifeTime(); /** * Configures the time period between runs of the maintenance job. */ - @ConfigItem - public Optional maintenanceFrequency; + Optional maintenanceFrequency(); /** * Configures period of time to wait before running the first maintenance job on the connection pool. */ - @ConfigItem - public Optional maintenanceInitialDelay; + Optional maintenanceInitialDelay(); /** * How long a connection can take to be opened before timing out. */ - @ConfigItem - public Optional connectTimeout; + Optional connectTimeout(); /** * How long a socket read can take before timing out. */ - @ConfigItem - public Optional readTimeout; + Optional readTimeout(); /** * If connecting with TLS, this option enables insecure TLS connections. */ - @ConfigItem - public boolean tlsInsecure; + @WithDefault("false") + boolean tlsInsecure(); /** * Whether to connect using TLS. */ - @ConfigItem - public boolean tls; + @WithDefault("false") + boolean tls(); /** * Implies that the hosts given are a seed list, and the driver will attempt to find all members of the set. */ - @ConfigItem - public Optional replicaSetName; + Optional replicaSetName(); /** * How long the driver will wait for server selection to succeed before throwing an exception. */ - @ConfigItem - public Optional serverSelectionTimeout; + Optional serverSelectionTimeout(); /** * When choosing among multiple MongoDB servers to send a request, the driver will only send that request to a * server whose ping time is less than or equal to the server with the fastest ping time plus the local threshold. */ - @ConfigItem - public Optional localThreshold; + Optional localThreshold(); /** * The frequency that the driver will attempt to determine the current state of each server in the cluster. */ - @ConfigItem - public Optional heartbeatFrequency; + Optional heartbeatFrequency(); /** * Write concern */ @ConfigDocSection - public WriteConcernConfig writeConcern; + WriteConcernConfig writeConcern(); /** * Configures the read concern. * Supported values are: {@code local|majority|linearizable|snapshot|available} */ - @ConfigItem - public Optional readConcern; + Optional readConcern(); /** * Configures the read preference. * Supported values are: {@code primary|primaryPreferred|secondary|secondaryPreferred|nearest} */ - @ConfigItem - public Optional readPreference; + Optional readPreference(); /** * Credentials and authentication mechanism */ @ConfigDocSection - public CredentialConfig credentials; + CredentialConfig credentials(); /** * The database used during the readiness health checks */ - @ConfigItem(name = "health.database", defaultValue = "admin") - public String healthDatabase; + @WithName("health.database") + @WithDefault("admin") + String healthDatabase(); /** * Configures the UUID representation to use when encoding instances of {@link java.util.UUID} * and when decoding BSON binary values with subtype of 3. */ - @ConfigItem - public Optional uuidRepresentation; + Optional uuidRepresentation(); } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java index 01fa1bf4caebd..0564474305dd4 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClientRecorder.java @@ -115,17 +115,17 @@ public ConnectionPoolListener get() { */ public void performInitialization(MongodbConfig config, RuntimeValue vertx) { MongoDnsClientProvider.vertx = vertx.getValue(); - initializeDNSLookup(config.defaultMongoClientConfig); - for (MongoClientConfig mongoClientConfig : config.mongoClientConfigs.values()) { + initializeDNSLookup(config.defaultMongoClientConfig()); + for (MongoClientConfig mongoClientConfig : config.mongoClientConfigs().values()) { initializeDNSLookup(mongoClientConfig); } } private void initializeDNSLookup(MongoClientConfig mongoClientConfig) { - if (mongoClientConfig.connectionString.isEmpty()) { + if (mongoClientConfig.connectionString().isEmpty()) { return; } // this ensures that DNS resolution will take place if necessary - new ConnectionString(mongoClientConfig.connectionString.get()); + new ConnectionString(mongoClientConfig.connectionString().get()); } } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClients.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClients.java index 09cc31391f018..876ed9d8e9a78 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClients.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClients.java @@ -130,8 +130,8 @@ public ReactiveMongoClient createReactiveMongoClient(String clientName) } public MongoClientConfig getMatchingMongoClientConfig(String clientName) { - return MongoClientBeanUtil.isDefault(clientName) ? mongodbConfig.defaultMongoClientConfig - : mongodbConfig.mongoClientConfigs.get(clientName); + return MongoClientBeanUtil.isDefault(clientName) ? mongodbConfig.defaultMongoClientConfig() + : mongodbConfig.mongoClientConfigs().get(clientName); } private static class ClusterSettingBuilder implements Block { @@ -143,26 +143,26 @@ public ClusterSettingBuilder(MongoClientConfig config) { @Override public void apply(ClusterSettings.Builder builder) { - Optional maybeConnectionString = config.connectionString; + Optional maybeConnectionString = config.connectionString(); if (maybeConnectionString.isEmpty()) { // Parse hosts - List hosts = parseHosts(config.hosts); + List hosts = parseHosts(config.hosts()); builder.hosts(hosts); - if (hosts.size() == 1 && config.replicaSetName.isEmpty()) { + if (hosts.size() == 1 && config.replicaSetName().isEmpty()) { builder.mode(ClusterConnectionMode.SINGLE); } else { builder.mode(ClusterConnectionMode.MULTIPLE); } } - if (config.localThreshold.isPresent()) { - builder.localThreshold(config.localThreshold.get().toMillis(), TimeUnit.MILLISECONDS); + if (config.localThreshold().isPresent()) { + builder.localThreshold(config.localThreshold().get().toMillis(), TimeUnit.MILLISECONDS); } - config.replicaSetName.ifPresent(builder::requiredReplicaSetName); + config.replicaSetName().ifPresent(builder::requiredReplicaSetName); - if (config.serverSelectionTimeout.isPresent()) { - builder.serverSelectionTimeout(config.serverSelectionTimeout.get().toMillis(), TimeUnit.MILLISECONDS); + if (config.serverSelectionTimeout().isPresent()) { + builder.serverSelectionTimeout(config.serverSelectionTimeout().get().toMillis(), TimeUnit.MILLISECONDS); } } } @@ -178,19 +178,19 @@ public ConnectionPoolSettingsBuilder(MongoClientConfig config, List maybeConnectionString = config.connectionString; + Optional maybeConnectionString = config.connectionString(); if (maybeConnectionString.isPresent()) { connectionString = new ConnectionString(maybeConnectionString.get()); settings.applyConnectionString(connectionString); @@ -273,25 +273,25 @@ private MongoClientSettings createMongoConfiguration(String name, MongoClientCon } settings.commandListenerList(commandListenerList); - config.applicationName.ifPresent(settings::applicationName); + config.applicationName().ifPresent(settings::applicationName); - if (config.credentials != null) { + if (config.credentials() != null) { MongoCredential credential = createMongoCredential(config); if (credential != null) { settings.credential(credential); } } - if (config.writeConcern != null) { - WriteConcernConfig wc = config.writeConcern; - WriteConcern concern = (wc.safe ? WriteConcern.ACKNOWLEDGED : WriteConcern.UNACKNOWLEDGED) - .withJournal(wc.journal); + if (config.writeConcern() != null) { + WriteConcernConfig wc = config.writeConcern(); + WriteConcern concern = (wc.safe() ? WriteConcern.ACKNOWLEDGED : WriteConcern.UNACKNOWLEDGED) + .withJournal(wc.journal()); - if (wc.wTimeout.isPresent()) { - concern = concern.withWTimeout(wc.wTimeout.get().toMillis(), TimeUnit.MILLISECONDS); + if (wc.wTimeout().isPresent()) { + concern = concern.withWTimeout(wc.wTimeout().get().toMillis(), TimeUnit.MILLISECONDS); } - Optional maybeW = wc.w; + Optional maybeW = wc.w(); if (maybeW.isPresent()) { String w = maybeW.get(); if ("majority".equalsIgnoreCase(w)) { @@ -302,9 +302,9 @@ private MongoClientSettings createMongoConfiguration(String name, MongoClientCon } } settings.writeConcern(concern); - settings.retryWrites(wc.retryWrites); + settings.retryWrites(wc.retryWrites()); } - if (config.tls) { + if (config.tls()) { settings.applyToSslSettings(new SslSettingsBuilder(config, mongoClientSupport.isDisableSslSupport())); } settings.applyToClusterSettings(new ClusterSettingBuilder(config)); @@ -313,15 +313,15 @@ private MongoClientSettings createMongoConfiguration(String name, MongoClientCon settings.applyToServerSettings(new ServerSettingsBuilder(config)); settings.applyToSocketSettings(new SocketSettingsBuilder(config)); - if (config.readPreference.isPresent()) { - settings.readPreference(ReadPreference.valueOf(config.readPreference.get())); + if (config.readPreference().isPresent()) { + settings.readPreference(ReadPreference.valueOf(config.readPreference().get())); } - if (config.readConcern.isPresent()) { - settings.readConcern(new ReadConcern(ReadConcernLevel.fromString(config.readConcern.get()))); + if (config.readConcern().isPresent()) { + settings.readConcern(new ReadConcern(ReadConcernLevel.fromString(config.readConcern().get()))); } - if (config.uuidRepresentation.isPresent()) { - settings.uuidRepresentation(config.uuidRepresentation.get()); + if (config.uuidRepresentation().isPresent()) { + settings.uuidRepresentation(config.uuidRepresentation().get()); } settings = customize(name, settings); @@ -423,15 +423,15 @@ private MongoCredential createMongoCredential(MongoClientConfig config) { // get the authsource, or the database from the config, or 'admin' as it is the default auth source in mongo // and null is not allowed - String authSource = config.credentials.authSource.orElse(config.database.orElse("admin")); + String authSource = config.credentials().authSource().orElse(config.database().orElse("admin")); // AuthMechanism AuthenticationMechanism mechanism = null; - Optional maybeMechanism = config.credentials.authMechanism; + Optional maybeMechanism = config.credentials().authMechanism(); if (maybeMechanism.isPresent()) { mechanism = getAuthenticationMechanism(maybeMechanism.get()); } - UsernamePassword usernamePassword = determineUserNamePassword(config.credentials); + UsernamePassword usernamePassword = determineUserNamePassword(config.credentials()); if (usernamePassword == null) { if (mechanism == null) { return null; @@ -461,8 +461,8 @@ private MongoCredential createMongoCredential(MongoClientConfig config) { } //add the properties - if (!config.credentials.authMechanismProperties.isEmpty()) { - for (Map.Entry entry : config.credentials.authMechanismProperties.entrySet()) { + if (!config.credentials().authMechanismProperties().isEmpty()) { + for (Map.Entry entry : config.credentials().authMechanismProperties().entrySet()) { credential = credential.withMechanismProperty(entry.getKey(), entry.getValue()); } } @@ -471,20 +471,20 @@ private MongoCredential createMongoCredential(MongoClientConfig config) { } private UsernamePassword determineUserNamePassword(CredentialConfig config) { - if (config.credentialsProvider.isPresent()) { - String beanName = config.credentialsProviderName.orElse(null); + if (config.credentialsProvider().isPresent()) { + String beanName = config.credentialsProviderName().orElse(null); CredentialsProvider credentialsProvider = CredentialsProviderFinder.find(beanName); - String name = config.credentialsProvider.get(); + String name = config.credentialsProvider().get(); Map credentials = credentialsProvider.getCredentials(name); String user = credentials.get(USER_PROPERTY_NAME); String password = credentials.get(PASSWORD_PROPERTY_NAME); return new UsernamePassword(user, password.toCharArray()); } else { - String username = config.username.orElse(null); + String username = config.username().orElse(null); if (username == null) { return null; } - char[] password = config.password.map(String::toCharArray).orElse(null); + char[] password = config.password().map(String::toCharArray).orElse(null); return new UsernamePassword(username, password); } } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongodbConfig.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongodbConfig.java index 0fdcc60dd7aa1..44001e6f50057 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongodbConfig.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongodbConfig.java @@ -5,31 +5,35 @@ import java.util.Optional; import java.util.OptionalInt; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; +import io.smallrye.config.WithParentName; -@ConfigRoot(name = MongodbConfig.CONFIG_NAME, phase = ConfigPhase.RUN_TIME) -public class MongodbConfig { - public static final String CONFIG_NAME = "mongodb"; +@ConfigMapping(prefix = "quarkus.mongodb") +@ConfigRoot(phase = ConfigPhase.RUN_TIME) +public interface MongodbConfig { + String CONFIG_NAME = "mongodb"; @Deprecated - public static final String NATIVE_DNS_LOG_ACTIVITY = "native.dns.log-activity"; - public static final String DNS_LOG_ACTIVITY = "dns.log-activity"; + String NATIVE_DNS_LOG_ACTIVITY = "native.dns.log-activity"; + String DNS_LOG_ACTIVITY = "dns.log-activity"; @Deprecated - public static final String NATIVE_DNS_SERVER_HOST = "native.dns.server-host"; - public static final String DNS_SERVER_HOST = "dns.server-host"; + String NATIVE_DNS_SERVER_HOST = "native.dns.server-host"; + String DNS_SERVER_HOST = "dns.server-host"; @Deprecated - public static final String NATIVE_DNS_SERVER_PORT = "native.dns.server-port"; - public static final String DNS_SERVER_PORT = "dns.server-port"; + String NATIVE_DNS_SERVER_PORT = "native.dns.server-port"; + String DNS_SERVER_PORT = "dns.server-port"; @Deprecated - public static final String NATIVE_DNS_LOOKUP_TIMEOUT = "native.dns.lookup-timeout"; - public static final String DNS_LOOKUP_TIMEOUT = "dns.lookup-timeout"; + String NATIVE_DNS_LOOKUP_TIMEOUT = "native.dns.lookup-timeout"; + String DNS_LOOKUP_TIMEOUT = "dns.lookup-timeout"; /** * The default mongo client connection. */ - @ConfigItem(name = ConfigItem.PARENT) - public MongoClientConfig defaultMongoClientConfig; + @WithParentName + MongoClientConfig defaultMongoClientConfig(); /** * Configures additional mongo client connections. @@ -54,8 +58,8 @@ public class MongodbConfig { * } * */ - @ConfigItem(name = ConfigItem.PARENT) - public Map mongoClientConfigs; + @WithParentName + Map mongoClientConfigs(); /** * The default DNS resolver used to handle {@code mongo+srv://} urls cannot be used in a native executable. @@ -67,8 +71,9 @@ public class MongodbConfig { * @deprecated This resolver is always used */ @Deprecated - @ConfigItem(name = "native.dns.use-vertx-dns-resolver", defaultValue = "false") - public boolean useVertxDnsResolverInNativeMode; + @WithName("native.dns.use-vertx-dns-resolver") + @WithDefault("false") + boolean useVertxDnsResolverInNativeMode(); /** * If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property configures the DNS server. @@ -78,15 +83,15 @@ public class MongodbConfig { * @deprecated this property has been deprecated in favor of {@link #dnsServer} */ @Deprecated - @ConfigItem(name = NATIVE_DNS_SERVER_HOST) - public Optional dnsServerInNativeMode; + @WithName(NATIVE_DNS_SERVER_HOST) + Optional dnsServerInNativeMode(); /** * This property configures the DNS server. If the server is not set, it tries to read the first {@code nameserver} from * {@code /etc /resolv.conf} (if the file exists), otherwise fallback to the default. */ - @ConfigItem(name = DNS_SERVER_HOST) - public Optional dnsServer; + @WithName(DNS_SERVER_HOST) + Optional dnsServer(); /** * If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property configures the DNS server port. @@ -94,13 +99,16 @@ public class MongodbConfig { * @deprecated this property has been deprecated in favor of {@link #dnsServerPort} */ @Deprecated - @ConfigItem(name = NATIVE_DNS_SERVER_PORT, defaultValue = "53") - public OptionalInt dnsServerPortInNativeMode; + @WithName(NATIVE_DNS_SERVER_PORT) + @WithDefault("53") + OptionalInt dnsServerPortInNativeMode(); + /** * This property configures the DNS server port. */ - @ConfigItem(name = DNS_SERVER_PORT, defaultValue = "53") - public OptionalInt dnsServerPort; + @WithName(DNS_SERVER_PORT) + @WithDefault("53") + OptionalInt dnsServerPort(); /** * If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property configures the DNS lookup timeout @@ -109,15 +117,17 @@ public class MongodbConfig { * @deprecated this property has been deprecated in favor of {@link #dnsLookupTimeout} */ @Deprecated - @ConfigItem(name = NATIVE_DNS_LOOKUP_TIMEOUT, defaultValue = "5s") - public Duration dnsLookupTimeoutInNativeMode; + @WithName(NATIVE_DNS_LOOKUP_TIMEOUT) + @WithDefault("5s") + Duration dnsLookupTimeoutInNativeMode(); /** * If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property configures the DNS lookup timeout * duration. */ - @ConfigItem(name = DNS_LOOKUP_TIMEOUT, defaultValue = "5s") - public Duration dnsLookupTimeout; + @WithName(DNS_LOOKUP_TIMEOUT) + @WithDefault("5s") + Duration dnsLookupTimeout(); /** * If {@code native.dns.use-vertx-dns-resolver} is set to {@code true}, this property enables the logging ot the @@ -126,12 +136,14 @@ public class MongodbConfig { * @deprecated this property has been deprecated in favor of {@link #dnsLookupLogActivity} */ @Deprecated - @ConfigItem(name = NATIVE_DNS_LOG_ACTIVITY, defaultValue = "false") - public Optional dnsLookupLogActivityInNativeMode; + @WithDefault("false") + @WithName(NATIVE_DNS_LOG_ACTIVITY) + Optional dnsLookupLogActivityInNativeMode(); /** * This property enables the logging ot the DNS lookup. It can be useful to understand why the lookup fails. */ - @ConfigItem(name = DNS_LOG_ACTIVITY, defaultValue = "false") - public Optional dnsLookupLogActivity; + @WithDefault("false") + @WithName(DNS_LOG_ACTIVITY) + Optional dnsLookupLogActivity(); } diff --git a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/WriteConcernConfig.java b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/WriteConcernConfig.java index 49b05bd00181d..3409c094d6620 100644 --- a/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/WriteConcernConfig.java +++ b/extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/WriteConcernConfig.java @@ -4,13 +4,13 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; +import io.smallrye.config.WithDefault; /** * Configures the write concern. */ @ConfigGroup -public class WriteConcernConfig { +public interface WriteConcernConfig { /** * Configures the safety. @@ -19,35 +19,33 @@ public class WriteConcernConfig { * If set fo *

  • {@code false}: the driver does not ensure that all writes are acknowledged by the MongoDB server. */ - @ConfigItem(defaultValue = "true") - public boolean safe; + @WithDefault("true") + boolean safe(); /** * Configures the journal writing aspect. * If set to {@code true}: the driver waits for the server to group commit to the journal file on disk. * If set to {@code false}: the driver does not wait for the server to group commit to the journal file on disk. */ - @ConfigItem(defaultValue = "true") - public boolean journal; + @WithDefault("true") + boolean journal(); /** * When set, the driver adds {@code w: wValue} to all write commands. It requires {@code safe} to be {@code true}. * The value is typically a number, but can also be the {@code majority} string. */ - @ConfigItem - public Optional w; + Optional w(); /** * If set to {@code true}, the driver will retry supported write operations if they fail due to a network error. */ - @ConfigItem - public boolean retryWrites; + @WithDefault("false") + boolean retryWrites(); /** * When set, the driver adds {@code wtimeout : ms } to all write commands. It requires {@code safe} to be * {@code true}. */ - @ConfigItem - public Optional wTimeout; + Optional wTimeout(); } diff --git a/extensions/narayana-jta/deployment/pom.xml b/extensions/narayana-jta/deployment/pom.xml index cd8deb7598f39..9cb051259de8a 100644 --- a/extensions/narayana-jta/deployment/pom.xml +++ b/extensions/narayana-jta/deployment/pom.xml @@ -55,9 +55,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java index c821bacf9572f..c82802a526d22 100644 --- a/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java +++ b/extensions/narayana-jta/deployment/src/main/java/io/quarkus/narayana/jta/deployment/NarayanaJtaProcessor.java @@ -151,7 +151,7 @@ public void build(NarayanaJtaRecorder recorder, builder.addBeanClass(TransactionalInterceptorNotSupported.class); additionalBeans.produce(builder.build()); - transactionManagerBuildTimeConfig.unsafeMultipleLastResources.ifPresent(mode -> { + transactionManagerBuildTimeConfig.unsafeMultipleLastResources().ifPresent(mode -> { if (!mode.equals(UnsafeMultipleLastResourcesMode.FAIL)) { recorder.logUnsafeMultipleLastResourcesOnStartup(mode); } @@ -178,7 +178,7 @@ public void build(NarayanaJtaRecorder recorder, @BuildStep(onlyIf = NativeOrNativeSourcesBuild.class) public void nativeImageFeature(TransactionManagerBuildTimeConfig transactionManagerBuildTimeConfig, BuildProducer nativeImageFeatures) { - switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources + switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources() .orElse(UnsafeMultipleLastResourcesMode.DEFAULT)) { case ALLOW, WARN_FIRST, WARN_EACH -> { nativeImageFeatures.produce(new NativeImageFeatureBuildItem(DisableLoggingFeature.class)); @@ -264,7 +264,7 @@ private void allowUnsafeMultipleLastResources(NarayanaJtaRecorder recorder, TransactionManagerBuildTimeConfig transactionManagerBuildTimeConfig, Capabilities capabilities, BuildProducer logCleanupFilters, BuildProducer nativeImageFeatures) { - switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources + switch (transactionManagerBuildTimeConfig.unsafeMultipleLastResources() .orElse(UnsafeMultipleLastResourcesMode.DEFAULT)) { case ALLOW -> { recorder.allowUnsafeMultipleLastResources(capabilities.isPresent(Capability.AGROAL), true); diff --git a/extensions/narayana-jta/runtime/pom.xml b/extensions/narayana-jta/runtime/pom.xml index efd5c6fed0f5f..56552a061022d 100644 --- a/extensions/narayana-jta/runtime/pom.xml +++ b/extensions/narayana-jta/runtime/pom.xml @@ -131,9 +131,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/QuarkusTransactionImpl.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/QuarkusTransactionImpl.java index 0384ead6f5ccf..e1ef05163511b 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/QuarkusTransactionImpl.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/QuarkusTransactionImpl.java @@ -210,7 +210,7 @@ private static void begin(RunOptionsBase options) { try { getUserTransaction().setTransactionTimeout( (int) Arc.container().instance(TransactionManagerConfiguration.class) - .get().defaultTransactionTimeout.toSeconds()); + .get().defaultTransactionTimeout().toSeconds()); } catch (SystemException e) { log.error("Failed to reset transaction timeout", e); } diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/RequestScopedTransaction.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/RequestScopedTransaction.java index 9421b0c964ea7..f9bd1fbf35140 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/RequestScopedTransaction.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/RequestScopedTransaction.java @@ -69,7 +69,7 @@ void begin(BeginOptions options) { if (timeout > 0) { try { userTransaction.setTransactionTimeout( - (int) transactionManagerConfiguration.defaultTransactionTimeout.toSeconds()); + (int) transactionManagerConfiguration.defaultTransactionTimeout().toSeconds()); } catch (SystemException e) { throw new QuarkusTransactionException(e); } diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java index 327acd72af42c..3981627cb838a 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaProducers.java @@ -7,7 +7,6 @@ import jakarta.transaction.TransactionSynchronizationRegistry; import jakarta.transaction.UserTransaction; -import org.jboss.logging.Logger; import org.jboss.tm.JBossXATerminator; import org.jboss.tm.XAResourceRecoveryRegistry; import org.jboss.tm.usertx.UserTransactionRegistry; @@ -20,7 +19,6 @@ @Dependent public class NarayanaJtaProducers { - private static final Logger log = Logger.getLogger(NarayanaJtaProducers.class); @Produces @ApplicationScoped diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java index 3363fb1276547..21ee50812284d 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorder.java @@ -6,7 +6,6 @@ import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Base64; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; @@ -43,20 +42,20 @@ public class NarayanaJtaRecorder { public void setNodeName(final TransactionManagerConfiguration transactions) { try { - if (transactions.nodeName.getBytes(StandardCharsets.UTF_8).length > 28 - && transactions.shortenNodeNameIfNecessary) { - shortenNodeName(transactions); + String nodeName = transactions.nodeName(); + if (nodeName.getBytes(StandardCharsets.UTF_8).length > 28 + && transactions.shortenNodeNameIfNecessary()) { + nodeName = shortenNodeName(transactions.nodeName()); } - arjPropertyManager.getCoreEnvironmentBean().setNodeIdentifier(transactions.nodeName); - jtaPropertyManager.getJTAEnvironmentBean().setXaRecoveryNodes(Collections.singletonList(transactions.nodeName)); - TxControl.setXANodeName(transactions.nodeName); + arjPropertyManager.getCoreEnvironmentBean().setNodeIdentifier(nodeName); + jtaPropertyManager.getJTAEnvironmentBean().setXaRecoveryNodes(List.of(nodeName)); + TxControl.setXANodeName(nodeName); } catch (CoreEnvironmentBeanException | NoSuchAlgorithmException e) { log.error("Could not set node name", e); } } - private static void shortenNodeName(TransactionManagerConfiguration transactions) throws NoSuchAlgorithmException { - String originalNodeName = transactions.nodeName; + String shortenNodeName(String originalNodeName) throws NoSuchAlgorithmException { log.warnf("Node name \"%s\" is longer than 28 bytes, shortening it by using %s.", originalNodeName, HASH_ALGORITHM_FOR_SHORTENING); final byte[] nodeNameAsBytes = originalNodeName.getBytes(); @@ -69,8 +68,9 @@ private static void shortenNodeName(TransactionManagerConfiguration transactions //truncate the array byte[] slice = Arrays.copyOfRange(base64Result, 0, 28); - transactions.nodeName = new String(slice, StandardCharsets.UTF_8); - log.warnf("New node name is \"%s\"", transactions.nodeName); + String shorterNodeName = new String(slice, StandardCharsets.UTF_8); + log.warnf("New node name is \"%s\"", shorterNodeName); + return shorterNodeName; } public void setDefaultProperties(Properties properties) { @@ -92,8 +92,8 @@ public void setDefaultProperties(Properties properties) { public void setDefaultTimeout(TransactionManagerConfiguration transactions) { arjPropertyManager.getCoordinatorEnvironmentBean() - .setDefaultTimeout((int) transactions.defaultTransactionTimeout.getSeconds()); - TxControl.setDefaultTimeout((int) transactions.defaultTransactionTimeout.getSeconds()); + .setDefaultTimeout((int) transactions.defaultTransactionTimeout().getSeconds()); + TxControl.setDefaultTimeout((int) transactions.defaultTransactionTimeout().getSeconds()); } public static Properties getDefaultProperties() { @@ -107,17 +107,17 @@ public void disableTransactionStatusManager() { public void setConfig(final TransactionManagerConfiguration transactions) { List objectStores = Arrays.asList(null, "communicationStore", "stateStore"); - if (transactions.objectStore.type.equals(ObjectStoreType.File_System)) { + if (transactions.objectStore().type().equals(ObjectStoreType.File_System)) { objectStores.forEach(name -> setObjectStoreDir(name, transactions)); - } else if (transactions.objectStore.type.equals(ObjectStoreType.JDBC)) { + } else if (transactions.objectStore().type().equals(ObjectStoreType.JDBC)) { objectStores.forEach(name -> setJDBCObjectStore(name, transactions)); } BeanPopulator.getDefaultInstance(RecoveryEnvironmentBean.class) - .setRecoveryModuleClassNames(transactions.recoveryModules); + .setRecoveryModuleClassNames(transactions.recoveryModules()); BeanPopulator.getDefaultInstance(RecoveryEnvironmentBean.class) - .setExpiryScannerClassNames(transactions.expiryScanners); + .setExpiryScannerClassNames(transactions.expiryScanners()); BeanPopulator.getDefaultInstance(JTAEnvironmentBean.class) - .setXaResourceOrphanFilterClassNames(transactions.xaResourceOrphanFilters); + .setXaResourceOrphanFilterClassNames(transactions.xaResourceOrphanFilters()); } /** @@ -145,25 +145,26 @@ public void logUnsafeMultipleLastResourcesOnStartup( } private void setObjectStoreDir(String name, TransactionManagerConfiguration config) { - BeanPopulator.getNamedInstance(ObjectStoreEnvironmentBean.class, name).setObjectStoreDir(config.objectStore.directory); + BeanPopulator.getNamedInstance(ObjectStoreEnvironmentBean.class, name) + .setObjectStoreDir(config.objectStore().directory()); } private void setJDBCObjectStore(String name, TransactionManagerConfiguration config) { final ObjectStoreEnvironmentBean instance = BeanPopulator.getNamedInstance(ObjectStoreEnvironmentBean.class, name); instance.setObjectStoreType(JDBCStore.class.getName()); - instance.setJdbcDataSource(new QuarkusDataSource(config.objectStore.datasource)); - instance.setCreateTable(config.objectStore.createTable); - instance.setDropTable(config.objectStore.dropTable); - instance.setTablePrefix(config.objectStore.tablePrefix); + instance.setJdbcDataSource(new QuarkusDataSource(config.objectStore().datasource())); + instance.setCreateTable(config.objectStore().createTable()); + instance.setDropTable(config.objectStore().dropTable()); + instance.setTablePrefix(config.objectStore().tablePrefix()); } public void startRecoveryService(final TransactionManagerConfiguration transactions, Map configuredDataSourcesConfigKeys, Set dataSourcesWithTransactionIntegration) { - if (transactions.objectStore.type.equals(ObjectStoreType.JDBC)) { + if (transactions.objectStore().type().equals(ObjectStoreType.JDBC)) { final String objectStoreDataSourceName; - if (transactions.objectStore.datasource.isEmpty()) { + if (transactions.objectStore().datasource().isEmpty()) { if (!DataSourceUtil.hasDefault(configuredDataSourcesConfigKeys.keySet())) { throw new ConfigurationException( "The Narayana JTA extension does not have a datasource configured as the JDBC object store," @@ -176,7 +177,7 @@ public void startRecoveryService(final TransactionManagerConfiguration transacti } objectStoreDataSourceName = DataSourceUtil.DEFAULT_DATASOURCE_NAME; } else { - objectStoreDataSourceName = transactions.objectStore.datasource.get(); + objectStoreDataSourceName = transactions.objectStore().datasource().get(); if (!configuredDataSourcesConfigKeys.keySet().contains(objectStoreDataSourceName)) { throw new ConfigurationException( @@ -200,7 +201,7 @@ public void startRecoveryService(final TransactionManagerConfiguration transacti objectStoreDataSourceName, configuredDataSourcesConfigKeys.get(objectStoreDataSourceName))); } } - if (transactions.enableRecovery) { + if (transactions.enableRecovery()) { QuarkusRecoveryService.getInstance().create(); QuarkusRecoveryService.getInstance().start(); } @@ -208,7 +209,7 @@ public void startRecoveryService(final TransactionManagerConfiguration transacti public void handleShutdown(ShutdownContext context, TransactionManagerConfiguration transactions) { context.addShutdownTask(() -> { - if (transactions.enableRecovery) { + if (transactions.enableRecovery()) { try { QuarkusRecoveryService.getInstance().stop(); } catch (Exception e) { diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerBuildTimeConfig.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerBuildTimeConfig.java index 91cbeccf433d6..aabb4941dd334 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerBuildTimeConfig.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerBuildTimeConfig.java @@ -2,12 +2,14 @@ import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; +import io.quarkus.runtime.annotations.ConfigDocDefault; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public final class TransactionManagerBuildTimeConfig { +@ConfigMapping(prefix = "quarkus.transaction-manager") +public interface TransactionManagerBuildTimeConfig { /** * Define the behavior when using multiple XA unaware resources in the same transactional demarcation. *

    @@ -31,8 +33,8 @@ public final class TransactionManagerBuildTimeConfig { * @deprecated This property is planned for removal in a future version. */ @Deprecated(forRemoval = true) - @ConfigItem(defaultValueDocumentation = "fail") - public Optional unsafeMultipleLastResources; + @ConfigDocDefault("fail") + public Optional unsafeMultipleLastResources(); public enum UnsafeMultipleLastResourcesMode { /** diff --git a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java index 5f55cb5b6f8f4..941d280ee8700 100644 --- a/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java +++ b/extensions/narayana-jta/runtime/src/main/java/io/quarkus/narayana/jta/runtime/TransactionManagerConfiguration.java @@ -5,23 +5,22 @@ import java.util.Optional; import io.quarkus.runtime.annotations.ConfigGroup; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; -/** - * - */ @ConfigRoot(phase = ConfigPhase.RUN_TIME) -public final class TransactionManagerConfiguration { +@ConfigMapping(prefix = "quarkus.transaction-manager") +public interface TransactionManagerConfiguration { /** * The node name used by the transaction manager. * Must not exceed a length of 28 bytes. * * @see #shortenNodeNameIfNecessary */ - @ConfigItem(defaultValue = "quarkus") - public String nodeName; + @WithDefault("quarkus") + String nodeName(); /** * Whether the node name should be shortened if necessary. @@ -31,88 +30,86 @@ public final class TransactionManagerConfiguration { * * @see #nodeName */ - @ConfigItem(defaultValue = "false") - public boolean shortenNodeNameIfNecessary; + @WithDefault("false") + boolean shortenNodeNameIfNecessary(); /** * The default transaction timeout. */ - @ConfigItem(defaultValue = "60") - public Duration defaultTransactionTimeout; + @WithDefault("60") + Duration defaultTransactionTimeout(); /** * Start the recovery service on startup. */ - @ConfigItem(defaultValue = "false") - public boolean enableRecovery; + @WithDefault("false") + boolean enableRecovery(); /** * The list of recovery modules. */ - @ConfigItem(defaultValue = "com.arjuna.ats.internal.arjuna.recovery.AtomicActionRecoveryModule," + + @WithDefault("com.arjuna.ats.internal.arjuna.recovery.AtomicActionRecoveryModule," + "com.arjuna.ats.internal.jta.recovery.arjunacore.XARecoveryModule") - public List recoveryModules; + List recoveryModules(); /** * The list of expiry scanners. */ - @ConfigItem(defaultValue = "com.arjuna.ats.internal.arjuna.recovery.ExpiredTransactionStatusManagerScanner") - public List expiryScanners; + @WithDefault("com.arjuna.ats.internal.arjuna.recovery.ExpiredTransactionStatusManagerScanner") + List expiryScanners(); /** * The list of orphan filters. */ - @ConfigItem(defaultValue = "com.arjuna.ats.internal.jta.recovery.arjunacore.JTATransactionLogXAResourceOrphanFilter," + + @WithDefault("com.arjuna.ats.internal.jta.recovery.arjunacore.JTATransactionLogXAResourceOrphanFilter," + "com.arjuna.ats.internal.jta.recovery.arjunacore.JTANodeNameXAResourceOrphanFilter," + "com.arjuna.ats.internal.jta.recovery.arjunacore.JTAActionStatusServiceXAResourceOrphanFilter") - public List xaResourceOrphanFilters; + List xaResourceOrphanFilters(); /** * The object store configuration. */ - @ConfigItem - public ObjectStoreConfig objectStore; + ObjectStoreConfig objectStore(); @ConfigGroup - public static class ObjectStoreConfig { + public interface ObjectStoreConfig { /** * The name of the directory where the transaction logs will be stored when using the {@code file-system} object store. * If the value is not absolute then the directory is relative * to the user.dir system property. */ - @ConfigItem(defaultValue = "ObjectStore") - public String directory; + @WithDefault("ObjectStore") + String directory(); /** * The type of object store. */ - @ConfigItem(defaultValue = "file-system") - public ObjectStoreType type; + @WithDefault("file-system") + ObjectStoreType type(); /** * The name of the datasource where the transaction logs will be stored when using the {@code jdbc} object store. *

    * If undefined, it will use the default datasource. */ - @ConfigItem - public Optional datasource = Optional.empty(); + Optional datasource(); /** * Whether to create the table if it does not exist. */ - @ConfigItem(defaultValue = "false") - public boolean createTable; + @WithDefault("false") + boolean createTable(); /** * Whether to drop the table on startup. */ - @ConfigItem(defaultValue = "false") - public boolean dropTable; + @WithDefault("false") + boolean dropTable(); /** * The prefix to apply to the table. */ - @ConfigItem(defaultValue = "quarkus_") - public String tablePrefix; + @WithDefault("quarkus_") + String tablePrefix(); } } diff --git a/extensions/narayana-jta/runtime/src/test/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorderTest.java b/extensions/narayana-jta/runtime/src/test/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorderTest.java index 633948c6ab901..d0cd4a8078ec4 100644 --- a/extensions/narayana-jta/runtime/src/test/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorderTest.java +++ b/extensions/narayana-jta/runtime/src/test/java/io/quarkus/narayana/jta/runtime/NarayanaJtaRecorderTest.java @@ -4,6 +4,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.charset.StandardCharsets; +import java.security.NoSuchAlgorithmException; import org.junit.jupiter.api.Test; @@ -13,35 +14,26 @@ public class NarayanaJtaRecorderTest { public static final String NODE_NAME_TO_SHORTEN = "dfe2420d-b12e-4ec3-92c0-ee7c4"; @Test - void testByteLengthWithLongerString() { - TransactionManagerConfiguration transactions = new TransactionManagerConfiguration(); - transactions.shortenNodeNameIfNecessary = true; + void testByteLengthWithLongerString() throws NoSuchAlgorithmException { // create nodeNames larger than 28 bytes assertTrue(NODE_NAME_TO_SHORTEN.getBytes(StandardCharsets.UTF_8).length > 28); - NarayanaJtaRecorder r = new NarayanaJtaRecorder(); - transactions.nodeName = NODE_NAME_TO_SHORTEN; - r.setNodeName(transactions); - int numberOfBytes = transactions.nodeName.getBytes(StandardCharsets.UTF_8).length; + NarayanaJtaRecorder recorder = new NarayanaJtaRecorder(); + String shorterNodeName = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN); + int numberOfBytes = shorterNodeName.getBytes(StandardCharsets.UTF_8).length; assertEquals(28, numberOfBytes, "node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes); } @Test - void testPredictableConversion() { - TransactionManagerConfiguration transactions = new TransactionManagerConfiguration(); - transactions.shortenNodeNameIfNecessary = true; + void testPredictableConversion() throws NoSuchAlgorithmException { assertTrue(NODE_NAME_TO_SHORTEN.getBytes(StandardCharsets.UTF_8).length > 28); - NarayanaJtaRecorder r = new NarayanaJtaRecorder(); - transactions.nodeName = NODE_NAME_TO_SHORTEN; - r.setNodeName(transactions); - int numberOfBytes = transactions.nodeName.getBytes(StandardCharsets.UTF_8).length; + NarayanaJtaRecorder recorder = new NarayanaJtaRecorder(); + String firstConversion = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN); + int numberOfBytes = firstConversion.getBytes(StandardCharsets.UTF_8).length; assertEquals(28, numberOfBytes, "node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes); - String firstConversion = transactions.nodeName; - transactions.nodeName = NODE_NAME_TO_SHORTEN; - r.setNodeName(transactions); - String secondConversion = transactions.nodeName; - numberOfBytes = transactions.nodeName.getBytes(StandardCharsets.UTF_8).length; + String secondConversion = recorder.shortenNodeName(NODE_NAME_TO_SHORTEN); + numberOfBytes = secondConversion.getBytes(StandardCharsets.UTF_8).length; assertEquals(28, numberOfBytes, "node name bytes was not 28 bytes limit, number of bytes is " + numberOfBytes); assertEquals(firstConversion, secondConversion, diff --git a/extensions/narayana-lra/deployment/pom.xml b/extensions/narayana-lra/deployment/pom.xml index ff54319bb966b..61e90ac03ed26 100644 --- a/extensions/narayana-lra/deployment/pom.xml +++ b/extensions/narayana-lra/deployment/pom.xml @@ -67,9 +67,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/LRABuildTimeConfiguration.java b/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/LRABuildTimeConfiguration.java index b694109c91e6c..9191e00127b50 100644 --- a/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/LRABuildTimeConfiguration.java +++ b/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/LRABuildTimeConfiguration.java @@ -1,18 +1,22 @@ package io.quarkus.narayana.lra.deployment; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; +import io.smallrye.config.WithName; /** * LRA build time configuration properties */ @ConfigRoot(phase = ConfigPhase.BUILD_TIME) -public final class LRABuildTimeConfiguration { +@ConfigMapping(prefix = "quarkus.lra") +public interface LRABuildTimeConfiguration { /** * Whether to include LRA proxy endpoints in the generated OpenAPI document */ - @ConfigItem(name = "openapi.included", defaultValue = "false") - public boolean openapiIncluded; + @WithName("openapi.included") + @WithDefault("false") + boolean openapiIncluded(); } diff --git a/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/NarayanaLRAProcessor.java b/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/NarayanaLRAProcessor.java index 5a702ddd986b4..103a0b99d5228 100644 --- a/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/NarayanaLRAProcessor.java +++ b/extensions/narayana-lra/deployment/src/main/java/io/quarkus/narayana/lra/deployment/NarayanaLRAProcessor.java @@ -160,7 +160,7 @@ public void filterOpenAPIEndpoint(BuildProducer Capabilities capabilities, LRABuildTimeConfiguration lraBuildTimeConfig) { if (capabilities.isPresent(Capability.SMALLRYE_OPENAPI)) { - NarayanaLRAOpenAPIFilter lraOpenAPIFilter = new NarayanaLRAOpenAPIFilter(lraBuildTimeConfig.openapiIncluded); + NarayanaLRAOpenAPIFilter lraOpenAPIFilter = new NarayanaLRAOpenAPIFilter(lraBuildTimeConfig.openapiIncluded()); openAPIProducer.produce(new AddToOpenAPIDefinitionBuildItem(lraOpenAPIFilter)); } } diff --git a/extensions/narayana-lra/runtime/pom.xml b/extensions/narayana-lra/runtime/pom.xml index 5d0deec48dcd2..71cd38a0e1bb0 100644 --- a/extensions/narayana-lra/runtime/pom.xml +++ b/extensions/narayana-lra/runtime/pom.xml @@ -92,9 +92,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/LRAConfiguration.java b/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/LRAConfiguration.java index 5539ecd1e56bb..7af69d0a0fefd 100644 --- a/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/LRAConfiguration.java +++ b/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/LRAConfiguration.java @@ -1,14 +1,16 @@ package io.quarkus.narayana.lra.runtime; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; +import io.smallrye.config.WithDefault; /** * Configuration properties for controlling LRA participants */ @ConfigRoot(phase = ConfigPhase.RUN_TIME) -public final class LRAConfiguration { +@ConfigMapping(prefix = "quarkus.lra") +public interface LRAConfiguration { /** * The REST endpoint on which a coordinator is running. * In order for an LRA to begin and end successfully and in order to @@ -18,6 +20,6 @@ public final class LRAConfiguration { * In this version of the extension, a failed coordinator with * LRAs that have not yet finished must be restarted. */ - @ConfigItem(defaultValue = "http://localhost:50000/lra-coordinator") - String coordinatorURL; + @WithDefault("http://localhost:50000/lra-coordinator") + String coordinatorURL(); } diff --git a/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/NarayanaLRARecorder.java b/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/NarayanaLRARecorder.java index 1f3a4868a77dd..3fdea0a20a3bd 100644 --- a/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/NarayanaLRARecorder.java +++ b/extensions/narayana-lra/runtime/src/main/java/io/quarkus/narayana/lra/runtime/NarayanaLRARecorder.java @@ -19,7 +19,7 @@ public class NarayanaLRARecorder { public void setConfig(final LRAConfiguration config) { if (System.getProperty(NarayanaLRAClient.LRA_COORDINATOR_URL_KEY) == null) { - System.setProperty(NarayanaLRAClient.LRA_COORDINATOR_URL_KEY, config.coordinatorURL); + System.setProperty(NarayanaLRAClient.LRA_COORDINATOR_URL_KEY, config.coordinatorURL()); } } diff --git a/extensions/netty/deployment/pom.xml b/extensions/netty/deployment/pom.xml index ac6d6301d2c56..367d575a6684a 100644 --- a/extensions/netty/deployment/pom.xml +++ b/extensions/netty/deployment/pom.xml @@ -42,9 +42,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyBuildTimeConfig.java b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyBuildTimeConfig.java index 64fdf514433bc..39d4681cff9b2 100644 --- a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyBuildTimeConfig.java +++ b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyBuildTimeConfig.java @@ -2,12 +2,13 @@ import java.util.OptionalInt; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; -@ConfigRoot(name = "netty", phase = ConfigPhase.BUILD_TIME) -public class NettyBuildTimeConfig { +@ConfigRoot(phase = ConfigPhase.BUILD_TIME) +@ConfigMapping(prefix = "quarkus.netty") +public interface NettyBuildTimeConfig { /** * The value configuring the {@code io.netty.allocator.maxOrder} system property of Netty. @@ -19,6 +20,5 @@ public class NettyBuildTimeConfig { * It must be used carefully. * More details on https://programmer.group/pool-area-of-netty-memory-pool.html. */ - @ConfigItem - public OptionalInt allocatorMaxOrder; + OptionalInt allocatorMaxOrder(); } diff --git a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java index 98a0d370ffcaf..b2131c1837563 100644 --- a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java +++ b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java @@ -58,7 +58,7 @@ public NativeImageSystemPropertyBuildItem limitMem() { @BuildStep public SystemPropertyBuildItem limitArenaSize(NettyBuildTimeConfig config, List minMaxOrderBuildItems) { - String maxOrder = calculateMaxOrder(config.allocatorMaxOrder, minMaxOrderBuildItems, true); + String maxOrder = calculateMaxOrder(config.allocatorMaxOrder(), minMaxOrderBuildItems, true); //in native mode we limit the size of the epoll array //if the array overflows the selector just moves the overflow to a map @@ -114,7 +114,7 @@ NativeImageConfigBuildItem build( .produce(ReflectiveClassBuildItem.builder("java.util.LinkedHashMap").build()); reflectiveClass.produce(ReflectiveClassBuildItem.builder("sun.nio.ch.SelectorImpl").methods().fields().build()); - String maxOrder = calculateMaxOrder(config.allocatorMaxOrder, minMaxOrderBuildItems, false); + String maxOrder = calculateMaxOrder(config.allocatorMaxOrder(), minMaxOrderBuildItems, false); NativeImageConfigBuildItem.Builder builder = NativeImageConfigBuildItem.builder() // Use small chunks to avoid a lot of wasted space. Default is 16mb * arenas (derived from core count) diff --git a/extensions/netty/runtime/pom.xml b/extensions/netty/runtime/pom.xml index a0c59483ce243..47420416d286b 100644 --- a/extensions/netty/runtime/pom.xml +++ b/extensions/netty/runtime/pom.xml @@ -82,9 +82,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/ContainerConstants.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/ContainerConstants.java index 55576ccd82b1e..489378273d323 100644 --- a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/ContainerConstants.java +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/ContainerConstants.java @@ -15,4 +15,12 @@ public final class ContainerConstants { public static final String OTEL_GRPC_PROTOCOL = "grpc"; public static final String OTEL_HTTP_PROTOCOL = "http/protobuf"; + + // Overrides + + public static final int SCRAPING_INTERVAL = 10; + public static final String OTEL_METRIC_EXPORT_INTERVAL = "10s"; + public static final String OTEL_BSP_SCHEDULE_DELAY = "3s"; + public static final String OTEL_BLRP_SCHEDULE_DELAY = "1s"; + } diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/AbstractGrafanaConfig.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/AbstractGrafanaConfig.java index b73cea0232961..0066c878b9fe7 100644 --- a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/AbstractGrafanaConfig.java +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/AbstractGrafanaConfig.java @@ -46,6 +46,6 @@ public int grafanaPort() { @Override public Duration timeout() { - return Duration.ofMinutes(1); + return Duration.ofMinutes(3); } } diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/ContainerConfigUtil.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/ContainerConfigUtil.java index 8d3ce6d1a0b42..2f468afd12b50 100644 --- a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/ContainerConfigUtil.java +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/ContainerConfigUtil.java @@ -2,7 +2,10 @@ import java.lang.reflect.Method; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.Objects; +import java.util.Optional; public class ContainerConfigUtil { /** @@ -16,11 +19,7 @@ public static boolean isEqual(ContainerConfig cc1, ContainerConfig cc2) { return false; } - Class i = Arrays.stream(c1.getInterfaces()) - .filter(ContainerConfig.class::isAssignableFrom) - .findFirst() - .orElseThrow(() -> new IllegalArgumentException("Missing ContainerConfig based interface")); - Method[] methods = i.getMethods(); // should get all config methods + Method[] methods = getMethods(c1); for (Method m : methods) { Object v1 = invoke(m, cc1); Object v2 = invoke(m, cc2); @@ -31,6 +30,38 @@ public static boolean isEqual(ContainerConfig cc1, ContainerConfig cc2) { return true; } + /** + * Get all properties to override from container config instance. + * + * @param config the container config + * @return map of properties to override + */ + public static Map propertiesToOverride(ContainerConfig config) { + Map map = new HashMap<>(); + for (Method m : getMethods(config.getClass())) { + OverrideProperty override = m.getAnnotation(OverrideProperty.class); + if (override != null) { + String key = override.value(); + Object value = invoke(m, config); + if (value instanceof Optional) { + Optional optional = (Optional) value; + optional.ifPresent(o -> map.put(key, o)); + } else if (value != null) { + map.put(key, value); + } + } + } + return map; + } + + private static Method[] getMethods(Class c1) { + Class i = Arrays.stream(c1.getInterfaces()) + .filter(ContainerConfig.class::isAssignableFrom) + .findFirst() + .orElseThrow(() -> new IllegalArgumentException("Missing ContainerConfig based interface")); + return i.getMethods(); + } + private static Object invoke(Method m, Object target) { try { return m.invoke(target); diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/GrafanaConfig.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/GrafanaConfig.java index 93cccae20c832..0a41330202244 100644 --- a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/GrafanaConfig.java +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/GrafanaConfig.java @@ -29,6 +29,6 @@ public interface GrafanaConfig extends ContainerConfig { /** * The timeout. */ - @WithDefault("PT1M") + @WithDefault("PT3M") Duration timeout(); } diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/LgtmConfig.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/LgtmConfig.java index 227948a7938a1..62270762f6dbd 100644 --- a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/LgtmConfig.java +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/LgtmConfig.java @@ -4,6 +4,7 @@ import java.util.Set; import io.quarkus.observability.common.ContainerConstants; +import io.quarkus.runtime.annotations.ConfigDocIgnore; import io.quarkus.runtime.annotations.ConfigGroup; import io.smallrye.config.WithDefault; @@ -37,4 +38,39 @@ public interface LgtmConfig extends GrafanaConfig { */ @WithDefault(ContainerConstants.OTEL_HTTP_PROTOCOL) String otlpProtocol(); + + /** + * The (Prometheus) scraping interval, in seconds. + */ + @WithDefault(ContainerConstants.SCRAPING_INTERVAL + "") + int scrapingInterval(); + + /** + * Do we force scraping. + */ + Optional forceScraping(); + + /** + * A way to override `quarkus.otel.metric.export.interval` property's default value. + */ + @OverrideProperty("quarkus.otel.metric.export.interval") + @WithDefault(ContainerConstants.OTEL_METRIC_EXPORT_INTERVAL) + @ConfigDocIgnore + String otelMetricExportInterval(); + + /** + * A way to override `quarkus.otel.bsp.schedule.delay` property's default value. + */ + @OverrideProperty("quarkus.otel.bsp.schedule.delay") + @WithDefault(ContainerConstants.OTEL_BSP_SCHEDULE_DELAY) + @ConfigDocIgnore + String otelBspScheduleDelay(); + + /** + * A way to override `quarkus.otel.metric.export.interval` property's default value. + */ + @OverrideProperty("quarkus.otel.blrp.schedule.delay") + @WithDefault(ContainerConstants.OTEL_BLRP_SCHEDULE_DELAY) + @ConfigDocIgnore + String otelBlrpScheduleDelay(); } diff --git a/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/OverrideProperty.java b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/OverrideProperty.java new file mode 100644 index 0000000000000..67d094fae29e8 --- /dev/null +++ b/extensions/observability-devservices/common/src/main/java/io/quarkus/observability/common/config/OverrideProperty.java @@ -0,0 +1,21 @@ +package io.quarkus.observability.common.config; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Override the property in the value, + * with the value of the annotated method's return. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.METHOD }) +public @interface OverrideProperty { + /** + * The property key to override. + * + * @return the property key + */ + String value(); +} diff --git a/extensions/observability-devservices/deployment/src/main/java/io/quarkus/observability/deployment/ObservabilityDevServiceProcessor.java b/extensions/observability-devservices/deployment/src/main/java/io/quarkus/observability/deployment/ObservabilityDevServiceProcessor.java index d2093a51decc1..655862baa9dae 100644 --- a/extensions/observability-devservices/deployment/src/main/java/io/quarkus/observability/deployment/ObservabilityDevServiceProcessor.java +++ b/extensions/observability-devservices/deployment/src/main/java/io/quarkus/observability/deployment/ObservabilityDevServiceProcessor.java @@ -28,6 +28,7 @@ import io.quarkus.deployment.builditem.DockerStatusBuildItem; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.LaunchModeBuildItem; +import io.quarkus.deployment.builditem.RunTimeConfigurationDefaultBuildItem; import io.quarkus.deployment.console.ConsoleInstalledBuildItem; import io.quarkus.deployment.console.StartupLogCompressor; import io.quarkus.deployment.dev.devservices.DevServicesConfig; @@ -84,6 +85,7 @@ public void startContainers(LaunchModeBuildItem launchMode, LoggingSetupBuildItem loggingSetupBuildItem, DevServicesConfig devServicesConfig, BuildProducer services, + BuildProducer properties, Capabilities capabilities, Optional metricsConfiguration, BuildProducer configBuildProducer) { @@ -118,6 +120,8 @@ public void startContainers(LaunchModeBuildItem launchMode, ContainerConfig currentDevServicesConfiguration = dev.config( configuration, new ExtensionsCatalog( + QuarkusClassLoader::isResourcePresentAtRuntime, + QuarkusClassLoader::isClassPresentAtRuntime, capabilities.isPresent(Capability.OPENTELEMETRY_TRACER), hasMicrometerOtlp(metricsConfiguration))); @@ -140,6 +144,13 @@ public void startContainers(LaunchModeBuildItem launchMode, devServices.remove(devId); // clean-up capturedDevServicesConfigurations.put(devId, currentDevServicesConfiguration); + // override some OTel, etc defaults - rates, intervals, delays, ... + Map propertiesToOverride = ContainerConfigUtil + .propertiesToOverride(currentDevServicesConfiguration); + propertiesToOverride + .forEach((k, v) -> properties.produce(new RunTimeConfigurationDefaultBuildItem(k, v.toString()))); + log.infof("Dev Service %s properties override: %s", devId, propertiesToOverride); + StartupLogCompressor compressor = new StartupLogCompressor( (launchMode.isTest() ? "(test) " : "") + devId + " Dev Services Starting:", consoleInstalledBuildItem, diff --git a/extensions/observability-devservices/testcontainers/pom.xml b/extensions/observability-devservices/testcontainers/pom.xml index 5f4a15c826d64..cb61bca191604 100644 --- a/extensions/observability-devservices/testcontainers/pom.xml +++ b/extensions/observability-devservices/testcontainers/pom.xml @@ -13,6 +13,10 @@ Quarkus - Observability Dev Services - Testcontainers + + io.quarkus + quarkus-devtools-utilities + io.quarkus quarkus-devservices-common diff --git a/extensions/observability-devservices/testcontainers/src/main/java/io/quarkus/observability/testcontainers/LgtmContainer.java b/extensions/observability-devservices/testcontainers/src/main/java/io/quarkus/observability/testcontainers/LgtmContainer.java index bbf77c722d08a..114a8aea2bb4b 100644 --- a/extensions/observability-devservices/testcontainers/src/main/java/io/quarkus/observability/testcontainers/LgtmContainer.java +++ b/extensions/observability-devservices/testcontainers/src/main/java/io/quarkus/observability/testcontainers/LgtmContainer.java @@ -15,11 +15,14 @@ import io.quarkus.observability.common.ContainerConstants; import io.quarkus.observability.common.config.AbstractGrafanaConfig; import io.quarkus.observability.common.config.LgtmConfig; +import io.quarkus.runtime.LaunchMode; +import io.quarkus.utilities.OS; +@SuppressWarnings("resource") public class LgtmContainer extends GrafanaContainer { protected static final String LGTM_NETWORK_ALIAS = "ltgm.testcontainer.docker"; - protected static final String PROMETHEUS_CONFIG = """ + protected static final String PROMETHEUS_CONFIG_DEFAULT = """ --- otlp: # Recommended attributes to be promoted to labels. @@ -47,12 +50,15 @@ public class LgtmContainer extends GrafanaContainer { # A 10min time window is enough because it can easily absorb retries and network delays. out_of_order_time_window: 10m global: - scrape_interval: 5s + scrape_interval: %s evaluation_interval: 5s + """; + + protected static final String PROMETHEUS_CONFIG_SCRAPE = """ scrape_configs: - job_name: '%s' metrics_path: '%s%s' - scrape_interval: 5s + scrape_interval: %s static_configs: - targets: ['%s:%d'] """; @@ -61,24 +67,38 @@ public class LgtmContainer extends GrafanaContainer { apiVersion: 1 providers: - - name: "Quarkus Micrometer Prometheus" + - name: "Quarkus Micrometer Prometheus registry" type: file options: path: /otel-lgtm/grafana-dashboard-quarkus-micrometer-prometheus.json foldersFromFilesStructure: false - - name: "Quarkus Micrometer with OTLP output" + - name: "Quarkus Micrometer OTLP registry" type: file options: path: /otel-lgtm/grafana-dashboard-quarkus-micrometer-otlp.json foldersFromFilesStructure: false + - name: "Quarkus Micrometer OpenTelemetry" + type: file + options: + path: /otel-lgtm/grafana-dashboard-quarkus-micrometer-opentelemetry.json + foldersFromFilesStructure: false + - name: "Quarkus OpenTelemetry logging" + type: file + options: + path: /otel-lgtm/grafana-dashboard-opentelemetry-logging.json + foldersFromFilesStructure: false """; - public LgtmContainer() { - this(new LgtmConfigImpl()); + private final boolean scrapingRequired; + + public LgtmContainer(boolean scrapingRequired) { + this(new LgtmConfigImpl(), scrapingRequired); } - public LgtmContainer(LgtmConfig config) { + public LgtmContainer(LgtmConfig config, boolean scrapingRequired) { super(config); + // do we require scraping + this.scrapingRequired = scrapingRequired; // always expose both -- since the LGTM image already does that as well addExposedPorts(ContainerConstants.OTEL_GRPC_EXPORTER_PORT, ContainerConstants.OTEL_HTTP_EXPORTER_PORT); @@ -91,8 +111,14 @@ public LgtmContainer(LgtmConfig config) { withCopyFileToContainer( MountableFile.forClasspathResource("/grafana-dashboard-quarkus-micrometer-otlp.json"), "/otel-lgtm/grafana-dashboard-quarkus-micrometer-otlp.json"); - addFileToContainer(getPrometheusConfig().getBytes(), "/otel-lgtm/prometheus.yaml"); + withCopyFileToContainer( + MountableFile.forClasspathResource("/grafana-dashboard-quarkus-micrometer-opentelemetry.json"), + "/otel-lgtm/grafana-dashboard-quarkus-micrometer-opentelemetry.json"); + withCopyFileToContainer( + MountableFile.forClasspathResource("/grafana-dashboard-opentelemetry-logging.json"), + "/otel-lgtm/grafana-dashboard-opentelemetry-logging.json"); + addFileToContainer(getPrometheusConfig().getBytes(), "/otel-lgtm/prometheus.yaml"); } @Override @@ -119,29 +145,47 @@ public String getOtlpProtocol() { return config.otlpProtocol(); } - public int getOtlpPort() { - int port = getOtlpPortInternal(); - return getMappedPort(port); + private int getPrivateOtlpPort() { + return getPrivateOtlpPort(getOtlpProtocol()); } - private int getOtlpPortInternal() { + public static int getPrivateOtlpPort(String otlpProtocol) { // use ignore-case here; grpc == gRPC - if (ContainerConstants.OTEL_GRPC_PROTOCOL.equalsIgnoreCase(getOtlpProtocol())) { + if (ContainerConstants.OTEL_GRPC_PROTOCOL.equalsIgnoreCase(otlpProtocol)) { return ContainerConstants.OTEL_GRPC_EXPORTER_PORT; - } else if (ContainerConstants.OTEL_HTTP_PROTOCOL.equals(getOtlpProtocol())) { + } else if (ContainerConstants.OTEL_HTTP_PROTOCOL.equals(otlpProtocol)) { return ContainerConstants.OTEL_HTTP_EXPORTER_PORT; } else { - throw new IllegalArgumentException("Unsupported OTEL protocol: " + getOtlpProtocol()); + throw new IllegalArgumentException("Unsupported OTEL protocol: " + otlpProtocol); } } private String getPrometheusConfig() { - Config runtimeConfig = ConfigProvider.getConfig(); - String rootPath = runtimeConfig.getOptionalValue("quarkus.management.root-path", String.class).orElse("/q"); - String metricsPath = runtimeConfig.getOptionalValue("quarkus.management.metrics.path", String.class).orElse("/metrics"); - int httpPort = runtimeConfig.getOptionalValue("quarkus.http.port", Integer.class).orElse(8080); // when not set use default - - return String.format(PROMETHEUS_CONFIG, config.serviceName(), rootPath, metricsPath, "host.docker.internal", httpPort); + String scraping = config.scrapingInterval() + "s"; + String prometheusConfig = String.format(PROMETHEUS_CONFIG_DEFAULT, scraping); + if (config.forceScraping().orElse(scrapingRequired)) { + boolean isTest = LaunchMode.current() == LaunchMode.TEST; + Config runtimeConfig = ConfigProvider.getConfig(); + String rootPath = runtimeConfig.getOptionalValue("quarkus.management.root-path", String.class).orElse("/q"); + String metricsPath = runtimeConfig.getOptionalValue("quarkus.management.metrics.path", String.class) + .orElse("/metrics"); + String httpPortKey = isTest ? "quarkus.http.test-port" : "quarkus.http.port"; + Optional optionalValue = runtimeConfig.getOptionalValue(httpPortKey, Integer.class); + int httpPort = optionalValue.orElse(isTest ? 8081 : 8080); // when not set use default + + // On Linux, you can’t automatically resolve host.docker.internal, + // you need to provide the following run flag when you start the container: + //--add-host=host.docker.internal:host-gateway + if (OS.determineOS() == OS.LINUX) { + withCreateContainerCmdModifier(cmd -> cmd + .getHostConfig() + .withExtraHosts("host.docker.internal:host-gateway")); + } + + prometheusConfig += String.format(PROMETHEUS_CONFIG_SCRAPE, config.serviceName(), rootPath, metricsPath, scraping, + "host.docker.internal", httpPort); + } + return prometheusConfig; } protected static class LgtmConfigImpl extends AbstractGrafanaConfig implements LgtmConfig { @@ -162,6 +206,31 @@ public Optional> networkAliases() { public String otlpProtocol() { return ContainerConstants.OTEL_HTTP_PROTOCOL; } + + @Override + public int scrapingInterval() { + return ContainerConstants.SCRAPING_INTERVAL; + } + + @Override + public Optional forceScraping() { + return Optional.empty(); + } + + @Override + public String otelMetricExportInterval() { + return ContainerConstants.OTEL_METRIC_EXPORT_INTERVAL; + } + + @Override + public String otelBspScheduleDelay() { + return ContainerConstants.OTEL_BSP_SCHEDULE_DELAY; + } + + @Override + public String otelBlrpScheduleDelay() { + return ContainerConstants.OTEL_BLRP_SCHEDULE_DELAY; + } } protected static class LgtmLoggingFilter implements Predicate { diff --git a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-opentelemetry-logging.json b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-opentelemetry-logging.json new file mode 100644 index 0000000000000..796784e15fa52 --- /dev/null +++ b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-opentelemetry-logging.json @@ -0,0 +1,969 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "description": "Universal and flexible dashboard for logging", + "editable": true, + "fiscalYearStartMonth": 0, + "gnetId": 12611, + "graphTooltip": 0, + "id": 1162, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "description": "Total Count of log lines in the specified time range", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "color": "rgb(31, 255, 7)", + "text": "0" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "rgb(31, 255, 7)", + "value": null + }, + { + "color": "rgb(31, 255, 7)", + "value": 10 + }, + { + "color": "rgb(31, 255, 7)", + "value": 50 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 11, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(count_over_time(({service_name=~\"$service_name\"})[$__interval]))", + "hide": false, + "queryType": "range", + "refId": "A" + } + ], + "title": "Total Count of logs", + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "description": "Total Count of \"$searchable_pattern\" in the specified time range", + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "color": "rgb(222, 15, 43)", + "text": "0" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "rgb(222, 15, 43)", + "value": null + }, + { + "color": "rgb(222, 15, 43)", + "value": 10 + }, + { + "color": "rgb(222, 15, 43)", + "value": 50 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 3, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 6, + "links": [], + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "graphMode": "area", + "justifyMode": "auto", + "orientation": "horizontal", + "reduceOptions": { + "calcs": [ + "sum" + ], + "fields": "", + "values": false + }, + "textMode": "auto" + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(count_over_time(({service_name=~\"$service_name\"} |~ \"(?i)$searchable_pattern\")[$__interval]))", + "hide": false, + "queryType": "range", + "refId": "A" + } + ], + "title": "Total Count of \"$searchable_pattern\"", + "type": "stat" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "description": "Live logs is a like 'tail -f' in a real time", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 3 + }, + "id": 2, + "options": { + "dedupStrategy": "none", + "enableLogDetails": true, + "prettifyLogMessage": false, + "showCommonLabels": false, + "showLabels": false, + "showTime": false, + "sortOrder": "Descending", + "wrapLogMessage": false + }, + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "{service_name=~\"$service_name\"} | severity_text=~\"$log_level\" |~ \"(?i)$searchable_pattern\" | line_format `{{date \"2006-01-02 12:04:05\" __timestamp__ | alignLeft 21}} {{alignLeft 30 .service_name}} {{upper .detected_level | alignLeft 9}} {{__line__}}`", + "hide": false, + "queryType": "range", + "refId": "A" + } + ], + "title": "Live logs", + "type": "logs" + }, + { + "datasource": { + "uid": "${DS_NY-ALERTING2}" + }, + "gridPos": { + "h": 2, + "w": 24, + "x": 0, + "y": 12 + }, + "id": 15, + "options": { + "code": { + "language": "plaintext", + "showLineNumbers": false, + "showMiniMap": false + }, + "content": "", + "mode": "html" + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "uid": "${DS_NY-ALERTING2}" + }, + "refId": "A", + "target": "" + } + ], + "type": "text" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 7, + "x": 0, + "y": 14 + }, + "id": 19, + "links": [], + "maxDataPoints": 100, + "options": { + "legend": { + "displayMode": "table", + "placement": "bottom", + "showLegend": true, + "values": [ + "value" + ] + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "7.0.4", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum (count_over_time(({service_name=~\"$service_name\"} | label_format level=detected_level)[$__interval])) by (level)", + "hide": false, + "legendFormat": "{{stream}}", + "queryType": "range", + "refId": "A" + } + ], + "title": "Total count of stderr / stdout pie", + "type": "piechart" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + } + }, + "mappings": [] + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 7, + "y": 14 + }, + "id": 20, + "interval": "1m", + "links": [], + "maxDataPoints": "", + "options": { + "legend": { + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "pieType": "pie", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "6.4.3", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum (count_over_time(({service_name=~\"$service_name\"} | label_format level=detected_level |~ \"(?i)$searchable_pattern\")[$__interval])) by (level)", + "legendFormat": "{{pod}}", + "queryType": "range", + "refId": "A" + } + ], + "title": "Matched word \"$searchable_pattern\" donut", + "type": "piechart" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "color": "#299c46", + "text": "0" + } + }, + "type": "special" + } + ], + "max": 100, + "min": 0, + "noValue": "0", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "#299c46", + "value": null + }, + { + "color": "rgba(237, 129, 40, 0.89)", + "value": 10 + }, + { + "color": "#C4162A", + "value": 50 + } + ] + }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 5, + "x": 19, + "y": 14 + }, + "id": 9, + "links": [], + "maxDataPoints": 100, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "mean" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": false + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(count_over_time(({service_name=~\"$service_name\"} |~ \"(?i)$searchable_pattern\")[$__interval])) * 100 / sum(count_over_time(({service_name=~\"$service_name\"})[$__interval]))", + "hide": false, + "queryType": "range", + "refId": "A" + } + ], + "title": "\"$searchable_pattern\" Percentage for specified time", + "type": "gauge" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "Count", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "bars", + "fillOpacity": 100, + "gradientMode": "opacity", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 3, + "pointSize": 1, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 21 + }, + "id": 18, + "interval": "1m", + "links": [], + "maxDataPoints": "", + "options": { + "legend": { + "calcs": [ + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(count_over_time(({service_name=~\"$service_name\"} |~ \"(?i)$searchable_pattern\")[$__interval])) by (service_name)", + "legendFormat": "{{pod}}", + "queryType": "range", + "refId": "A" + } + ], + "title": "Matched word \"$searchable_pattern\" historical", + "type": "timeseries" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 100, + "gradientMode": "opacity", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 1, + "scaleDistribution": { + "log": 10, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 21 + }, + "id": 10, + "links": [], + "maxDataPoints": 100, + "options": { + "legend": { + "calcs": [ + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(rate(({service_name=~\"$service_name\"} |~ \"(?i)$searchable_pattern\")[30s])) by (service_name)", + "hide": false, + "legendFormat": "{{pod}}", + "queryType": "range", + "refId": "A" + } + ], + "title": "\"$searchable_pattern\" Rate per Pod", + "type": "timeseries" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 40, + "gradientMode": "opacity", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 1, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "always", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "links": [], + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "none" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "{stream=\"stderr\"} stderr" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#C4162A", + "mode": "fixed" + } + }, + { + "id": "custom.lineWidth", + "value": 2 + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "{stream=\"stdout\"} stdout" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#56A64B", + "mode": "fixed" + } + }, + { + "id": "custom.lineWidth", + "value": 2 + } + ] + } + ] + }, + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 30 + }, + "id": 7, + "links": [], + "maxDataPoints": 100, + "options": { + "legend": { + "calcs": [ + "lastNotNull" + ], + "displayMode": "table", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "multi", + "sort": "desc" + } + }, + "pluginVersion": "9.3.6", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "code", + "expr": "sum(count_over_time(({service_name=~\"$service_name\"})[$__interval])) by (service_name)", + "hide": false, + "legendFormat": "{{stream}}", + "queryType": "range", + "refId": "A" + } + ], + "title": "Count of stderr / stdout historical", + "type": "timeseries" + } + ], + "refresh": "10s", + "schemaVersion": 37, + "style": "dark", + "tags": [ + "Quarkus", + "OpenTelemetry", + "logging" + ], + "templating": { + "list": [ + { + "allValue": ".+", + "current": { + "selected": true, + "text": [ + "All" + ], + "value": [ + ".+" + ] + }, + "datasource": { + "type": "loki", + "uid": "loki" + }, + "definition": "", + "hide": 0, + "includeAll": true, + "label": "Service", + "multi": true, + "name": "service_name", + "options": [], + "query": { + "label": "service_name", + "refId": "LokiVariableQueryEditor-VariableQuery", + "stream": "{service_name=~\".+\"}", + "type": 1 + }, + "refresh": 1, + "regex": "", + "skipUrlSync": false, + "sort": 0, + "type": "query" + }, + { + "allValue": ".+", + "current": { + "text": [ + "All" + ], + "value": [ + ".+" + ] + }, + "includeAll": true, + "label": "Log Level", + "multi": true, + "name": "log_level", + "options": [ + { + "selected": false, + "text": "FATAL", + "value": "FATAL" + }, + { + "selected": true, + "text": "ERROR", + "value": "ERROR" + }, + { + "selected": false, + "text": "WARN", + "value": "WARN" + }, + { + "selected": false, + "text": "INFO", + "value": "INFO" + }, + { + "selected": false, + "text": "DEBUG", + "value": "DEBUG" + }, + { + "selected": false, + "text": "TRACE", + "value": "TRACE" + } + ], + "query": "FATAL,ERROR,WARN,INFO,DEBUG,TRACE", + "type": "custom" + }, + { + "current": { + "selected": true, + "text": "", + "value": "" + }, + "hide": 0, + "label": "Search (case insensitive)", + "name": "searchable_pattern", + "options": [ + { + "selected": false, + "text": "", + "value": "" + } + ], + "query": "", + "skipUrlSync": false, + "type": "textbox" + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ] + }, + "timezone": "", + "title": "Quarkus OpenTelemetry Logging", + "uid": "fRIvzUZMzTES1", + "version": 1, + "weekStart": "" +} diff --git a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-opentelemetry.json b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-opentelemetry.json new file mode 100644 index 0000000000000..36f29b890eee6 --- /dev/null +++ b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-opentelemetry.json @@ -0,0 +1,4016 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "datasource", + "uid": "grafana" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "description": "Dashboard for Quarkus applications instrumented with the Micrometer to OpenTelemetry bridge.", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 1, + "id": 2, + "links": [], + "liveNow": true, + "panels": [ + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 125, + "panels": [], + "title": "Quick Facts", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 1, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 1 + }, + { + "color": "#EAB839", + "value": 300 + }, + { + "color": "semi-dark-green", + "value": 600 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 0, + "y": 1 + }, + "id": 63, + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "process_uptime_milliseconds{ job = \"$instance\" } / 1000", + "format": "time_series", + "intervalFactor": 1, + "legendFormat": "", + "metric": "", + "range": true, + "refId": "A", + "step": 14400 + } + ], + "title": "Uptime", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + }, + { + "color": "#EAB839", + "value": 90 + } + ] + }, + "unit": "dateTimeAsIso" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 6, + "y": 1 + }, + "id": 92, + "maxDataPoints": 100, + "options": { + "colorMode": "value", + "graphMode": "none", + "justifyMode": "auto", + "orientation": "horizontal", + "percentChangeColorMode": "standard", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showPercentChange": false, + "textMode": "auto", + "wideLayout": true + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "process_start_time_milliseconds{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "", + "metric": "", + "range": true, + "refId": "A", + "step": 14400 + } + ], + "title": "Start time", + "type": "stat" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 2, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + } + ], + "max": 100, + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "rgba(50, 172, 45, 0.97)", + "value": null + }, + { + "color": "rgba(237, 129, 40, 0.89)", + "value": 70 + }, + { + "color": "rgba(245, 54, 54, 0.9)", + "value": 90 + } + ] + }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 12, + "y": 1 + }, + "id": 65, + "maxDataPoints": 100, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "horizontal", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_used_bytes{ area = \"heap\", job = \"$instance\"}) * 100 / sum(jvm_memory_max_bytes{ area = \"heap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "", + "refId": "A", + "step": 14400 + } + ], + "title": "Heap used", + "type": "gauge" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "decimals": 2, + "mappings": [ + { + "options": { + "match": "null", + "result": { + "text": "N/A" + } + }, + "type": "special" + }, + { + "options": { + "from": -1e+32, + "result": { + "text": "N/A" + }, + "to": 0 + }, + "type": "range" + } + ], + "max": 100, + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "rgba(50, 172, 45, 0.97)", + "value": null + }, + { + "color": "rgba(237, 129, 40, 0.89)", + "value": 70 + }, + { + "color": "rgba(245, 54, 54, 0.9)", + "value": 90 + } + ] + }, + "unit": "percent" + }, + "overrides": [] + }, + "gridPos": { + "h": 4, + "w": 6, + "x": 18, + "y": 1 + }, + "id": 75, + "maxDataPoints": 100, + "options": { + "minVizHeight": 75, + "minVizWidth": 75, + "orientation": "horizontal", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "sizing": "auto" + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_used_bytes{ area = \"nonheap\", job = \"$instance\" }) * 100 / sum(jvm_memory_max_bytes{ area = \"nonheap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "", + "refId": "A", + "step": 14400 + } + ], + "title": "Non-Heap used", + "type": "gauge" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 5 + }, + "id": 163, + "panels": [], + "title": "HTTP Edpoints", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "ops" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 6 + }, + "id": 143, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "exemplar": true, + "expr": "rate( http_server_requests_milliseconds_count{ job = \"$instance\" }[2m]) * 120", + "interval": "", + "legendFormat": "{{method}} - {{uri}} - {{status}}", + "range": true, + "refId": "A" + } + ], + "title": "Total number of requests", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 6 + }, + "id": 155, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "rate(http_server_requests_milliseconds_sum{ job = \"$instance\" }[4m]) / rate(http_server_requests_milliseconds_count{ job = \"$instance\" }[4m]) / 1000", + "interval": "", + "legendFormat": "{{method}} - {{uri}} - {{status}}", + "range": true, + "refId": "A" + } + ], + "title": "Average inbound request duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 14 + }, + "id": 153, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "exemplar": true, + "expr": "http_server_requests_max_milliseconds{ job = \"$instance\" } / 1000", + "interval": "", + "legendFormat": "{{method}} - {{uri}} - {{status}}", + "range": true, + "refId": "A" + } + ], + "title": "Maximum inbound request duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "fieldMinMax": false, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 14 + }, + "id": 151, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "sum(increase(http_server_requests_milliseconds_sum{ job = \"$instance\" }[2m])) / 1000", + "interval": "", + "legendFormat": "{{method}} - {{uri}} - {{status}}", + "range": true, + "refId": "A" + } + ], + "title": "Sum of the duration of every request", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 22 + }, + "id": 127, + "panels": [], + "title": "JVM Memory", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 23 + }, + "id": 26, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_used_bytes{ job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_committed_bytes{ job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "committed", + "refId": "B", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_max_bytes{ job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "max", + "refId": "C", + "step": 2400 + } + ], + "title": "JVM Total", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 23 + }, + "id": 24, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_used_bytes{ area = \"heap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_committed_bytes{ area = \"heap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "committed", + "refId": "B", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_max_bytes{ area = \"heap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "max", + "refId": "C", + "step": 2400 + } + ], + "title": "JVM Heap", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 23 + }, + "id": 25, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_used_bytes{ area = \"nonheap\", job = \"$instance\" })", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_committed_bytes{ area=\"nonheap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "committed", + "refId": "B", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "sum(jvm_memory_max_bytes{ area=\"nonheap\", job = \"$instance\" })", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "max", + "refId": "C", + "step": 2400 + } + ], + "title": "JVM Non-Heap", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 30 + }, + "id": 128, + "panels": [], + "title": "JVM Misc", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 1, + "mappings": [], + "max": 1, + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "percentunit" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 11, + "x": 0, + "y": 31 + }, + "id": 106, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "system_cpu_usage{ job = \"$instance\" }", + "format": "time_series", + "hide": false, + "intervalFactor": 1, + "legendFormat": "system", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "process_cpu_usage{ job = \"$instance\" }", + "format": "time_series", + "hide": false, + "intervalFactor": 1, + "legendFormat": "process", + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "avg_over_time(process_cpu_usage{ job = \"$instance\" }[1h])", + "format": "time_series", + "hide": false, + "intervalFactor": 1, + "legendFormat": "process-1h", + "refId": "C" + } + ], + "title": "CPU Usage", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 1, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 13, + "x": 11, + "y": 31 + }, + "id": 93, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "system_load_average_1m{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "system-1m", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "system_cpu_count{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "cpus", + "refId": "B" + } + ], + "title": "Load", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [ + { + "matcher": { + "id": "byName", + "options": "blocked" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#bf1b00", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "new" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#fce2de", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "runnable" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#7eb26d", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "terminated" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#511749", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "timed-waiting" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#c15c17", + "mode": "fixed" + } + } + ] + }, + { + "matcher": { + "id": "byName", + "options": "waiting" + }, + "properties": [ + { + "id": "color", + "value": { + "fixedColor": "#eab839", + "mode": "fixed" + } + } + ] + } + ] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 38 + }, + "id": 124, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_threads_states{ job = \"$instance\" }", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{state}}", + "range": true, + "refId": "A" + } + ], + "title": "Thread States", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 38 + }, + "id": 32, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_threads_live{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "live", + "metric": "", + "range": true, + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_threads_daemon{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "daemon", + "metric": "", + "range": true, + "refId": "B", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_threads_peak{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "peak", + "range": true, + "refId": "C", + "step": 2400 + } + ], + "title": "Threads", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "log": 10, + "type": "log" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 38 + }, + "id": 61, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "process_files_open{ job = \"$instance\" }", + "format": "time_series", + "hide": false, + "intervalFactor": 2, + "legendFormat": "open", + "metric": "", + "range": true, + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "process_files_max{ job = \"$instance\" }", + "format": "time_series", + "hide": false, + "intervalFactor": 2, + "legendFormat": "max", + "metric": "", + "range": true, + "refId": "B", + "step": 2400 + } + ], + "title": "File Descriptors", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 45 + }, + "id": 192, + "panels": [], + "title": "JDBC", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic", + "seriesBy": "last" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineStyle": { + "fill": "solid" + }, + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 100 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 46 + }, + "id": 193, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "agroal_active_count{ job = \"$instance\" }", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Active Connections", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "agroal_available_count{ job = \"$instance\" }", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Available Connections", + "range": true, + "refId": "B", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "rate(agroal_leak_detection_count_total{ job = \"$instance\" }[$__rate_interval])", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": false, + "instant": false, + "legendFormat": "Leaked Connections", + "range": true, + "refId": "C", + "useBackend": false + } + ], + "title": "JDBC Connections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "continuous-GrYlRd" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 46 + }, + "id": 194, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "agroal_blocking_time_average_milliseconds{ job = \"$instance\" }", + "fullMetaSearch": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Average (ms)", + "range": true, + "refId": "A", + "useBackend": false + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "disableTextWrap": false, + "editorMode": "builder", + "expr": "agroal_blocking_time_max_milliseconds{ job = \"$instance\" }", + "fullMetaSearch": false, + "hide": false, + "includeNullMetadata": true, + "instant": false, + "legendFormat": "Max (ms)", + "range": true, + "refId": "B", + "useBackend": false + } + ], + "title": "JDBC Blocking Time", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 54 + }, + "id": 129, + "panels": [], + "repeat": "persistence_counts", + "title": "JVM Memory Pools (Heap)", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 55 + }, + "id": 3, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "repeat": "jvm_memory_pool_heap", + "repeatDirection": "h", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_used_bytes{ id =~ \"$jvm_memory_pool_heap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 1800 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_committed_bytes{ id =~ \"$jvm_memory_pool_heap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "commited", + "metric": "", + "refId": "B", + "step": 1800 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_max_bytes{ id =~ \"$jvm_memory_pool_heap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "max", + "metric": "", + "refId": "C", + "step": 1800 + } + ], + "title": "$jvm_memory_pool_heap", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 62 + }, + "id": 130, + "panels": [], + "title": "JVM Memory Pools (Non-Heap)", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 63 + }, + "id": 78, + "options": { + "legend": { + "calcs": [ + "lastNotNull", + "max" + ], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "repeat": "jvm_memory_pool_nonheap", + "repeatDirection": "h", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_used_bytes{ id =~ \"$jvm_memory_pool_nonheap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 1800 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_committed_bytes{ id =~ \"$jvm_memory_pool_nonheap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "commited", + "metric": "", + "refId": "B", + "step": 1800 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_memory_max_bytes{ id =~ \"$jvm_memory_pool_nonheap\", job = \"$instance\" }", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 2, + "legendFormat": "max", + "metric": "", + "refId": "C", + "step": 1800 + } + ], + "title": "$jvm_memory_pool_nonheap", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 70 + }, + "id": 131, + "panels": [], + "title": "Garbage Collection", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "ops" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 71 + }, + "id": 98, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "rate(jvm_gc_pause_milliseconds_count{ job = \"$instance\" }[2m]) * 120", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "{{action}} ({{cause}})", + "range": true, + "refId": "A" + } + ], + "title": "Collections", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "bars", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 71 + }, + "id": 101, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "(rate(jvm_gc_pause_milliseconds_sum{ job = \"$instance\" }[2m]) / rate(jvm_gc_pause_milliseconds_count{ job = \"$instance\" }[2m])) / 1000", + "format": "time_series", + "hide": false, + "instant": false, + "intervalFactor": 1, + "legendFormat": "avg {{action}} ({{cause}})", + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_gc_pause_milliseconds_max{ job = \"$instance\" } / 1000", + "format": "time_series", + "hide": false, + "instant": false, + "intervalFactor": 1, + "legendFormat": "max {{action}} ({{cause}})", + "refId": "B" + } + ], + "title": "Pause Durations", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "Bps" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 71 + }, + "id": 99, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "rate(jvm_gc_memory_allocated_bytes_total{ job = \"$instance\" }[2m]) * 120", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "allocated", + "range": true, + "refId": "A" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "rate(jvm_gc_memory_promoted_bytes_total{ job = \"$instance\" }[2m]) * 120", + "format": "time_series", + "interval": "", + "intervalFactor": 1, + "legendFormat": "promoted", + "range": true, + "refId": "B" + } + ], + "title": "Allocated/Promoted", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 78 + }, + "id": 132, + "panels": [], + "title": "Classloading", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 79 + }, + "id": 37, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "jvm_classes_loaded{ job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "loaded", + "metric": "", + "range": true, + "refId": "A", + "step": 1200 + } + ], + "title": "Classes loaded", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 79 + }, + "id": 38, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "code", + "expr": "delta(jvm_classes_loaded{ job = \"$instance\" }[2m])", + "format": "time_series", + "hide": false, + "interval": "", + "intervalFactor": 1, + "legendFormat": "delta-2m", + "metric": "", + "range": true, + "refId": "A", + "step": 1200 + } + ], + "title": "Class delta", + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 86 + }, + "id": 133, + "panels": [], + "title": "Buffer Pools", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 0, + "y": 87 + }, + "id": 33, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_memory_used_bytes{ id = \"direct\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_total_capacity_bytes{ id = \"direct\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "capacity", + "metric": "", + "refId": "B", + "step": 2400 + } + ], + "title": "Direct Buffers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 6, + "y": 87 + }, + "id": 83, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_count_buffers{ id = \"direct\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "count", + "metric": "", + "refId": "A", + "step": 2400 + } + ], + "title": "Direct Buffers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "bytes" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 12, + "y": 87 + }, + "id": 85, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_memory_used_bytes{ id = \"mapped\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "used", + "metric": "", + "refId": "A", + "step": 2400 + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_total_capacity_bytes{ id = \"mapped\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "capacity", + "metric": "", + "refId": "B", + "step": 2400 + } + ], + "title": "Mapped Buffers", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "barWidthFactor": 0.6, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "short" + }, + "overrides": [] + }, + "gridPos": { + "h": 7, + "w": 6, + "x": 18, + "y": 87 + }, + "id": 84, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "pluginVersion": "11.4.0", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "jvm_buffer_count_buffers{ id = \"mapped\", job = \"$instance\" }", + "format": "time_series", + "intervalFactor": 2, + "legendFormat": "count", + "metric": "", + "refId": "A", + "step": 2400 + } + ], + "title": "Mapped Buffers", + "type": "timeseries" + }, + { + "collapsed": true, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 94 + }, + "id": 173, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "ops" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 117 + }, + "id": 171, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "rate(mp_graphql_seconds_count{ job = \"$instance\" }[2m])", + "interval": "", + "legendFormat": "{{type}} {{name}}", + "refId": "A" + } + ], + "title": "Total number of GraphQL requests", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "min": 0, + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 117 + }, + "id": 181, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "rate(mp_graphql_seconds_sum{ job = \"$instance\" }[4m]) / rate(mp_graphql_seconds_count{ job = \"$instance\" }[4m])", + "interval": "", + "legendFormat": "{{type}} {{name}}", + "refId": "A" + } + ], + "title": "Average inbound request duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 125 + }, + "id": 183, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "mp_graphql_seconds_max{ job = \"$instance\" }", + "interval": "", + "legendFormat": "{{type}} {{name}}", + "refId": "A" + } + ], + "title": "Maximum inbound GraphQL request duration", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisBorderShow": false, + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 125 + }, + "id": 185, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "maxHeight": 600, + "mode": "multi", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "expr": "rate(mp_graphql_seconds_sum{ job = \"$instance\" }[2m])", + "interval": "", + "legendFormat": "{{type}} {{name}}", + "refId": "A" + } + ], + "title": "Sum of the duration of every GraphQL request", + "type": "timeseries" + } + ], + "title": "GraphQL requests", + "type": "row" + } + ], + "preload": false, + "refresh": "auto", + "schemaVersion": 40, + "tags": [ + "Quarkus", + "Micrometer", + "OpenTelemetry", + "metrics" + ], + "templating": { + "list": [ + { + "current": { + "text": "host.docker.internal:8080", + "value": "host.docker.internal:8080" + }, + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "definition": "", + "includeAll": false, + "label": "Instance", + "name": "instance", + "options": [], + "query": "label_values(jvm_memory_used_bytes{},job)", + "refresh": 2, + "regex": "", + "type": "query" + }, + { + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "definition": "", + "includeAll": true, + "label": "JVM Memory Pools Heap", + "name": "jvm_memory_pool_heap", + "options": [], + "query": "label_values(jvm_memory_used_bytes{ area=\"heap\"},id)", + "refresh": 1, + "regex": "", + "sort": 1, + "type": "query" + }, + { + "current": { + "text": "All", + "value": "$__all" + }, + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "definition": "", + "includeAll": true, + "label": "JVM Memory Pools Non-Heap", + "name": "jvm_memory_pool_nonheap", + "options": [], + "query": "label_values(jvm_memory_used_bytes{ area=\"nonheap\"},id)", + "refresh": 1, + "regex": "", + "sort": 2, + "type": "query" + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "now": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Quarkus Micrometer OpenTelemetry", + "uid": "edy6473ay1vk0a", + "version": 1, + "weekStart": "" +} diff --git a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-otlp.json b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-otlp.json index 3d91e07220a07..1bffddbe9a327 100644 --- a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-otlp.json +++ b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-otlp.json @@ -24,18 +24,13 @@ "description": "Dashboard for Micrometer instrumented applications in Quarkus using the OTLP registry", "editable": true, "fiscalYearStartMonth": 0, - "gnetId": 14370, "graphTooltip": 1, - "id": 4, + "id": 2, "links": [], "liveNow": true, "panels": [ { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, @@ -44,15 +39,6 @@ }, "id": 125, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "Quick Facts", "type": "row" }, @@ -128,7 +114,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -213,7 +199,7 @@ "textMode": "auto", "wideLayout": true }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -302,7 +288,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -398,7 +384,7 @@ "showThresholdMarkers": true, "sizing": "auto" }, - "pluginVersion": "11.0.0", + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -418,10 +404,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, @@ -430,15 +412,6 @@ }, "id": 163, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "HTTP Edpoints", "type": "row" }, @@ -459,6 +432,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -504,7 +478,7 @@ }, "gridPos": { "h": 8, - "w": 12, + "w": 24, "x": 0, "y": 6 }, @@ -522,6 +496,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -529,7 +504,9 @@ "uid": "prometheus" }, "editorMode": "code", - "expr": "rate(http_server_requests_milliseconds_count{}[2m]) * 1000", + "exemplar": false, + "expr": "rate(http_server_requests_milliseconds_count[2m])*120", + "instant": false, "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", "range": true, @@ -556,6 +533,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -602,8 +580,8 @@ "gridPos": { "h": 8, "w": 12, - "x": 12, - "y": 6 + "x": 0, + "y": 14 }, "id": 155, "options": { @@ -619,6 +597,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -653,101 +632,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 1, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - }, - "unit": "s" - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 14 - }, - "id": 153, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "maxHeight": 600, - "mode": "multi", - "sort": "none" - } - }, - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "expr": "http_server_requests_seconds_max{ }", - "interval": "", - "legendFormat": "{{method}} - {{uri}} - {{status}}", - "refId": "A" - } - ], - "title": "Maximum inbound request duration", - "type": "timeseries" - }, - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisBorderShow": false, - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -811,15 +696,18 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(http_server_requests_seconds_sum{ }[2m])", + "editorMode": "code", + "expr": "rate(http_server_requests_milliseconds_sum{ }[2m])/1000", "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", + "range": true, "refId": "A" } ], @@ -828,10 +716,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, @@ -840,15 +724,6 @@ }, "id": 127, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "JVM Memory", "type": "row" }, @@ -869,6 +744,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -936,6 +812,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -995,6 +872,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1062,6 +940,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1121,6 +1000,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1188,6 +1068,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1233,10 +1114,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, @@ -1245,15 +1122,6 @@ }, "id": 128, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "JVM Misc", "type": "row" }, @@ -1274,6 +1142,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1343,6 +1212,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1403,6 +1273,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1471,6 +1342,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1517,6 +1389,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1674,6 +1547,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1710,6 +1584,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1778,6 +1653,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1844,6 +1720,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -1913,6 +1790,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -1981,6 +1859,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 0, "gradientMode": "none", @@ -2046,6 +1925,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2118,6 +1998,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 0, "gradientMode": "none", @@ -2180,6 +2061,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2220,10 +2102,6 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, @@ -2233,15 +2111,6 @@ "id": 129, "panels": [], "repeat": "persistence_counts", - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "JVM Memory Pools (Heap)", "type": "row" }, @@ -2262,6 +2131,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2293,7 +2163,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2307,7 +2178,7 @@ }, "gridPos": { "h": 7, - "w": 8, + "w": 12, "x": 0, "y": 55 }, @@ -2328,22 +2199,30 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "repeat": "jvm_memory_pool_heap", + "repeatDirection": "h", "targets": [ { "datasource": { "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_used_bytes{ id=~\"$jvm_memory_pool_heap\"}", + "disableTextWrap": false, + "editorMode": "builder", + "expr": "jvm_memory_used_bytes{area=~\"heap\"}", "format": "time_series", + "fullMetaSearch": false, "hide": false, + "includeNullMetadata": true, "interval": "", "intervalFactor": 2, "legendFormat": "used", "metric": "", + "range": true, "refId": "A", - "step": 1800 + "step": 1800, + "useBackend": false }, { "datasource": { @@ -2379,32 +2258,6 @@ "title": "$jvm_memory_pool_heap", "type": "timeseries" }, - { - "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "gridPos": { - "h": 1, - "w": 24, - "x": 0, - "y": 62 - }, - "id": 130, - "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], - "title": "JVM Memory Pools (Non-Heap)", - "type": "row" - }, { "datasource": { "type": "prometheus", @@ -2422,6 +2275,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2453,7 +2307,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2467,9 +2322,9 @@ }, "gridPos": { "h": 7, - "w": 8, - "x": 0, - "y": 63 + "w": 12, + "x": 12, + "y": 55 }, "id": 78, "options": { @@ -2488,7 +2343,9 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "repeat": "jvm_memory_pool_nonheap", + "repeatDirection": "h", "targets": [ { "datasource": { @@ -2541,27 +2398,27 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 62 }, + "id": 130, + "panels": [], + "title": "JVM Memory Pools (Non-Heap)", + "type": "row" + }, + { + "collapsed": false, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 70 + "y": 63 }, "id": 131, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "Garbage Collection", "type": "row" }, @@ -2582,6 +2439,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2613,7 +2471,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2629,7 +2488,7 @@ "h": 7, "w": 8, "x": 0, - "y": 71 + "y": 64 }, "id": 98, "options": { @@ -2645,6 +2504,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2682,7 +2542,8 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, - "drawStyle": "line", + "barWidthFactor": 0.6, + "drawStyle": "bars", "fillOpacity": 10, "gradientMode": "none", "hideFrom": { @@ -2713,7 +2574,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2729,7 +2591,7 @@ "h": 7, "w": 8, "x": 8, - "y": 71 + "y": 64 }, "id": 101, "options": { @@ -2745,13 +2607,15 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(jvm_gc_pause_seconds_sum{ }[1m])/rate(jvm_gc_pause_seconds_count{ }[1m])", + "editorMode": "code", + "expr": "(rate(jvm_gc_pause_milliseconds_sum{ }[2m])/rate(jvm_gc_pause_milliseconds_count{ }[2m]))/1000", "format": "time_series", "hide": false, "instant": false, @@ -2764,7 +2628,8 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_gc_pause_seconds_max{ }", + "editorMode": "code", + "expr": "jvm_gc_pause_milliseconds_max{ }", "format": "time_series", "hide": false, "instant": false, @@ -2793,6 +2658,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2824,7 +2690,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2840,7 +2707,7 @@ "h": 7, "w": 8, "x": 16, - "y": 71 + "y": 64 }, "id": 99, "options": { @@ -2856,6 +2723,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -2887,27 +2755,14 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 78 + "y": 71 }, "id": 132, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "Classloading", "type": "row" }, @@ -2928,6 +2783,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -2959,7 +2815,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -2975,7 +2832,7 @@ "h": 7, "w": 12, "x": 0, - "y": 79 + "y": 72 }, "id": 37, "options": { @@ -2991,6 +2848,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3028,6 +2886,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -3058,7 +2917,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -3074,7 +2934,7 @@ "h": 7, "w": 12, "x": 12, - "y": 79 + "y": 72 }, "id": 38, "options": { @@ -3090,6 +2950,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3114,27 +2975,14 @@ }, { "collapsed": false, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 86 + "y": 79 }, "id": 133, "panels": [], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "Buffer Pools", "type": "row" }, @@ -3155,6 +3003,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -3186,7 +3035,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -3202,7 +3052,7 @@ "h": 7, "w": 6, "x": 0, - "y": 87 + "y": 80 }, "id": 33, "options": { @@ -3218,6 +3068,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3266,6 +3117,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -3298,7 +3150,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -3314,7 +3167,7 @@ "h": 7, "w": 6, "x": 6, - "y": 87 + "y": 80 }, "id": 83, "options": { @@ -3330,6 +3183,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3365,6 +3219,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -3396,7 +3251,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -3412,7 +3268,7 @@ "h": 7, "w": 6, "x": 12, - "y": 87 + "y": 80 }, "id": 85, "options": { @@ -3428,6 +3284,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3476,6 +3333,7 @@ "axisLabel": "", "axisPlacement": "auto", "barAlignment": 0, + "barWidthFactor": 0.6, "drawStyle": "line", "fillOpacity": 10, "gradientMode": "none", @@ -3508,7 +3366,8 @@ "mode": "absolute", "steps": [ { - "color": "green" + "color": "green", + "value": null }, { "color": "red", @@ -3524,7 +3383,7 @@ "h": 7, "w": 6, "x": 18, - "y": 87 + "y": 80 }, "id": 84, "options": { @@ -3540,6 +3399,7 @@ "sort": "none" } }, + "pluginVersion": "11.4.0", "targets": [ { "datasource": { @@ -3560,15 +3420,11 @@ }, { "collapsed": true, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, "gridPos": { "h": 1, "w": 24, "x": 0, - "y": 94 + "y": 87 }, "id": 173, "panels": [ @@ -3950,28 +3806,24 @@ "type": "timeseries" } ], - "targets": [ - { - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "refId": "A" - } - ], "title": "GraphQL requests", "type": "row" } ], + "preload": false, "refresh": "auto", - "schemaVersion": 39, - "tags": [], + "schemaVersion": 40, + "tags": [ + "Quarkus", + "Micrometer", + "OTLP", + "metrics" + ], "templating": { "list": [ { "current": { "isNone": true, - "selected": false, "text": "None", "value": "" }, @@ -3980,26 +3832,17 @@ "uid": "prometheus" }, "definition": "", - "hide": 0, "includeAll": false, "label": "Application", - "multi": false, "name": "application", "options": [], "query": "label_values(application)", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "host.docker.internal:8080", "value": "host.docker.internal:8080" }, @@ -4008,27 +3851,17 @@ "uid": "prometheus" }, "definition": "", - "hide": 0, "includeAll": false, "label": "Instance", - "multi": false, - "multiFormat": "glob", "name": "instance", "options": [], "query": "label_values(jvm_memory_used_bytes{)", "refresh": 2, "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "All", "value": "$__all" }, @@ -4036,28 +3869,23 @@ "type": "prometheus", "uid": "prometheus" }, - "definition": "", - "hide": 0, + "definition": "label_values(jvm_memory_used_bytes{area=\"heap\"},area)", "includeAll": true, "label": "JVM Memory Pools Heap", - "multi": false, - "multiFormat": "glob", "name": "jvm_memory_pool_heap", "options": [], - "query": "label_values(jvm_memory_used_bytes{ area=\"heap\"},id)", + "query": { + "qryType": 1, + "query": "label_values(jvm_memory_used_bytes{area=\"heap\"},area)", + "refId": "PrometheusVariableQueryEditor-VariableQuery" + }, "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 1, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" }, { - "allFormat": "glob", "current": { - "selected": false, "text": "All", "value": "$__all" }, @@ -4066,59 +3894,26 @@ "uid": "prometheus" }, "definition": "", - "hide": 0, "includeAll": true, "label": "JVM Memory Pools Non-Heap", - "multi": false, - "multiFormat": "glob", "name": "jvm_memory_pool_nonheap", "options": [], "query": "label_values(jvm_memory_used_bytes{ area=\"nonheap\"},id)", "refresh": 1, "regex": "", - "skipUrlSync": false, "sort": 2, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false + "type": "query" } ] }, "time": { - "from": "now-3h", + "from": "now-5m", "to": "now" }, - "timeRangeUpdatedDuringEditOrView": false, - "timepicker": { - "now": true, - "refresh_intervals": [ - "5s", - "10s", - "30s", - "1m", - "5m", - "15m", - "30m", - "1h", - "2h", - "1d" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, + "timepicker": {}, "timezone": "browser", - "title": "Quarkus Micrometer Metrics - OTLP", + "title": "Quarkus Micrometer OTLP registry", "uid": "edy6473ay1vk0c", - "version": 1, + "version": 3, "weekStart": "" } \ No newline at end of file diff --git a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-prometheus.json b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-prometheus.json index 9270212d30169..27d9a232c8738 100644 --- a/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-prometheus.json +++ b/extensions/observability-devservices/testcontainers/src/main/resources/grafana-dashboard-quarkus-micrometer-prometheus.json @@ -14,7 +14,12 @@ "target": { "limit": 100, "matchAny": false, - "tags": [], + "tags": [ + "Quarkus", + "Micrometer", + "Prometheus", + "metrics" + ], "type": "dashboard" }, "type": "dashboard" @@ -135,7 +140,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_uptime_seconds", + "expr": "process_uptime_seconds{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 1, "legendFormat": "", @@ -218,7 +223,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_start_time_seconds * 1000", + "expr": "process_start_time_seconds{ instance = \"$instance\" } * 1000", "format": "time_series", "intervalFactor": 2, "legendFormat": "", @@ -305,7 +310,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_used_bytes{area=\"heap\"})*100/sum(jvm_memory_max_bytes{area=\"heap\"})", + "expr": "sum(jvm_memory_used_bytes{ area = \"heap\", instance = \"$instance\" }) * 100 / sum(jvm_memory_max_bytes{ area = \"heap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "", @@ -401,7 +406,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_used_bytes{area=\"nonheap\"})*100/sum(jvm_memory_max_bytes{area=\"nonheap\"})", + "expr": "sum(jvm_memory_used_bytes{ area = \"nonheap\", instance = \"$instance\" }) * 100 / sum(jvm_memory_max_bytes{ area = \"nonheap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "", @@ -435,7 +440,7 @@ "refId": "A" } ], - "title": "HTTP endpoints", + "title": "HTTP Edpoints", "type": "row" }, { @@ -524,7 +529,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(http_server_requests_seconds_count{ }[2m])", + "expr": "rate(http_server_requests_seconds_count{ instance = \"$instance\" }[2m])", "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", "refId": "A" @@ -619,7 +624,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(http_server_requests_seconds_sum{ }[4m]) / rate(http_server_requests_seconds_count{ }[4m])", + "expr": "rate(http_server_requests_seconds_sum{ instance = \"$instance\" }[4m]) / rate(http_server_requests_seconds_count{ instance = \"$instance\" }[4m])", "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", "refId": "A" @@ -714,7 +719,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "http_server_requests_seconds_max{ }", + "expr": "http_server_requests_seconds_max{ instance = \"$instance\" }", "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", "refId": "A" @@ -809,7 +814,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(http_server_requests_seconds_sum{ }[2m])", + "expr": "rate(http_server_requests_seconds_sum{ instance = \"$instance\" }[2m])", "interval": "", "legendFormat": "{{method}} - {{uri}} - {{status}}", "refId": "A" @@ -934,7 +939,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_used_bytes{ area=\"heap\"})", + "expr": "sum(jvm_memory_used_bytes{ area = \"heap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "used", @@ -947,7 +952,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_committed_bytes{ area=\"heap\"})", + "expr": "sum(jvm_memory_committed_bytes{ area = \"heap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "committed", @@ -959,7 +964,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_max_bytes{ area=\"heap\"})", + "expr": "sum(jvm_memory_max_bytes{ area = \"heap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "max", @@ -1060,7 +1065,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_used_bytes{ area=\"nonheap\"})", + "expr": "sum(jvm_memory_used_bytes{ area=\"nonheap\", instance = \"$instance\" })", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -1074,7 +1079,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_committed_bytes{ area=\"nonheap\"})", + "expr": "sum(jvm_memory_committed_bytes{ area=\"nonheap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "committed", @@ -1086,7 +1091,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_max_bytes{ area=\"nonheap\"})", + "expr": "sum(jvm_memory_max_bytes{ area=\"nonheap\", instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "max", @@ -1187,7 +1192,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_used_bytes{ })", + "expr": "sum(jvm_memory_used_bytes{ instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "used", @@ -1200,7 +1205,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_committed_bytes{ })", + "expr": "sum(jvm_memory_committed_bytes{ instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "committed", @@ -1212,7 +1217,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(jvm_memory_max_bytes{ })", + "expr": "sum(jvm_memory_max_bytes{ instance = \"$instance\" })", "format": "time_series", "intervalFactor": 2, "legendFormat": "max", @@ -1313,7 +1318,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_memory_vss_bytes{ }", + "expr": "process_memory_vss_bytes{ instance = \"$instance\" }", "format": "time_series", "hide": true, "interval": "", @@ -1328,7 +1333,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_memory_rss_bytes{ }", + "expr": "process_memory_rss_bytes{ instance = \"$instance\" }", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -1340,7 +1345,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_memory_swap_bytes{ }", + "expr": "process_memory_swap_bytes{ instance = \"$instance\" }", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -1352,7 +1357,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_memory_rss_bytes{ } + process_memory_swap_bytes{ }", + "expr": "process_memory_rss_bytes{ instance = \"$instance\" } + process_memory_swap_bytes{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "total", @@ -1480,7 +1485,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "system_cpu_usage{ }", + "expr": "system_cpu_usage{ instance = \"$instance\" }", "format": "time_series", "hide": false, "intervalFactor": 1, @@ -1494,7 +1499,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_cpu_usage{ }", + "expr": "process_cpu_usage{ instance = \"$instance\" }", "format": "time_series", "hide": false, "intervalFactor": 1, @@ -1506,7 +1511,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "avg_over_time(process_cpu_usage{ }[1h])", + "expr": "avg_over_time(process_cpu_usage{ instance = \"$instance\" }[1h])", "format": "time_series", "hide": false, "intervalFactor": 1, @@ -1608,7 +1613,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "system_load_average_1m{ }", + "expr": "system_load_average_1m{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "system-1m", @@ -1621,7 +1626,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "system_cpu_count{ }", + "expr": "system_cpu_count{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "cpus", @@ -1811,7 +1816,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_threads_states_threads{ }", + "expr": "jvm_threads_states_threads{ instance = \"$instance\" }", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -1913,7 +1918,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_threads_live_threads{ }", + "expr": "jvm_threads_live_threads{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "live", @@ -1926,7 +1931,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_threads_daemon_threads{ }", + "expr": "jvm_threads_daemon_threads{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "daemon", @@ -1939,7 +1944,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_threads_peak_threads{ }", + "expr": "jvm_threads_peak_threads{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "peak", @@ -1951,7 +1956,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_threads{ }", + "expr": "process_threads{ instance = \"$instance\" }", "format": "time_series", "interval": "", "intervalFactor": 2, @@ -2055,7 +2060,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_files_open_files{ }", + "expr": "process_files_open_files{ instance = \"$instance\" }", "format": "time_series", "hide": false, "intervalFactor": 2, @@ -2069,7 +2074,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "process_files_max_files{ }", + "expr": "process_files_max_files{ instance = \"$instance\" }", "format": "time_series", "hide": false, "intervalFactor": 2, @@ -2185,7 +2190,7 @@ }, "disableTextWrap": false, "editorMode": "builder", - "expr": "agroal_active_count", + "expr": "agroal_active_count{ instance = \"$instance\" }", "fullMetaSearch": false, "includeNullMetadata": true, "instant": false, @@ -2201,7 +2206,7 @@ }, "disableTextWrap": false, "editorMode": "builder", - "expr": "agroal_available_count", + "expr": "agroal_available_count{ instance = \"$instance\" }", "fullMetaSearch": false, "hide": false, "includeNullMetadata": true, @@ -2218,7 +2223,7 @@ }, "disableTextWrap": false, "editorMode": "builder", - "expr": "rate(agroal_leak_detection_count_total[$__rate_interval])", + "expr": "rate(agroal_leak_detection_count_total{ instance = \"$instance\" }[$__rate_interval])", "fullMetaSearch": false, "hide": false, "includeNullMetadata": false, @@ -2318,7 +2323,7 @@ }, "disableTextWrap": false, "editorMode": "builder", - "expr": "agroal_blocking_time_average_milliseconds", + "expr": "agroal_blocking_time_average_milliseconds{ instance = \"$instance\" }", "fullMetaSearch": false, "includeNullMetadata": true, "instant": false, @@ -2334,7 +2339,7 @@ }, "disableTextWrap": false, "editorMode": "builder", - "expr": "agroal_blocking_time_max_milliseconds", + "expr": "agroal_blocking_time_max_milliseconds{ instance = \"$instance\" }", "fullMetaSearch": false, "hide": false, "includeNullMetadata": true, @@ -2462,7 +2467,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(rate(http_server_requests_seconds_count{ }[2m]))", + "expr": "sum(rate(http_server_requests_seconds_count{ instance = \"$instance\" }[2m]))", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -2561,7 +2566,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(rate(http_server_requests_seconds_sum{ status!~\"5..\"}[2m]))/sum(rate(http_server_requests_seconds_count{ status!~\"5..\"}[2m]))", + "expr": "sum(rate(http_server_requests_seconds_sum{ status!~\"5..\", instance = \"$instance\"}[2m])) / sum(rate(http_server_requests_seconds_count{ status!~\"5..\", instance = \"$instance\"}[2m]))", "format": "time_series", "hide": false, "interval": "", @@ -2574,7 +2579,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "max(http_server_requests_seconds_max{ status!~\"5..\"})", + "expr": "max(http_server_requests_seconds_max{ status!~\"5..\", instance = \"$instance\" })", "format": "time_series", "hide": false, "interval": "", @@ -2705,7 +2710,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "sum(rate(http_server_requests_seconds_count{ status=~\"5..\"}[5m]))", + "expr": "sum(rate(http_server_requests_seconds_count{ status=~\"5..\", instance = \"$instance\"}[5m]))", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -2820,7 +2825,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_used_bytes{ id=~\"$jvm_memory_pool_heap\"}", + "expr": "jvm_memory_used_bytes{ id=~\"$jvm_memory_pool_heap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -2835,7 +2840,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_committed_bytes{ id=~\"$jvm_memory_pool_heap\"}", + "expr": "jvm_memory_committed_bytes{ id=~\"$jvm_memory_pool_heap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -2850,7 +2855,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_max_bytes{ id=~\"$jvm_memory_pool_heap\"}", + "expr": "jvm_memory_max_bytes{ id=~\"$jvm_memory_pool_heap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -2982,7 +2987,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_used_bytes{ id=~\"$jvm_memory_pool_nonheap\"}", + "expr": "jvm_memory_used_bytes{ id=~\"$jvm_memory_pool_nonheap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -2997,7 +3002,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_committed_bytes{ id=~\"$jvm_memory_pool_nonheap\"}", + "expr": "jvm_memory_committed_bytes{ id=~\"$jvm_memory_pool_nonheap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -3012,7 +3017,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_memory_max_bytes{ id=~\"$jvm_memory_pool_nonheap\"}", + "expr": "jvm_memory_max_bytes{ id=~\"$jvm_memory_pool_nonheap\", instance = \"$instance\" }", "format": "time_series", "hide": false, "interval": "", @@ -3139,7 +3144,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(jvm_gc_pause_seconds_count{ }[2m])", + "expr": "rate(jvm_gc_pause_seconds_count{ instance = \"$instance\" }[2m])", "format": "time_series", "hide": false, "interval": "", @@ -3237,7 +3242,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(jvm_gc_pause_seconds_sum{ }[1m])/rate(jvm_gc_pause_seconds_count{ }[1m])", + "expr": "rate(jvm_gc_pause_seconds_sum{ instance = \"$instance\" }[1m]) / rate(jvm_gc_pause_seconds_count{ instance = \"$instance\" }[1m])", "format": "time_series", "hide": false, "instant": false, @@ -3250,7 +3255,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_gc_pause_seconds_max{ }", + "expr": "jvm_gc_pause_seconds_max{ instance = \"$instance\" }", "format": "time_series", "hide": false, "instant": false, @@ -3348,7 +3353,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(jvm_gc_memory_allocated_bytes_total{ }[2m])", + "expr": "rate(jvm_gc_memory_allocated_bytes_total{ instance = \"$instance\" }[2m])", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -3360,7 +3365,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(jvm_gc_memory_promoted_bytes_total{ }[2m])", + "expr": "rate(jvm_gc_memory_promoted_bytes_total{ instance = \"$instance\" }[2m])", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -3484,7 +3489,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_classes_loaded_classes{ }", + "expr": "jvm_classes_loaded_classes{ instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "loaded", @@ -3581,12 +3586,12 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "delta(jvm_classes_loaded_classes{}[2m])", + "expr": "delta(jvm_classes_loaded_classes{ instance = \"$instance\" }[2m])", "format": "time_series", "hide": false, "interval": "", "intervalFactor": 1, - "legendFormat": "delta-1m", + "legendFormat": "delta-2m", "metric": "", "refId": "A", "step": 1200 @@ -3708,7 +3713,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_memory_used_bytes{ id=\"direct\"}", + "expr": "jvm_buffer_memory_used_bytes{ id=\"direct\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "used", @@ -3721,7 +3726,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_total_capacity_bytes{ id=\"direct\"}", + "expr": "jvm_buffer_total_capacity_bytes{ id=\"direct\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "capacity", @@ -3820,7 +3825,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_count_buffers{ id=\"direct\"}", + "expr": "jvm_buffer_count_buffers{ id=\"direct\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "count", @@ -3918,7 +3923,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_memory_used_bytes{ id=\"mapped\"}", + "expr": "jvm_buffer_memory_used_bytes{ id=\"mapped\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "used", @@ -3931,7 +3936,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_total_capacity_bytes{ id=\"mapped\"}", + "expr": "jvm_buffer_total_capacity_bytes{ id=\"mapped\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "capacity", @@ -4030,7 +4035,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "jvm_buffer_count_buffers{ id=\"mapped\"}", + "expr": "jvm_buffer_count_buffers{ id=\"mapped\", instance = \"$instance\" }", "format": "time_series", "intervalFactor": 2, "legendFormat": "count", @@ -4154,7 +4159,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(mp_graphql_seconds_count{ }[2m])", + "expr": "rate(mp_graphql_seconds_count{ instance = \"$instance\" }[2m])", "interval": "", "legendFormat": "{{type}} {{name}}", "refId": "A" @@ -4249,7 +4254,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(mp_graphql_seconds_sum{ }[4m]) / rate(mp_graphql_seconds_count{ }[4m])", + "expr": "rate(mp_graphql_seconds_sum{ instance = \"$instance\" }[4m]) / rate(mp_graphql_seconds_count{ instance = \"$instance\" }[4m])", "interval": "", "legendFormat": "{{type}} {{name}}", "refId": "A" @@ -4343,7 +4348,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "mp_graphql_seconds_max{ }", + "expr": "mp_graphql_seconds_max{ instance = \"$instance\" }", "interval": "", "legendFormat": "{{type}} {{name}}", "refId": "A" @@ -4437,7 +4442,7 @@ "type": "prometheus", "uid": "prometheus" }, - "expr": "rate(mp_graphql_seconds_sum{ }[2m])", + "expr": "rate(mp_graphql_seconds_sum{ instance = \"$instance\" }[2m])", "interval": "", "legendFormat": "{{type}} {{name}}", "refId": "A" @@ -4462,37 +4467,14 @@ ], "refresh": "auto", "schemaVersion": 39, - "tags": [], + "tags": [ + "Quarkus", + "Micrometer", + "Prometheus", + "metrics" + ], "templating": { "list": [ - { - "current": { - "isNone": true, - "selected": false, - "text": "None", - "value": "" - }, - "datasource": { - "type": "prometheus", - "uid": "prometheus" - }, - "definition": "", - "hide": 0, - "includeAll": false, - "label": "Application", - "multi": false, - "name": "application", - "options": [], - "query": "label_values(application)", - "refresh": 2, - "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tagsQuery": "", - "type": "query", - "useTags": false - }, { "allFormat": "glob", "current": { @@ -4512,7 +4494,7 @@ "multiFormat": "glob", "name": "instance", "options": [], - "query": "label_values(jvm_memory_used_bytes{)", + "query": "label_values(jvm_memory_used_bytes{},instance)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -4583,7 +4565,7 @@ ] }, "time": { - "from": "now-3h", + "from": "now-5m", "to": "now" }, "timepicker": { @@ -4613,8 +4595,8 @@ ] }, "timezone": "browser", - "title": "Quarkus Micrometer Metrics - Prometheus", + "title": "Quarkus Micrometer Prometheus registry", "uid": "edy6473ay1vk0b", - "version": 4, + "version": 5, "weekStart": "" -} \ No newline at end of file +} diff --git a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResourceLifecycleManager.java b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResourceLifecycleManager.java index 57d7f2008a37e..cecc7bf97e392 100644 --- a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResourceLifecycleManager.java +++ b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResourceLifecycleManager.java @@ -75,8 +75,8 @@ default Container container(T config, ModulesConfiguration root) { } /** - * Deduct current config from params. - * If port are too dynamic / configured, it's hard to deduct, + * Deduce current config from params. + * If port are too dynamic / configured, it's hard to deduce, * since configuration is not part of the devservice state. * e.g. different ports then usual - Grafana UI is 3000, if you do not use 3000, * it's hard or impossible to know which port belongs to certain property. diff --git a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResources.java b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResources.java index b94090c40e64b..8200ce129517f 100644 --- a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResources.java +++ b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/DevResources.java @@ -18,7 +18,8 @@ public class DevResources { private static final Logger log = Logger.getLogger(DevResources.class); private static List resources; - private static Map map; + private static Map map = Map.of(); + private static volatile boolean started = false; /** * @return list of found dev resources. @@ -45,7 +46,8 @@ public static synchronized List resources() { * @return a map of config properties to be returned by {@link DevResourcesConfigSource} */ static synchronized Map ensureStarted() { - if (map == null) { + if (!started) { + started = true; try { for (var res : resources()) { res.initDev(); diff --git a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/ExtensionsCatalog.java b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/ExtensionsCatalog.java index 00af2105c6c0b..2a2ccadb367cd 100644 --- a/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/ExtensionsCatalog.java +++ b/extensions/observability-devservices/testlibs/devresource-common/src/main/java/io/quarkus/observability/devresource/ExtensionsCatalog.java @@ -1,8 +1,13 @@ package io.quarkus.observability.devresource; +import java.util.function.Function; + /** * Relevant Observability extensions present. */ -public record ExtensionsCatalog(boolean hasOpenTelemetry, +public record ExtensionsCatalog( + Function resourceChecker, + Function classChecker, + boolean hasOpenTelemetry, boolean hasMicrometerOtlp) { } diff --git a/extensions/observability-devservices/testlibs/devresource-lgtm/src/main/java/io/quarkus/observability/devresource/lgtm/LgtmResource.java b/extensions/observability-devservices/testlibs/devresource-lgtm/src/main/java/io/quarkus/observability/devresource/lgtm/LgtmResource.java index 2c835036c789d..b41947d4791a5 100644 --- a/extensions/observability-devservices/testlibs/devresource-lgtm/src/main/java/io/quarkus/observability/devresource/lgtm/LgtmResource.java +++ b/extensions/observability-devservices/testlibs/devresource-lgtm/src/main/java/io/quarkus/observability/devresource/lgtm/LgtmResource.java @@ -2,6 +2,10 @@ import java.util.HashMap; import java.util.Map; +import java.util.Set; +import java.util.function.Function; + +import org.jboss.logging.Logger; import io.quarkus.observability.common.ContainerConstants; import io.quarkus.observability.common.config.LgtmConfig; @@ -14,11 +18,30 @@ public class LgtmResource extends ContainerResource { + private static final Logger log = Logger.getLogger(LgtmResource.class.getName()); + + protected static final Set SCRAPING_REGISTRIES = Set.of( + "io.micrometer.prometheus.PrometheusMeterRegistry"); + + protected static final Function TCCL_FN = s -> { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + try { + cl.loadClass(s); + return true; + } catch (Exception e) { + // any exception + return false; + } + }; + private ExtensionsCatalog catalog; + private LgtmConfig config; @Override public LgtmConfig config(ModulesConfiguration configuration) { - return configuration.lgtm(); + LgtmConfig config = configuration.lgtm(); + this.config = config; + return config; } @Override @@ -29,47 +52,92 @@ public LgtmConfig config(ModulesConfiguration configuration, ExtensionsCatalog c @Override public Container container(LgtmConfig config, ModulesConfiguration root) { - return set(new LgtmContainer(config)); + return set(new LgtmContainer(config, isScrapingRequired(catalog.classChecker()))); + } + + private boolean isScrapingRequired(Function checker) { + boolean result = false; + String foundRegistry = null; + for (String clazz : SCRAPING_REGISTRIES) { + if (checker.apply(clazz)) { + foundRegistry = clazz; + result = true; + break; + } + } + + if (result && (catalog != null && catalog.hasMicrometerOtlp())) { + log.warnf("Multiple Micrometer registries found - OTLP and %s, no Prometheus scrapping required.", foundRegistry); + return false; + } + + return result; + } + + private int getPrivateOtlpPort() { + if (config != null) { + return LgtmContainer.getPrivateOtlpPort(config.otlpProtocol()); + } else { + return -1; + } + } + + private Map config(int privatePort, String host) { + return config(privatePort, host, container.getMappedPort(privatePort)); } - // FIXME consolidate config methods. @Override public Map config(int privatePort, String host, int publicPort) { + + Map containerConfigs = new HashMap<>(); + switch (privatePort) { case ContainerConstants.GRAFANA_PORT: - return Map.of("grafana.endpoint", String.format("http://%s:%s", host, publicPort)); - case ContainerConstants.OTEL_GRPC_EXPORTER_PORT: + containerConfigs.put("grafana.endpoint", String.format("http://%s:%s", host, publicPort)); + break; case ContainerConstants.OTEL_HTTP_EXPORTER_PORT: - return Map.of("otel-collector.url", String.format("%s:%s", host, publicPort)); + if (catalog != null && catalog.hasMicrometerOtlp()) { + + containerConfigs.put("quarkus.micrometer.export.otlp.url", + String.format("http://%s:%s/v1/metrics", host, + publicPort)); + } + // No break, fall through + case ContainerConstants.OTEL_GRPC_EXPORTER_PORT: + containerConfigs.put("otel-collector.url", String.format("%s:%s", host, publicPort)); + break; } - return Map.of(); + + // The OTLP port is probably one of the ports we already compared against, but at compile-time we don't know which one, + // so instead of doing this check as a fallthrough on the switch, do a normal if-check + if (catalog != null && catalog.hasOpenTelemetry()) { + final int privateOtlpPort = getPrivateOtlpPort(); + if (privateOtlpPort == privatePort) { + containerConfigs.put("quarkus.otel.exporter.otlp.endpoint", + String.format("http://%s:%s", host, publicPort)); + String otlpProtocol = config.otlpProtocol(); // If we got to this stage, config must be not null + containerConfigs.put("quarkus.otel.exporter.otlp.protocol", otlpProtocol); + } + + } + return containerConfigs; } @Override protected LgtmContainer defaultContainer() { - return new LgtmContainer(); + return new LgtmContainer(isScrapingRequired(TCCL_FN)); // best we can do? } @Override public Map doStart() { String host = container.getHost(); - int otlpPort = container.getOtlpPort(); - - //Set non Quarkus properties for convenience and testing. Map containerConfigs = new HashMap<>(); - containerConfigs.put("grafana.endpoint", String.format("http://%s:%s", host, container.getGrafanaPort())); - containerConfigs.put("otel-collector.url", String.format("%s:%s", host, otlpPort)); - // set relevant properties for Quarkus extensions directly - if (catalog != null && catalog.hasOpenTelemetry()) { - containerConfigs.put("quarkus.otel.exporter.otlp.endpoint", String.format("http://%s:%s", host, otlpPort)); - containerConfigs.put("quarkus.otel.exporter.otlp.protocol", container.getOtlpProtocol()); - } - if (catalog != null && catalog.hasMicrometerOtlp()) { - // always use http -- as that's what Micrometer supports - containerConfigs.put("quarkus.micrometer.export.otlp.url", - String.format("http://%s:%s/v1/metrics", host, - container.getMappedPort(ContainerConstants.OTEL_HTTP_EXPORTER_PORT))); + containerConfigs.putAll(config(ContainerConstants.GRAFANA_PORT, host)); + containerConfigs.putAll(config(ContainerConstants.OTEL_HTTP_EXPORTER_PORT, host)); + // Iff GRPC is the OTLP protocol, overwrite the otel-collector.url we just wrote with the correct grpc one, and set up the otlp endpoints + if (ContainerConstants.OTEL_GRPC_PROTOCOL.equals(container.getOtlpProtocol())) { + containerConfigs.putAll(config(ContainerConstants.OTEL_GRPC_EXPORTER_PORT, host)); } return containerConfigs; } diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java index 2d3b124aab56e..1ac78839e5c53 100644 --- a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java +++ b/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/OidcClientBuildStep.java @@ -3,7 +3,6 @@ import static io.quarkus.oidc.client.deployment.OidcClientFilterDeploymentHelper.sanitize; import java.lang.reflect.Modifier; -import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; @@ -13,7 +12,6 @@ import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Singleton; -import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.DotName; import io.quarkus.arc.BeanDestroyer; @@ -32,7 +30,6 @@ import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ApplicationArchivesBuildItem; -import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.ExtensionSslNativeSupportBuildItem; import io.quarkus.deployment.builditem.RunTimeConfigBuilderBuildItem; import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; @@ -53,15 +50,12 @@ import io.quarkus.oidc.client.runtime.TokenProviderProducer; import io.quarkus.oidc.client.runtime.TokensHelper; import io.quarkus.oidc.client.runtime.TokensProducer; -import io.quarkus.oidc.token.propagation.AccessToken; import io.quarkus.tls.TlsRegistryBuildItem; import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; @BuildSteps(onlyIf = OidcClientBuildStep.IsEnabled.class) public class OidcClientBuildStep { - private static final DotName ACCESS_TOKEN = DotName.createSimple(AccessToken.class.getName()); - @BuildStep ExtensionSslNativeSupportBuildItem enableSslInNative() { return new ExtensionSslNativeSupportBuildItem(Feature.OIDC_CLIENT); @@ -166,26 +160,6 @@ public void createNonDefaultTokensProducers( } } - @BuildStep - public List collectAccessTokenInstances(CombinedIndexBuildItem index) { - record ItemBuilder(AnnotationInstance instance) { - - private String toClientName() { - var value = instance.value("exchangeTokenClient"); - return value == null || value.asString().equals("Default") ? "" : value.asString(); - } - - private boolean toExchangeToken() { - return instance.value("exchangeTokenClient") != null; - } - - private AccessTokenInstanceBuildItem build() { - return new AccessTokenInstanceBuildItem(toClientName(), toExchangeToken(), instance.target()); - } - } - return index.getIndex().getAnnotations(ACCESS_TOKEN).stream().map(ItemBuilder::new).map(ItemBuilder::build).toList(); - } - @BuildStep RunTimeConfigBuilderBuildItem useOidcClientDefaultIdConfigBuilder() { return new RunTimeConfigBuilderBuildItem(OidcClientDefaultIdConfigBuilder.class); diff --git a/extensions/oidc-token-propagation-common/deployment/pom.xml b/extensions/oidc-token-propagation-common/deployment/pom.xml new file mode 100644 index 0000000000000..2ed0c5161c2a9 --- /dev/null +++ b/extensions/oidc-token-propagation-common/deployment/pom.xml @@ -0,0 +1,51 @@ + + + + quarkus-oidc-token-propagation-common-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 + + quarkus-oidc-token-propagation-common-deployment + Quarkus - OpenID Connect Token Propagation - Common - Deployment + + + + io.quarkus + quarkus-core-deployment + + + io.quarkus + quarkus-arc-deployment + + + io.quarkus + quarkus-oidc-token-propagation-common + + + + + + + maven-compiler-plugin + + + default-compile + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + + + diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenInstanceBuildItem.java similarity index 85% rename from extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java rename to extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenInstanceBuildItem.java index 91a19805c755b..204023cf9e5cc 100644 --- a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenInstanceBuildItem.java +++ b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenInstanceBuildItem.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.client.deployment; +package io.quarkus.oidc.token.propagation.common.deployment; import java.util.Objects; @@ -7,7 +7,7 @@ import io.quarkus.builder.item.MultiBuildItem; /** - * Represents one {@link io.quarkus.oidc.token.propagation.AccessToken} annotation instance. + * Represents one {@link io.quarkus.oidc.token.propagation.common.AccessToken} annotation instance. */ public final class AccessTokenInstanceBuildItem extends MultiBuildItem { diff --git a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenRequestFilterGenerator.java similarity index 98% rename from extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java rename to extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenRequestFilterGenerator.java index bd15930cc24b1..20f5050276a8e 100644 --- a/extensions/oidc-client/deployment/src/main/java/io/quarkus/oidc/client/deployment/AccessTokenRequestFilterGenerator.java +++ b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/AccessTokenRequestFilterGenerator.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.client.deployment; +package io.quarkus.oidc.token.propagation.common.deployment; import java.lang.annotation.RetentionPolicy; import java.lang.reflect.Modifier; diff --git a/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/OidcTokenPropagationCommonProcessor.java b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/OidcTokenPropagationCommonProcessor.java new file mode 100644 index 0000000000000..bb097a7befa25 --- /dev/null +++ b/extensions/oidc-token-propagation-common/deployment/src/main/java/io/quarkus/oidc/token/propagation/common/deployment/OidcTokenPropagationCommonProcessor.java @@ -0,0 +1,42 @@ +package io.quarkus.oidc.token.propagation.common.deployment; + +import java.util.List; +import java.util.stream.Stream; + +import org.jboss.jandex.AnnotationInstance; +import org.jboss.jandex.DotName; + +import io.quarkus.deployment.annotations.BuildStep; +import io.quarkus.deployment.builditem.CombinedIndexBuildItem; +import io.quarkus.oidc.token.propagation.common.AccessToken; + +public class OidcTokenPropagationCommonProcessor { + + private static final DotName DEPRECATED_ACCESS_TOKEN = DotName + .createSimple(io.quarkus.oidc.token.propagation.AccessToken.class.getName()); + private static final DotName ACCESS_TOKEN = DotName.createSimple(AccessToken.class.getName()); + + @BuildStep + public List collectAccessTokenInstances(CombinedIndexBuildItem index) { + record ItemBuilder(AnnotationInstance instance) { + + private String toClientName() { + var value = instance.value("exchangeTokenClient"); + return value == null || value.asString().equals("Default") ? "" : value.asString(); + } + + private boolean toExchangeToken() { + return instance.value("exchangeTokenClient") != null; + } + + private AccessTokenInstanceBuildItem build() { + return new AccessTokenInstanceBuildItem(toClientName(), toExchangeToken(), instance.target()); + } + } + var accessTokenAnnotations = index.getIndex().getAnnotations(ACCESS_TOKEN); + var accessTokenDeprecatedAnnotations = index.getIndex().getAnnotations(DEPRECATED_ACCESS_TOKEN); + return Stream.concat(accessTokenAnnotations.stream(), accessTokenDeprecatedAnnotations.stream()) + .map(ItemBuilder::new).map(ItemBuilder::build).toList(); + } + +} diff --git a/extensions/oidc-token-propagation-common/pom.xml b/extensions/oidc-token-propagation-common/pom.xml new file mode 100644 index 0000000000000..5699ad5064483 --- /dev/null +++ b/extensions/oidc-token-propagation-common/pom.xml @@ -0,0 +1,21 @@ + + + + quarkus-extensions-parent + io.quarkus + 999-SNAPSHOT + ../pom.xml + + 4.0.0 + + quarkus-oidc-token-propagation-common-parent + Quarkus - OpenID Connect Token Propagation - Common - Parent + + pom + + deployment + runtime + + diff --git a/extensions/oidc-token-propagation-common/runtime/pom.xml b/extensions/oidc-token-propagation-common/runtime/pom.xml new file mode 100644 index 0000000000000..896b66ea0fe32 --- /dev/null +++ b/extensions/oidc-token-propagation-common/runtime/pom.xml @@ -0,0 +1,52 @@ + + + + quarkus-oidc-token-propagation-common-parent + io.quarkus + 999-SNAPSHOT + + 4.0.0 + + quarkus-oidc-token-propagation-common + Quarkus - OpenID Connect Token Propagation - Common + Common Runtime API for OpenID Connect Token Propagation extensions + + + + io.quarkus + quarkus-core + + + io.quarkus + quarkus-arc + + + + + + + io.quarkus + quarkus-extension-maven-plugin + + + maven-compiler-plugin + + + default-compile + + + + io.quarkus + quarkus-extension-processor + ${project.version} + + + + + + + + + diff --git a/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java new file mode 100644 index 0000000000000..f91c7c74df08d --- /dev/null +++ b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java @@ -0,0 +1,34 @@ +package io.quarkus.oidc.token.propagation; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * When this annotation is added to a MicroProfile REST Client interface, the {@link AccessTokenRequestFilter} will be added to + * the request pipeline. + * The end result is that the request propagates the Bearer token present in the current active request or the token acquired + * from the Authorization Code Flow, + * as the HTTP {@code Authorization} header's {@code Bearer} scheme value. + * + * @deprecated The @AccessToken annotation has been moved into a new package without any functionality changes. + * Use {@link io.quarkus.oidc.token.propagation.common.AccessToken} instead of this annotation. + * This annotation will be removed in the future. + */ +@Deprecated(forRemoval = true, since = "3.19") +@Target({ ElementType.TYPE }) +@Retention(RetentionPolicy.RUNTIME) +@Documented +public @interface AccessToken { + + /** + * Selects name of the configured OidcClient and activates token exchange for the annotated REST client. + * Please note that the default OidcClient's name is `Default`. You do not have to enable this attribute + * if you use the default OidcClient and already have either 'quarkus.resteasy-client-oidc-token-propagation.exchange-token' + * or 'quarkus.rest-client-oidc-token-propagation.exchange-token' property set to 'true' + */ + String exchangeTokenClient() default ""; + +} diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/AccessToken.java similarity index 96% rename from extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java rename to extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/AccessToken.java index aa2d21b07eafb..3fcd4ee1f0faf 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessToken.java +++ b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/AccessToken.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.common; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; diff --git a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/TokenPropagationConstants.java b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/runtime/TokenPropagationConstants.java similarity index 93% rename from extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/TokenPropagationConstants.java rename to extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/runtime/TokenPropagationConstants.java index 210aec8998c48..f01edab7611d3 100644 --- a/extensions/oidc-client/runtime/src/main/java/io/quarkus/oidc/token/propagation/TokenPropagationConstants.java +++ b/extensions/oidc-token-propagation-common/runtime/src/main/java/io/quarkus/oidc/token/propagation/common/runtime/TokenPropagationConstants.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.common.runtime; public final class TokenPropagationConstants { diff --git a/extensions/oidc-token-propagation-common/runtime/src/resources/META-INF/quarkus-extension.yaml b/extensions/oidc-token-propagation-common/runtime/src/resources/META-INF/quarkus-extension.yaml new file mode 100644 index 0000000000000..2d145c7edd900 --- /dev/null +++ b/extensions/oidc-token-propagation-common/runtime/src/resources/META-INF/quarkus-extension.yaml @@ -0,0 +1,5 @@ +--- +artifact: ${project.groupId}:${project.artifactId}:${project.version} +name: "OpenID Connect Token Propagation - Common" +metadata: + unlisted: true diff --git a/extensions/oidc-token-propagation-reactive/deployment/pom.xml b/extensions/oidc-token-propagation-reactive/deployment/pom.xml index 0dbe5a126b0d6..f3d404a1449f5 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/pom.xml +++ b/extensions/oidc-token-propagation-reactive/deployment/pom.xml @@ -29,6 +29,10 @@ io.quarkus quarkus-oidc-client-deployment + + io.quarkus + quarkus-oidc-token-propagation-common-deployment + io.quarkus diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java similarity index 88% rename from extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java rename to extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java index e478f9a20cb83..e80740b1bb410 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveAlwaysEnabledProcessor.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment; import io.quarkus.deployment.Feature; import io.quarkus.deployment.annotations.BuildStep; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildStep.java similarity index 89% rename from extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java rename to extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildStep.java index 4292342e5a7cc..d422a4ee813fe 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildStep.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildStep.java @@ -1,7 +1,7 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; import java.util.List; import java.util.function.BooleanSupplier; @@ -24,8 +24,9 @@ import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; -import io.quarkus.oidc.client.deployment.AccessTokenInstanceBuildItem; -import io.quarkus.oidc.client.deployment.AccessTokenRequestFilterGenerator; +import io.quarkus.oidc.token.propagation.common.deployment.AccessTokenInstanceBuildItem; +import io.quarkus.oidc.token.propagation.common.deployment.AccessTokenRequestFilterGenerator; +import io.quarkus.oidc.token.propagation.reactive.AccessTokenRequestReactiveFilter; import io.quarkus.rest.client.reactive.deployment.DotNames; import io.quarkus.rest.client.reactive.deployment.RegisterProviderAnnotationInstanceBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildTimeConfig.java b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildTimeConfig.java similarity index 94% rename from extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildTimeConfig.java rename to extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildTimeConfig.java index f87408763d8cf..4a908265ccfe0 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveBuildTimeConfig.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/main/java/io/quarkus/oidc/token/propagation/reactive/deployment/OidcTokenPropagationReactiveBuildTimeConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment; import io.quarkus.runtime.annotations.ConfigRoot; import io.smallrye.config.ConfigMapping; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenAnnotationTest.java similarity index 96% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenAnnotationTest.java index 31e8aee1c6bd8..15e623361f47b 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenAnnotationTest.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenAnnotationTest.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import static org.hamcrest.Matchers.equalTo; @@ -20,7 +20,8 @@ import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.arc.Unremovable; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; +import io.quarkus.oidc.token.propagation.reactive.AccessTokenRequestReactiveFilter; import io.quarkus.test.QuarkusUnitTest; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.oidc.client.OidcTestClient; @@ -116,7 +117,7 @@ public interface DefaultClientEnabledExchange { } @RegisterRestClient(baseUri = "http://localhost:8081/protected") - @AccessToken(exchangeTokenClient = "named") + @io.quarkus.oidc.token.propagation.AccessToken(exchangeTokenClient = "named") @Path("/") public interface NamedClientDefaultExchange { @GET diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenPropagationService.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenPropagationService.java similarity index 66% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenPropagationService.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenPropagationService.java index 7a6273c9124aa..486170353fdba 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenPropagationService.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/AccessTokenPropagationService.java @@ -1,10 +1,12 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.common.AccessToken; + @RegisterRestClient @AccessToken @Path("/") diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/FrontendResource.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/FrontendResource.java similarity index 94% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/FrontendResource.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/FrontendResource.java index 960c894cad4f0..d98d4789149fd 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/FrontendResource.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/FrontendResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationTest.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationTest.java similarity index 95% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationTest.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationTest.java index ae0489140f12a..1b4884fc7e5a9 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationTest.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationTest.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import static org.hamcrest.Matchers.equalTo; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java similarity index 92% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java index f90592f66199f..ac11c6a85e678 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java @@ -1,6 +1,6 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; -import static io.quarkus.oidc.token.propagation.reactive.RolesSecurityIdentityAugmentor.SUPPORTED_USER; +import static io.quarkus.oidc.token.propagation.reactive.deployment.test.RolesSecurityIdentityAugmentor.SUPPORTED_USER; import static org.hamcrest.Matchers.equalTo; import java.util.Set; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java similarity index 94% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java index 61743be804e2e..b67760fce9a89 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java @@ -1,6 +1,6 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; -import static io.quarkus.oidc.token.propagation.reactive.RolesSecurityIdentityAugmentor.SUPPORTED_USER; +import static io.quarkus.oidc.token.propagation.reactive.deployment.test.RolesSecurityIdentityAugmentor.SUPPORTED_USER; import static org.hamcrest.Matchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/ProtectedResource.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/ProtectedResource.java similarity index 86% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/ProtectedResource.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/ProtectedResource.java index ea3ae35bb1371..d34529b13bc82 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/ProtectedResource.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/ProtectedResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesResource.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesResource.java similarity index 89% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesResource.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesResource.java index 6ea97cbe2a7c1..da96d11f99981 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesResource.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.inject.Inject; import jakarta.ws.rs.GET; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesSecurityIdentityAugmentor.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesSecurityIdentityAugmentor.java similarity index 94% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesSecurityIdentityAugmentor.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesSecurityIdentityAugmentor.java index 47632c40fd86b..e8433edb68895 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesSecurityIdentityAugmentor.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesSecurityIdentityAugmentor.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesService.java b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesService.java similarity index 69% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesService.java rename to extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesService.java index eed59904a431d..04f2660af3dec 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/RolesService.java +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/deployment/test/RolesService.java @@ -1,11 +1,11 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.deployment.test; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; import io.smallrye.mutiny.Uni; @RegisterRestClient(configKey = "roles") diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/resources/application.properties b/extensions/oidc-token-propagation-reactive/deployment/src/test/resources/application.properties index 1b4d313ac7508..9ce2171438b8f 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/resources/application.properties +++ b/extensions/oidc-token-propagation-reactive/deployment/src/test/resources/application.properties @@ -15,4 +15,4 @@ quarkus.oidc-client.token-path=${keycloak.url}/realms/quarkus/jwt-bearer-token quarkus.rest-client-oidc-token-propagation.exchange-token=true -io.quarkus.oidc.token.propagation.reactive.AccessTokenPropagationService/mp-rest/uri=http://localhost:8081/protected +io.quarkus.oidc.token.propagation.reactive.deployment.test.AccessTokenPropagationService/mp-rest/uri=http://localhost:8081/protected diff --git a/extensions/oidc-token-propagation-reactive/runtime/pom.xml b/extensions/oidc-token-propagation-reactive/runtime/pom.xml index e8b6b0cab747c..c5f9ee82f62f1 100644 --- a/extensions/oidc-token-propagation-reactive/runtime/pom.xml +++ b/extensions/oidc-token-propagation-reactive/runtime/pom.xml @@ -25,6 +25,10 @@ io.quarkus quarkus-oidc-client + + io.quarkus + quarkus-oidc-token-propagation-common + diff --git a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java index 9f3b7c601061e..567371b2dc362 100644 --- a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java +++ b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenRequestReactiveFilter.java @@ -1,7 +1,7 @@ package io.quarkus.oidc.token.propagation.reactive; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; import java.util.Collections; import java.util.function.Consumer; diff --git a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveConfig.java b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/runtime/OidcTokenPropagationReactiveConfig.java similarity index 93% rename from extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveConfig.java rename to extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/runtime/OidcTokenPropagationReactiveConfig.java index e982a38df9e6a..c993594137d0e 100644 --- a/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/OidcTokenPropagationReactiveConfig.java +++ b/extensions/oidc-token-propagation-reactive/runtime/src/main/java/io/quarkus/oidc/token/propagation/reactive/runtime/OidcTokenPropagationReactiveConfig.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.reactive.runtime; import java.util.Optional; diff --git a/extensions/oidc-token-propagation/deployment/pom.xml b/extensions/oidc-token-propagation/deployment/pom.xml index 562d4ad647127..b0493801c74d3 100644 --- a/extensions/oidc-token-propagation/deployment/pom.xml +++ b/extensions/oidc-token-propagation/deployment/pom.xml @@ -33,6 +33,10 @@ io.quarkus quarkus-smallrye-jwt-build-deployment + + io.quarkus + quarkus-oidc-token-propagation-common-deployment + io.quarkus diff --git a/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java b/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java index 22013ab0bd273..ff5c19cdfc208 100644 --- a/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java +++ b/extensions/oidc-token-propagation/deployment/src/main/java/io/quarkus/oidc/token/propagation/deployment/OidcTokenPropagationBuildStep.java @@ -1,7 +1,7 @@ package io.quarkus.oidc.token.propagation.deployment; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; import java.util.List; import java.util.function.BooleanSupplier; @@ -18,11 +18,11 @@ import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.builditem.SystemPropertyBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; -import io.quarkus.oidc.client.deployment.AccessTokenInstanceBuildItem; -import io.quarkus.oidc.client.deployment.AccessTokenRequestFilterGenerator; import io.quarkus.oidc.token.propagation.AccessTokenRequestFilter; import io.quarkus.oidc.token.propagation.JsonWebToken; import io.quarkus.oidc.token.propagation.JsonWebTokenRequestFilter; +import io.quarkus.oidc.token.propagation.common.deployment.AccessTokenInstanceBuildItem; +import io.quarkus.oidc.token.propagation.common.deployment.AccessTokenRequestFilterGenerator; import io.quarkus.oidc.token.propagation.runtime.OidcTokenPropagationBuildTimeConfig; import io.quarkus.oidc.token.propagation.runtime.OidcTokenPropagationConfig; import io.quarkus.restclient.deployment.RestClientAnnotationProviderBuildItem; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenAnnotationTest.java similarity index 96% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenAnnotationTest.java index b38da82918eac..c10e0e2b482e9 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/AccessTokenAnnotationTest.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenAnnotationTest.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import static org.hamcrest.Matchers.equalTo; @@ -18,6 +18,8 @@ import org.junit.jupiter.api.extension.RegisterExtension; import io.quarkus.arc.Unremovable; +import io.quarkus.oidc.token.propagation.AccessTokenRequestFilter; +import io.quarkus.oidc.token.propagation.common.AccessToken; import io.quarkus.test.QuarkusUnitTest; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.oidc.client.OidcTestClient; @@ -105,7 +107,7 @@ public interface DefaultClientDefaultExchange { } @RegisterRestClient(baseUri = "http://localhost:8081/protected") - @AccessToken(exchangeTokenClient = "Default") + @io.quarkus.oidc.token.propagation.AccessToken(exchangeTokenClient = "Default") @Path("/") public interface DefaultClientEnabledExchange { @GET diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenPropagationService.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenPropagationService.java similarity index 68% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenPropagationService.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenPropagationService.java index ee7bb89991a66..047d1e75e2b64 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/AccessTokenPropagationService.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/AccessTokenPropagationService.java @@ -1,11 +1,11 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; -import io.quarkus.oidc.token.propagation.AccessToken; +import io.quarkus.oidc.token.propagation.common.AccessToken; @RegisterRestClient @AccessToken diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/CustomAccessTokenRequestFilter.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/CustomAccessTokenRequestFilter.java similarity index 66% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/CustomAccessTokenRequestFilter.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/CustomAccessTokenRequestFilter.java index bf9d5d11f9808..4787689888cb5 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/CustomAccessTokenRequestFilter.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/CustomAccessTokenRequestFilter.java @@ -1,4 +1,6 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; + +import io.quarkus.oidc.token.propagation.AccessTokenRequestFilter; public class CustomAccessTokenRequestFilter extends AccessTokenRequestFilter { diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/FrontendResource.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/FrontendResource.java similarity index 95% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/FrontendResource.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/FrontendResource.java index 5f0a7cd283a52..9a479c0ae6b77 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/FrontendResource.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/FrontendResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationTest.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationTest.java similarity index 96% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationTest.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationTest.java index 2ad093b9bec6d..ef5083d9d2a36 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationTest.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationTest.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import static org.hamcrest.Matchers.equalTo; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java similarity index 92% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java index 7a892d26497ec..e875ac0a1f005 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorLazyAuthTest.java @@ -1,6 +1,6 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; -import static io.quarkus.oidc.token.propagation.RolesSecurityIdentityAugmentor.SUPPORTED_USER; +import static io.quarkus.oidc.token.propagation.deployment.test.RolesSecurityIdentityAugmentor.SUPPORTED_USER; import static org.hamcrest.Matchers.equalTo; import java.util.Set; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java similarity index 92% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java index c2165254c3935..8d8dd56c32b76 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/OidcTokenPropagationWithSecurityIdentityAugmentorTest.java @@ -1,6 +1,6 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; -import static io.quarkus.oidc.token.propagation.RolesSecurityIdentityAugmentor.SUPPORTED_USER; +import static io.quarkus.oidc.token.propagation.deployment.test.RolesSecurityIdentityAugmentor.SUPPORTED_USER; import static org.hamcrest.Matchers.equalTo; import java.util.Set; diff --git a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/ProtectedResource.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/ProtectedResource.java similarity index 88% rename from extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/ProtectedResource.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/ProtectedResource.java index 436c3adb99f02..7e042e8f0fdc2 100644 --- a/extensions/oidc-token-propagation-reactive/deployment/src/test/java/io/quarkus/oidc/token/propagation/reactive/ProtectedResource.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/ProtectedResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation.reactive; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.annotation.security.RolesAllowed; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesResource.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesResource.java similarity index 90% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesResource.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesResource.java index 2fa2ddd4404fd..a9accb8568631 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesResource.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesResource.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.inject.Inject; import jakarta.ws.rs.GET; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesSecurityIdentityAugmentor.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesSecurityIdentityAugmentor.java similarity index 95% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesSecurityIdentityAugmentor.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesSecurityIdentityAugmentor.java index 03b97c253fe52..acf221e57f9b7 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesSecurityIdentityAugmentor.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesSecurityIdentityAugmentor.java @@ -1,4 +1,4 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; diff --git a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesService.java b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesService.java similarity index 68% rename from extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesService.java rename to extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesService.java index 5a2047e9dcff5..56e3e1c6cfbd2 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/RolesService.java +++ b/extensions/oidc-token-propagation/deployment/src/test/java/io/quarkus/oidc/token/propagation/deployment/test/RolesService.java @@ -1,10 +1,12 @@ -package io.quarkus.oidc.token.propagation; +package io.quarkus.oidc.token.propagation.deployment.test; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import org.eclipse.microprofile.rest.client.inject.RegisterRestClient; +import io.quarkus.oidc.token.propagation.common.AccessToken; + @RegisterRestClient(configKey = "roles") @AccessToken @Path("/") diff --git a/extensions/oidc-token-propagation/deployment/src/test/resources/application.properties b/extensions/oidc-token-propagation/deployment/src/test/resources/application.properties index 4de773e28284b..5bf7b7b2134d4 100644 --- a/extensions/oidc-token-propagation/deployment/src/test/resources/application.properties +++ b/extensions/oidc-token-propagation/deployment/src/test/resources/application.properties @@ -13,4 +13,4 @@ quarkus.oidc-client.token-path=${keycloak.url}/realms/quarkus/jwt-bearer-token quarkus.resteasy-client-oidc-token-propagation.exchange-token=true -io.quarkus.oidc.token.propagation.AccessTokenPropagationService/mp-rest/uri=http://localhost:8081/protected +io.quarkus.oidc.token.propagation.deployment.test.AccessTokenPropagationService/mp-rest/uri=http://localhost:8081/protected diff --git a/extensions/oidc-token-propagation/runtime/pom.xml b/extensions/oidc-token-propagation/runtime/pom.xml index 5dc9a094fc416..8f11c18f7a5bb 100644 --- a/extensions/oidc-token-propagation/runtime/pom.xml +++ b/extensions/oidc-token-propagation/runtime/pom.xml @@ -33,6 +33,10 @@ io.quarkus quarkus-smallrye-jwt-build + + io.quarkus + quarkus-oidc-token-propagation-common + diff --git a/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java b/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java index a1fb7787a1e91..527613a70b156 100644 --- a/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java +++ b/extensions/oidc-token-propagation/runtime/src/main/java/io/quarkus/oidc/token/propagation/AccessTokenRequestFilter.java @@ -1,7 +1,7 @@ package io.quarkus.oidc.token.propagation; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; -import static io.quarkus.oidc.token.propagation.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.JWT_PROPAGATE_TOKEN_CREDENTIAL; +import static io.quarkus.oidc.token.propagation.common.runtime.TokenPropagationConstants.OIDC_PROPAGATE_TOKEN_CREDENTIAL; import java.io.IOException; import java.util.Collections; diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java index b7e026e78f7e6..2b39d3eaabe70 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/OidcBuildStep.java @@ -97,7 +97,7 @@ import io.quarkus.vertx.http.deployment.FilterBuildItem; import io.quarkus.vertx.http.deployment.HttpAuthMechanismAnnotationBuildItem; import io.quarkus.vertx.http.deployment.SecurityInformationBuildItem; -import io.quarkus.vertx.http.runtime.HttpBuildTimeConfig; +import io.quarkus.vertx.http.runtime.VertxHttpBuildTimeConfig; import io.smallrye.jwt.auth.cdi.ClaimValueProducer; import io.smallrye.jwt.auth.cdi.CommonJwtProducer; import io.smallrye.jwt.auth.cdi.JsonValueProducer; @@ -345,11 +345,11 @@ SyntheticBeanBuildItem setup(OidcConfig config, OidcRecorder recorder, SecurityC @BuildStep @Record(ExecutionTime.STATIC_INIT) public void registerTenantResolverInterceptor(Capabilities capabilities, OidcRecorder recorder, - HttpBuildTimeConfig buildTimeConfig, + VertxHttpBuildTimeConfig httpBuildTimeConfig, CombinedIndexBuildItem combinedIndexBuildItem, BuildProducer bindingProducer, BuildProducer systemPropertyProducer) { - if (!buildTimeConfig.auth.proactive + if (!httpBuildTimeConfig.auth().proactive() && (capabilities.isPresent(Capability.RESTEASY_REACTIVE) || capabilities.isPresent(Capability.RESTEASY))) { boolean foundTenantResolver = combinedIndexBuildItem .getIndex() diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java index 383dd938ddfc3..1bb05aef6823c 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java @@ -20,7 +20,7 @@ import io.quarkus.runtime.RuntimeValue; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; import io.quarkus.vertx.http.deployment.RouteBuildItem; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; import io.smallrye.config.ConfigValue; public abstract class AbstractDevUIProcessor { @@ -46,7 +46,7 @@ protected static CardPageBuildItem createProviderWebComponent(OidcDevUiRecorder Map keycloakUsers, List keycloakRealms, boolean alwaysLogoutUserInDevUiOnReload, - HttpConfiguration httpConfiguration, boolean discoverMetadata, String authServerUrl) { + VertxHttpConfig httpConfig, boolean discoverMetadata, String authServerUrl) { final CardPageBuildItem cardPage = new CardPageBuildItem(); // prepare provider component @@ -85,7 +85,7 @@ protected static CardPageBuildItem createProviderWebComponent(OidcDevUiRecorder graphqlIsAvailable, swaggerUiPath, graphqlUiPath, alwaysLogoutUserInDevUiOnReload, discoverMetadata, authServerUrl); - recorder.createJsonRPCService(beanContainer.getValue(), runtimeProperties, httpConfiguration); + recorder.createJsonRPCService(beanContainer.getValue(), runtimeProperties, httpConfig); return cardPage; } diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java index 687ef5e9b9859..6d095b1749438 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java @@ -27,7 +27,7 @@ import io.quarkus.runtime.configuration.ConfigUtils; import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; public class OidcDevUIProcessor extends AbstractDevUIProcessor { @@ -49,7 +49,7 @@ public class OidcDevUIProcessor extends AbstractDevUIProcessor { @Consume(CoreVertxBuildItem.class) // metadata discovery requires Vertx instance @Consume(RuntimeConfigSetupCompleteBuildItem.class) void prepareOidcDevConsole(Capabilities capabilities, - HttpConfiguration httpConfiguration, + VertxHttpConfig httpConfig, BeanContainerBuildItem beanContainer, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, BuildProducer cardPageProducer, @@ -93,7 +93,7 @@ void prepareOidcDevConsole(Capabilities capabilities, null, null, true, - httpConfiguration, discoverMetadata, authServerUrl); + httpConfig, discoverMetadata, authServerUrl); cardPageProducer.produce(cardPage); } } diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java index 71072fadc006d..4b3170d7f71d1 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java @@ -27,7 +27,7 @@ import io.quarkus.oidc.runtime.devui.OidcDevUiRecorder; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; import io.quarkus.vertx.http.deployment.RouteBuildItem; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; public class KeycloakDevUIProcessor extends AbstractDevUIProcessor { @@ -38,7 +38,7 @@ public class KeycloakDevUIProcessor extends AbstractDevUIProcessor { @Consume(RuntimeConfigSetupCompleteBuildItem.class) void produceProviderComponent(Optional configProps, BuildProducer keycloakAdminPageProducer, - HttpConfiguration httpConfiguration, + VertxHttpConfig httpConfig, OidcDevUiRecorder recorder, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, BeanContainerBuildItem beanContainer, @@ -72,7 +72,7 @@ void produceProviderComponent(Optional confi users, keycloakRealms, configProps.get().isContainerRestarted(), - httpConfiguration, false, null); + httpConfig, false, null); // use same card page so that both pages appear on the same card var keycloakAdminPageItem = new KeycloakAdminPageBuildItem(cardPageBuildItem); keycloakAdminPageProducer.produce(keycloakAdminPageItem); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcSession.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcSession.java index 9f50af65aed4c..e68ca2a82c577 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcSession.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcSession.java @@ -16,19 +16,6 @@ public interface OidcSession { */ String getTenantId(); - /** - * Return an {@linkplain:Instant} indicating how long will it take for the current session to expire. - * - * @deprecated This method shouldn't be used as it provides an instant corresponding to 1970-01-01T0:0:0Z plus the duration - * of the validity of the token, which is impractical. Please use either {@link #expiresAt()} or - * {@link #validFor()} depending on your requirements. This method will be removed in a later version of - * Quarkus. - * - * @return Instant - */ - @Deprecated(forRemoval = true, since = "2.12.0") - Instant expiresIn(); - /** * Return an {@linkplain Instant} representing the current session's expiration time. * diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java index d32ed85645deb..e24ab66182092 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/OidcTenantConfig.java @@ -1420,6 +1420,11 @@ public boolean failOnMissingStateParam() { return failOnMissingStateParam; } + @Override + public boolean failOnUnresolvedKid() { + return failOnUnresolvedKid; + } + @Override public Optional userInfoRequired() { return userInfoRequired; @@ -1684,6 +1689,22 @@ public enum ResponseMode { */ public boolean failOnMissingStateParam = false; + /** + * Fail with the HTTP 401 error if the ID token signature can not be verified during the re-authentication only due to + * an unresolved token key identifier (`kid`). + *

    + * This property might need to be disabled when multiple tab authentications are allowed, with one of the tabs keeping + * an expired ID token with its `kid` + * unresolved due to the verification key set refreshed due to another tab initiating an authorization code flow. In + * such cases, instead of failing with the HTTP 401 error, + * redirecting the user to re-authenticate with the HTTP 302 status may provide better user experience. + *

    + * Note that the HTTP 401 error is always returned if the ID token signature can not be verified due to an unresolved + * kid during an initial ID token verification + * following the authorization code flow completion, before a session cookie is created. + */ + public boolean failOnUnresolvedKid = true; + /** * If this property is set to `true`, an OIDC UserInfo endpoint is called. *

    @@ -2042,6 +2063,7 @@ private void addConfigMappingValues(io.quarkus.oidc.runtime.OidcTenantConfig.Aut cookieSameSite = CookieSameSite.valueOf(mapping.cookieSameSite().toString()); allowMultipleCodeFlows = mapping.allowMultipleCodeFlows(); failOnMissingStateParam = mapping.failOnMissingStateParam(); + failOnUnresolvedKid = mapping.failOnUnresolvedKid(); userInfoRequired = mapping.userInfoRequired(); sessionAgeExtension = mapping.sessionAgeExtension(); stateCookieAge = mapping.stateCookieAge(); @@ -2247,7 +2269,7 @@ public static Token fromAudience(String... audience) { * For this option be effective the `authentication.session-age-extension` property should also be set to a nonzero * value since the refresh token is currently kept in the user session. * - * This option is valid only when the application is of type {@link ApplicationType#WEB_APP}}. + * This option is valid only when the application is of type {@link ApplicationType#WEB_APP}. * * This property is enabled if `quarkus.oidc.token.refresh-token-time-skew` is configured, * you do not need to enable this property manually in this case. @@ -2270,7 +2292,7 @@ public static Token fromAudience(String... audience) { /** * Custom HTTP header that contains a bearer token. - * This option is valid only when the application is of type {@link ApplicationType#SERVICE}}. + * This option is valid only when the application is of type {@link ApplicationType#SERVICE}. */ public Optional header = Optional.empty(); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java index 32491900bcb36..755ca07d481ad 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/CodeAuthenticationMechanism.java @@ -23,6 +23,7 @@ import org.jose4j.jwt.consumer.ErrorCodes; import org.jose4j.jwt.consumer.InvalidJwtException; import org.jose4j.lang.JoseException; +import org.jose4j.lang.UnresolvableKeyException; import io.netty.handler.codec.http.HttpResponseStatus; import io.quarkus.logging.Log; @@ -375,14 +376,31 @@ public Uni apply(Throwable t) { .hasErrorCode(ErrorCodes.EXPIRED); if (!expired) { - String error = logAuthenticationError(context, t); + + Throwable failure = null; + + boolean unresolvedKey = t.getCause() instanceof InvalidJwtException + && (t.getCause().getCause() instanceof UnresolvableKeyException); + if (unresolvedKey + && !configContext.oidcConfig().authentication().failOnUnresolvedKid() + && OidcUtils.isJwtTokenExpired(currentIdToken)) { + // It can happen in multi-tab applications where a user login causes a JWK set refresh + // due to the key rotation, discarding old keys, and the old tab still keeps the session + // whose signature can only be verified with the now discarded key. + LOG.debugf( + "Session can not be verified due to an unresolved key exception, reauthentication is required"); + // Redirect the user to the OIDC provider to re-authenticate + failure = new AuthenticationFailedException(); + } else { + // Failures such as the signature verification failures require 401 status + String error = logAuthenticationError(context, t); + failure = t.getCause() instanceof AuthenticationCompletionException + ? t.getCause() + : new AuthenticationCompletionException(error, t.getCause()); + } + return removeSessionCookie(context, configContext.oidcConfig()) - .replaceWith(Uni.createFrom() - .failure(t - .getCause() instanceof AuthenticationCompletionException - ? t.getCause() - : new AuthenticationCompletionException( - error, t.getCause()))); + .replaceWith(Uni.createFrom().failure(failure)); } // Token has expired, try to refresh if (isRpInitiatedLogout(context, configContext)) { diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcSessionImpl.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcSessionImpl.java index 5972902066f09..81fcfe90aaa38 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcSessionImpl.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcSessionImpl.java @@ -46,12 +46,6 @@ public Uni apply(OidcTenantConfig oidcConfig) { } - @Override - public Instant expiresIn() { - final long nowSecs = System.currentTimeMillis() / 1000; - return Instant.ofEpochSecond(idToken.getExpirationTime() - nowSecs); - } - @Override public Instant expiresAt() { return Instant.ofEpochSecond(idToken.getExpirationTime()); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTenantConfig.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTenantConfig.java index c3b589941c6d3..bb7f06035d7e7 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTenantConfig.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcTenantConfig.java @@ -756,6 +756,19 @@ enum ResponseMode { @WithDefault("false") boolean failOnMissingStateParam(); + /** + * Fail with the HTTP 401 error if the ID token signature can not be verified during the re-authentication only due to + * an unresolved token key identifier (`kid`). + *

    + * This property might need to be disabled when multiple tab authentications are allowed, with one of the tabs keeping + * an expired ID token with its `kid` + * unresolved due to the verification key set refreshed due to another tab initiating an authorization code flow. In + * such cases, instead of failing with the HTTP 401 error, + * redirecting the user to re-authenticate with the HTTP 302 status may provide better user experience. + */ + @WithDefault("true") + boolean failOnUnresolvedKid(); + /** * If this property is set to `true`, an OIDC UserInfo endpoint is called. *

    @@ -1007,7 +1020,7 @@ interface Token { * For this option be effective the `authentication.session-age-extension` property should also be set to a nonzero * value since the refresh token is currently kept in the user session. * - * This option is valid only when the application is of type {@link ApplicationType#WEB_APP}}. + * This option is valid only when the application is of type {@link ApplicationType#WEB_APP}. * * This property is enabled if `quarkus.oidc.token.refresh-token-time-skew` is configured, * you do not need to enable this property manually in this case. @@ -1032,7 +1045,7 @@ interface Token { /** * Custom HTTP header that contains a bearer token. - * This option is valid only when the application is of type {@link ApplicationType#SERVICE}}. + * This option is valid only when the application is of type {@link ApplicationType#SERVICE}. */ Optional header(); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java index 878f0295a76bf..554cd51a390d9 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcUtils.java @@ -126,6 +126,14 @@ private OidcUtils() { } + public static JsonObject decodeJwtContent(String jwt) { + return OidcCommonUtils.decodeJwtContent(jwt); + } + + public static String getJwtContentPart(String jwt) { + return OidcCommonUtils.getJwtContentPart(jwt); + } + public static String getSessionCookie(RoutingContext context, OidcTenantConfig oidcTenantConfig) { final Map cookies = context.request().cookieMap(); return getSessionCookie(context.data(), cookies, oidcTenantConfig); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/builders/AuthenticationConfigBuilder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/builders/AuthenticationConfigBuilder.java index 09c38218e2505..e037256ee7921 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/builders/AuthenticationConfigBuilder.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/builders/AuthenticationConfigBuilder.java @@ -26,7 +26,8 @@ private record AuthenticationImpl(Optional responseMode, Optional< Optional addOpenidScope, Map extraParams, Optional> forwardParams, boolean cookieForceSecure, Optional cookieSuffix, String cookiePath, Optional cookiePathHeader, Optional cookieDomain, CookieSameSite cookieSameSite, boolean allowMultipleCodeFlows, - boolean failOnMissingStateParam, Optional userInfoRequired, Duration sessionAgeExtension, + boolean failOnMissingStateParam, boolean failOnUnresolvedKid, Optional userInfoRequired, + Duration sessionAgeExtension, Duration stateCookieAge, boolean javaScriptAutoRedirect, Optional idTokenRequired, Optional internalIdTokenLifespan, Optional pkceRequired, Optional pkceSecret, Optional stateSecret) implements Authentication { @@ -55,6 +56,7 @@ private record AuthenticationImpl(Optional responseMode, Optional< private CookieSameSite cookieSameSite; private boolean allowMultipleCodeFlows; private boolean failOnMissingStateParam; + private boolean failOnUnresolvedKid; private Optional userInfoRequired; private Duration sessionAgeExtension; private Duration stateCookieAge; @@ -98,6 +100,7 @@ public AuthenticationConfigBuilder(OidcTenantConfigBuilder builder) { this.cookieSameSite = authentication.cookieSameSite(); this.allowMultipleCodeFlows = authentication.allowMultipleCodeFlows(); this.failOnMissingStateParam = authentication.failOnMissingStateParam(); + this.failOnUnresolvedKid = authentication.failOnUnresolvedKid(); this.userInfoRequired = authentication.userInfoRequired(); this.sessionAgeExtension = authentication.sessionAgeExtension(); this.stateCookieAge = authentication.stateCookieAge(); @@ -405,6 +408,24 @@ public AuthenticationConfigBuilder failOnMissingStateParam(boolean failOnMissing return this; } + /** + * Sets {@link Authentication#failOnUnreslvedKid()} to true. + * + * @return this builder + */ + public AuthenticationConfigBuilder failOnUnresolvedKid() { + return failOnUnresolvedKid(true); + } + + /** + * @param failOnUnresolvedKid {@link Authentication#failOnUnreslvedKid()} + * @return this builder + */ + public AuthenticationConfigBuilder failOnUnresolvedKid(boolean failOnUnresolvedKid) { + this.failOnUnresolvedKid = failOnUnresolvedKid; + return this; + } + /** * Sets {@link Authentication#userInfoRequired()} to true. * @@ -554,6 +575,7 @@ public Authentication build() { sessionExpiredPath, verifyAccessToken, forceRedirectHttpsScheme, optionalScopes, scopeSeparator, nonceRequired, addOpenidScope, Map.copyOf(extraParams), optionalForwardParams, cookieForceSecure, cookieSuffix, cookiePath, cookiePathHeader, cookieDomain, cookieSameSite, allowMultipleCodeFlows, failOnMissingStateParam, + failOnUnresolvedKid, userInfoRequired, sessionAgeExtension, stateCookieAge, javaScriptAutoRedirect, idTokenRequired, internalIdTokenLifespan, pkceRequired, pkceSecret, stateSecret); } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java index 742677989272b..41ec4d28187d3 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java @@ -6,7 +6,7 @@ import org.eclipse.microprofile.config.ConfigProvider; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; import io.smallrye.common.annotation.NonBlocking; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.Uni; @@ -14,7 +14,7 @@ public class OidcDevJsonRpcService { private OidcDevUiRpcSvcPropertiesBean props; - private HttpConfiguration httpConfiguration; + private VertxHttpConfig httpConfig; @Inject OidcDevLoginObserver oidcDevTokensObserver; @@ -25,7 +25,7 @@ public class OidcDevJsonRpcService { @NonBlocking public OidcDevUiRuntimePropertiesDTO getProperties() { return new OidcDevUiRuntimePropertiesDTO(props.getAuthorizationUrl(), props.getTokenUrl(), props.getLogoutUrl(), - ConfigProvider.getConfig(), httpConfiguration.port, + ConfigProvider.getConfig(), httpConfig.port(), props.getOidcProviderName(), props.getOidcApplicationType(), props.getOidcGrantType(), props.isIntrospectionIsAvailable(), props.getKeycloakAdminUrl(), props.getKeycloakRealms(), props.isSwaggerIsAvailable(), props.isGraphqlIsAvailable(), props.getSwaggerUiPath(), @@ -61,8 +61,8 @@ public Multi streamOidcLoginEvent() { return oidcDevTokensObserver.streamOidcLoginEvent(); } - void hydrate(OidcDevUiRpcSvcPropertiesBean properties, HttpConfiguration httpConfiguration) { + void hydrate(OidcDevUiRpcSvcPropertiesBean properties, VertxHttpConfig httpConfig) { this.props = properties; - this.httpConfiguration = httpConfiguration; + this.httpConfig = httpConfig; } } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java index 7cefaff7013f0..2543b047978e4 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java @@ -12,7 +12,7 @@ import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; import io.quarkus.vertx.core.runtime.VertxCoreRecorder; -import io.quarkus.vertx.http.runtime.HttpConfiguration; +import io.quarkus.vertx.http.runtime.VertxHttpConfig; import io.vertx.core.Handler; import io.vertx.core.http.HttpHeaders; import io.vertx.core.json.JsonObject; @@ -33,9 +33,9 @@ public OidcDevUiRecorder(RuntimeValue oidcConfigRuntimeValue) { } public void createJsonRPCService(BeanContainer beanContainer, - RuntimeValue oidcDevUiRpcSvcPropertiesBean, HttpConfiguration httpConfiguration) { + RuntimeValue oidcDevUiRpcSvcPropertiesBean, VertxHttpConfig httpConfig) { OidcDevJsonRpcService jsonRpcService = beanContainer.beanInstance(OidcDevJsonRpcService.class); - jsonRpcService.hydrate(oidcDevUiRpcSvcPropertiesBean.getValue(), httpConfiguration); + jsonRpcService.hydrate(oidcDevUiRpcSvcPropertiesBean.getValue(), httpConfig); } public RuntimeValue getRpcServiceProperties(String authorizationUrl, String tokenUrl, diff --git a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcTenantConfigImpl.java b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcTenantConfigImpl.java index 901016e517153..fbdeaeca98292 100644 --- a/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcTenantConfigImpl.java +++ b/extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcTenantConfigImpl.java @@ -142,6 +142,7 @@ enum ConfigMappingMethods { AUTHENTICATION_COOKIE_SAME_SITE, AUTHENTICATION_ALLOW_MULTIPLE_CODE_FLOWS, AUTHENTICATION_FAIL_ON_MISSING_STATE_PARAM, + AUTHENTICATION_FAIL_ON_UNRESOLVED_KID, AUTHENTICATION_USER_INFO_REQUIRED, AUTHENTICATION_SESSION_AGE_EXTENSION, AUTHENTICATION_STATE_COOKIE_AGE, @@ -706,6 +707,12 @@ public boolean failOnMissingStateParam() { return false; } + @Override + public boolean failOnUnresolvedKid() { + invocationsRecorder.put(ConfigMappingMethods.AUTHENTICATION_FAIL_ON_UNRESOLVED_KID, true); + return false; + } + @Override public Optional userInfoRequired() { invocationsRecorder.put(ConfigMappingMethods.AUTHENTICATION_USER_INFO_REQUIRED, true); diff --git a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java index 4b1846f90a857..a5d0124fa9172 100644 --- a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java +++ b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/OpenTelemetryProcessor.java @@ -36,11 +36,11 @@ import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSamplerProvider; import io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider; import io.quarkus.agroal.spi.JdbcDataSourceBuildItem; -import io.quarkus.agroal.spi.OpenTelemetryInitBuildItem; import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.AnnotationsTransformerBuildItem; import io.quarkus.arc.deployment.BeanContainerBuildItem; import io.quarkus.arc.deployment.InterceptorBindingRegistrarBuildItem; +import io.quarkus.arc.deployment.OpenTelemetrySdkBuildItem; import io.quarkus.arc.deployment.SyntheticBeanBuildItem; import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; import io.quarkus.arc.processor.InterceptorBindingRegistrar; @@ -52,7 +52,6 @@ import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.BuildSteps; import io.quarkus.deployment.annotations.ExecutionTime; -import io.quarkus.deployment.annotations.Produce; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.LaunchModeBuildItem; import io.quarkus.deployment.builditem.RemovedResourceBuildItem; @@ -107,8 +106,11 @@ AdditionalBeanBuildItem ensureProducerIsRetained() { @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - SyntheticBeanBuildItem openTelemetryBean(OpenTelemetryRecorder recorder, OTelRuntimeConfig oTelRuntimeConfig) { - return SyntheticBeanBuildItem.configure(OpenTelemetry.class) + void openTelemetryBean(OpenTelemetryRecorder recorder, + OTelRuntimeConfig oTelRuntimeConfig, + BuildProducer syntheticProducer, + BuildProducer openTelemetrySdkBuildItemBuildProducer) { + syntheticProducer.produce(SyntheticBeanBuildItem.configure(OpenTelemetry.class) .defaultBean() .setRuntimeInit() .unremovable() @@ -122,7 +124,10 @@ SyntheticBeanBuildItem openTelemetryBean(OpenTelemetryRecorder recorder, OTelRun null)) .createWith(recorder.opentelemetryBean(oTelRuntimeConfig)) .destroyer(OpenTelemetryDestroyer.class) - .done(); + .done()); + + openTelemetrySdkBuildItemBuildProducer.produce( + new OpenTelemetrySdkBuildItem(recorder.isOtelSdkEnabled(oTelRuntimeConfig))); } @BuildStep @@ -258,7 +263,6 @@ void transformWithSpan(BuildProducer annotation @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - @Produce(OpenTelemetryInitBuildItem.class) void createOpenTelemetry( OpenTelemetryRecorder recorder, CoreVertxBuildItem vertx, diff --git a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/logging/LogHandlerProcessor.java b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/logging/LogHandlerProcessor.java index b552448cf0327..06c66ebc73b1b 100644 --- a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/logging/LogHandlerProcessor.java +++ b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/logging/LogHandlerProcessor.java @@ -8,8 +8,8 @@ import io.opentelemetry.sdk.autoconfigure.spi.logs.ConfigurableLogRecordExporterProvider; import io.opentelemetry.sdk.logs.LogRecordProcessor; import io.opentelemetry.sdk.logs.export.LogRecordExporter; -import io.quarkus.agroal.spi.OpenTelemetryInitBuildItem; import io.quarkus.arc.deployment.BeanContainerBuildItem; +import io.quarkus.arc.deployment.OpenTelemetrySdkBuildItem; import io.quarkus.arc.deployment.UnremovableBeanBuildItem; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; @@ -45,7 +45,7 @@ void nativeSupport(BuildProducer servicesProducer) { @BuildStep @Record(ExecutionTime.RUNTIME_INIT) - @Consume(OpenTelemetryInitBuildItem.class) + @Consume(OpenTelemetrySdkBuildItem.class) LogHandlerBuildItem build(OpenTelemetryLogRecorder recorder, OTelRuntimeConfig config, BeanContainerBuildItem beanContainerBuildItem) { diff --git a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/metric/MetricProcessor.java b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/metric/MetricProcessor.java index fcc57f9187165..9d698acc0b959 100644 --- a/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/metric/MetricProcessor.java +++ b/extensions/opentelemetry/deployment/src/main/java/io/quarkus/opentelemetry/deployment/metric/MetricProcessor.java @@ -36,7 +36,12 @@ public class MetricProcessor { private static final DotName METRIC_PROCESSOR = DotName.createSimple(MetricProcessor.class.getName()); @BuildStep - void addNativeMonitoring(BuildProducer nativeMonitoring) { + void startJvmMetrics(BuildProducer nativeMonitoring, + BuildProducer additionalBeans) { + additionalBeans.produce(AdditionalBeanBuildItem.builder() + .setUnremovable() + .addBeanClass(JvmMetricsService.class) + .build()); nativeMonitoring.produce(new NativeMonitoringBuildItem(NativeConfig.MonitoringOption.JFR)); } @@ -48,7 +53,6 @@ UnremovableBeanBuildItem ensureProducersAreRetained( additionalBeans.produce(AdditionalBeanBuildItem.builder() .setUnremovable() .addBeanClass(MetricsProducer.class) - .addBeanClass(JvmMetricsService.class) .build()); IndexView index = indexBuildItem.getIndex(); diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenTelemetryTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenTelemetryTest.java index d96b72c5e5897..80ebeb232f4a4 100644 --- a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenTelemetryTest.java +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/instrumentation/GrpcOpenTelemetryTest.java @@ -4,6 +4,8 @@ import static io.opentelemetry.api.trace.SpanKind.CLIENT; import static io.opentelemetry.api.trace.SpanKind.INTERNAL; import static io.opentelemetry.api.trace.SpanKind.SERVER; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_ADDRESS; +import static io.opentelemetry.semconv.NetworkAttributes.NETWORK_PEER_PORT; import static io.opentelemetry.semconv.ServerAttributes.SERVER_ADDRESS; import static io.opentelemetry.semconv.ServerAttributes.SERVER_PORT; import static io.opentelemetry.semconv.incubating.RpcIncubatingAttributes.RPC_GRPC_STATUS_CODE; @@ -132,6 +134,8 @@ void grpc() { assertEquals(Status.Code.OK.value(), server.getAttributes().get(RPC_GRPC_STATUS_CODE)); assertNotNull(server.getAttributes().get(SERVER_PORT)); assertNotNull(server.getAttributes().get(SERVER_ADDRESS)); + assertNotNull(server.getAttributes().get(NETWORK_PEER_PORT)); + assertNotNull(server.getAttributes().get(NETWORK_PEER_ADDRESS)); final SpanData internal = getSpanByKindAndParentId(spans, INTERNAL, server.getSpanId()); assertEquals("span.internal", internal.getName()); diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/OpenTelemetryRecorder.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/OpenTelemetryRecorder.java index 852258c4a4f51..8b950ed3087ee 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/OpenTelemetryRecorder.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/OpenTelemetryRecorder.java @@ -19,6 +19,7 @@ import io.opentelemetry.sdk.autoconfigure.AutoConfiguredOpenTelemetrySdk; import io.quarkus.arc.SyntheticCreationalContext; import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; +import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; import io.quarkus.runtime.annotations.RuntimeInit; import io.quarkus.runtime.annotations.StaticInit; @@ -38,6 +39,11 @@ public void resetGlobalOpenTelemetryForDevMode() { GlobalEventLoggerProvider.resetForTest(); } + @RuntimeInit + public RuntimeValue isOtelSdkEnabled(OTelRuntimeConfig oTelRuntimeConfig) { + return new RuntimeValue<>(!oTelRuntimeConfig.sdkDisabled()); + } + @RuntimeInit public void eagerlyCreateContextStorage() { ContextStorage.get(); @@ -156,9 +162,9 @@ public String convert(final String value) throws IllegalArgumentException, NullP } try { - return duration.toMillis() + "ms"; + return String.valueOf(duration.toMillis()).concat("ms"); } catch (Exception ignored) { - return duration.toSeconds() + "s"; + return String.valueOf(duration.toSeconds()).concat("s"); } } } diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java index 382ea4af501e8..aa05b39d1cf70 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java @@ -1,18 +1,20 @@ package io.quarkus.opentelemetry.runtime.logs; -import static io.opentelemetry.semconv.ExceptionAttributes.*; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_MESSAGE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_STACKTRACE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_TYPE; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_FUNCTION; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_LINENO; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_NAMESPACE; import static io.opentelemetry.semconv.incubating.LogIncubatingAttributes.LOG_FILE_PATH; -import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.*; +import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.THREAD_ID; +import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.THREAD_NAME; import static io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig.INSTRUMENTATION_NAME; import java.io.PrintWriter; import java.io.StringWriter; import java.time.Instant; import java.util.Map; -import java.util.Optional; import java.util.logging.Formatter; import java.util.logging.Level; @@ -30,10 +32,19 @@ public class OpenTelemetryLogHandler extends ExtHandler { + private static final AttributeKey NAMESPACE_ATTRIBUTE_KEY = AttributeKey.stringKey("log.logger.namespace"); + private final OpenTelemetry openTelemetry; + private final boolean logFileEnabled; + private final String logFilePath; public OpenTelemetryLogHandler(final OpenTelemetry openTelemetry) { this.openTelemetry = openTelemetry; + + final Config config = ConfigProvider.getConfig(); + this.logFileEnabled = config.getOptionalValue("quarkus.log.file.enable", Boolean.class).orElse(false); + this.logFilePath = this.logFileEnabled ? config.getOptionalValue("quarkus.log.file.path", String.class).orElse(null) + : null; } @Override @@ -71,8 +82,7 @@ protected void doPublish(ExtLogRecord record) { attributes.put(CODE_LINENO, record.getSourceLineNumber()); attributes.put(THREAD_NAME, record.getThreadName()); attributes.put(THREAD_ID, record.getLongThreadID()); - attributes.put(AttributeKey.stringKey("log.logger.namespace"), - record.getLoggerClassName()); + attributes.put(NAMESPACE_ATTRIBUTE_KEY, record.getLoggerClassName()); final Map mdcCopy = record.getMdcCopy(); if (mdcCopy != null) { @@ -102,13 +112,9 @@ protected void doPublish(ExtLogRecord record) { } // required by spec - final Config config = ConfigProvider.getConfig(); - config.getOptionalValue("quarkus.log.file.enable", Boolean.class).ifPresent(enable -> { - Optional filePath = config.getOptionalValue("quarkus.log.file.path", String.class); - if (enable.equals(Boolean.TRUE) && filePath.isPresent()) { - attributes.put(LOG_FILE_PATH, filePath.get()); - } - }); + if (logFileEnabled && logFilePath != null) { + attributes.put(LOG_FILE_PATH, logFilePath); + } logRecordBuilder.setAllAttributes(attributes.build()); logRecordBuilder.emit(); diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/grpc/GrpcRequest.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/grpc/GrpcRequest.java index c1fcdc9a55f6c..e2a6c0cad24df 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/grpc/GrpcRequest.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/grpc/GrpcRequest.java @@ -1,5 +1,7 @@ package io.quarkus.opentelemetry.runtime.tracing.intrumentation.grpc; +import static io.grpc.Grpc.TRANSPORT_ATTR_REMOTE_ADDR; + import java.net.SocketAddress; import io.grpc.Attributes; @@ -13,7 +15,12 @@ public static GrpcRequest server( final Metadata metadata, final Attributes attributes, final String authority) { - return new GrpcRequest(methodDescriptor, metadata, attributes, null, authority); + + return new GrpcRequest(methodDescriptor, + metadata, + attributes, + attributes == null ? null : attributes.get(TRANSPORT_ATTR_REMOTE_ADDR), + authority); } public static GrpcRequest client(final MethodDescriptor methodDescriptor, String authority) { diff --git a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/BeanUtils.java b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/BeanUtils.java index 7443ef8801767..0a87f4ddc86b3 100644 --- a/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/BeanUtils.java +++ b/extensions/panache/mongodb-panache-common/runtime/src/main/java/io/quarkus/mongodb/panache/common/runtime/BeanUtils.java @@ -62,15 +62,15 @@ public static T clientFromArc(MongoEntity entity, public static String getDatabaseName(MongoEntity mongoEntity, String clientBeanName) { MongoClients mongoClients = Arc.container().instance(MongoClients.class).get(); MongoClientConfig matchingMongoClientConfig = mongoClients.getMatchingMongoClientConfig(clientBeanName); - if (matchingMongoClientConfig.database.isPresent()) { - return matchingMongoClientConfig.database.get(); + if (matchingMongoClientConfig.database().isPresent()) { + return matchingMongoClientConfig.database().get(); } if (!clientBeanName.equals(MongoClientBeanUtil.DEFAULT_MONGOCLIENT_NAME)) { MongoClientConfig defaultMongoClientConfig = mongoClients .getMatchingMongoClientConfig(MongoClientBeanUtil.DEFAULT_MONGOCLIENT_NAME); - if (defaultMongoClientConfig.database.isPresent()) { - return defaultMongoClientConfig.database.get(); + if (defaultMongoClientConfig.database().isPresent()) { + return defaultMongoClientConfig.database().get(); } } diff --git a/extensions/picocli/deployment/pom.xml b/extensions/picocli/deployment/pom.xml index bd3e6a193c9a0..96983bfd2116b 100644 --- a/extensions/picocli/deployment/pom.xml +++ b/extensions/picocli/deployment/pom.xml @@ -54,9 +54,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliDeploymentConfiguration.java b/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliDeploymentConfiguration.java deleted file mode 100644 index b68fa90a57e44..0000000000000 --- a/extensions/picocli/deployment/src/main/java/io/quarkus/picocli/deployment/PicocliDeploymentConfiguration.java +++ /dev/null @@ -1,29 +0,0 @@ -package io.quarkus.picocli.deployment; - -import io.quarkus.runtime.annotations.ConfigItem; -import io.quarkus.runtime.annotations.ConfigRoot; - -@ConfigRoot(name = "picocli") -class PicocliDeploymentConfiguration { - - /** - *

    - * Set this to false to use the `picocli-codegen` annotation processor instead of build steps. - *

    - *

    - * CAUTION: this will have serious build-time performance impact since - * this is run on every restart in dev mode, use with care! - *

    - *

    - * This property is intended to be used only in cases where an incompatible change in the - * picocli library causes problems in the build steps used to support GraalVM Native images. - *

    - *

    - * In such cases this property allows users to make the trade-off between fast build cycles - * with the older version of picocli, and temporarily accept slower build cycles with - * the latest version of picocli until the updated extension is available. - *

    - */ - @ConfigItem(name = "native-image.processing.enable", defaultValue = "true") - boolean nativeImageProcessingEnabled; -} diff --git a/extensions/picocli/runtime/pom.xml b/extensions/picocli/runtime/pom.xml index ed85efc9f4963..54d4bb1cea4bf 100644 --- a/extensions/picocli/runtime/pom.xml +++ b/extensions/picocli/runtime/pom.xml @@ -64,9 +64,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/DefaultPicocliCommandLineFactory.java b/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/DefaultPicocliCommandLineFactory.java index f87f7b27c6664..2bc13913bd070 100644 --- a/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/DefaultPicocliCommandLineFactory.java +++ b/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/DefaultPicocliCommandLineFactory.java @@ -32,8 +32,9 @@ private Class classForName(String name) { @Override public CommandLine create() { - String topCommandName = picocliConfiguration.topCommand.orElse(null); - if (topCommandName != null) { + if (picocliConfiguration.topCommand().isPresent()) { + String topCommandName = picocliConfiguration.topCommand().get(); + Instance namedTopCommand = topCommand.select(NamedLiteral.of(topCommandName)); if (namedTopCommand.isResolvable()) { return new CommandLine(namedTopCommand.get(), picocliFactory); diff --git a/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/PicocliConfiguration.java b/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/PicocliConfiguration.java index e1236af774024..5161f2dd07128 100644 --- a/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/PicocliConfiguration.java +++ b/extensions/picocli/runtime/src/main/java/io/quarkus/picocli/runtime/PicocliConfiguration.java @@ -2,17 +2,17 @@ import java.util.Optional; -import io.quarkus.runtime.annotations.ConfigItem; import io.quarkus.runtime.annotations.ConfigPhase; import io.quarkus.runtime.annotations.ConfigRoot; +import io.smallrye.config.ConfigMapping; @ConfigRoot(phase = ConfigPhase.RUN_TIME) -public class PicocliConfiguration { +@ConfigMapping(prefix = "quarkus.picocli") +public interface PicocliConfiguration { /** * Name of bean annotated with {@link io.quarkus.picocli.runtime.annotations.TopCommand} * or FQCN of class which will be used as entry point for Picocli CommandLine instance. * This class needs to be annotated with {@link picocli.CommandLine.Command}. */ - @ConfigItem - public Optional topCommand; + Optional topCommand(); } diff --git a/extensions/pom.xml b/extensions/pom.xml index 7f86c1174b30f..9c1c39c78d681 100644 --- a/extensions/pom.xml +++ b/extensions/pom.xml @@ -51,6 +51,7 @@ smallrye-fault-tolerance micrometer micrometer-registry-prometheus + micrometer-opentelemetry opentelemetry info observability-devservices @@ -145,6 +146,7 @@ oidc-client-reactive-filter oidc-client-registration oidc-client-graphql + oidc-token-propagation-common oidc-token-propagation oidc-token-propagation-reactive oidc-db-token-state-manager diff --git a/extensions/qute/deployment/pom.xml b/extensions/qute/deployment/pom.xml index 893c79f081a83..bb8d8d92415bc 100644 --- a/extensions/qute/deployment/pom.xml +++ b/extensions/qute/deployment/pom.xml @@ -80,9 +80,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java index 4336adb550da2..84f9fa9c85ef4 100644 --- a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java +++ b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/MessageBundleProcessor.java @@ -548,7 +548,7 @@ public String apply(String id) { TemplateAnalysis templateAnalysis = exprEntry.getKey(); String path = templateAnalysis.path; - for (String suffix : config.suffixes) { + for (String suffix : config.suffixes()) { if (path.endsWith(suffix)) { path = path.substring(0, path.length() - (suffix.length() + 1)); break; diff --git a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteDevModeProcessor.java b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteDevModeProcessor.java index 4baf3b0756616..40cce54b35177 100644 --- a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteDevModeProcessor.java +++ b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteDevModeProcessor.java @@ -1,15 +1,26 @@ package io.quarkus.qute.deployment; +import java.lang.reflect.Modifier; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Predicate; +import org.jboss.jandex.DotName; + +import io.quarkus.arc.deployment.GeneratedBeanBuildItem; +import io.quarkus.arc.deployment.GeneratedBeanGizmoAdaptor; import io.quarkus.arc.deployment.ValidationPhaseBuildItem.ValidationErrorBuildItem; import io.quarkus.deployment.IsDevelopment; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.annotations.BuildSteps; +import io.quarkus.deployment.builditem.ApplicationIndexBuildItem; import io.quarkus.dev.console.DevConsoleManager; +import io.quarkus.gizmo.ClassCreator; +import io.quarkus.gizmo.FieldCreator; +import io.quarkus.gizmo.MethodCreator; +import io.quarkus.qute.TemplateGlobal; import io.quarkus.qute.runtime.devmode.QuteErrorPageSetup; @BuildSteps(onlyIf = IsDevelopment.class) @@ -28,4 +39,33 @@ void collectGeneratedContents(List templatePaths, DevConsoleManager.setGlobal(QuteErrorPageSetup.GENERATED_CONTENTS, contents); } + // This build step is only used to for a QuarkusDevModeTest that contains the QuteDummyTemplateGlobalMarker interface + @BuildStep + void generateTestTemplateGlobal(ApplicationIndexBuildItem applicationIndex, + BuildProducer generatedBeanClasses) { + if (applicationIndex.getIndex().getClassByName( + DotName.createSimple("io.quarkus.qute.deployment.devmode.QuteDummyTemplateGlobalMarker")) != null) { + // If the marker interface is present then we generate a dummy class annotated with @TemplateGlobal + GeneratedBeanGizmoAdaptor gizmoAdaptor = new GeneratedBeanGizmoAdaptor(generatedBeanClasses, + new Predicate() { + @Override + public boolean test(String t) { + return false; + } + }); + try (ClassCreator classCreator = ClassCreator.builder().className("org.acme.qute.test.QuteDummyGlobals") + .classOutput(gizmoAdaptor).build()) { + classCreator.addAnnotation(TemplateGlobal.class); + + FieldCreator quteDummyFoo = classCreator.getFieldCreator("quteDummyFoo", String.class); + quteDummyFoo.setModifiers(Modifier.STATIC); + + MethodCreator staticInitializer = classCreator.getMethodCreator("", void.class); + staticInitializer.setModifiers(Modifier.STATIC); + staticInitializer.writeStaticField(quteDummyFoo.getFieldDescriptor(), staticInitializer.load("bar")); + staticInitializer.returnVoid(); + } + } + } + } diff --git a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java index ae41dc3ee4c79..8b533481e5002 100644 --- a/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java +++ b/extensions/qute/deployment/src/main/java/io/quarkus/qute/deployment/QuteProcessor.java @@ -24,6 +24,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; @@ -801,7 +802,7 @@ public void beforeParsing(ParserHelper parserHelper) { } private String templatePathWithoutSuffix(String path, QuteConfig config) { - for (String suffix : config.suffixes) { + for (String suffix : config.suffixes()) { if (path.endsWith(suffix)) { // Remove the suffix path = path.substring(0, path.length() - (suffix.length() + 1)); @@ -910,7 +911,7 @@ void validateCheckedFragments(List validatio @BuildStep(onlyIf = IsTest.class) SyntheticBeanBuildItem registerRenderedResults(QuteConfig config) { - if (config.testMode.recordRenderedResults) { + if (config.testMode().recordRenderedResults()) { return SyntheticBeanBuildItem.configure(RenderedResults.class) .unremovable() .scope(Singleton.class) @@ -1130,7 +1131,7 @@ private CheckedTemplateBuildItem findCheckedTemplate(QuteConfig config, Template List checkedTemplates) { // Try to find the checked template String path = analysis.path; - for (String suffix : config.suffixes) { + for (String suffix : config.suffixes()) { if (path.endsWith(suffix)) { path = path.substring(0, path.length() - (suffix.length() + 1)); break; @@ -1637,19 +1638,19 @@ private static boolean isInvalidCheckedTemplateExpression(QuteConfig config, Che if (!expression.hasNamespace() && expression.getParts().size() == 1 && ITERATION_METADATA_KEYS.contains(expression.getParts().get(0).getName())) { String prefixInfo; - if (config.iterationMetadataPrefix + if (config.iterationMetadataPrefix() .equals(LoopSectionHelper.Factory.ITERATION_METADATA_PREFIX_ALIAS_UNDERSCORE)) { prefixInfo = String.format( "based on the iteration alias, i.e. the correct key should be something like {it_%1$s} or {element_%1$s}", expression.getParts().get(0).getName()); - } else if (config.iterationMetadataPrefix + } else if (config.iterationMetadataPrefix() .equals(LoopSectionHelper.Factory.ITERATION_METADATA_PREFIX_ALIAS_QM)) { prefixInfo = String.format( "based on the iteration alias, i.e. the correct key should be something like {it?%1$s} or {element?%1$s}", expression.getParts().get(0).getName()); } else { - prefixInfo = ": " + config.iterationMetadataPrefix + ", i.e. the correct key should be: " - + config.iterationMetadataPrefix + expression.getParts().get(0).getName(); + prefixInfo = ": " + config.iterationMetadataPrefix() + ", i.e. the correct key should be: " + + config.iterationMetadataPrefix() + expression.getParts().get(0).getName(); } incorrectExpressions.produce(new IncorrectExpressionBuildItem(expression.toOriginalString(), "An invalid iteration metadata key is probably used\n\t- The configured iteration metadata prefix is " @@ -2094,23 +2095,46 @@ public Function apply(ClassInfo clazz) { } if (!templateGlobals.isEmpty()) { - TemplateGlobalGenerator globalGenerator = new TemplateGlobalGenerator(classOutput, GLOBAL_NAMESPACE, -1000, index); - - Map> classToTargets = new HashMap<>(); + Set generatedGlobals = new HashSet<>(); + // The classes for non-application globals are only generated during the first run because they can't be reloaded + // by the class loader during hot reload + // However, we need to make sure the priorities used by non-application globals do not conflict + // with priorities of application globals that are regenerated during each hot reload + // Therefore, the initial priority is increased by the number of all globals ever found + // For example, if there are three globals [A, B, C] (A and C are non-application classes) + // The intial priority during the first hot reload will be "-1000 + 3 = 997" + // If a global D is added afterwards, the initial priority during the subsequent hot reload will be "-1000 + 4 = 996" + // If the global D is removed, the initial priority will still remain "-1000 + 4 = 996" + // This way we can be sure that the priorities assigned to A and C will never conflict with priorities of B and D or any other application global class + int initialPriority = -1000 + existingValueResolvers.allGlobals.size(); + + TemplateGlobalGenerator globalGenerator = new TemplateGlobalGenerator(classOutput, GLOBAL_NAMESPACE, + initialPriority, index); + + Map> classToTargets = new LinkedHashMap<>(); Map> classToGlobals = templateGlobals.stream() - .collect(Collectors.groupingBy(TemplateGlobalBuildItem::getDeclaringClass)); + .sorted(Comparator.comparing(g -> g.getDeclaringClass())) + .collect(Collectors.groupingBy(TemplateGlobalBuildItem::getDeclaringClass, LinkedHashMap::new, + Collectors.toList())); for (Entry> entry : classToGlobals.entrySet()) { classToTargets.put(entry.getKey(), entry.getValue().stream().collect( Collectors.toMap(TemplateGlobalBuildItem::getName, TemplateGlobalBuildItem::getTarget))); } for (Entry> e : classToTargets.entrySet()) { - globalGenerator.generate(index.getClassByName(e.getKey()), e.getValue()); + String generatedClass = existingValueResolvers.getGeneratedGlobalClass(e.getKey()); + if (generatedClass != null) { + generatedGlobals.add(generatedClass); + } else { + generatedClass = globalGenerator.generate(index.getClassByName(e.getKey()), e.getValue()); + } + existingValueResolvers.addGlobal(e.getKey(), generatedClass, applicationClassPredicate); } + generatedGlobals.addAll(globalGenerator.getGeneratedTypes()); - for (String generatedType : globalGenerator.getGeneratedTypes()) { - globalProviders.produce(new TemplateGlobalProviderBuildItem(generatedType)); - reflectiveClass.produce(ReflectiveClassBuildItem.builder(generatedType).build()); + for (String globalType : generatedGlobals) { + globalProviders.produce(new TemplateGlobalProviderBuildItem(globalType)); + reflectiveClass.produce(ReflectiveClassBuildItem.builder(globalType).build()); } } } @@ -2123,11 +2147,20 @@ static class ExistingValueResolvers { final Map identifiersToGeneratedClass = new HashMap<>(); + // class declaring globals -> generated type; non-application globals only + final Map globals = new HashMap<>(); + + final Set allGlobals = new HashSet<>(); + boolean contains(MethodInfo extensionMethod) { return identifiersToGeneratedClass .containsKey(toKey(extensionMethod)); } + String getGeneratedGlobalClass(DotName declaringClassName) { + return globals.get(declaringClassName.toString()); + } + String getGeneratedClass(MethodInfo extensionMethod) { return identifiersToGeneratedClass.get(toKey(extensionMethod)); } @@ -2138,6 +2171,12 @@ void add(MethodInfo extensionMethod, String className, Predicate applic } } + void addGlobal(DotName declaringClassName, String generatedClassName, Predicate applicationClassPredicate) { + if (allGlobals.add(generatedClassName.toString()) && !applicationClassPredicate.test(declaringClassName)) { + globals.put(declaringClassName.toString(), generatedClassName); + } + } + private String toKey(MethodInfo extensionMethod) { return extensionMethod.declaringClass().toString() + "#" + extensionMethod.toString(); } @@ -2202,7 +2241,7 @@ private void scanPathTree(PathTree pathTree, TemplateRootsBuildItem templateRoot // remove templateRoot + / final String relativePath = visit.getRelativePath(); String templatePath = relativePath.substring(templateRoot.length() + 1); - if (config.templatePathExclude.matcher(templatePath).matches()) { + if (config.templatePathExclude().matcher(templatePath).matches()) { LOGGER.debugf("Template file excluded: %s", visit.getPath()); return; } @@ -2222,7 +2261,7 @@ TemplateFilePathsBuildItem collectTemplateFilePaths(QuteConfig config, List> excludes) { diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/ExistingValueResolversDevModeTest.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/ExistingValueResolversDevModeTest.java index d8069523c9d12..942e8f8372c43 100644 --- a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/ExistingValueResolversDevModeTest.java +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/ExistingValueResolversDevModeTest.java @@ -20,7 +20,7 @@ public class ExistingValueResolversDevModeTest { .addClass(TestRoute.class) .addAsResource(new StringAsset( "{#let a = 3}{#let b = a.minus(2)}b={b}{/}{/}"), - "templates/let.html")); + "templates/test.html")); @Test public void testExistingValueResolvers() { @@ -29,7 +29,7 @@ public void testExistingValueResolvers() { .statusCode(200) .body(Matchers.equalTo("b=1")); - config.modifyResourceFile("templates/let.html", t -> t.concat("::MODIFIED")); + config.modifyResourceFile("templates/test.html", t -> t.concat("::MODIFIED")); given().get("test") .then() diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/QuteDummyTemplateGlobalMarker.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/QuteDummyTemplateGlobalMarker.java new file mode 100644 index 0000000000000..c2beb9f787d9d --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/QuteDummyTemplateGlobalMarker.java @@ -0,0 +1,8 @@ +package io.quarkus.qute.deployment.devmode; + +/** + * Marker interface for {@link TemplateGlobalDevModeTest}. + */ +public interface QuteDummyTemplateGlobalMarker { + +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TemplateGlobalDevModeTest.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TemplateGlobalDevModeTest.java new file mode 100644 index 0000000000000..d3d57177743ea --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TemplateGlobalDevModeTest.java @@ -0,0 +1,53 @@ +package io.quarkus.qute.deployment.devmode; + +import static io.restassured.RestAssured.given; + +import org.hamcrest.Matchers; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.RegisterExtension; + +import io.quarkus.qute.TemplateGlobal; +import io.quarkus.test.QuarkusDevModeTest; + +/** + * Test that template globals added in the dev mode are generated correctly after live reload. + *

    + * The {@link QuteDummyTemplateGlobalMarker} is used to identify an application archive where a dummy built-in class with + * template globals is added. + */ +public class TemplateGlobalDevModeTest { + + @RegisterExtension + static final QuarkusDevModeTest config = new QuarkusDevModeTest() + .withApplicationRoot(root -> root + .addClasses(TestRoute.class, MyGlobals.class, QuteDummyTemplateGlobalMarker.class) + .addAsResource(new StringAsset( + "{foo}:{quteDummyFoo}:{testFoo ?: 'NA'}"), + "templates/test.html")); + + @Test + public void testTemplateGlobals() { + given().get("test") + .then() + .statusCode(200) + .body(Matchers.equalTo("24:bar:NA")); + + // Add application globals - the priority sequence should be automatically + // increased before it's used for TestGlobals + config.addSourceFile(TestGlobals.class); + + given().get("test") + .then() + .statusCode(200) + .body(Matchers.equalTo("24:bar:baz")); + } + + @TemplateGlobal + public static class MyGlobals { + + public static int foo = 24; + + } + +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestGlobals.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestGlobals.java new file mode 100644 index 0000000000000..8a432c9533123 --- /dev/null +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestGlobals.java @@ -0,0 +1,11 @@ +package io.quarkus.qute.deployment.devmode; + +import io.quarkus.qute.TemplateGlobal; + +@TemplateGlobal +public class TestGlobals { + + public static String testFoo() { + return "baz"; + } +} diff --git a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestRoute.java b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestRoute.java index 15fbd2054ded6..06107c21c5441 100644 --- a/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestRoute.java +++ b/extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/devmode/TestRoute.java @@ -9,11 +9,11 @@ public class TestRoute { @Inject - Template let; + Template test; @Route(path = "test") public void test(RoutingContext ctx) { - ctx.end(let.render()); + ctx.end(test.render()); } } diff --git a/extensions/qute/runtime/pom.xml b/extensions/qute/runtime/pom.xml index 83a329d2a9ca5..fb04f6aa86ff8 100644 --- a/extensions/qute/runtime/pom.xml +++ b/extensions/qute/runtime/pom.xml @@ -57,9 +57,6 @@ ${project.version} - - -AlegacyConfigRoot=true - diff --git a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/ContentTypes.java b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/ContentTypes.java index cb62d34d281f0..5cb933b3f585c 100644 --- a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/ContentTypes.java +++ b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/ContentTypes.java @@ -31,7 +31,7 @@ public String getContentType(String templatePath) { int dotIdx = fileName.lastIndexOf('.'); if (dotIdx != -1) { String suffix = fileName.substring(dotIdx + 1, fileName.length()); - String additionalContentType = config.contentTypes.get(suffix); + String additionalContentType = config.contentTypes().get(suffix); if (additionalContentType != null) { return additionalContentType; } diff --git a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/EngineProducer.java b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/EngineProducer.java index aeba46a8cd3d8..4192873666210 100644 --- a/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/EngineProducer.java +++ b/extensions/qute/runtime/src/main/java/io/quarkus/qute/runtime/EngineProducer.java @@ -93,13 +93,13 @@ public EngineProducer(QuteContext context, QuteConfig config, QuteRuntimeConfig @All List> sectionHelperFactories, @All List valueResolvers, @All List namespaceResolvers, @All List parserHooks) { this.contentTypes = contentTypes; - this.suffixes = config.suffixes; + this.suffixes = config.suffixes(); this.templateRoots = context.getTemplateRoots(); this.templateContents = Map.copyOf(context.getTemplateContents()); this.tags = context.getTags(); - this.templatePathExclude = config.templatePathExclude; + this.templatePathExclude = config.templatePathExclude(); this.defaultLocale = locales.defaultLocale().orElse(Locale.getDefault()); - this.defaultCharset = config.defaultCharset; + this.defaultCharset = config.defaultCharset(); this.container = Arc.container(); LOGGER.debugf("Initializing Qute [templates: %s, tags: %s, resolvers: %s", context.getTemplatePaths(), tags, @@ -127,13 +127,13 @@ public EngineProducer(QuteContext context, QuteConfig config, QuteRuntimeConfig } // Enable/disable strict rendering - if (runtimeConfig.strictRendering) { + if (runtimeConfig.strictRendering()) { builder.strictRendering(true); } else { builder.strictRendering(false); // If needed, use a specific result mapper for the selected strategy - if (runtimeConfig.propertyNotFoundStrategy.isPresent()) { - switch (runtimeConfig.propertyNotFoundStrategy.get()) { + if (runtimeConfig.propertyNotFoundStrategy().isPresent()) { + switch (runtimeConfig.propertyNotFoundStrategy().get()) { case THROW_EXCEPTION: builder.addResultMapper(new PropertyNotFoundThrowException()); break; @@ -156,7 +156,7 @@ public EngineProducer(QuteContext context, QuteConfig config, QuteRuntimeConfig } // Escape some characters for HTML/XML templates - builder.addResultMapper(new HtmlEscaper(List.copyOf(config.escapeContentTypes))); + builder.addResultMapper(new HtmlEscaper(List.copyOf(config.escapeContentTypes()))); // Escape some characters for JSON templates builder.addResultMapper(new JsonEscaper()); @@ -165,10 +165,10 @@ public EngineProducer(QuteContext context, QuteConfig config, QuteRuntimeConfig builder.addValueResolver(new ReflectionValueResolver()); // Remove standalone lines if desired - builder.removeStandaloneLines(runtimeConfig.removeStandaloneLines); + builder.removeStandaloneLines(runtimeConfig.removeStandaloneLines()); // Iteration metadata prefix - builder.iterationMetadataPrefix(config.iterationMetadataPrefix); + builder.iterationMetadataPrefix(config.iterationMetadataPrefix()); // Default section helpers builder.addDefaultSectionHelpers(); @@ -257,8 +257,8 @@ public void run() { } }); - builder.timeout(runtimeConfig.timeout); - builder.useAsyncTimeout(runtimeConfig.useAsyncTimeout); + builder.timeout(runtimeConfig.timeout()); + builder.useAsyncTimeout(runtimeConfig.useAsyncTimeout()); engine = builder.build(); @@ -267,7 +267,7 @@ public void run() { for (String path : context.getTemplatePaths()) { Template template = engine.getTemplate(path); if (template != null) { - for (String suffix : config.suffixes) { + for (String suffix : config.suffixes()) { if (path.endsWith(suffix)) { String pathNoSuffix = path.substring(0, path.length() - (suffix.length() + 1)); List