diff --git a/.github/workflows/ci-actions-incremental.yml b/.github/workflows/ci-actions-incremental.yml index 643508e23fc04..c8444fdfa9c07 100644 --- a/.github/workflows/ci-actions-incremental.yml +++ b/.github/workflows/ci-actions-incremental.yml @@ -154,7 +154,7 @@ jobs: run: | echo -n ${{ github.event.number }} > pull-request-number - name: Upload pull request number - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: pull-request-number-${{ github.event.number }} path: pull-request-number @@ -305,7 +305,7 @@ jobs: run: echo "${{ github.sha }}" > main-last-build-sha.txt - name: Upload build SHA artifact if: github.event_name == 'schedule' && github.ref_name == 'main' && github.repository == 'quarkusio/quarkus' - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: main-last-build-sha path: main-last-build-sha.txt @@ -452,7 +452,7 @@ jobs: tar -czf m2-content.tgz -C ~ .m2/repository/io/quarkus fi - name: Upload .m2 content pushed to subsequent jobs - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: m2-content path: m2-content.tgz @@ -467,7 +467,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Initial JDK 17 Build" @@ -663,7 +663,7 @@ jobs: if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-${{matrix.java.name}} @@ -678,7 +678,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-${{ matrix.java.name }}" @@ -686,7 +686,7 @@ jobs: build-reports.zip retention-days: 7 - name: Upload test-produced debug dumps - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 # We need this as soon as there's a matching file # -- even in case of success, as some flaky tests won't fail the build if: always() @@ -696,7 +696,7 @@ jobs: if-no-files-found: ignore # If we're not currently debugging any test, it's fine. retention-days: 28 # We don't get notified for flaky tests, so let's give maintainers time to get back to it - name: Upload build.log (if build failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: ${{ failure() || cancelled() }} with: name: "build-logs-${{ matrix.java.name }}" @@ -797,7 +797,7 @@ jobs: if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-maven-java${{matrix.java.name}} @@ -819,7 +819,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Maven Tests - JDK ${{matrix.java.name}}" @@ -916,7 +916,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Gradle Tests - JDK ${{matrix.java.name}}" @@ -1010,7 +1010,7 @@ jobs: if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-devtools-java${{matrix.java.name}} @@ -1025,7 +1025,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Devtools Tests - JDK ${{matrix.java.name}}" @@ -1119,7 +1119,7 @@ jobs: if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-kubernetes-java${{matrix.java.name}} @@ -1134,7 +1134,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Kubernetes Tests - JDK ${{matrix.java.name}}" @@ -1233,7 +1233,7 @@ jobs: 'quarkus-quickstarts/target/build-report.json' \ 'quarkus-quickstarts/LICENSE' \ - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Quickstarts Compilation - JDK ${{matrix.java.name}}" @@ -1332,7 +1332,7 @@ jobs: 'quarkus-platform/target/build-report.json' \ 'quarkus-platform/LICENSE.txt' \ - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Platform Tests - JDK ${{matrix.java.name}}" @@ -1413,7 +1413,7 @@ jobs: 'integration-tests/**/target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Virtual Thread Support Tests Native - ${{matrix.category}}" @@ -1487,7 +1487,7 @@ jobs: if: failure() run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-tcks @@ -1503,7 +1503,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-MicroProfile TCKs Tests" @@ -1621,7 +1621,7 @@ jobs: if: failure() run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-native-${{matrix.category}} @@ -1637,7 +1637,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Native Tests - ${{matrix.category}}" @@ -1648,7 +1648,7 @@ jobs: shell: bash run: find . -name '*runner*.json' | tar czvf build-stats.tgz -T - - name: Upload build JSON stats - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: build-stats-${{matrix.category}} path: 'build-stats.tgz' diff --git a/.github/workflows/develocity-publish-build-scans.yml b/.github/workflows/develocity-publish-build-scans.yml index 29c8d1e4ea029..55537ceefd06c 100644 --- a/.github/workflows/develocity-publish-build-scans.yml +++ b/.github/workflows/develocity-publish-build-scans.yml @@ -32,7 +32,7 @@ jobs: develocity-url: 'https://ge.quarkus.io' develocity-access-key: ${{ secrets.GRADLE_ENTERPRISE_ACCESS_KEY }} - name: Upload JSON file - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: build-metadata.json path: ${{ steps.setup.outputs.build-metadata-file-path }} diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml index cedf2ba6c6a8a..7ed4ba4c952f9 100644 --- a/.github/workflows/doc-build.yml +++ b/.github/workflows/doc-build.yml @@ -103,7 +103,7 @@ jobs: run: echo ${{ github.event.number }} > pr-id.txt - name: Persist documentation - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: documentation path: | @@ -120,7 +120,7 @@ jobs: 'target/build-report.json' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-${{ github.run_attempt }}-Documentation Build" diff --git a/.github/workflows/jdk-early-access-build.yml b/.github/workflows/jdk-early-access-build.yml index 110c2ce0dd5c5..c86fa9e603bfe 100644 --- a/.github/workflows/jdk-early-access-build.yml +++ b/.github/workflows/jdk-early-access-build.yml @@ -127,7 +127,7 @@ jobs: shell: bash run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-linux-jvm${{ matrix.version }} diff --git a/.github/workflows/native-it-selected-graalvm.yml b/.github/workflows/native-it-selected-graalvm.yml index 1d04a381ac9ec..e3af2aeb4e714 100644 --- a/.github/workflows/native-it-selected-graalvm.yml +++ b/.github/workflows/native-it-selected-graalvm.yml @@ -166,7 +166,7 @@ jobs: - name: Tar .m2/repository/io/quarkus run: tar -czf m2-io-quarkus.tgz -C ~ .m2/repository/io/quarkus - name: Upload .m2/repository/io/quarkus - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: m2-io-quarkus path: m2-io-quarkus.tgz @@ -181,7 +181,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-Initial JDK 17 Build" @@ -294,7 +294,7 @@ jobs: 'integration-tests/virtual-threads/target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-Virtual Thread Support Tests Native - ${{matrix.category}}" @@ -382,7 +382,7 @@ jobs: if: failure() run: find . -type d -name '*-reports' -o -wholename '*/build/reports/tests/functionalTest' -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-native-${{matrix.category}} @@ -398,7 +398,7 @@ jobs: 'target/gradle-build-scan-url.txt' \ LICENSE - name: Upload build reports - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: always() with: name: "build-reports-Native Tests - ${{matrix.category}}" diff --git a/.github/workflows/owasp-check.yml b/.github/workflows/owasp-check.yml index 453236d8d2097..9cd98240d9af5 100644 --- a/.github/workflows/owasp-check.yml +++ b/.github/workflows/owasp-check.yml @@ -55,7 +55,7 @@ jobs: - name: Perform OWASP Dependency Check Report run: ./mvnw -Dowasp-report - - uses: actions/upload-artifact@v7 + - uses: actions/upload-artifact@v7.0.1 with: name: dependency-check-report path: target/dependency-check-report.html diff --git a/.github/workflows/podman-build.yml b/.github/workflows/podman-build.yml index 469b7234dc6f4..10faae2d22e7d 100644 --- a/.github/workflows/podman-build.yml +++ b/.github/workflows/podman-build.yml @@ -178,14 +178,14 @@ jobs: shell: bash run: find . -name '*-reports' -type d -o -name '*.log' | tar -czf test-reports.tgz -T - - name: Upload failure Archive (if maven failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: failure() with: name: test-reports-jvm${{matrix.java.name}} path: 'test-reports.tgz' retention-days: 5 - name: Upload build reports (if build failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: ${{ failure() || cancelled() }} with: name: "build-reports-JVM Tests - JDK ${{matrix.java.name}}" @@ -195,7 +195,7 @@ jobs: LICENSE.txt retention-days: 2 - name: Upload gc.log - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 with: name: "GC log - JDK ${{matrix.java.name}}" path: | @@ -203,7 +203,7 @@ jobs: !**/build/tmp/** retention-days: 5 - name: Upload build.log (if build failed) - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a # v7.0.1 if: ${{ failure() || cancelled() }} with: name: "build-logs-JVM Tests - JDK ${{matrix.java.name}}" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6cdcf80674e60..d52fd1d065451 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,6 +13,8 @@ fixes, documentation, examples... But first, read this page (including the small - [Reporting an issue](#reporting-an-issue) - [Checking an issue is fixed in main](#checking-an-issue-is-fixed-in-main) * [Using snapshots](#using-snapshots) + + [Using the GitHub Action (CI)](#using-the-github-action-ci) + + [Installing snapshots locally](#installing-snapshots-locally) * [Building main](#building-main) * [Updating the version](#updating-the-version) - [Before you contribute](#before-you-contribute) @@ -102,7 +104,7 @@ what you would expect to see. Don't forget to indicate your Quarkus, Java, Maven Sometimes a bug has been fixed in the `main` branch of Quarkus and you want to confirm it is fixed for your own application. There are two simple options for testing the `main` branch: -* either use the snapshots we publish daily on +* either use the snapshots we publish daily on * or build Quarkus locally The following is a quick summary aimed at allowing you to quickly test `main`. If you are interested in learning more details, refer to @@ -110,52 +112,41 @@ the [Build section](#build) and the [Usage section](#usage). ### Using snapshots -Snapshots are published daily with version `999-SNAPSHOT`, so you will have to wait for a snapshot containing the commits you are interested in. +Snapshots are published daily with version `999-SNAPSHOT` as GitHub Releases in the +[quarkusio/quarkus-ecosystem-ci](https://github.com/quarkusio/quarkus-ecosystem-ci/releases) repository. +Each release contains a `maven-repo.tar.gz` asset with pre-built Maven artifacts. +You will have to wait for a snapshot containing the commits you are interested in. -Then just add as a Maven repository **and** a plugin -repository in your `settings xml` (which should be placed in the `.m2` directory within your home directory): +You can browse the available snapshots at . +Releases follow the naming convention `Quarkus 999-SNAPSHOT (main) (YYYY-MM-DD)`. -```xml +#### Using the GitHub Action (CI) + +If you are setting up CI, you can use the +[install-quarkus-snapshots-action](https://github.com/quarkusio/install-quarkus-snapshots-action/) +to automatically download and install snapshots into your local Maven repository. + +#### Installing snapshots locally + +> [!NOTE] +> This script requires the [GitHub CLI](https://cli.github.com/) (`gh`) to be installed and properly configured. + +To install snapshots locally, run the following command to download and extract the latest snapshot +into your local Maven repository: + +```sh +curl -sL https://raw.githubusercontent.com/quarkusio/quarkus-ecosystem-ci/main/setup-quarkus | bash +``` - - - - quarkus-snapshots - - - quarkus-snapshots-repository - https://central.sonatype.com/repository/maven-snapshots/ - - false - - - true - - - - - - quarkus-snapshots-plugin-repository - https://central.sonatype.com/repository/maven-snapshots/ - - false - - - true - - - - - - - quarkus-snapshots - - -``` - -You can check the last publication date here: . +You can also specify a branch (defaults to `main`): + +```sh +curl -sL https://raw.githubusercontent.com/quarkusio/quarkus-ecosystem-ci/main/setup-quarkus | bash -s -- 3.21 +``` + +> [!TIP] +> Use `./mvnw -nsu` (or `--no-snapshot-updates`) when building your project to prevent Maven from +> trying to fetch snapshots from remote repositories, ensuring it uses the locally installed artifacts. ### Building main @@ -850,12 +841,9 @@ repositories { } ``` -**Note** Use the following definition in `repositories` section when using daily snapshot builds instead of local builds: -```gradle - maven { - url 'https://central.sonatype.com/repository/maven-snapshots/' - } -``` +**Note** When using daily snapshot builds instead of local builds, download and install the snapshots into your local Maven +repository first (see [Using snapshots](#using-snapshots)), then add `mavenLocal()` to the first position in the +`repositories` section. ### MicroProfile TCK's diff --git a/adr/0009-extension-structure.adoc b/adr/0009-extension-structure.adoc index d13e0889aa4e3..cb1e1aa4e7eba 100644 --- a/adr/0009-extension-structure.adoc +++ b/adr/0009-extension-structure.adoc @@ -200,7 +200,10 @@ Thus, automated tooling can easily use this information to help generate a `modu Build items in this module should be documented with their intended use (produced or consumed). ==== `runtime-api` module -* `io...api`: Public runtime API. Can be used independently of the full extension. Consumers should not expect the full extension to be available at runtime. To make sure the extension is available, the consumer should use the `runtime` module (which would pull in the `runtime-api` module transitively). +* `io..`: Public runtime API. Can be used independently of the full extension. Consumers should not expect the full extension to be available at runtime. To make sure the extension is available, the consumer should use the `runtime` module (which would pull in the `runtime-api` module transitively). This is the preferred package when there is no conflict with the `runtime` module's root package. +* `io...api`: Alternative package for public runtime API. Should be used when the extension also uses the root package in the `runtime` module, to avoid split packages. + +NOTE: Once an extension has a `runtime-api` module, it should not expose public APIs in the `runtime` module. ==== `runtime-dev` module * `io...dev`: Dev-mode-only runtime classes, e.g., for Dev UI contribution. Not included in production builds. @@ -287,4 +290,8 @@ Note that this ADR is orthogonal to the existing platform BOMs and version align == Notes -This ADR is forward-looking and prescriptive for new extensions or extensions undergoing significant refactoring. It does not require retrofitting all existing extensions immediately. Tooling, documentation, and examples will progressively support the adoption of this structure. The goal is consistency, clarity, and better long-term modularity within the Quarkus ecosystem. \ No newline at end of file +This ADR is forward-looking and prescriptive for new extensions or extensions undergoing significant refactoring. It does not require retrofitting all existing extensions immediately. Tooling, documentation, and examples will progressively support the adoption of this structure. The goal is consistency, clarity, and better long-term modularity within the Quarkus ecosystem. + +== Updates + +* 2026-04-23: The `runtime-api` module package rule was updated to allow either `io..` (matching the `runtime` module root package) or `io...api`. The `.api` variant should be used when the root package is already used by the `runtime` module, to avoid split packages. \ No newline at end of file diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 2f7c3323f139c..3e0180f3e367e 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -32,7 +32,7 @@ 2.26.1-alpha 1.40.0-alpha 5.4.0 - 1.16.3 + 1.16.5 2.2.2 0.22.0 25.0 @@ -216,7 +216,7 @@ 0.16.0 1.0.11 - 0.28.0.RELEASE + 0.30.3.RELEASE diff --git a/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformer.java b/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformer.java index 5ef1f9f09d4be..9e14a20058f24 100644 --- a/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformer.java +++ b/core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformer.java @@ -62,8 +62,8 @@ public final class JavadocToAsciidocTransformer { private static final String SUB_SCRIPT_ASCIDOC_STYLE = "~"; private static final String SUPER_SCRIPT_ASCIDOC_STYLE = "^"; private static final String SMALL_ASCIDOC_STYLE = "[.small]"; - private static final String ORDERED_LIST_ITEM_ASCIDOC_STYLE = " . "; - private static final String UNORDERED_LIST_ITEM_ASCIDOC_STYLE = " - "; + private static final String ORDERED_LIST_MARKER = "."; + private static final String UNORDERED_LIST_MARKER = "*"; private static final String UNDERLINE_ASCIDOC_STYLE = "[.underline]"; private static final String LINE_THROUGH_ASCIDOC_STYLE = "[.line-through]"; private static final String HARD_LINE_BREAK_ASCIDOC_STYLE = " +\n"; @@ -189,17 +189,32 @@ private static void htmlToAsciidoc(StringBuilder sb, Node node, boolean inlineMa break; case ORDERED_LIST_NODE: case UN_ORDERED_LIST_NODE: - newLine(sb); + if (context.listDepth > 0) { + // Nested list: we're already on a new line from the parent list item, + // no need for an extra newline + } else { + newLine(sb); + } + context.listDepth++; htmlToAsciidoc(sb, childNode, inlineMacroMode, context); - newLine(sb); - newLine(sb); + context.listDepth--; + if (context.listDepth == 0) { + newLine(sb); + newLine(sb); + } break; case LIST_ITEM_NODE: - final String marker = childNode.parentNode().nodeName().equals(ORDERED_LIST_NODE) - ? ORDERED_LIST_ITEM_ASCIDOC_STYLE - : UNORDERED_LIST_ITEM_ASCIDOC_STYLE; + final String listMarker = childNode.parentNode().nodeName().equals(ORDERED_LIST_NODE) + ? ORDERED_LIST_MARKER + : UNORDERED_LIST_MARKER; newLine(sb); - sb.append(marker); + sb.append(' '); + for (int i = 0; i < context.listDepth; i++) { + sb.append(listMarker); + } + sb.append(' '); + // Track that we're in a list item to strip leading whitespace from the first text node + context.firstTextInListItem = true; htmlToAsciidoc(sb, childNode, inlineMacroMode, context); break; case LINK_NODE: @@ -273,6 +288,16 @@ private static void htmlToAsciidoc(StringBuilder sb, Node node, boolean inlineMa break; } + // Trim leading whitespace for the first text node inside a list item + // (HTML formatting often introduces extra spaces/newlines) + if (context.firstTextInListItem) { + text = text.stripLeading(); + context.firstTextInListItem = false; + if (text.isEmpty()) { + break; + } + } + // Indenting the first line of a paragraph by one or more spaces makes the block literal // Please see https://docs.asciidoctor.org/asciidoc/latest/verbatim/literal-blocks/ for more info // This prevents literal blocks f.e. after
@@ -553,5 +578,7 @@ private static class Context { boolean inTable; boolean firstTableRow; + int listDepth; + boolean firstTextInListItem; } } diff --git a/core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformerConfigItemTest.java b/core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformerConfigItemTest.java index 9d12409dc7fd7..77b8149971994 100644 --- a/core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformerConfigItemTest.java +++ b/core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/formatter/JavadocToAsciidocTransformerConfigItemTest.java @@ -121,7 +121,7 @@ public void parseJavaDocWithLiTagsInsideUlTag() { "
  • 2
  • \n" + "" + ""; - String expectedOutput = "List:\n\n - 1\n - 2"; + String expectedOutput = "List:\n\n * 1\n * 2"; ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc); String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()); @@ -143,6 +143,84 @@ public void parseJavaDocWithLiTagsInsideOlTag() { assertEquals(expectedOutput, description); } + @Test + public void parseJavaDocWithNestedUnorderedList() { + String javaDoc = "List:" + + "
      \n" + + "
    • First sentence. Second sentence.
    • \n" + + "
    • \n" + + "And some nested bullet list:\n" + + "
        \n" + + "
      • Element 1 with one sentence. And another sentence.
      • \n" + + "
      • Element 2 with sentence 2. And yet another sentence.
      • \n" + + "
      \n" + + "
    • \n" + + "
    "; + String expectedOutput = "List:\n\n * First sentence. Second sentence.\n * And some nested bullet list:\n ** Element 1 with one sentence. And another sentence.\n ** Element 2 with sentence 2. And yet another sentence."; + ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc); + String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()); + + assertEquals(expectedOutput, description); + } + + @Test + public void parseJavaDocWithNestedOrderedList() { + String javaDoc = "List:" + + "
      \n" + + "
    1. Item 1
    2. \n" + + "
    3. \n" + + "Item 2 with nested list:\n" + + "
        \n" + + "
      1. Sub 1
      2. \n" + + "
      3. Sub 2
      4. \n" + + "
      \n" + + "
    4. \n" + + "
    "; + String expectedOutput = "List:\n\n . Item 1\n . Item 2 with nested list:\n .. Sub 1\n .. Sub 2"; + ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc); + String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()); + + assertEquals(expectedOutput, description); + } + + @Test + public void parseJavaDocWithDeeplyNestedLists() { + String javaDoc = "List:" + + "
      \n" + + "
    • Level 1\n" + + "
        \n" + + "
      • Level 2\n" + + "
          \n" + + "
        • Level 3
        • \n" + + "
        \n" + + "
      • \n" + + "
      \n" + + "
    • \n" + + "
    "; + String expectedOutput = "List:\n\n * Level 1\n ** Level 2\n *** Level 3"; + ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc); + String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()); + + assertEquals(expectedOutput, description); + } + + @Test + public void parseJavaDocWithMixedNestedLists() { + String javaDoc = "List:" + + "
      \n" + + "
    • Unordered item\n" + + "
        \n" + + "
      1. Ordered sub-item
      2. \n" + + "
      \n" + + "
    • \n" + + "
    "; + String expectedOutput = "List:\n\n * Unordered item\n .. Ordered sub-item"; + ParsedJavadoc parsed = JavadocUtil.parseConfigItemJavadoc(javaDoc); + String description = JavadocToAsciidocTransformer.toAsciidoc(parsed.description(), parsed.format()); + + assertEquals(expectedOutput, description); + } + @Test public void parseJavaDocWithLinkInlineSnippet() { String javaDoc = "{@link firstlink} {@link #secondlink} \n {@linkplain #third.link}"; diff --git a/devtools/cli/pom.xml b/devtools/cli/pom.xml index edc54c60d0ae5..6a368d7fa5062 100644 --- a/devtools/cli/pom.xml +++ b/devtools/cli/pom.xml @@ -12,6 +12,7 @@ uber-jar + classes quarkus-cli diff --git a/devtools/gradle/settings.gradle.kts b/devtools/gradle/settings.gradle.kts index fb010631aba99..65a3fa7969fba 100644 --- a/devtools/gradle/settings.gradle.kts +++ b/devtools/gradle/settings.gradle.kts @@ -1,5 +1,5 @@ plugins { - id("com.gradle.develocity") version "4.3.2" + id("com.gradle.develocity") version "4.4.1" } develocity { diff --git a/docs/src/main/asciidoc/dev-mcp.adoc b/docs/src/main/asciidoc/dev-mcp.adoc index 2fd84c6d883ad..2baa41e7db89f 100644 --- a/docs/src/main/asciidoc/dev-mcp.adoc +++ b/docs/src/main/asciidoc/dev-mcp.adoc @@ -37,7 +37,7 @@ A single JSON‑RPC service can be used for both Dev UI and Dev MCP; methods wit === Extension skills -Extensions can ship a `quarkus-skill.md` file to provide AI coding agents with extension-specific coding guidelines, testing patterns, and common pitfalls. During the Quarkus build, all skill files are automatically discovered, composed with extension metadata (name, description, guide URL), and aggregated into a single `io.quarkus:quarkus-extension-skills` JAR following the https://agentskills.io/specification[Agent Skills specification]. This JAR is published to Maven Central with each Quarkus release and is compatible with https://www.skillsjars.com[SkillsJars]. +Extensions can ship a `quarkus-skill.md` file to provide AI coding agents with extension-specific coding guidelines, testing patterns, and common pitfalls. During the Quarkus build, all skill files are automatically discovered, composed with extension metadata (name, description, guide URL, categories), and aggregated into a single `io.quarkus:quarkus-extension-skills` JAR following the https://agentskills.io/specification[Agent Skills specification]. This JAR is published to Maven Central with each Quarkus release and is compatible with https://www.skillsjars.com[SkillsJars]. ==== Adding a skill file to your extension @@ -50,7 +50,7 @@ my-extension/deployment/src/main/resources/META-INF/quarkus-skill.md That's it. No pom.xml changes are needed. The Quarkus build automatically discovers all `quarkus-skill.md` files across all extensions and aggregates them into the `quarkus-extension-skills` artifact. -If your extension's runtime module has a `description` field in `META-INF/quarkus-extension.yaml`, it will be included in the composed skill's YAML frontmatter to help AI agents discover and understand your extension. While optional, providing a description is recommended: +If your extension's runtime module has a `description` field in `META-INF/quarkus-extension.yaml`, it will be included in the composed skill's YAML frontmatter to help AI agents discover and understand your extension. The `categories` field under `metadata` is also included, allowing AI agents to group skills by category. While optional, providing a description and categories is recommended: [source,yaml] ---- @@ -59,6 +59,8 @@ description: "A concise description of what this extension does" artifact: ${project.groupId}:${project.artifactId}:${project.version} metadata: guide: "https://quarkus.io/guides/my-extension" + categories: + - "web" ---- The file should contain concise, actionable Markdown content. Focus on what an AI agent needs to know to use your extension correctly: @@ -92,7 +94,7 @@ Example for a hypothetical extension: The skill file content is **not** shipped as-is. During the Quarkus build, the `aggregate-skills` goal scans the source tree for all `quarkus-skill.md` files and composes each one with extension metadata to produce a `SKILL.md` file at `META-INF/skills/{extension-name}/SKILL.md` inside the aggregated JAR. The composed document follows the https://agentskills.io/specification[Agent Skills specification] and includes: -1. **YAML frontmatter** — the skill name, extension description from `quarkus-extension.yaml`, license, and guide URL as structured metadata for agent discovery. +1. **YAML frontmatter** — the skill name, extension description from `quarkus-extension.yaml`, license, guide URL, and categories as structured metadata for agent discovery. 2. **Skill body** — the patterns, testing guidelines, and pitfalls authored by the extension developer. 3. **Dev MCP tools** — the skill is automatically enriched with an "Available Dev MCP Tools" section listing each MCP tool the extension enables by default, including tool names, descriptions, and parameters. Tools are discovered from runtime methods annotated with both `@JsonRpcDescription` and `@DevMCPEnableByDefault`, and from deployment processor classes annotated with `@DevMcpBuildTimeTool`. @@ -106,6 +108,7 @@ description: "A brief description of the extension from quarkus-extension.yaml" license: Apache-2.0 metadata: guide: https://quarkus.io/guides/my-extension + categories: "web, data" --- ### Data Access diff --git a/docs/src/main/asciidoc/opentelemetry-tracing.adoc b/docs/src/main/asciidoc/opentelemetry-tracing.adoc index 183270d57e906..ff9a72f1fc9c9 100644 --- a/docs/src/main/asciidoc/opentelemetry-tracing.adoc +++ b/docs/src/main/asciidoc/opentelemetry-tracing.adoc @@ -108,55 +108,82 @@ endpoint will be traced without any required code changes. === Create the configuration - :opentelemetry-config: include::{includes}/opentelemetry-config.adoc[] -== Run the application +== See data -First we need to start a system to visualise the OpenTelemetry data. -We have 2 options: +Before starting the app, please set up the system to visualize the OpenTelemetry data. +We have several options: -* Start an all-in-one Grafana OTel LGTM system for traces and metrics. -* Jaeger system just for traces. +* Start an all-in-one Grafana OTel LGTM Dev Service for traces, logs and metrics +* Jaeger system just for traces +* Logging exporter === Grafana OTel LGTM option +A Dev Service that will receive your app's telemetry. Only the dependency is needed. * Take a look at: xref:observability-devservices-lgtm.adoc[Getting Started with Grafana-OTel-LGTM]. -This features a Quarkus Dev service including a Grafana for visualizing data, Loki to store logs, Tempo to store traces and Prometheus to store metrics. Also provides an OTel collector to receive the data. +This features a Quarkus Dev Service including a Grafana for visualizing data, Loki to store logs, Tempo to store traces and Prometheus to store metrics. Also provides an OTel collector to receive the data. + +=== Jaeger v2 to see traces option + +Jaeger V2 is a tool to visualize spans, and it's based on OpenTelemetry collector. There is no need to install a separate collector. +More details are available in https://medium.com/jaegertracing/towards-jaeger-v2-moar-opentelemetry-2f8239bee48e[this blog post]. + +Start the OpenTelemetry Collector and Jaeger system via the following Docker command: + +[source,shell] +---- +docker run -it \ + -p 16686:16686 \ + -p 4317:4317 \ + -p 4318:4318 \ +jaegertracing/jaeger:latest +---- + +Where: +|=== +|Port | Purpose + +|16686 +|Jaeger UI + +|4317 +|OpenTelemetry collector. OTLP protobuf gRPC receiver. -=== Jaeger to see traces option +|4318 +|OpenTelemetry collector. OTLP protobuf HTTP receiver. -Configure and start the https://opentelemetry.io/docs/collector/[OpenTelemetry Collector] to receive, process and export telemetry data to https://www.jaegertracing.io/[Jaeger] that will display the captured traces. +|=== + +Other ports are available in the https://www.jaegertracing.io/docs/2.17/architecture/apis/#write-apis[Jaeger documentation]. + +=== Logging exporter +You can output all traces to the console by setting the exporter to `logging` in the `application.properties` file: + +[source,properties] +---- +quarkus.otel.traces.exporter=logging +---- [NOTE] ==== -Jaeger-all-in-one includes the Jaeger agent, an OTel collector, and the query service/UI. -You do not need to install a separated collector. You can directly send the trace data to Jaeger (after enabling OTLP receivers there, see e.g. this -https://medium.com/jaegertracing/introducing-native-support-for-opentelemetry-in-jaeger-eb661be8183c[blog entry] for details). +Usually there is no need to set the `quarkus.otel.traces.exporter` property. The default value is `cdi` and is managed by Quarkus. ==== -Start the OpenTelemetry Collector and Jaeger system via the following `docker-compose.yml` file that you can launch via `docker-compose up -d`: +This dependency must be added to the project: -[source,yaml,subs="attributes"] +[source,xml] ---- -version: "2" -services: - - # Jaeger - jaeger-all-in-one: - image: jaegertracing/all-in-one:latest - ports: - - "16686:16686" # Jaeger UI - - "14268:14268" # Receive legacy OpenTracing traces, optional - - "4317:4317" # OTLP gRPC receiver - - "4318:4318" # OTLP HTTP receiver - - "14250:14250" # Receive from external otel-collector, optional - environment: - - COLLECTOR_OTLP_ENABLED=true + + io.opentelemetry + opentelemetry-exporter-logging + ---- -You should remove the optional ports you don't need them. + +== Run the application === Start the application diff --git a/docs/src/main/asciidoc/writing-extensions.adoc b/docs/src/main/asciidoc/writing-extensions.adoc index 6c73c2031147e..130f385671883 100644 --- a/docs/src/main/asciidoc/writing-extensions.adoc +++ b/docs/src/main/asciidoc/writing-extensions.adoc @@ -1832,7 +1832,7 @@ void registerMetrics(AgroalMetricsRecorder recorder, // IFF metrics are enabled globally and for the data source // (they are enabled for each data source by default if they are also enabled globally) if (dataSourcesBuildTimeConfig.metricsEnabled && - aggregatedDataSourceBuildTimeConfig.getJdbcConfig().enableMetrics.orElse(true)) { + aggregatedDataSourceBuildTimeConfig.getJdbcConfig().metrics().enabled().orElse(true)) { datasourceMetrics.produce(new MetricsFactoryConsumerBuildItem( recorder.registerDataSourceMetrics(aggregatedDataSourceBuildTimeConfig.getName()))); } diff --git a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java index a354b50c6c2ae..cab6dda5c8d62 100644 --- a/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java +++ b/extensions/agroal/runtime/src/main/java/io/quarkus/agroal/runtime/DataSourceJdbcBuildTimeConfig.java @@ -31,15 +31,6 @@ public interface DataSourceJdbcBuildTimeConfig { @WithDefault("enabled") TransactionIntegration transactions(); - /** - * Enable datasource metrics collection. If unspecified, collecting metrics will be enabled by default if - * a metrics extension is active. - *

    - * Please use quarkus-micrometer and the quarkus.datasource.metrics.enabled property - */ - @Deprecated(forRemoval = true) - Optional enableMetrics(); - /** * Enable OpenTelemetry JDBC instrumentation. */ diff --git a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java index 5d819a0cc2ac1..3cd749b9dd607 100644 --- a/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java +++ b/extensions/hibernate-orm/deployment/src/main/java/io/quarkus/hibernate/orm/deployment/HibernateOrmProcessor.java @@ -426,7 +426,7 @@ public void configurationDescriptorBuilding( true, isHibernateValidatorPresent(capabilities), jsonMapper, xmlMapper)); } - if (impliedPU.shouldGenerateImpliedBlockingPersistenceUnit()) { + if (persistenceXmlDescriptors.isEmpty() && impliedPU.shouldGenerateImpliedBlockingPersistenceUnit()) { handleHibernateORMWithNoPersistenceXml(hibernateOrmConfig, index, persistenceXmlDescriptors, jdbcDataSources, reactiveDataSources, applicationArchivesBuildItem, launchMode.getLaunchMode(), additionalJpaModelBuildItems, @@ -476,7 +476,31 @@ public void contributePersistenceXmlToJpaModel( @BuildStep public void contributeQuarkusConfigToJpaModel( BuildProducer jpaModelPuContributions, - HibernateOrmConfig hibernateOrmConfig) { + HibernateOrmConfig hibernateOrmConfig, List persistenceXmlDescriptors) { + if (!persistenceXmlDescriptors.isEmpty()) { + if (hibernateOrmConfig.isAnyNonPersistenceXmlPropertySet()) { + throw new ConfigurationException( + "A legacy persistence.xml file is present in the classpath, but Hibernate ORM is also configured through the Quarkus config file.\n" + + "Legacy persistence.xml files and Quarkus configuration cannot be used at the same time.\n" + + "To ignore persistence.xml files, set the configuration property" + + " 'quarkus.hibernate-orm.persistence-xml.ignore' to 'true'.\n" + + "To use persistence.xml files, remove all '" + HIBERNATE_ORM_CONFIG_PREFIX + + "*' properties from the Quarkus config file."); + } else { + // It's theoretically possible to use the Quarkus Hibernate ORM extension + // without setting any build-time configuration property, + // so the condition above might not catch all attempts to use persistence.xml and Quarkus-configured PUs + // at the same time. + // At that point, the only thing we can do is log something, + // so that hopefully people in that situation will notice that their Quarkus configuration is being ignored. + LOG.infof( + "A legacy persistence.xml file is present in the classpath. This file will be used to configure JPA/Hibernate ORM persistence units," + + " and any configuration of the Hibernate ORM extension will be ignored." + + " To ignore persistence.xml files instead, set the configuration property" + + " 'quarkus.hibernate-orm.persistence-xml.ignore' to 'true'."); + return; + } + } for (Entry entry : hibernateOrmConfig.persistenceUnits() .entrySet()) { String name = entry.getKey(); @@ -977,31 +1001,6 @@ private void handleHibernateORMWithNoPersistenceXml( BuildProducer reflectiveMethods, BuildProducer unremovableBeans, List dbKindMetadataBuildItems) { - if (!descriptors.isEmpty()) { - if (hibernateOrmConfig.isAnyNonPersistenceXmlPropertySet()) { - throw new ConfigurationException( - "A legacy persistence.xml file is present in the classpath, but Hibernate ORM is also configured through the Quarkus config file.\n" - + "Legacy persistence.xml files and Quarkus configuration cannot be used at the same time.\n" - + "To ignore persistence.xml files, set the configuration property" - + " 'quarkus.hibernate-orm.persistence-xml.ignore' to 'true'.\n" - + "To use persistence.xml files, remove all '" + HIBERNATE_ORM_CONFIG_PREFIX - + "*' properties from the Quarkus config file."); - } else { - // It's theoretically possible to use the Quarkus Hibernate ORM extension - // without setting any build-time configuration property, - // so the condition above might not catch all attempts to use persistence.xml and Quarkus-configured PUs - // at the same time. - // At that point, the only thing we can do is log something, - // so that hopefully people in that situation will notice that their Quarkus configuration is being ignored. - LOG.infof( - "A legacy persistence.xml file is present in the classpath. This file will be used to configure JPA/Hibernate ORM persistence units," - + " and any configuration of the Hibernate ORM extension will be ignored." - + " To ignore persistence.xml files instead, set the configuration property" - + " 'quarkus.hibernate-orm.persistence-xml.ignore' to 'true'."); - return; - } - } - if (!hibernateOrmConfig.blocking()) { LOG.infof( "Hibernate ORM was disabled explicitly by quarkus.hibernate-orm.blocking=false"); diff --git a/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java b/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java index ff9e1f9f751ba..22b7ed24af08e 100644 --- a/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java +++ b/extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/JpaOperationsSortTest.java @@ -75,4 +75,64 @@ public void testSortByDisabledEscaping() { Sort sort1 = Sort.by("foo.`bar`").disableEscaping(); assertEquals(" ORDER BY foo.`bar`", PanacheJpaUtil.toOrderBy(sort1)); } + + @Test + public void testCaseInsensitiveSorting() { + Sort sort = Sort.ascendingIgnoreCase("name"); + assertEquals(" ORDER BY LOWER(`name`)", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testCaseInsensitiveSortingDescending() { + Sort sort = Sort.descendingIgnoreCase("name"); + assertEquals(" ORDER BY LOWER(`name`) DESC", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testCaseInsensitiveSortingWithNullPrecedence() { + Sort sort = Sort.ascendingIgnoreCase("name").nullsFirst(); + assertEquals(" ORDER BY LOWER(`name`) NULLS FIRST", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testMixedCaseSensitiveAndInsensitive() { + Sort sort = Sort.by("category").andIgnoreCase("name", Sort.Direction.Descending); + assertEquals(" ORDER BY `category` , LOWER(`name`) DESC", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testCaseInsensitiveEmbeddedColumn() { + Sort sort = Sort.ascendingIgnoreCase("author.name"); + assertEquals(" ORDER BY LOWER(`author`.`name`)", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testCaseInsensitiveDisabledEscaping() { + Sort sort = Sort.ascendingIgnoreCase("name").disableEscaping(); + assertEquals(" ORDER BY LOWER(name)", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testIgnoreCaseFluentAPI() { + Sort sort = Sort.by("name", "author").ignoreCase(); + assertEquals(" ORDER BY LOWER(`name`) , LOWER(`author`)", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testCaseInsensitiveMultipleColumns() { + Sort sort = Sort.ascendingIgnoreCase("name", "author"); + assertEquals(" ORDER BY LOWER(`name`) , LOWER(`author`)", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testNullsFirstConvenience() { + Sort sort = Sort.by("foo").nullsFirst(); + assertEquals(" ORDER BY `foo` NULLS FIRST", PanacheJpaUtil.toOrderBy(sort)); + } + + @Test + public void testNullsLastConvenience() { + Sort sort = Sort.by("foo").nullsLast(); + assertEquals(" ORDER BY `foo` NULLS LAST", PanacheJpaUtil.toOrderBy(sort)); + } } diff --git a/extensions/panache/hibernate-panache-next/runtime/pom.xml b/extensions/panache/hibernate-panache-next/runtime/pom.xml index 5b89380ed7e2c..3a75cb2fddef6 100644 --- a/extensions/panache/hibernate-panache-next/runtime/pom.xml +++ b/extensions/panache/hibernate-panache-next/runtime/pom.xml @@ -33,11 +33,10 @@ io.quarkus quarkus-panache-common - + jakarta.data jakarta.data-api - true jakarta.json.bind diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/PanacheRepositoryQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/PanacheRepositoryQueries.java index 73bf0fb0fcd23..e02a958e87954 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/PanacheRepositoryQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/PanacheRepositoryQueries.java @@ -2,10 +2,9 @@ import java.util.Map; +import jakarta.data.Order; import jakarta.persistence.LockModeType; -import io.quarkus.panache.common.Sort; - public interface PanacheRepositoryQueries, Count, Confirmation, Id> { // Queries @@ -44,15 +43,15 @@ public interface PanacheRepositoryQueries order, Object... params); /** * Find entities using a query, with named parameters. @@ -71,15 +70,15 @@ public interface PanacheRepositoryQueries params); + Query find(String query, Order order, Map params); /** * Find all entities of this type. @@ -94,13 +93,13 @@ public interface PanacheRepositoryQueries order); /** * Find entities matching a query, with optional indexed parameters. @@ -118,18 +117,18 @@ public interface PanacheRepositoryQueriesfind(query, sort, params).list(). + * This method is a shortcut for find(query, order, params).list(). * * @param query a {@link io.quarkus.hibernate.panache query string} - * @param sort the sort strategy to use + * @param order the sort strategy to use * @param params optional sequence of indexed parameters * @return a {@link List} containing all results, without paging * @see #list(String, Object...) - * @see #list(String, Sort, Map) - * @see #find(String, Sort, Object...) - * @see #stream(String, Sort, Object...) + * @see #list(String, Order, Map) + * @see #find(String, Order, Object...) + * @see #stream(String, Order, Object...) */ - EntityList list(String query, Sort sort, Object... params); + EntityList list(String query, Order order, Object... params); /** * Find entities matching a query, with named parameters. @@ -147,18 +146,18 @@ public interface PanacheRepositoryQueriesfind(query, sort, params).list(). + * This method is a shortcut for find(query, order, params).list(). * * @param query a {@link io.quarkus.hibernate.panache query string} - * @param sort the sort strategy to use + * @param order the sort strategy to use * @param params {@link Map} of indexed parameters * @return a {@link List} containing all results, without paging * @see #list(String, Map) - * @see #list(String, Sort, Object...) - * @see #find(String, Sort, Map) - * @see #stream(String, Sort, Map) + * @see #list(String, Order, Object...) + * @see #find(String, Order, Map) + * @see #stream(String, Order, Map) */ - EntityList list(String query, Sort sort, Map params); + EntityList list(String query, Order order, Map params); /** * Find all entities of this type. @@ -173,15 +172,15 @@ public interface PanacheRepositoryQueriesfindAll(sort).list(). + * This method is a shortcut for findAll(order).list(). * - * @param sort the sort order to use + * @param order the sort order to use * @return a {@link List} containing all results, without paging * @see #listAll() - * @see #findAll(Sort) - * @see #streamAll(Sort) + * @see #findAll(Order) + * @see #streamAll(Order) */ - EntityList listAll(Sort sort); + EntityList listAll(Order order); /** * Counts the number of this type of entity in the database. diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/blocking/PanacheRepositoryBlockingQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/blocking/PanacheRepositoryBlockingQueries.java index d413b0d940601..01912b1d7a00d 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/blocking/PanacheRepositoryBlockingQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/blocking/PanacheRepositoryBlockingQueries.java @@ -5,10 +5,10 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.panache.PanacheRepositoryQueries; -import io.quarkus.panache.common.Sort; public interface PanacheRepositoryBlockingQueries extends PanacheRepositoryQueries, PanacheBlockingQuery, Long, Boolean, Id> { @@ -49,20 +49,20 @@ public interface PanacheRepositoryBlockingQueries /** * Find entities matching a query and the given sort options, with optional indexed parameters. - * This method is a shortcut for find(query, sort, params).stream(). + * This method is a shortcut for find(query, order, params).stream(). * It requires a transaction to work. * Without a transaction, the underlying cursor can be closed before the end of the stream. * * @param query a {@link io.quarkus.hibernate.panache query string} - * @param sort the sort strategy to use + * @param order the sort strategy to use * @param params optional sequence of indexed parameters * @return a {@link Stream} containing all results, without paging * @see #stream(String, Object...) - * @see #stream(String, Sort, Map) - * @see #find(String, Sort, Object...) - * @see #list(String, Sort, Object...) + * @see #stream(String, Order, Map) + * @see #find(String, Order, Object...) + * @see #list(String, Order, Object...) */ - Stream stream(String query, Sort sort, Object... params); + Stream stream(String query, Order order, Object... params); /** * Find entities matching a query, with named parameters. @@ -82,20 +82,20 @@ public interface PanacheRepositoryBlockingQueries /** * Find entities matching a query and the given sort options, with named parameters. - * This method is a shortcut for find(query, sort, params).stream(). + * This method is a shortcut for find(query, order, params).stream(). * It requires a transaction to work. * Without a transaction, the underlying cursor can be closed before the end of the stream. * * @param query a {@link io.quarkus.hibernate.panache query string} - * @param sort the sort strategy to use + * @param order the sort strategy to use * @param params {@link Map} of indexed parameters * @return a {@link Stream} containing all results, without paging * @see #stream(String, Map) - * @see #stream(String, Sort, Object...) - * @see #find(String, Sort, Map) - * @see #list(String, Sort, Map) + * @see #stream(String, Order, Object...) + * @see #find(String, Order, Map) + * @see #list(String, Order, Map) */ - Stream stream(String query, Sort sort, Map params); + Stream stream(String query, Order order, Map params); /** * Find all entities of this type. @@ -104,11 +104,11 @@ public interface PanacheRepositoryBlockingQueries * Without a transaction, the underlying cursor can be closed before the end of the stream. * * @return a {@link Stream} containing all results, without paging - * @see #streamAll(Sort) + * @see #streamAll(Order) * @see #findAll() * @see #listAll() */ - Stream streamAll(Sort sort); + Stream streamAll(Order order); /** * Find all entities of this type, in the given order. @@ -118,8 +118,8 @@ public interface PanacheRepositoryBlockingQueries * * @return a {@link Stream} containing all results, without paging * @see #streamAll() - * @see #findAll(Sort) - * @see #listAll(Sort) + * @see #findAll(Order) + * @see #listAll(Order) */ Stream streamAll(); } diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/blocking/PanacheManagedBlockingRepositoryQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/blocking/PanacheManagedBlockingRepositoryQueries.java index 0bc6e2ae498dc..de70d45652ff8 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/blocking/PanacheManagedBlockingRepositoryQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/blocking/PanacheManagedBlockingRepositoryQueries.java @@ -5,6 +5,7 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractJpaOperations; @@ -12,7 +13,6 @@ import io.quarkus.hibernate.panache.blocking.PanacheRepositoryBlockingQueries; import io.quarkus.hibernate.panache.runtime.spi.PanacheBlockingOperations; import io.quarkus.hibernate.panache.runtime.spi.PanacheOperations; -import io.quarkus.panache.common.Sort; public interface PanacheManagedBlockingRepositoryQueries extends PanacheRepositoryBlockingQueries { private Class getEntityClass() { @@ -49,8 +49,8 @@ default PanacheBlockingQuery find(String query, Object... params) { } @Override - default PanacheBlockingQuery find(String query, Sort sort, Object... params) { - return (PanacheBlockingQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheBlockingQuery find(String query, Order order, Object... params) { + return (PanacheBlockingQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -59,8 +59,8 @@ default PanacheBlockingQuery find(String query, Map para } @Override - default PanacheBlockingQuery find(String query, Sort sort, Map params) { - return (PanacheBlockingQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheBlockingQuery find(String query, Order order, Map params) { + return (PanacheBlockingQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -69,8 +69,8 @@ default PanacheBlockingQuery findAll() { } @Override - default PanacheBlockingQuery findAll(Sort sort) { - return (PanacheBlockingQuery) operations().findAll(getEntityClass(), sort); + default PanacheBlockingQuery findAll(Order order) { + return (PanacheBlockingQuery) operations().findAll(getEntityClass(), order); } @Override @@ -79,8 +79,8 @@ default List list(String query, Object... params) { } @Override - default List list(String query, Sort sort, Object... params) { - return (List) operations().list(getEntityClass(), query, sort, params); + default List list(String query, Order order, Object... params) { + return (List) operations().list(getEntityClass(), query, order, params); } @Override @@ -89,8 +89,8 @@ default List list(String query, Map params) { } @Override - default List list(String query, Sort sort, Map params) { - return (List) operations().list(getEntityClass(), query, sort, params); + default List list(String query, Order order, Map params) { + return (List) operations().list(getEntityClass(), query, order, params); } @Override @@ -99,8 +99,8 @@ default List listAll() { } @Override - default List listAll(Sort sort) { - return (List) operations().listAll(getEntityClass(), sort); + default List listAll(Order order) { + return (List) operations().listAll(getEntityClass(), order); } @Override @@ -109,8 +109,8 @@ default Stream stream(String query, Object... params) { } @Override - default Stream stream(String query, Sort sort, Object... params) { - return (Stream) operations().stream(getEntityClass(), query, sort, params); + default Stream stream(String query, Order order, Object... params) { + return (Stream) operations().stream(getEntityClass(), query, order, params); } @Override @@ -119,13 +119,13 @@ default Stream stream(String query, Map params) { } @Override - default Stream stream(String query, Sort sort, Map params) { - return (Stream) operations().stream(getEntityClass(), query, sort, params); + default Stream stream(String query, Order order, Map params) { + return (Stream) operations().stream(getEntityClass(), query, order, params); } @Override - default Stream streamAll(Sort sort) { - return (Stream) operations().streamAll(getEntityClass(), sort); + default Stream streamAll(Order order) { + return (Stream) operations().streamAll(getEntityClass(), order); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/reactive/PanacheManagedReactiveRepositoryQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/reactive/PanacheManagedReactiveRepositoryQueries.java index d8047e2738ace..9d57bd5ec9ac7 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/reactive/PanacheManagedReactiveRepositoryQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/managed/reactive/PanacheManagedReactiveRepositoryQueries.java @@ -3,6 +3,7 @@ import java.util.List; import java.util.Map; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractJpaOperations; @@ -10,7 +11,6 @@ import io.quarkus.hibernate.panache.reactive.PanacheRepositoryReactiveQueries; import io.quarkus.hibernate.panache.runtime.spi.PanacheOperations; import io.quarkus.hibernate.panache.runtime.spi.PanacheReactiveOperations; -import io.quarkus.panache.common.Sort; import io.smallrye.mutiny.Uni; public interface PanacheManagedReactiveRepositoryQueries extends PanacheRepositoryReactiveQueries { @@ -38,8 +38,8 @@ default PanacheReactiveQuery find(String query, Object... params) { } @Override - default PanacheReactiveQuery find(String query, Sort sort, Object... params) { - return (PanacheReactiveQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheReactiveQuery find(String query, Order order, Object... params) { + return (PanacheReactiveQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -48,8 +48,8 @@ default PanacheReactiveQuery find(String query, Map para } @Override - default PanacheReactiveQuery find(String query, Sort sort, Map params) { - return (PanacheReactiveQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheReactiveQuery find(String query, Order order, Map params) { + return (PanacheReactiveQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -58,8 +58,8 @@ default PanacheReactiveQuery findAll() { } @Override - default PanacheReactiveQuery findAll(Sort sort) { - return (PanacheReactiveQuery) operations().findAll(getEntityClass(), sort); + default PanacheReactiveQuery findAll(Order order) { + return (PanacheReactiveQuery) operations().findAll(getEntityClass(), order); } @Override @@ -68,8 +68,8 @@ default Uni> list(String query, Object... params) { } @Override - default Uni> list(String query, Sort sort, Object... params) { - return (Uni) operations().list(getEntityClass(), query, sort, params); + default Uni> list(String query, Order order, Object... params) { + return (Uni) operations().list(getEntityClass(), query, order, params); } @Override @@ -78,8 +78,8 @@ default Uni> list(String query, Map params) { } @Override - default Uni> list(String query, Sort sort, Map params) { - return (Uni) operations().list(getEntityClass(), query, sort, params); + default Uni> list(String query, Order order, Map params) { + return (Uni) operations().list(getEntityClass(), query, order, params); } @Override @@ -88,8 +88,8 @@ default Uni> listAll() { } @Override - default Uni> listAll(Sort sort) { - return (Uni) operations().listAll(getEntityClass(), sort); + default Uni> listAll(Order order) { + return (Uni) operations().listAll(getEntityClass(), order); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/ManagedReactiveOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/ManagedReactiveOperations.java index 7acdc36532740..af23153479e33 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/ManagedReactiveOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/ManagedReactiveOperations.java @@ -4,6 +4,7 @@ import java.util.Map; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import org.hibernate.Session; @@ -12,7 +13,7 @@ import io.quarkus.hibernate.panache.reactive.PanacheReactiveQuery; import io.quarkus.hibernate.panache.runtime.spi.PanacheReactiveOperations; -import io.quarkus.panache.common.Sort; +import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; import io.smallrye.mutiny.Uni; public class ManagedReactiveOperations implements PanacheReactiveOperations { @@ -120,8 +121,8 @@ public PanacheReactiveQuery find(Class entityClass, String query, Object.. } @Override - public PanacheReactiveQuery find(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheReactiveQuery find(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -130,8 +131,8 @@ public PanacheReactiveQuery find(Class entityClass, String query, Map find(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheReactiveQuery find(Class entityClass, String query, Order order, Map params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -140,8 +141,8 @@ public PanacheReactiveQuery findAll(Class entityClass) { } @Override - public PanacheReactiveQuery findAll(Class entityClass, Sort sort) { - return DELEGATE.findAll(entityClass, sort); + public PanacheReactiveQuery findAll(Class entityClass, Order order) { + return DELEGATE.findAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -150,8 +151,8 @@ public Uni> list(Class entityClass, String query, Object... params) { } @Override - public Uni> list(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.list(entityClass, query, sort, params); + public Uni> list(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -160,8 +161,8 @@ public Uni> list(Class entityClass, String query, Map } @Override - public Uni> list(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.list(entityClass, query, sort, params); + public Uni> list(Class entityClass, String query, Order order, Map params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -170,8 +171,8 @@ public Uni> listAll(Class entityClass) { } @Override - public Uni> listAll(Class entityClass, Sort sort) { - return DELEGATE.listAll(entityClass, sort); + public Uni> listAll(Class entityClass, Order order) { + return DELEGATE.listAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/StatelessReactiveOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/StatelessReactiveOperations.java index 9a762abe4ec75..77567220c6b69 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/StatelessReactiveOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/hr/StatelessReactiveOperations.java @@ -4,13 +4,14 @@ import java.util.Map; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import org.hibernate.reactive.mutiny.Mutiny; import io.quarkus.hibernate.panache.reactive.PanacheReactiveQuery; import io.quarkus.hibernate.panache.runtime.spi.PanacheReactiveOperations; -import io.quarkus.panache.common.Sort; +import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; import io.smallrye.mutiny.Uni; public class StatelessReactiveOperations implements PanacheReactiveOperations { @@ -119,8 +120,8 @@ public PanacheReactiveQuery find(Class entityClass, String query, Object.. } @Override - public PanacheReactiveQuery find(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheReactiveQuery find(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -129,8 +130,8 @@ public PanacheReactiveQuery find(Class entityClass, String query, Map find(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheReactiveQuery find(Class entityClass, String query, Order order, Map params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -139,8 +140,8 @@ public PanacheReactiveQuery findAll(Class entityClass) { } @Override - public PanacheReactiveQuery findAll(Class entityClass, Sort sort) { - return DELEGATE.findAll(entityClass, sort); + public PanacheReactiveQuery findAll(Class entityClass, Order order) { + return DELEGATE.findAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -149,8 +150,8 @@ public Uni> list(Class entityClass, String query, Object... params) { } @Override - public Uni> list(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.list(entityClass, query, sort, params); + public Uni> list(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -159,8 +160,8 @@ public Uni> list(Class entityClass, String query, Map } @Override - public Uni> list(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.list(entityClass, query, sort, params); + public Uni> list(Class entityClass, String query, Order order, Map params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -169,8 +170,8 @@ public Uni> listAll(Class entityClass) { } @Override - public Uni> listAll(Class entityClass, Sort sort) { - return DELEGATE.listAll(entityClass, sort); + public Uni> listAll(Class entityClass, Order order) { + return DELEGATE.listAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/ManagedBlockingOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/ManagedBlockingOperations.java index a6c310aab6fd3..4d077fe9a995e 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/ManagedBlockingOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/ManagedBlockingOperations.java @@ -5,6 +5,7 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import org.hibernate.Session; @@ -12,7 +13,7 @@ import io.quarkus.hibernate.panache.blocking.PanacheBlockingQuery; import io.quarkus.hibernate.panache.runtime.spi.PanacheBlockingOperations; -import io.quarkus.panache.common.Sort; +import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; public class ManagedBlockingOperations implements PanacheBlockingOperations { @@ -127,8 +128,8 @@ public PanacheBlockingQuery find(Class entityClass, String query, Object.. } @Override - public PanacheBlockingQuery find(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheBlockingQuery find(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -137,8 +138,8 @@ public PanacheBlockingQuery find(Class entityClass, String query, Map find(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheBlockingQuery find(Class entityClass, String query, Order order, Map params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -147,8 +148,8 @@ public PanacheBlockingQuery findAll(Class entityClass) { } @Override - public PanacheBlockingQuery findAll(Class entityClass, Sort sort) { - return DELEGATE.findAll(entityClass, sort); + public PanacheBlockingQuery findAll(Class entityClass, Order order) { + return DELEGATE.findAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -157,8 +158,8 @@ public List list(Class entityClass, String query, Object... params) { } @Override - public List list(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.list(entityClass, query, sort, params); + public List list(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -167,8 +168,8 @@ public List list(Class entityClass, String query, Map para } @Override - public List list(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.list(entityClass, query, sort, params); + public List list(Class entityClass, String query, Order order, Map params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -177,8 +178,8 @@ public List listAll(Class entityClass) { } @Override - public List listAll(Class entityClass, Sort sort) { - return DELEGATE.listAll(entityClass, sort); + public List listAll(Class entityClass, Order order) { + return DELEGATE.listAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -242,8 +243,8 @@ public Stream stream(Class entityClass, String query, Object... params) { } @Override - public Stream stream(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.stream(entityClass, query, sort, params); + public Stream stream(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.stream(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -252,13 +253,13 @@ public Stream stream(Class entityClass, String query, Map } @Override - public Stream stream(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.stream(entityClass, query, sort, params); + public Stream stream(Class entityClass, String query, Order order, Map params) { + return DELEGATE.stream(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override - public Stream streamAll(Class entityClass, Sort sort) { - return DELEGATE.streamAll(entityClass, sort); + public Stream streamAll(Class entityClass, Order order) { + return DELEGATE.streamAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/StatelessBlockingOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/StatelessBlockingOperations.java index 9e73c75a0cc3d..0260dbbbbdd77 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/StatelessBlockingOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/orm/StatelessBlockingOperations.java @@ -5,6 +5,7 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import org.hibernate.Session; @@ -12,7 +13,7 @@ import io.quarkus.hibernate.panache.blocking.PanacheBlockingQuery; import io.quarkus.hibernate.panache.runtime.spi.PanacheBlockingOperations; -import io.quarkus.panache.common.Sort; +import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; public class StatelessBlockingOperations implements PanacheBlockingOperations { @@ -126,8 +127,8 @@ public PanacheBlockingQuery find(Class entityClass, String query, Object.. } @Override - public PanacheBlockingQuery find(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheBlockingQuery find(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -136,8 +137,8 @@ public PanacheBlockingQuery find(Class entityClass, String query, Map find(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.find(entityClass, query, sort, params); + public PanacheBlockingQuery find(Class entityClass, String query, Order order, Map params) { + return DELEGATE.find(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -146,8 +147,8 @@ public PanacheBlockingQuery findAll(Class entityClass) { } @Override - public PanacheBlockingQuery findAll(Class entityClass, Sort sort) { - return DELEGATE.findAll(entityClass, sort); + public PanacheBlockingQuery findAll(Class entityClass, Order order) { + return DELEGATE.findAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -156,8 +157,8 @@ public List list(Class entityClass, String query, Object... params) { } @Override - public List list(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.list(entityClass, query, sort, params); + public List list(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -166,8 +167,8 @@ public List list(Class entityClass, String query, Map para } @Override - public List list(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.list(entityClass, query, sort, params); + public List list(Class entityClass, String query, Order order, Map params) { + return DELEGATE.list(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -176,8 +177,8 @@ public List listAll(Class entityClass) { } @Override - public List listAll(Class entityClass, Sort sort) { - return DELEGATE.listAll(entityClass, sort); + public List listAll(Class entityClass, Order order) { + return DELEGATE.listAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override @@ -241,8 +242,8 @@ public Stream stream(Class entityClass, String query, Object... params) { } @Override - public Stream stream(Class entityClass, String query, Sort sort, Object... params) { - return DELEGATE.stream(entityClass, query, sort, params); + public Stream stream(Class entityClass, String query, Order order, Object... params) { + return DELEGATE.stream(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override @@ -251,13 +252,13 @@ public Stream stream(Class entityClass, String query, Map } @Override - public Stream stream(Class entityClass, String query, Sort sort, Map params) { - return DELEGATE.stream(entityClass, query, sort, params); + public Stream stream(Class entityClass, String query, Order order, Map params) { + return DELEGATE.stream(entityClass, query, PanacheJpaUtil.toSort(order), params); } @Override - public Stream streamAll(Class entityClass, Sort sort) { - return DELEGATE.streamAll(entityClass, sort); + public Stream streamAll(Class entityClass, Order order) { + return DELEGATE.streamAll(entityClass, PanacheJpaUtil.toSort(order)); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheBlockingOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheBlockingOperations.java index f186ed9fc44f3..207fcd90470dd 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheBlockingOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheBlockingOperations.java @@ -5,13 +5,13 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import org.hibernate.Session; import org.hibernate.StatelessSession; import io.quarkus.hibernate.panache.blocking.PanacheBlockingQuery; -import io.quarkus.panache.common.Sort; public interface PanacheBlockingOperations extends PanacheOperations, PanacheBlockingQuery, Long, Void, Boolean> { @@ -26,13 +26,13 @@ public interface PanacheBlockingOperations extends Stream stream(Class entityClass, String query, Object... params); - Stream stream(Class entityClass, String query, Sort sort, Object... params); + Stream stream(Class entityClass, String query, Order order, Object... params); Stream stream(Class entityClass, String query, Map params); - Stream stream(Class entityClass, String query, Sort sort, Map params); + Stream stream(Class entityClass, String query, Order order, Map params); - Stream streamAll(Class entityClass, Sort sort); + Stream streamAll(Class entityClass, Order order); Stream streamAll(Class entityClass); } diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheOperations.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheOperations.java index 0f8e8d0632030..d365011d23656 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheOperations.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/runtime/spi/PanacheOperations.java @@ -3,13 +3,13 @@ import java.util.Map; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.panache.runtime.hr.ManagedReactiveOperations; import io.quarkus.hibernate.panache.runtime.hr.StatelessReactiveOperations; import io.quarkus.hibernate.panache.runtime.orm.ManagedBlockingOperations; import io.quarkus.hibernate.panache.runtime.orm.StatelessBlockingOperations; -import io.quarkus.panache.common.Sort; public interface PanacheOperations { @@ -67,27 +67,27 @@ static PanacheReactiveOperations getReactiveStateless() { Query find(Class entityClass, String query, Object... params); - Query find(Class entityClass, String query, Sort sort, Object... params); + Query find(Class entityClass, String query, Order order, Object... params); Query find(Class entityClass, String query, Map params); - Query find(Class entityClass, String query, Sort sort, Map params); + Query find(Class entityClass, String query, Order order, Map params); Query findAll(Class entityClass); - Query findAll(Class entityClass, Sort sort); + Query findAll(Class entityClass, Order order); Many list(Class entityClass, String query, Object... params); - Many list(Class entityClass, String query, Sort sort, Object... params); + Many list(Class entityClass, String query, Order order, Object... params); Many list(Class entityClass, String query, Map params); - Many list(Class entityClass, String query, Sort sort, Map params); + Many list(Class entityClass, String query, Order order, Map params); Many listAll(Class entityClass); - Many listAll(Class entityClass, Sort sort); + Many listAll(Class entityClass, Order order); Count count(Class entityClass); diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/blocking/PanacheStatelessBlockingRepositoryQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/blocking/PanacheStatelessBlockingRepositoryQueries.java index 28c418dd99468..b9c2b3cac4e58 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/blocking/PanacheStatelessBlockingRepositoryQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/blocking/PanacheStatelessBlockingRepositoryQueries.java @@ -5,6 +5,7 @@ import java.util.Optional; import java.util.stream.Stream; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractJpaOperations; @@ -12,7 +13,6 @@ import io.quarkus.hibernate.panache.blocking.PanacheRepositoryBlockingQueries; import io.quarkus.hibernate.panache.runtime.spi.PanacheBlockingOperations; import io.quarkus.hibernate.panache.runtime.spi.PanacheOperations; -import io.quarkus.panache.common.Sort; public interface PanacheStatelessBlockingRepositoryQueries extends PanacheRepositoryBlockingQueries { private Class getEntityClass() { @@ -49,8 +49,8 @@ default PanacheBlockingQuery find(String query, Object... params) { } @Override - default PanacheBlockingQuery find(String query, Sort sort, Object... params) { - return (PanacheBlockingQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheBlockingQuery find(String query, Order order, Object... params) { + return (PanacheBlockingQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -59,8 +59,8 @@ default PanacheBlockingQuery find(String query, Map para } @Override - default PanacheBlockingQuery find(String query, Sort sort, Map params) { - return (PanacheBlockingQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheBlockingQuery find(String query, Order order, Map params) { + return (PanacheBlockingQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -69,8 +69,8 @@ default PanacheBlockingQuery findAll() { } @Override - default PanacheBlockingQuery findAll(Sort sort) { - return (PanacheBlockingQuery) operations().findAll(getEntityClass(), sort); + default PanacheBlockingQuery findAll(Order order) { + return (PanacheBlockingQuery) operations().findAll(getEntityClass(), order); } @Override @@ -79,8 +79,8 @@ default List list(String query, Object... params) { } @Override - default List list(String query, Sort sort, Object... params) { - return (List) operations().list(getEntityClass(), query, sort, params); + default List list(String query, Order order, Object... params) { + return (List) operations().list(getEntityClass(), query, order, params); } @Override @@ -89,8 +89,8 @@ default List list(String query, Map params) { } @Override - default List list(String query, Sort sort, Map params) { - return (List) operations().list(getEntityClass(), query, sort, params); + default List list(String query, Order order, Map params) { + return (List) operations().list(getEntityClass(), query, order, params); } @Override @@ -99,8 +99,8 @@ default List listAll() { } @Override - default List listAll(Sort sort) { - return (List) operations().listAll(getEntityClass(), sort); + default List listAll(Order order) { + return (List) operations().listAll(getEntityClass(), order); } @Override @@ -109,8 +109,8 @@ default Stream stream(String query, Object... params) { } @Override - default Stream stream(String query, Sort sort, Object... params) { - return (Stream) operations().stream(getEntityClass(), query, sort, params); + default Stream stream(String query, Order order, Object... params) { + return (Stream) operations().stream(getEntityClass(), query, order, params); } @Override @@ -119,13 +119,13 @@ default Stream stream(String query, Map params) { } @Override - default Stream stream(String query, Sort sort, Map params) { - return (Stream) operations().stream(getEntityClass(), query, sort, params); + default Stream stream(String query, Order order, Map params) { + return (Stream) operations().stream(getEntityClass(), query, order, params); } @Override - default Stream streamAll(Sort sort) { - return (Stream) operations().streamAll(getEntityClass(), sort); + default Stream streamAll(Order order) { + return (Stream) operations().streamAll(getEntityClass(), order); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/reactive/PanacheStatelessReactiveRepositoryQueries.java b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/reactive/PanacheStatelessReactiveRepositoryQueries.java index 553171765578a..26eaec996c615 100644 --- a/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/reactive/PanacheStatelessReactiveRepositoryQueries.java +++ b/extensions/panache/hibernate-panache-next/runtime/src/main/java/io/quarkus/hibernate/panache/stateless/reactive/PanacheStatelessReactiveRepositoryQueries.java @@ -3,6 +3,7 @@ import java.util.List; import java.util.Map; +import jakarta.data.Order; import jakarta.persistence.LockModeType; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractJpaOperations; @@ -10,7 +11,6 @@ import io.quarkus.hibernate.panache.reactive.PanacheRepositoryReactiveQueries; import io.quarkus.hibernate.panache.runtime.spi.PanacheOperations; import io.quarkus.hibernate.panache.runtime.spi.PanacheReactiveOperations; -import io.quarkus.panache.common.Sort; import io.smallrye.mutiny.Uni; public interface PanacheStatelessReactiveRepositoryQueries extends PanacheRepositoryReactiveQueries { @@ -38,8 +38,8 @@ default PanacheReactiveQuery find(String query, Object... params) { } @Override - default PanacheReactiveQuery find(String query, Sort sort, Object... params) { - return (PanacheReactiveQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheReactiveQuery find(String query, Order order, Object... params) { + return (PanacheReactiveQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -48,8 +48,8 @@ default PanacheReactiveQuery find(String query, Map para } @Override - default PanacheReactiveQuery find(String query, Sort sort, Map params) { - return (PanacheReactiveQuery) operations().find(getEntityClass(), query, sort, params); + default PanacheReactiveQuery find(String query, Order order, Map params) { + return (PanacheReactiveQuery) operations().find(getEntityClass(), query, order, params); } @Override @@ -58,8 +58,8 @@ default PanacheReactiveQuery findAll() { } @Override - default PanacheReactiveQuery findAll(Sort sort) { - return (PanacheReactiveQuery) operations().findAll(getEntityClass(), sort); + default PanacheReactiveQuery findAll(Order order) { + return (PanacheReactiveQuery) operations().findAll(getEntityClass(), order); } @Override @@ -68,8 +68,8 @@ default Uni> list(String query, Object... params) { } @Override - default Uni> list(String query, Sort sort, Object... params) { - return (Uni) operations().list(getEntityClass(), query, sort, params); + default Uni> list(String query, Order order, Object... params) { + return (Uni) operations().list(getEntityClass(), query, order, params); } @Override @@ -78,8 +78,8 @@ default Uni> list(String query, Map params) { } @Override - default Uni> list(String query, Sort sort, Map params) { - return (Uni) operations().list(getEntityClass(), query, sort, params); + default Uni> list(String query, Order order, Map params) { + return (Uni) operations().list(getEntityClass(), query, order, params); } @Override @@ -88,8 +88,8 @@ default Uni> listAll() { } @Override - default Uni> listAll(Sort sort) { - return (Uni) operations().listAll(getEntityClass(), sort); + default Uni> listAll(Order order) { + return (Uni) operations().listAll(getEntityClass(), order); } @Override diff --git a/extensions/panache/hibernate-panache-next/runtime/src/test/java/io/quarkus/hibernate/panache/JakartaDataSortConversionTest.java b/extensions/panache/hibernate-panache-next/runtime/src/test/java/io/quarkus/hibernate/panache/JakartaDataSortConversionTest.java new file mode 100644 index 0000000000000..5f42b5b6862f6 --- /dev/null +++ b/extensions/panache/hibernate-panache-next/runtime/src/test/java/io/quarkus/hibernate/panache/JakartaDataSortConversionTest.java @@ -0,0 +1,48 @@ +package io.quarkus.hibernate.panache; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import jakarta.data.Order; + +import org.junit.jupiter.api.Test; + +import io.quarkus.panache.common.Sort; +import io.quarkus.panache.hibernate.common.runtime.PanacheJpaUtil; + +public class JakartaDataSortConversionTest { + + @Test + public void testJakartaDataIgnoreCasePreserved() { + jakarta.data.Order order = Order.by(jakarta.data.Sort.ascIgnoreCase("name")); + Sort panacheSort = PanacheJpaUtil.toSort(order); + + assertEquals(1, panacheSort.getColumns().size()); + assertTrue(panacheSort.getColumns().get(0).isIgnoreCase()); + assertEquals(" ORDER BY LOWER(`name`)", PanacheJpaUtil.toOrderBy(panacheSort)); + } + + @Test + public void testJakartaDataMixedIgnoreCase() { + jakarta.data.Order order = Order.by( + jakarta.data.Sort.asc("id"), + jakarta.data.Sort.descIgnoreCase("name")); + Sort panacheSort = PanacheJpaUtil.toSort(order); + + assertEquals(2, panacheSort.getColumns().size()); + assertFalse(panacheSort.getColumns().get(0).isIgnoreCase()); + assertTrue(panacheSort.getColumns().get(1).isIgnoreCase()); + assertEquals(" ORDER BY `id` , LOWER(`name`) DESC", PanacheJpaUtil.toOrderBy(panacheSort)); + } + + @Test + public void testJakartaDataCaseSensitiveNotAffected() { + jakarta.data.Order order = Order.by(jakarta.data.Sort.asc("name")); + Sort panacheSort = PanacheJpaUtil.toSort(order); + + assertEquals(1, panacheSort.getColumns().size()); + assertFalse(panacheSort.getColumns().get(0).isIgnoreCase()); + assertEquals(" ORDER BY `name`", PanacheJpaUtil.toOrderBy(panacheSort)); + } +} diff --git a/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java b/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java index a503da211661c..59c3782be28bd 100644 --- a/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java +++ b/extensions/panache/panache-common/runtime/src/main/java/io/quarkus/panache/common/Sort.java @@ -54,24 +54,46 @@ public enum NullPrecedence { NULLS_LAST; } + /** + * Represents the case instructions + */ + public enum Case { + /** + * Sort while ignoring case + */ + IGNORE, + /** + * Sort by respecting case (default) + */ + RESPECT; + } + public static class Column { private String name; + // never null private Direction direction; + // can be null for unspecified private NullPrecedence nullPrecedence; + // never null + private Case caseInstruction; public Column(String name) { this(name, Direction.Ascending); } public Column(String name, Direction direction) { - this.name = name; - this.direction = direction; + this(name, direction, null); } public Column(String name, Direction direction, NullPrecedence nullPrecedence) { + this(name, direction, nullPrecedence, Case.RESPECT); + } + + public Column(String name, Direction direction, NullPrecedence nullPrecedence, Case caseInstruction) { this.name = name; this.direction = direction; this.nullPrecedence = nullPrecedence; + this.caseInstruction = caseInstruction; } public String getName() { @@ -97,6 +119,18 @@ public NullPrecedence getNullPrecedence() { public void setNullPrecedence(NullPrecedence nullPrecedence) { this.nullPrecedence = nullPrecedence; } + + public boolean isIgnoreCase() { + return caseInstruction == Case.IGNORE; + } + + public void setCase(Case caseInstruction) { + this.caseInstruction = caseInstruction; + } + + public void setIgnoreCase() { + this.caseInstruction = Case.IGNORE; + } } private List columns = new ArrayList<>(); @@ -206,6 +240,42 @@ public static Sort descending(String... columns) { return sort; } + /** + * Sort by the given columns, in ascending order, case-insensitive. + * + * @param columns the columns to sort on, in ascending order, case-insensitive. + * @return a new Sort instance which sorts on the given columns in ascending order, case-insensitive. + * @see #descendingIgnoreCase(String...) + * @see #ignoreCase() + */ + public static Sort ascendingIgnoreCase(String... columns) { + Sort sort = new Sort(); + for (String column : columns) { + Column col = new Column(column, Direction.Ascending); + col.setIgnoreCase(); + sort.columns.add(col); + } + return sort; + } + + /** + * Sort by the given columns, in descending order, case-insensitive. + * + * @param columns the columns to sort on, in descending order, case-insensitive. + * @return a new Sort instance which sorts on the given columns in descending order, case-insensitive. + * @see #ascendingIgnoreCase(String...) + * @see #ignoreCase() + */ + public static Sort descendingIgnoreCase(String... columns) { + Sort sort = new Sort(); + for (String column : columns) { + Column col = new Column(column, Direction.Descending); + col.setIgnoreCase(); + sort.columns.add(col); + } + return sort; + } + /** * Sets the order to descending for all current sort columns. * @@ -243,6 +313,42 @@ public Sort direction(Direction direction) { return this; } + /** + * Sets null precedence to NULLS FIRST for all current sort columns. + * + * @return this instance, modified. + * @see #nullsLast() + * @see #nullPrecedence(NullPrecedence) + */ + public Sort nullsFirst() { + return nullPrecedence(NullPrecedence.NULLS_FIRST); + } + + /** + * Sets null precedence to NULLS LAST for all current sort columns. + * + * @return this instance, modified. + * @see #nullsFirst() + * @see #nullPrecedence(NullPrecedence) + */ + public Sort nullsLast() { + return nullPrecedence(NullPrecedence.NULLS_LAST); + } + + /** + * Sets null precedence for all current sort columns. + * + * @return this instance, modified. + * @see #nullsFirst() + * @see #nullsLast() + */ + public Sort nullPrecedence(NullPrecedence nullPrecedence) { + for (Column column : columns) { + column.setNullPrecedence(nullPrecedence); + } + return this; + } + /** * Adds a sort column, in ascending order. * @@ -294,6 +400,70 @@ public Sort and(String name, Direction direction, NullPrecedence nullPrecedence) return this; } + /** + * Adds a sort column, in the given order, case-insensitive. + * + * @param name the new column to sort on, in the given order, case-insensitive. + * @param direction the direction to sort on. + * @return this instance, modified. + * @see #andIgnoreCase(String) + * @see #ignoreCase() + */ + public Sort andIgnoreCase(String name, Direction direction) { + Column col = new Column(name, direction); + col.setIgnoreCase(); + columns.add(col); + return this; + } + + /** + * Adds a sort column, in ascending order, case-insensitive. + * + * @param name the new column to sort on, in ascending order, case-insensitive. + * @return this instance, modified. + * @see #andIgnoreCase(String, Direction) + * @see #ignoreCase() + */ + public Sort andIgnoreCase(String name) { + return andIgnoreCase(name, Direction.Ascending); + } + + /** + * Sets case-insensitive sorting for all current sort columns. + * + * @return this instance, modified. + * @see #ignoreCase() + * @see #respectCase() + */ + public Sort setCase(Case caseInstruction) { + for (Column column : columns) { + column.setCase(caseInstruction); + } + return this; + } + + /** + * Sets case-sensitive sorting for all current sort columns. + * + * @return this instance, modified. + * @see #ignoreCase() + * @see #setCase(Case) + */ + public Sort respectCase() { + return setCase(Case.RESPECT); + } + + /** + * Sets case-insensitive sorting for all current sort columns. + * + * @return this instance, modified. + * @see #respectCase() + * @see #setCase(Case) + */ + public Sort ignoreCase() { + return setCase(Case.IGNORE); + } + /** * Disables escaping of column names with a backticks during HQL Order By clause generation * diff --git a/extensions/panache/panache-hibernate-common/runtime/pom.xml b/extensions/panache/panache-hibernate-common/runtime/pom.xml index d2adbad237ab9..f72df16cb482a 100644 --- a/extensions/panache/panache-hibernate-common/runtime/pom.xml +++ b/extensions/panache/panache-hibernate-common/runtime/pom.xml @@ -36,6 +36,11 @@ jakarta.persistence jakarta.persistence-api + + jakarta.data + jakarta.data-api + true + org.junit.jupiter junit-jupiter-api diff --git a/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java index b255ed3c0c077..f5f3074f77985 100644 --- a/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java +++ b/extensions/panache/panache-hibernate-common/runtime/src/main/java/io/quarkus/panache/hibernate/common/runtime/PanacheJpaUtil.java @@ -170,11 +170,22 @@ public static String toOrderBy(Sort sort) { Sort.Column column = sort.getColumns().get(i); if (i > 0) sb.append(" , "); + + // Get the column name (escaped or not) + String columnRef; if (sort.isEscapingEnabled()) { - sb.append(escapeColumnName(column.getName())); + columnRef = escapeColumnName(column.getName()).toString(); + } else { + columnRef = column.getName(); + } + + // Wrap in LOWER() if case-insensitive + if (column.isIgnoreCase()) { + sb.append("LOWER(").append(columnRef).append(")"); } else { - sb.append(column.getName()); + sb.append(columnRef); } + if (column.getDirection() != Sort.Direction.Ascending) { sb.append(" DESC"); } @@ -191,6 +202,43 @@ public static String toOrderBy(Sort sort) { return sb.toString(); } + /** + * Convert Jakarta Data Order to Panache Sort. + * + * @param order the Jakarta Data order, may be null + * @return the Panache Sort, or null if order is null + */ + public static Sort toSort(jakarta.data.Order order) { + if (order == null) { + return null; + } + + if (order.sorts().isEmpty()) { + return Sort.empty(); + } + + Sort result = null; + for (jakarta.data.Sort jdSort : order.sorts()) { + String property = jdSort.property(); + Sort.Direction direction = jdSort.isAscending() + ? Sort.Direction.Ascending + : Sort.Direction.Descending; + + if (result == null) { + result = Sort.by(property, direction); + } else { + result = result.and(property, direction); + } + + // Preserve ignoreCase flag from Jakarta Data + if (jdSort.ignoreCase()) { + result.getColumns().get(result.getColumns().size() - 1).setIgnoreCase(); + } + } + + return result; + } + private static StringBuilder escapeColumnName(String columnName) { StringBuilder sb = new StringBuilder(); String[] path = columnName.split("\\."); diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/SpringDataJPAProcessor.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/SpringDataJPAProcessor.java index f8d503dc20bd1..ef0439876105b 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/SpringDataJPAProcessor.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/SpringDataJPAProcessor.java @@ -34,17 +34,18 @@ import io.quarkus.arc.deployment.AdditionalBeanBuildItem; import io.quarkus.arc.deployment.GeneratedBeanBuildItem; -import io.quarkus.arc.deployment.GeneratedBeanGizmoAdaptor; +import io.quarkus.arc.deployment.GeneratedBeanGizmo2Adaptor; import io.quarkus.deployment.Feature; -import io.quarkus.deployment.GeneratedClassGizmoAdaptor; +import io.quarkus.deployment.GeneratedClassGizmo2Adaptor; import io.quarkus.deployment.annotations.BuildProducer; import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.AdditionalIndexedClassesBuildItem; import io.quarkus.deployment.builditem.CombinedIndexBuildItem; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.GeneratedClassBuildItem; +import io.quarkus.deployment.builditem.GeneratedResourceBuildItem; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; -import io.quarkus.gizmo.ClassOutput; +import io.quarkus.gizmo2.ClassOutput; import io.quarkus.hibernate.orm.deployment.IgnorableNonIndexedClasses; import io.quarkus.hibernate.orm.deployment.JpaModelPersistenceUnitMappingBuildItem; import io.quarkus.hibernate.orm.deployment.spi.SqlLoadScriptDefaultBuildItem; @@ -122,6 +123,7 @@ void build(CombinedIndexBuildItem index, Optional jpaModelPersistenceUnitMapping, BuildProducer generatedClasses, BuildProducer generatedBeans, + BuildProducer generatedResources, BuildProducer additionalBeans, BuildProducer reflectiveClasses, BuildProducer entityToPersistenceUnit) { @@ -137,7 +139,8 @@ void build(CombinedIndexBuildItem index, addInterfacesExtendingIntermediateRepositories(indexView, interfacesExtendingRepository); removeNoRepositoryBeanClasses(interfacesExtendingRepository); - Set entities = implementCrudRepositories(generatedBeans, generatedClasses, additionalBeans, reflectiveClasses, + Set entities = implementCrudRepositories(generatedBeans, generatedClasses, generatedResources, additionalBeans, + reflectiveClasses, interfacesExtendingRepository, indexView); determineEntityPersistenceUnits(jpaModelPersistenceUnitMapping, entities, "Spring Data JPA") .forEach((e, pu) -> entityToPersistenceUnit.produce(new EntityToPersistenceUnitBuildItem(e, pu))); @@ -243,12 +246,13 @@ private Collection getAllNoRepositoryBeanInterfaces(IndexView index) { // generate a concrete class that will be used by Arc to resolve injection points private Set implementCrudRepositories(BuildProducer generatedBeans, BuildProducer generatedClasses, + BuildProducer generatedResources, BuildProducer additionalBeans, BuildProducer reflectiveClasses, Set crudRepositoriesToImplement, IndexView index) { - ClassOutput beansClassOutput = new GeneratedBeanGizmoAdaptor(generatedBeans); - ClassOutput otherClassOutput = new GeneratedClassGizmoAdaptor(generatedClasses, true); + ClassOutput beansClassOutput = new GeneratedBeanGizmo2Adaptor(generatedBeans); + ClassOutput otherClassOutput = new GeneratedClassGizmo2Adaptor(generatedClasses, generatedResources, true); SpringDataRepositoryCreator repositoryCreator = new SpringDataRepositoryCreator(beansClassOutput, otherClassOutput, index, (n) -> { diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/AbstractMethodsAdder.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/AbstractMethodsAdder.java index 3cc29a09a54a0..c5eadb4af688d 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/AbstractMethodsAdder.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/AbstractMethodsAdder.java @@ -1,6 +1,7 @@ package io.quarkus.spring.data.deployment.generate; -import java.util.Collection; +import java.lang.constant.ClassDesc; +import java.lang.constant.ConstantDescs; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; @@ -23,14 +24,11 @@ import org.springframework.data.domain.SliceImpl; import org.springframework.data.jpa.repository.Modifying; -import io.quarkus.gizmo.BytecodeCreator; -import io.quarkus.gizmo.CatchBlockCreator; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.FunctionCreator; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; -import io.quarkus.gizmo.ResultHandle; -import io.quarkus.gizmo.TryBlock; +import io.quarkus.gizmo2.Const; +import io.quarkus.gizmo2.Expr; +import io.quarkus.gizmo2.creator.BlockCreator; +import io.quarkus.gizmo2.desc.ClassMethodDesc; +import io.quarkus.gizmo2.desc.MethodDesc; import io.quarkus.hibernate.orm.panache.PanacheQuery; import io.quarkus.panache.common.Page; import io.quarkus.runtime.util.HashUtil; @@ -40,218 +38,217 @@ public abstract class AbstractMethodsAdder { - protected void handleLongReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) { + protected void handleLongReturnValue(BlockCreator bc, Expr resultHandle, DotName returnType) { if (DotNames.LONG.equals(returnType)) { // handle object Long return type - resultHandle = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Long.class, "valueOf", Long.class, long.class), + resultHandle = bc.invokeStatic( + MethodDesc.of(Long.class, "valueOf", Long.class, long.class), resultHandle); } - methodCreator.returnValue(resultHandle); + bc.return_(resultHandle); } - protected void handleIntegerReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) { + protected void handleIntegerReturnValue(BlockCreator bc, Expr resultHandle, DotName returnType) { if (DotNames.INTEGER.equals(returnType)) { // handle object Integer return type - resultHandle = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Integer.class, "valueOf", Integer.class, int.class), + resultHandle = bc.invokeStatic( + MethodDesc.of(Integer.class, "valueOf", Integer.class, int.class), resultHandle); } - methodCreator.returnValue(resultHandle); + bc.return_(resultHandle); } - protected void handleBooleanReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) { - if (DotNames.BOOLEAN.equals(returnType)) { // handle object Long return type - resultHandle = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Boolean.class, "valueOf", Boolean.class, boolean.class), + protected void handleBooleanReturnValue(BlockCreator bc, Expr resultHandle, DotName returnType) { + if (DotNames.BOOLEAN.equals(returnType)) { // handle object Boolean return type + resultHandle = bc.invokeStatic( + MethodDesc.of(Boolean.class, "valueOf", Boolean.class, boolean.class), resultHandle); } - methodCreator.returnValue(resultHandle); + bc.return_(resultHandle); } - protected void generateFindQueryResultHandling(MethodCreator methodCreator, ResultHandle panacheQuery, - Integer pageableParameterIndex, ClassInfo repositoryClassInfo, ClassInfo entityClassInfo, + protected void generateFindQueryResultHandling(BlockCreator bc, Expr panacheQueryExpr, + Integer pageableParameterIndex, Expr[] methodParams, + ClassInfo repositoryClassInfo, ClassInfo entityClassInfo, DotName returnType, Integer limit, String methodName, DotName customResultType, String originalResultType) { - ResultHandle page = null; + // Store panacheQuery in a LocalVar so it can be used across nested blocks (try_, ifElse, etc.) + Expr panacheQuery = bc.localVar("panacheQuery", panacheQueryExpr); + + Expr page = null; if (limit != null) { // create a custom page object that will limit the results by the limit size - page = methodCreator.newInstance(MethodDescriptor.ofConstructor(Page.class, int.class), methodCreator.load(limit)); + page = bc.new_(ClassDesc.of(Page.class.getName()), Const.of(limit)); } else if (pageableParameterIndex != null) { - page = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanachePage", Page.class, Pageable.class), - methodCreator.getMethodParam(pageableParameterIndex)); + page = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanachePage", Page.class, Pageable.class), + methodParams[pageableParameterIndex]); } if (page != null) { - panacheQuery = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "page", PanacheQuery.class, Page.class), - panacheQuery, page); + panacheQuery = bc.localVar("pagedPanacheQuery", + bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "page", PanacheQuery.class, Page.class), + panacheQuery, page)); } - if (returnType.equals(entityClassInfo.name())) { - // implement by issuing PanacheQuery.singleResult - // if there is one result return - // if there are no results (known due to NoResultException) return null - // if there are multiple results just let the relevant exception be thrown + // Need a final copy for use in lambdas + final Expr finalPanacheQuery = panacheQuery; - // when limit is specified we don't want to fail when there are multiple results, we just want to return the first one + if (returnType.equals(entityClassInfo.name())) { String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult"; - TryBlock tryBlock = methodCreator.tryBlock(); - ResultHandle singleResult = tryBlock.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class), - panacheQuery); + bc.try_(tc -> { + tc.body(tb -> { + Expr singleResult = tb.invokeInterface( + MethodDesc.of(PanacheQuery.class, panacheQueryMethodToUse, Object.class), + finalPanacheQuery); - ResultHandle casted = tryBlock.checkCast(singleResult, entityClassInfo.name().toString()); - tryBlock.returnValue(casted); - - CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class); - catchBlock.returnValue(catchBlock.loadNull()); + Expr casted = tb.cast(singleResult, ClassDesc.of(entityClassInfo.name().toString())); + tb.return_(casted); + }); + tc.catch_(NoResultException.class, "e", (cb, e) -> { + cb.return_(Const.ofNull(ClassDesc.of(entityClassInfo.name().toString()))); + }); + }); } else if (DotNames.OPTIONAL.equals(returnType)) { - // implement by issuing PanacheQuery.singleResult - // if there is one result return an Optional containing it - // if there are no results (known due to NoResultException) return empty Optional - // if there are multiple results just let the relevant exception be thrown - - // when limit is specified we don't want to fail when there are multiple results, we just want to return the first one String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult"; - TryBlock tryBlock = methodCreator.tryBlock(); - ResultHandle singleResult = tryBlock.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class), - panacheQuery); - - if (customResultType == null) { - ResultHandle casted = tryBlock.checkCast(singleResult, entityClassInfo.name().toString()); - ResultHandle optional = tryBlock.invokeStaticMethod( - MethodDescriptor.ofMethod(Optional.class, "ofNullable", Optional.class, Object.class), - casted); - tryBlock.returnValue(optional); - } else { - ResultHandle customResult = tryBlock.invokeStaticMethod( - MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName, - customResultType.toString(), - originalResultType), - singleResult); - ResultHandle optional = tryBlock.invokeStaticMethod( - MethodDescriptor.ofMethod(Optional.class, "ofNullable", Optional.class, Object.class), - customResult); - tryBlock.returnValue(optional); - } - CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class); - ResultHandle emptyOptional = catchBlock.invokeStaticMethod( - MethodDescriptor.ofMethod(Optional.class, "empty", Optional.class)); - catchBlock.returnValue(emptyOptional); + bc.try_(tc -> { + tc.body(tb -> { + Expr singleResult = tb.invokeInterface( + MethodDesc.of(PanacheQuery.class, panacheQueryMethodToUse, Object.class), + finalPanacheQuery); + + if (customResultType == null) { + Expr casted = tb.cast(singleResult, ClassDesc.of(entityClassInfo.name().toString())); + Expr optional = tb.invokeStatic( + MethodDesc.of(Optional.class, "ofNullable", Optional.class, Object.class), + casted); + tb.return_(optional); + } else { + Expr customResult = tb.invokeStatic( + ClassMethodDesc.of(ClassDesc.of(customResultType.toString()), "convert_" + methodName, + GenerationUtil.toMethodTypeDesc(customResultType.toString(), originalResultType)), + singleResult); + Expr optional = tb.invokeStatic( + MethodDesc.of(Optional.class, "ofNullable", Optional.class, Object.class), + customResult); + tb.return_(optional); + } + }); + tc.catch_(NoResultException.class, "e", (cb, e) -> { + Expr emptyOptional = cb.invokeStatic( + MethodDesc.of(Optional.class, "empty", Optional.class)); + cb.return_(emptyOptional); + }); + }); } else if (DotNames.LIST.equals(returnType) || DotNames.COLLECTION.equals(returnType) || DotNames.SET.equals(returnType) || DotNames.ITERATOR.equals(returnType) || DotNames.SPRING_DATA_PAGE.equals(returnType) || DotNames.SPRING_DATA_SLICE.equals(returnType)) { - ResultHandle list; + Expr list; if (customResultType == null) { - list = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "list", List.class), - panacheQuery); + list = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "list", List.class), + finalPanacheQuery); } else { - - ResultHandle stream = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "stream", Stream.class), - panacheQuery); - - // Function to convert `originResultType` (Object[] or entity class) - // to the custom type (using the generated static convert method) - FunctionCreator customResultMappingFunction = methodCreator.createFunction(Function.class); - BytecodeCreator funcBytecode = customResultMappingFunction.getBytecode(); - ResultHandle obj = funcBytecode.invokeStaticMethod( - MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName, - customResultType.toString(), - originalResultType), - funcBytecode.getMethodParam(0)); - funcBytecode.returnValue(obj); - - stream = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Stream.class, "map", Stream.class, Function.class), - stream, customResultMappingFunction.getInstance()); + Expr stream = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "stream", Stream.class), + finalPanacheQuery); + + // Function to convert originResultType to the custom type + Expr mappingFunction = bc.lambda(Function.class, lc -> { + var param = lc.parameter("p", 0); + lc.body(lb -> { + Expr obj = lb.invokeStatic( + ClassMethodDesc.of(ClassDesc.of(customResultType.toString()), "convert_" + methodName, + GenerationUtil.toMethodTypeDesc(customResultType.toString(), originalResultType)), + param); + lb.return_(obj); + }); + }); + + stream = bc.invokeInterface( + MethodDesc.of(Stream.class, "map", Stream.class, Function.class), + stream, mappingFunction); // Re-collect the stream into a list - ResultHandle collector = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Collectors.class, "toList", Collector.class)); - list = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Stream.class, "collect", Object.class, Collector.class), + Expr collector = bc.invokeStatic( + MethodDesc.of(Collectors.class, "toList", Collector.class)); + Expr collected = bc.invokeInterface( + MethodDesc.of(Stream.class, "collect", Object.class, Collector.class), stream, collector); + list = bc.cast(collected, ConstantDescs.CD_List); } if (DotNames.ITERATOR.equals(returnType)) { - ResultHandle iterator = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Iterable.class, "iterator", Iterator.class), + Expr iterator = bc.invokeInterface( + MethodDesc.of(Iterable.class, "iterator", Iterator.class), list); - methodCreator.returnValue(iterator); + bc.return_(iterator); } else if (DotNames.SET.equals(returnType)) { - ResultHandle set = methodCreator.newInstance( - MethodDescriptor.ofConstructor(LinkedHashSet.class, Collection.class), list); - methodCreator.returnValue(set); + Expr listAsCollection = bc.cast(list, ConstantDescs.CD_Collection); + Expr set = bc.new_(ClassDesc.of(LinkedHashSet.class.getName()), listAsCollection); + bc.return_(set); } else if (DotNames.SPRING_DATA_PAGE.equals(returnType)) { - ResultHandle pageResult; if (pageableParameterIndex != null) { - ResultHandle count = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "count", long.class), - panacheQuery); - pageResult = methodCreator.newInstance( - MethodDescriptor.ofConstructor(PageImpl.class, List.class, Pageable.class, long.class), - list, methodCreator.getMethodParam(pageableParameterIndex), count); + Expr count = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "count", long.class), + finalPanacheQuery); + Expr pageResult = bc.new_(ClassDesc.of(PageImpl.class.getName()), + list, methodParams[pageableParameterIndex], count); + bc.return_(pageResult); } else { - pageResult = methodCreator.newInstance(MethodDescriptor.ofConstructor(PageImpl.class, List.class), list); + Expr pageResult = bc.new_(ClassDesc.of(PageImpl.class.getName()), list); + bc.return_(pageResult); } - - methodCreator.returnValue(pageResult); } else if (DotNames.SPRING_DATA_SLICE.equals(returnType)) { - ResultHandle sliceResult; if (pageableParameterIndex != null) { - ResultHandle hasNextPage = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "hasNextPage", boolean.class), - panacheQuery); - sliceResult = methodCreator.newInstance( - MethodDescriptor.ofConstructor(SliceImpl.class, List.class, Pageable.class, boolean.class), - list, methodCreator.getMethodParam(pageableParameterIndex), hasNextPage); + Expr hasNextPage = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "hasNextPage", boolean.class), + finalPanacheQuery); + Expr sliceResult = bc.new_(ClassDesc.of(SliceImpl.class.getName()), + list, methodParams[pageableParameterIndex], hasNextPage); + bc.return_(sliceResult); } else { - sliceResult = methodCreator.newInstance(MethodDescriptor.ofConstructor(SliceImpl.class, List.class), list); + Expr sliceResult = bc.new_(ClassDesc.of(SliceImpl.class.getName()), list); + bc.return_(sliceResult); } - - methodCreator.returnValue(sliceResult); + } else { + bc.return_(list); } - methodCreator.returnValue(list); } else if (DotNames.STREAM.equals(returnType)) { - ResultHandle stream = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "stream", Stream.class), - panacheQuery); - methodCreator.returnValue(stream); + Expr stream = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "stream", Stream.class), + finalPanacheQuery); + bc.return_(stream); } else if (isHibernateSupportedReturnType(returnType)) { - ResultHandle singleResult = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "singleResult", Object.class), - panacheQuery); - methodCreator.returnValue(singleResult); + Expr singleResult = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "singleResult", Object.class), + finalPanacheQuery); + bc.return_(singleResult); } else if (customResultType != null) { - // when limit is specified we don't want to fail when there are multiple results, we just want to return the first one String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult"; - TryBlock tryBlock = methodCreator.tryBlock(); - ResultHandle singleResult = tryBlock.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class), - panacheQuery); - - ResultHandle customResult = tryBlock.invokeStaticMethod( - MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName, - customResultType.toString(), - originalResultType), - singleResult); - - tryBlock.returnValue(customResult); - - CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class); - catchBlock.returnValue(catchBlock.loadNull()); - - tryBlock.returnValue(customResult); + bc.try_(tc -> { + tc.body(tb -> { + Expr singleResult = tb.invokeInterface( + MethodDesc.of(PanacheQuery.class, panacheQueryMethodToUse, Object.class), + finalPanacheQuery); + + Expr customResult = tb.invokeStatic( + ClassMethodDesc.of(ClassDesc.of(customResultType.toString()), "convert_" + methodName, + GenerationUtil.toMethodTypeDesc(customResultType.toString(), originalResultType)), + singleResult); + + tb.return_(customResult); + }); + tc.catch_(NoResultException.class, "e", (cb, e) -> { + cb.return_(Const.ofNull(ConstantDescs.CD_Object)); + }); + }); } else { throw new IllegalArgumentException( "Return type of method " + methodName + " of Repository " + repositoryClassInfo @@ -263,14 +260,14 @@ protected void generateFindQueryResultHandling(MethodCreator methodCreator, Resu * Flush the underlying persistence context before executing the modifying query if enabled by {@link Modifying} * annotation. */ - protected void handleFlushAutomatically(AnnotationInstance modifyingAnnotation, MethodCreator methodCreator, - FieldDescriptor entityClassFieldDescriptor) { + protected void handleFlushAutomatically(AnnotationInstance modifyingAnnotation, BlockCreator bc, + Expr entityClassExpr) { final AnnotationValue flushAutomatically = modifyingAnnotation != null ? modifyingAnnotation.value("flushAutomatically") : null; if (flushAutomatically != null && flushAutomatically.asBoolean()) { - methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(RepositorySupport.class, "flush", void.class, Class.class), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis())); + bc.invokeStatic( + MethodDesc.of(RepositorySupport.class, "flush", void.class, Class.class), + entityClassExpr); } } @@ -278,14 +275,14 @@ protected void handleFlushAutomatically(AnnotationInstance modifyingAnnotation, * Clear the underlying persistence context after executing the modifying query if enabled by {@link Modifying} * annotation. */ - protected void handleClearAutomatically(AnnotationInstance modifyingAnnotation, MethodCreator methodCreator, - FieldDescriptor entityClassFieldDescriptor) { + protected void handleClearAutomatically(AnnotationInstance modifyingAnnotation, BlockCreator bc, + Expr entityClassExpr) { final AnnotationValue clearAutomatically = modifyingAnnotation != null ? modifyingAnnotation.value("clearAutomatically") : null; if (clearAutomatically != null && clearAutomatically.asBoolean()) { - methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(RepositorySupport.class, "clear", void.class, Class.class), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis())); + bc.invokeStatic( + MethodDesc.of(RepositorySupport.class, "clear", void.class, Class.class), + entityClassExpr); } } diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/CustomQueryMethodsAdder.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/CustomQueryMethodsAdder.java index 14eae12c421b7..3bd15c9daacfe 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/CustomQueryMethodsAdder.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/CustomQueryMethodsAdder.java @@ -1,9 +1,10 @@ package io.quarkus.spring.data.deployment.generate; -import static io.quarkus.gizmo.FieldDescriptor.of; import static io.quarkus.spring.data.deployment.generate.GenerationUtil.getNamedQueryForMethod; import static java.util.function.Predicate.not; +import java.lang.constant.ClassDesc; +import java.lang.constant.MethodTypeDesc; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Collections; @@ -29,12 +30,16 @@ import org.jboss.jandex.Type; import io.quarkus.deployment.bean.JavaBeanUtil; -import io.quarkus.gizmo.ClassCreator; -import io.quarkus.gizmo.ClassOutput; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; -import io.quarkus.gizmo.ResultHandle; +import io.quarkus.gizmo2.ClassOutput; +import io.quarkus.gizmo2.Const; +import io.quarkus.gizmo2.Expr; +import io.quarkus.gizmo2.Gizmo; +import io.quarkus.gizmo2.LocalVar; +import io.quarkus.gizmo2.ParamVar; +import io.quarkus.gizmo2.creator.BlockCreator; +import io.quarkus.gizmo2.creator.ClassCreator; +import io.quarkus.gizmo2.desc.FieldDesc; +import io.quarkus.gizmo2.desc.MethodDesc; import io.quarkus.hibernate.orm.panache.PanacheQuery; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractManagedJpaOperations; import io.quarkus.hibernate.orm.panache.runtime.AdditionalJpaOperations; @@ -58,19 +63,20 @@ public class CustomQueryMethodsAdder extends AbstractMethodsAdder { private final IndexView index; private final ClassOutput nonBeansClassOutput; private final Consumer customClassCreatedCallback; - private final FieldDescriptor operationsField; + private final FieldDesc operationsField; - public CustomQueryMethodsAdder(IndexView index, ClassOutput classOutput, Consumer customClassCreatedCallback, + public CustomQueryMethodsAdder(IndexView index, ClassOutput classOutput, + Consumer customClassCreatedCallback, TypeBundle typeBundle) { this.index = index; this.nonBeansClassOutput = classOutput; this.customClassCreatedCallback = customClassCreatedCallback; String operationsName = typeBundle.operations().dotName().toString(); - operationsField = of(operationsName, "INSTANCE", operationsName); + operationsField = FieldDesc.of(ClassDesc.of(operationsName), "INSTANCE", ClassDesc.of(operationsName)); } - public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, ClassInfo repositoryClassInfo, - ClassInfo entityClassInfo, String idTypeStr) { + public void add(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, ClassInfo repositoryClassInfo, + ClassInfo entityClassInfo, String idTypeStr, Set existingMethods) { // Remember custom return types: {resultType:{methodName:[fieldNames]}} Map>> customResultTypes = new HashMap<>(3); @@ -146,7 +152,7 @@ public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescr // if no or only some parameters are annotated with @Param, add the compiled names (if present) if (namedParameterToIndex.size() < methodParameterTypes.size()) { for (int index = 0; index < methodParameterTypes.size(); index++) { - if (namedParameterToIndex.values().contains(index)) { + if (namedParameterToIndex.containsValue(index)) { continue; } String parameterName = method.parameterName(index); @@ -182,185 +188,215 @@ public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescr DotName methodReturnTypeDotName = method.returnType().name(); - try (MethodCreator methodCreator = classCreator.getMethodCreator(method.name(), methodReturnTypeDotName.toString(), - methodParameterTypesStr)) { + // Need effectively final copies for use in lambdas + final Integer finalPageableParameterIndex = pageableParameterIndex; + final Integer finalSortParameterIndex = sortParameterIndex; + final String finalQueryString = queryString; + final Map finalNamedParameterToIndex = namedParameterToIndex; + + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(methodReturnTypeDotName.toString(), + methodParameterTypesStr); + + classCreator.method(method.name(), mc -> { + mc.setType(mtd); + + // Add @Transactional for modifying queries before calling body() if (isModifying) { - methodCreator.addAnnotation(Transactional.class); - AnnotationInstance modifyingAnnotation = method.annotation(DotNames.SPRING_DATA_MODIFYING); - handleFlushAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor); - - if (queryString.toLowerCase().startsWith("delete")) { - if (!DotNames.PRIMITIVE_LONG.equals(methodReturnTypeDotName) - && !DotNames.LONG.equals(methodReturnTypeDotName) - && !DotNames.VOID.equals(methodReturnTypeDotName)) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a delete query and can therefore only have a void or long return type"); - } + mc.addAnnotation(Transactional.class); + } - // we need to strip 'delete' or else JpaOperations.delete will generate the wrong query - String deleteQueryString = queryString.substring("delete".length()); - ResultHandle deleteCount; - if (!namedParameterToIndex.isEmpty()) { - ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator); - - // call JpaOperations.delete - deleteCount = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "delete", long.class, - Class.class, String.class, Parameters.class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(deleteQueryString), parameters); - } else { - ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator); - - // call JpaOperations.delete - deleteCount = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "delete", long.class, - Class.class, String.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(deleteQueryString), paramsArray); - } - handleClearAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor); + // Declare parameters + ParamVar[] params = new ParamVar[methodParameterTypes.size()]; + for (int i = 0; i < methodParameterTypes.size(); i++) { + params[i] = mc.parameter("p" + i); + } - if (DotNames.VOID.equals(methodReturnTypeDotName)) { - methodCreator.returnValue(null); - } - handleLongReturnValue(methodCreator, deleteCount, methodReturnTypeDotName); + mc.body(bc -> { + // Store static field and instance field in LocalVars so they can be reused + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + + if (isModifying) { + AnnotationInstance modifyingAnnotation = method.annotation(DotNames.SPRING_DATA_MODIFYING); + handleFlushAutomatically(modifyingAnnotation, bc, entityClass); + + if (finalQueryString.toLowerCase().startsWith("delete")) { + if (!DotNames.PRIMITIVE_LONG.equals(methodReturnTypeDotName) + && !DotNames.LONG.equals(methodReturnTypeDotName) + && !DotNames.VOID.equals(methodReturnTypeDotName)) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be a delete query and can therefore only have a void or long return type"); + } - } else if (queryString.toLowerCase().startsWith("update")) { - if (!DotNames.PRIMITIVE_INTEGER.equals(methodReturnTypeDotName) - && !DotNames.INTEGER.equals(methodReturnTypeDotName) - && !DotNames.VOID.equals(methodReturnTypeDotName)) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be an update query and can therefore only have a void or integer return type"); - } + // we need to strip 'delete' or else JpaOperations.delete will generate the wrong query + String deleteQueryString = finalQueryString.substring("delete".length()); + Expr deleteCount; + if (!finalNamedParameterToIndex.isEmpty()) { + Expr parameters = generateParametersObject(finalNamedParameterToIndex, bc, params); + + // call JpaOperations.delete + deleteCount = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "delete", long.class, + Class.class, String.class, Parameters.class), + ops, entityClass, + Const.of(deleteQueryString), parameters); + } else { + Expr paramsArray = generateParamsArray(queryParameterIndexes, bc, params); + + // call JpaOperations.delete + deleteCount = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "delete", long.class, + Class.class, String.class, Object[].class), + ops, entityClass, + Const.of(deleteQueryString), paramsArray); + } + handleClearAutomatically(modifyingAnnotation, bc, entityClass); - ResultHandle updateCount; - if (!namedParameterToIndex.isEmpty()) { - ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator); - ResultHandle parametersMap = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(Parameters.class, "map", Map.class), - parameters); + if (DotNames.VOID.equals(methodReturnTypeDotName)) { + bc.return_(); + } else { + handleLongReturnValue(bc, deleteCount, methodReturnTypeDotName); + } - // call JpaOperations.executeUpdate - updateCount = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "executeUpdate", int.class, - String.class, Map.class), - methodCreator.readStaticField(operationsField), - methodCreator.load(queryString), - parametersMap); - } else { - ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator); - - // call JpaOperations.executeUpdate - updateCount = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "executeUpdate", - int.class, String.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.load(queryString), - paramsArray); - } - handleClearAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor); + } else if (finalQueryString.toLowerCase().startsWith("update")) { + if (!DotNames.PRIMITIVE_INTEGER.equals(methodReturnTypeDotName) + && !DotNames.INTEGER.equals(methodReturnTypeDotName) + && !DotNames.VOID.equals(methodReturnTypeDotName)) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be an update query and can therefore only have a void or integer return type"); + } - if (DotNames.VOID.equals(methodReturnTypeDotName)) { - methodCreator.returnValue(null); - } - handleIntegerReturnValue(methodCreator, updateCount, methodReturnTypeDotName); + Expr updateCount; + if (!finalNamedParameterToIndex.isEmpty()) { + Expr parameters = generateParametersObject(finalNamedParameterToIndex, bc, params); + Expr parametersMap = bc.invokeVirtual( + MethodDesc.of(Parameters.class, "map", Map.class), + parameters); + + // call JpaOperations.executeUpdate + updateCount = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "executeUpdate", int.class, + String.class, Map.class), + ops, + Const.of(finalQueryString), + parametersMap); + } else { + Expr paramsArray = generateParamsArray(queryParameterIndexes, bc, params); + + // call JpaOperations.executeUpdate + updateCount = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "executeUpdate", + int.class, String.class, Object[].class), + ops, + Const.of(finalQueryString), + paramsArray); + } + handleClearAutomatically(modifyingAnnotation, bc, entityClass); + if (DotNames.VOID.equals(methodReturnTypeDotName)) { + bc.return_(); + } else { + handleIntegerReturnValue(bc, updateCount, methodReturnTypeDotName); + } + + } else { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " has been annotated with @Modifying but the @Query does not appear to be " + + "a delete or update query"); + } } else { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " has been annotated with @Modifying but the @Query does not appear to be " + - "a delete or update query"); - } - } else { - // by default just hope that adding select count(*) will do - String countQueryString = "SELECT COUNT(*) " + queryString; - if (queryInstance != null && queryInstance.value(QUERY_COUNT_FIELD) != null) { // if a countQuery is specified, use it - countQueryString = queryInstance.value(QUERY_COUNT_FIELD).asString().trim(); - } else { - // otherwise try and derive the select query from the method name and use that to construct the count query - MethodNameParser methodNameParser = new MethodNameParser(repositoryClassInfo, index); - try { - MethodNameParser.Result parseResult = methodNameParser.parse(method); - if (MethodNameParser.QueryType.SELECT == parseResult.getQueryType()) { - countQueryString = "SELECT COUNT (*) " + parseResult.getQuery(); + // by default just hope that adding select count(*) will do + String countQueryString = "SELECT COUNT(*) " + finalQueryString; + if (queryInstance != null && queryInstance.value(QUERY_COUNT_FIELD) != null) { // if a countQuery is specified, use it + countQueryString = queryInstance.value(QUERY_COUNT_FIELD).asString().trim(); + } else { + // otherwise try and derive the select query from the method name and use that to construct the count query + MethodNameParser methodNameParser = new MethodNameParser(repositoryClassInfo, index); + try { + MethodNameParser.Result parseResult = methodNameParser.parse(method); + if (MethodNameParser.QueryType.SELECT == parseResult.getQueryType()) { + countQueryString = "SELECT COUNT (*) " + parseResult.getQuery(); + } + } catch (Exception ignored) { + // we just ignore the exception if the method does not match one of the supported styles } - } catch (Exception ignored) { - // we just ignore the exception if the method does not match one of the supported styles } - } - // Find the type of data used in the result - // e.g. method.returnType() is a List that may contain non-entity elements - Type resultType = verifyQueryResultType(method.returnType(), index); - DotName customResultTypeName = resultType.name(); - - if (customResultTypeName.equals(entityClassInfo.name()) - || customResultTypeName.toString().equals(idTypeStr) - || isHibernateSupportedReturnType(customResultTypeName) - || getFieldTypeNames(entityClassInfo, entityFieldTypeNames).contains(customResultTypeName)) { - // no special handling needed - customResultTypeName = null; - } else { - // The result is using a custom type. - List fieldNames = getFieldNames(queryString); - - // If the custom type is an interface, we need to generate the implementation - ClassInfo resultClassInfo = index.getClassByName(customResultTypeName); - if (Modifier.isInterface(resultClassInfo.flags())) { - // Find the implementation name, and use that for subsequent query result generation - customResultTypeName = customResultTypeNames.computeIfAbsent(customResultTypeName, - (k) -> createSimpleInterfaceImpl(k, entityClassInfo.name())); - - // Remember the parameters for this usage of the custom type, we'll deal with it later - customResultTypes.computeIfAbsent(customResultTypeName, - k -> new HashMap<>()).put(methodName, fieldNames); + // Find the type of data used in the result + // e.g. method.returnType() is a List that may contain non-entity elements + Type resultType = verifyQueryResultType(method.returnType(), index); + DotName customResultTypeName = resultType.name(); + + if (customResultTypeName.equals(entityClassInfo.name()) + || customResultTypeName.toString().equals(idTypeStr) + || isHibernateSupportedReturnType(customResultTypeName) + || getFieldTypeNames(entityClassInfo, entityFieldTypeNames).contains(customResultTypeName)) { + // no special handling needed + customResultTypeName = null; } else { - throw new IllegalArgumentException( - "Query annotations may only use interfaces to map results to non-entity types. " - + "Offending query string is \"" + queryString + "\" on method " + methodName - + " of Repository " + repositoryName); + // The result is using a custom type. + List fieldNames = getFieldNames(finalQueryString); + + // If the custom type is an interface, we need to generate the implementation + ClassInfo resultClassInfo = index.getClassByName(customResultTypeName); + if (Modifier.isInterface(resultClassInfo.flags())) { + // Find the implementation name, and use that for subsequent query result generation + customResultTypeName = customResultTypeNames.computeIfAbsent(customResultTypeName, + (k) -> createSimpleInterfaceImpl(k, entityClassInfo.name())); + + // Remember the parameters for this usage of the custom type, we'll deal with it later + customResultTypes.computeIfAbsent(customResultTypeName, + k -> new HashMap<>()).put(methodName, fieldNames); + } else { + throw new IllegalArgumentException( + "Query annotations may only use interfaces to map results to non-entity types. " + + "Offending query string is \"" + finalQueryString + "\" on method " + + methodName + + " of Repository " + repositoryName); + } } - } - ResultHandle panacheQuery; - if (!namedParameterToIndex.isEmpty()) { - ResultHandle parameters = generateParametersObject(namedParameterToIndex, methodCreator); + Expr panacheQuery; + if (!finalNamedParameterToIndex.isEmpty()) { + Expr parameters = generateParametersObject(finalNamedParameterToIndex, bc, params); + + // call JpaOperations.find() + panacheQuery = bc.invokeStatic( + MethodDesc.of(AdditionalJpaOperations.class, "find", + PanacheQuery.class, AbstractManagedJpaOperations.class, Class.class, String.class, + String.class, io.quarkus.panache.common.Sort.class, Parameters.class), + ops, entityClass, + Const.of(finalQueryString), Const.of(countQueryString), + generateSort(finalSortParameterIndex, finalPageableParameterIndex, bc, params), + parameters); - // call JpaOperations.find() - panacheQuery = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "find", - PanacheQuery.class, AbstractManagedJpaOperations.class, Class.class, String.class, - String.class, io.quarkus.panache.common.Sort.class, Parameters.class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(queryString), methodCreator.load(countQueryString), - generateSort(sortParameterIndex, pageableParameterIndex, methodCreator), parameters); + } else { + Expr paramsArray = generateParamsArray(queryParameterIndexes, bc, params); + + // call JpaOperations.find() + panacheQuery = bc.invokeStatic( + MethodDesc.of(AdditionalJpaOperations.class, "find", + PanacheQuery.class, AbstractManagedJpaOperations.class, Class.class, String.class, + String.class, io.quarkus.panache.common.Sort.class, Object[].class), + ops, entityClass, + Const.of(finalQueryString), Const.of(countQueryString), + generateSort(finalSortParameterIndex, finalPageableParameterIndex, bc, params), + paramsArray); + } - } else { - ResultHandle paramsArray = generateParamsArray(queryParameterIndexes, methodCreator); - - // call JpaOperations.find() - panacheQuery = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "find", - PanacheQuery.class, AbstractManagedJpaOperations.class, Class.class, String.class, - String.class, io.quarkus.panache.common.Sort.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(queryString), methodCreator.load(countQueryString), - generateSort(sortParameterIndex, pageableParameterIndex, methodCreator), paramsArray); + generateFindQueryResultHandling(bc, panacheQuery, finalPageableParameterIndex, params, + repositoryClassInfo, entityClassInfo, methodReturnTypeDotName, null, method.name(), + customResultTypeName, + Object[].class.getName()); } - - generateFindQueryResultHandling(methodCreator, panacheQuery, pageableParameterIndex, repositoryClassInfo, - entityClassInfo, methodReturnTypeDotName, null, method.name(), customResultTypeName, - Object[].class.getName()); - } - - } + }); + }); + existingMethods.add(GenerationUtil.methodKey(method.name(), methodReturnTypeDotName.toString(), + methodParameterTypesStr)); } for (Map.Entry mapping : customResultTypeNames.entrySet().stream() @@ -396,42 +432,41 @@ private void verifyQueryAnnotation(AnnotationInstance queryInstance, String meth } } - private ResultHandle generateParamsArray(List queryParameterIndexes, MethodCreator methodCreator) { - ResultHandle paramsArray = methodCreator.newArray(Object.class, queryParameterIndexes.size()); + private Expr generateParamsArray(List queryParameterIndexes, BlockCreator bc, ParamVar[] params) { + LocalVar paramsArray = bc.localVar("paramsArray", bc.newEmptyArray(Object.class, queryParameterIndexes.size())); for (int i = 0; i < queryParameterIndexes.size(); i++) { - methodCreator.writeArrayValue(paramsArray, methodCreator.load(i), - methodCreator.getMethodParam(queryParameterIndexes.get(i))); + bc.set(paramsArray.elem(i), params[queryParameterIndexes.get(i)]); } return paramsArray; } - private ResultHandle generateParametersObject(Map namedParameterToIndex, MethodCreator methodCreator) { - ResultHandle parameters = methodCreator.newInstance(MethodDescriptor.ofConstructor(Parameters.class)); + private Expr generateParametersObject(Map namedParameterToIndex, BlockCreator bc, ParamVar[] params) { + LocalVar parameters = bc.localVar("parameters", bc.new_(ClassDesc.of(Parameters.class.getName()))); for (Map.Entry entry : namedParameterToIndex.entrySet().stream() .sorted(Map.Entry.comparingByKey()).toList()) { - methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(Parameters.class, "and", Parameters.class, + bc.invokeVirtual( + MethodDesc.of(Parameters.class, "and", Parameters.class, String.class, Object.class), - parameters, methodCreator.load(entry.getKey()), methodCreator.getMethodParam(entry.getValue())); + parameters, Const.of(entry.getKey()), params[entry.getValue()]); } return parameters; } // ensure that Sort is correctly handled whether it's specified from the method name or a method param - private ResultHandle generateSort(Integer sortParameterIndex, Integer pageableParameterIndex, MethodCreator methodCreator) { - ResultHandle sort = methodCreator.loadNull(); + private Expr generateSort(Integer sortParameterIndex, Integer pageableParameterIndex, BlockCreator bc, ParamVar[] params) { + Expr sort = Const.ofNull(ClassDesc.of(io.quarkus.panache.common.Sort.class.getName())); if (sortParameterIndex != null) { - sort = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort", + sort = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanacheSort", io.quarkus.panache.common.Sort.class, org.springframework.data.domain.Sort.class), - methodCreator.getMethodParam(sortParameterIndex)); + params[sortParameterIndex]); } else if (pageableParameterIndex != null) { - sort = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "pageToPanacheSort", + sort = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "pageToPanacheSort", io.quarkus.panache.common.Sort.class, org.springframework.data.domain.Pageable.class), - methodCreator.getMethodParam(pageableParameterIndex)); + params[pageableParameterIndex]); } return sort; } @@ -466,11 +501,14 @@ private void generateCustomResultTypes(DotName interfaceName, DotName implName, ClassInfo interfaceInfo = index.getClassByName(interfaceName); - try (ClassCreator implClassCreator = ClassCreator.builder().classOutput(nonBeansClassOutput) - .interfaces(interfaceName.toString()).className(implName.toString()) - .build()) { + Gizmo gizmo = Gizmo.create(nonBeansClassOutput); + gizmo.class_(implName.toString(), implClassCreator -> { + implClassCreator.implements_(ClassDesc.of(interfaceName.toString())); + + // Add default constructor + implClassCreator.defaultConstructor(); - Map fields = new HashMap<>(3); + Map fields = new HashMap<>(3); for (MethodInfo method : interfaceInfo.methods()) { String getterName = method.name(); @@ -483,14 +521,19 @@ private void generateCustomResultTypes(DotName interfaceName, DotName implName, } DotName fieldTypeName = getPrimitiveTypeName(returnType.name()); - FieldDescriptor field = implClassCreator.getFieldCreator(propertyName, fieldTypeName.toString()) - .getFieldDescriptor(); + FieldDesc field = implClassCreator.field(propertyName, ifc -> { + ifc.setType(GenerationUtil.toClassDesc(fieldTypeName.toString())); + }); // create getter (based on the interface) - try (MethodCreator getter = implClassCreator.getMethodCreator(getterName, returnType.name().toString())) { - getter.setModifiers(Modifier.PUBLIC); - getter.returnValue(getter.readInstanceField(field, getter.getThis())); - } + MethodTypeDesc getterMtd = GenerationUtil.toMethodTypeDesc(returnType.name().toString()); + implClassCreator.method(getterName, mc -> { + mc.setType(getterMtd); + mc.public_(); + mc.body(bc -> { + bc.return_(bc.get(mc.this_().field(field))); + }); + }); fields.put(propertyName.toLowerCase(), field); } @@ -498,44 +541,47 @@ private void generateCustomResultTypes(DotName interfaceName, DotName implName, // Add static methods to convert from Object[] to this type for (Map.Entry> queryMethod : queryMethods.entrySet().stream() .sorted(Map.Entry.comparingByKey()).toList()) { - try (MethodCreator convert = implClassCreator.getMethodCreator("convert_" + queryMethod.getKey(), - implName.toString(), Object[].class.getName())) { - convert.setModifiers(Modifier.STATIC | Modifier.PUBLIC); - - ResultHandle newObject = convert.newInstance(MethodDescriptor.ofConstructor(implName.toString())); - - // Use field names in the query-declared order - List queryNames = queryMethod.getValue(); - - // Object[] is the only parameter: values are in column/declared order - ResultHandle array = convert.getMethodParam(0); - - for (int i = 0; i < queryNames.size(); i++) { - FieldDescriptor f = fields.get(queryNames.get(i)); - if (f == null) { - throw new IllegalArgumentException("@Query annotation for " + queryMethod.getKey() - + " does not use fields from " + interfaceName); - } else { - convert.writeInstanceField(f, newObject, - castReturnValue(convert, convert.readArrayValue(array, i), f.getType())); + MethodTypeDesc convertMtd = GenerationUtil.toMethodTypeDesc(implName.toString(), Object[].class.getName()); + implClassCreator.staticMethod("convert_" + queryMethod.getKey(), smc -> { + smc.setType(convertMtd); + smc.public_(); + ParamVar arrayParam = smc.parameter("input"); + + smc.body(bc -> { + LocalVar newObject = bc.localVar("newObject", + bc.new_(ClassDesc.of(implName.toString()))); + + // Use field names in the query-declared order + List queryNames = queryMethod.getValue(); + + for (int i = 0; i < queryNames.size(); i++) { + FieldDesc f = fields.get(queryNames.get(i)); + if (f == null) { + throw new IllegalArgumentException("@Query annotation for " + queryMethod.getKey() + + " does not use fields from " + interfaceName); + } else { + bc.set(newObject.field(f), + castReturnValue(bc, arrayParam.elem(i), f.type())); + } } - } - convert.returnValue(newObject); - } + bc.return_(newObject); + }); + }); } - } + }); } - private ResultHandle castReturnValue(MethodCreator methodCreator, ResultHandle resultHandle, String type) { - switch (type) { + private Expr castReturnValue(BlockCreator bc, Expr resultHandle, ClassDesc type) { + String typeDesc = type.descriptorString(); + switch (typeDesc) { case "I": - resultHandle = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Integer.class, "valueOf", Integer.class, int.class), + resultHandle = bc.invokeStatic( + MethodDesc.of(Integer.class, "valueOf", Integer.class, int.class), resultHandle); break; case "J": - resultHandle = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(Long.class, "valueOf", Long.class, long.class), + resultHandle = bc.invokeStatic( + MethodDesc.of(Long.class, "valueOf", Long.class, long.class), resultHandle); break; } diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/DerivedMethodsAdder.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/DerivedMethodsAdder.java index 8009cbbf075ad..14f48313430a8 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/DerivedMethodsAdder.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/DerivedMethodsAdder.java @@ -1,8 +1,9 @@ package io.quarkus.spring.data.deployment.generate; -import static io.quarkus.gizmo.FieldDescriptor.of; import static io.quarkus.spring.data.deployment.generate.GenerationUtil.getNamedQueryForMethod; +import java.lang.constant.ClassDesc; +import java.lang.constant.MethodTypeDesc; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; @@ -26,12 +27,16 @@ import io.quarkus.deployment.bean.JavaBeanUtil; import io.quarkus.deployment.util.JandexUtil; -import io.quarkus.gizmo.ClassCreator; -import io.quarkus.gizmo.ClassOutput; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; -import io.quarkus.gizmo.ResultHandle; +import io.quarkus.gizmo2.ClassOutput; +import io.quarkus.gizmo2.Const; +import io.quarkus.gizmo2.Expr; +import io.quarkus.gizmo2.Gizmo; +import io.quarkus.gizmo2.LocalVar; +import io.quarkus.gizmo2.ParamVar; +import io.quarkus.gizmo2.creator.ClassCreator; +import io.quarkus.gizmo2.desc.ClassMethodDesc; +import io.quarkus.gizmo2.desc.FieldDesc; +import io.quarkus.gizmo2.desc.MethodDesc; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractManagedJpaOperations; import io.quarkus.hibernate.orm.panache.runtime.AdditionalJpaOperations; import io.quarkus.panache.common.deployment.TypeBundle; @@ -44,7 +49,7 @@ public class DerivedMethodsAdder extends AbstractMethodsAdder { private final IndexView index; private final String operationsName; - private final FieldDescriptor operationsField; + private final FieldDesc operationsField; private final ClassOutput nonBeansClassOutput; private final Consumer projectionClassCreatedCallback; @@ -52,13 +57,14 @@ public DerivedMethodsAdder(IndexView index, TypeBundle typeBundle, ClassOutput n Consumer projectionClassCreatedCallback) { this.index = index; operationsName = typeBundle.operations().dotName().toString(); - operationsField = of(operationsName, "INSTANCE", operationsName); + operationsField = FieldDesc.of(ClassDesc.of(operationsName), "INSTANCE", ClassDesc.of(operationsName)); this.nonBeansClassOutput = nonBeansClassOutput; this.projectionClassCreatedCallback = projectionClassCreatedCallback; } - public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, ClassInfo repositoryClassInfo, ClassInfo entityClassInfo) { + public void add(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, ClassInfo repositoryClassInfo, ClassInfo entityClassInfo, + Set existingMethods) { MethodNameParser methodNameParser = new MethodNameParser(entityClassInfo, index); LinkedHashSet repoMethods = new LinkedHashSet<>(repositoryClassInfo.methods()); @@ -80,7 +86,8 @@ public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescr continue; } - if (classCreator.getExistingMethods().contains(GenerationUtil.toMethodDescriptor(generatedClassName, method))) { + String methodKey = GenerationUtil.methodKey(generatedClassName, method); + if (existingMethods.contains(methodKey)) { continue; } @@ -125,162 +132,188 @@ public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescr + " does not match the number of parameter needed (inferred from the method name)"); } - try (MethodCreator methodCreator = classCreator.getMethodCreator(method.name(), returnType.name().toString(), - parameterTypesStr)) { - ResultHandle paramsArray = methodCreator.newArray(Object.class, parseResult.getParamCount()); - for (int i = 0; i < queryParameterIndexes.size(); i++) { - methodCreator.writeArrayValue(paramsArray, methodCreator.load(i), - methodCreator.getMethodParam(queryParameterIndexes.get(i))); + // Need effectively final copies for use in lambdas + final Integer finalPageableParameterIndex = pageableParameterIndex; + final Integer finalSortParameterIndex = sortParameterIndex; + + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(returnType.name().toString(), parameterTypesStr); + classCreator.method(method.name(), mc -> { + mc.setType(mtd); + + // Add @Transactional for delete queries before calling body() + if (parseResult.getQueryType() == MethodNameParser.QueryType.DELETE) { + mc.addAnnotation(Transactional.class); } - if (parseResult.getQueryType() == MethodNameParser.QueryType.SELECT) { - if (parseResult.getSort() != null && sortParameterIndex != null) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo + " contains both a " - + DotNames.SPRING_DATA_SORT + " parameter and a sort operation"); - } + // Declare parameters + ParamVar[] params = new ParamVar[parameters.size()]; + for (int i = 0; i < parameters.size(); i++) { + params[i] = mc.parameter("p" + i); + } - // ensure that Sort is correctly handled whether it's specified in the method name or via a Sort method param - String finalQuery = parseResult.getQuery(); - ResultHandle sort = methodCreator.loadNull(); - if (sortParameterIndex != null) { - sort = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort", - io.quarkus.panache.common.Sort.class, - org.springframework.data.domain.Sort.class), - methodCreator.getMethodParam(sortParameterIndex)); - } else if (parseResult.getSort() != null) { - finalQuery += PanacheJpaUtil.toOrderBy(parseResult.getSort()); - } else if (pageableParameterIndex != null) { - ResultHandle pageable = methodCreator.getMethodParam(pageableParameterIndex); - ResultHandle pageableSort = methodCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Pageable.class, "getSort", Sort.class), - pageable); - sort = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort", - io.quarkus.panache.common.Sort.class, - org.springframework.data.domain.Sort.class), - pageableSort); + mc.body(bc -> { + // Store static field and instance field in LocalVars so they can be reused + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + + // Build params array for query parameters + LocalVar paramsArray = bc.localVar("paramsArray", + bc.newEmptyArray(Object.class, parseResult.getParamCount())); + for (int i = 0; i < queryParameterIndexes.size(); i++) { + bc.set(paramsArray.elem(i), params[queryParameterIndexes.get(i)]); } - // call JpaOperations.find() - ResultHandle panacheQuery = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "find", Object.class, - Class.class, String.class, io.quarkus.panache.common.Sort.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(finalQuery), sort, paramsArray); - - Type resultType = extractResultType(repositoryClassInfo, method); - - DotName customResultTypeName = resultType.name(); - - if (customResultTypeName.equals(entityClassInfo.name()) - || isHibernateSupportedReturnType(customResultTypeName)) { - // no special handling needed - customResultTypeName = null; - } else { - // If the custom type is an interface, we need to generate the implementation - ClassInfo resultClassInfo = index.getClassByName(customResultTypeName); - if (Modifier.isInterface(resultClassInfo.flags())) { - // Find the implementation name, and use that for subsequent query result generation - customResultTypeName = customResultTypeImplNames.computeIfAbsent(customResultTypeName, - k -> createSimpleInterfaceImpl(k, entityClassInfo.name())); - - // Remember the parameters for this usage of the custom type, we'll deal with it later - customResultTypes.computeIfAbsent(customResultTypeName, - k -> new ArrayList<>()).add(method.name()); + if (parseResult.getQueryType() == MethodNameParser.QueryType.SELECT) { + if (parseResult.getSort() != null && finalSortParameterIndex != null) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + " contains both a " + + DotNames.SPRING_DATA_SORT + " parameter and a sort operation"); + } + + // ensure that Sort is correctly handled whether it's specified in the method name or via a Sort method param + String finalQuery = parseResult.getQuery(); + Expr sort = Const.ofNull(ClassDesc.of(io.quarkus.panache.common.Sort.class.getName())); + if (finalSortParameterIndex != null) { + sort = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanacheSort", + io.quarkus.panache.common.Sort.class, + org.springframework.data.domain.Sort.class), + params[finalSortParameterIndex]); + } else if (parseResult.getSort() != null) { + finalQuery += PanacheJpaUtil.toOrderBy(parseResult.getSort()); + } else if (finalPageableParameterIndex != null) { + Expr pageable = params[finalPageableParameterIndex]; + Expr pageableSort = bc.invokeInterface( + MethodDesc.of(Pageable.class, "getSort", Sort.class), + pageable); + sort = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanacheSort", + io.quarkus.panache.common.Sort.class, + org.springframework.data.domain.Sort.class), + pageableSort); + } + + // call JpaOperations.find() + Expr panacheQuery = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "find", Object.class, + Class.class, String.class, io.quarkus.panache.common.Sort.class, Object[].class), + ops, entityClass, + Const.of(finalQuery), sort, paramsArray); + + Type resultType = extractResultType(repositoryClassInfo, method); + + DotName customResultTypeName = resultType.name(); + + if (customResultTypeName.equals(entityClassInfo.name()) + || isHibernateSupportedReturnType(customResultTypeName)) { + // no special handling needed + customResultTypeName = null; } else { + // If the custom type is an interface, we need to generate the implementation + ClassInfo resultClassInfo = index.getClassByName(customResultTypeName); + if (Modifier.isInterface(resultClassInfo.flags())) { + // Find the implementation name, and use that for subsequent query result generation + customResultTypeName = customResultTypeImplNames.computeIfAbsent(customResultTypeName, + k -> createSimpleInterfaceImpl(k, entityClassInfo.name())); + + // Remember the parameters for this usage of the custom type, we'll deal with it later + customResultTypes.computeIfAbsent(customResultTypeName, + k -> new ArrayList<>()).add(method.name()); + } else { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " can only use interfaces to map results to non-entity types."); + } + } + + generateFindQueryResultHandling(bc, panacheQuery, finalPageableParameterIndex, params, + repositoryClassInfo, entityClassInfo, returnType.name(), parseResult.getTopCount(), + method.name(), customResultTypeName, + entityClassInfo.name().toString()); + + } else if (parseResult.getQueryType() == MethodNameParser.QueryType.COUNT) { + if (!DotNames.PRIMITIVE_LONG.equals(returnType.name()) + && !DotNames.LONG.equals(returnType.name())) { throw new IllegalArgumentException( method.name() + " of Repository " + repositoryClassInfo - + " can only use interfaces to map results to non-entity types."); + + " is meant to be a count query and can therefore only have a long return type"); + } + if ((finalSortParameterIndex != null) || finalPageableParameterIndex != null) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be a count query and therefore doesn't " + + "support Pageable and Sort method parameters"); } - } - generateFindQueryResultHandling(methodCreator, panacheQuery, pageableParameterIndex, repositoryClassInfo, - entityClassInfo, returnType.name(), parseResult.getTopCount(), method.name(), customResultTypeName, - entityClassInfo.name().toString()); + // call JpaOperations.count() + Expr count = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "count", long.class, + Class.class, String.class, Object[].class), + ops, entityClass, + Const.of(parseResult.getQuery()), paramsArray); - } else if (parseResult.getQueryType() == MethodNameParser.QueryType.COUNT) { - if (!DotNames.PRIMITIVE_LONG.equals(returnType.name()) && !DotNames.LONG.equals(returnType.name())) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a count query and can therefore only have a long return type"); - } - if ((sortParameterIndex != null) || pageableParameterIndex != null) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a count query and therefore doesn't " + - "support Pageable and Sort method parameters"); - } + handleLongReturnValue(bc, count, returnType.name()); - // call JpaOperations.count() - ResultHandle count = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "count", long.class, - Class.class, String.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(parseResult.getQuery()), paramsArray); - - handleLongReturnValue(methodCreator, count, returnType.name()); - - } else if (parseResult.getQueryType() == MethodNameParser.QueryType.EXISTS) { - if (!DotNames.PRIMITIVE_BOOLEAN.equals(returnType.name()) && !DotNames.BOOLEAN.equals(returnType.name())) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be an exists query and can therefore only have a boolean return type"); - } - if ((sortParameterIndex != null) || pageableParameterIndex != null) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a count query and therefore doesn't " + - "support Pageable and Sort method parameters"); - } + } else if (parseResult.getQueryType() == MethodNameParser.QueryType.EXISTS) { + if (!DotNames.PRIMITIVE_BOOLEAN.equals(returnType.name()) + && !DotNames.BOOLEAN.equals(returnType.name())) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be an exists query and can therefore only have a boolean return type"); + } + if ((finalSortParameterIndex != null) || finalPageableParameterIndex != null) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be a count query and therefore doesn't " + + "support Pageable and Sort method parameters"); + } - // call JpaOperations.exists() - ResultHandle exists = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "exists", boolean.class, - Class.class, String.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(parseResult.getQuery()), paramsArray); - - handleBooleanReturnValue(methodCreator, exists, returnType.name()); - - } else if (parseResult.getQueryType() == MethodNameParser.QueryType.DELETE) { - if (!DotNames.PRIMITIVE_LONG.equals(returnType.name()) && !DotNames.LONG.equals(returnType.name()) - && !DotNames.VOID.equals(returnType.name())) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a delete query and can therefore only have a void or long return type"); - } - if ((sortParameterIndex != null) || pageableParameterIndex != null) { - throw new IllegalArgumentException( - method.name() + " of Repository " + repositoryClassInfo - + " is meant to be a delete query and therefore doesn't " + - "support Pageable and Sort method parameters"); - } - methodCreator.addAnnotation(Transactional.class); + // call JpaOperations.exists() + Expr exists = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "exists", boolean.class, + Class.class, String.class, Object[].class), + ops, entityClass, + Const.of(parseResult.getQuery()), paramsArray); - AnnotationInstance modifyingAnnotation = method.annotation(DotNames.SPRING_DATA_MODIFYING); - handleFlushAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor); + handleBooleanReturnValue(bc, exists, returnType.name()); - // call JpaOperations.delete() - ResultHandle delete = methodCreator.invokeStaticMethod( - MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "deleteWithCascade", - long.class, AbstractManagedJpaOperations.class, Class.class, String.class, Object[].class), - methodCreator.readStaticField(operationsField), - methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()), - methodCreator.load(parseResult.getQuery()), paramsArray); + } else if (parseResult.getQueryType() == MethodNameParser.QueryType.DELETE) { + if (!DotNames.PRIMITIVE_LONG.equals(returnType.name()) && !DotNames.LONG.equals(returnType.name()) + && !DotNames.VOID.equals(returnType.name())) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be a delete query and can therefore only have a void or long return type"); + } + if ((finalSortParameterIndex != null) || finalPageableParameterIndex != null) { + throw new IllegalArgumentException( + method.name() + " of Repository " + repositoryClassInfo + + " is meant to be a delete query and therefore doesn't " + + "support Pageable and Sort method parameters"); + } - handleClearAutomatically(modifyingAnnotation, methodCreator, entityClassFieldDescriptor); + AnnotationInstance modifyingAnnotation = method.annotation(DotNames.SPRING_DATA_MODIFYING); + handleFlushAutomatically(modifyingAnnotation, bc, entityClass); - if (DotNames.VOID.equals(returnType.name())) { - methodCreator.returnValue(null); + // call JpaOperations.delete() + Expr delete = bc.invokeStatic( + MethodDesc.of(AdditionalJpaOperations.class, "deleteWithCascade", + long.class, AbstractManagedJpaOperations.class, Class.class, String.class, + Object[].class), + ops, entityClass, + Const.of(parseResult.getQuery()), paramsArray); + + handleClearAutomatically(modifyingAnnotation, bc, entityClass); + + if (DotNames.VOID.equals(returnType.name())) { + bc.return_(); + } else { + handleLongReturnValue(bc, delete, returnType.name()); + } } - handleLongReturnValue(methodCreator, delete, returnType.name()); - } - } + }); + }); + existingMethods.add(methodKey); } for (Map.Entry mapping : customResultTypeImplNames.entrySet().stream() .sorted(Map.Entry.comparingByKey()).toList()) { @@ -334,11 +367,14 @@ private void generateCustomResultTypes(DotName interfaceName, DotName implName, ClassInfo interfaceInfo = index.getClassByName(interfaceName); - try (ClassCreator implClassCreator = ClassCreator.builder().classOutput(nonBeansClassOutput) - .interfaces(interfaceName.toString()).className(implName.toString()) - .build()) { + Gizmo gizmo = Gizmo.create(nonBeansClassOutput); + gizmo.class_(implName.toString(), implClassCreator -> { + implClassCreator.implements_(ClassDesc.of(interfaceName.toString())); - Map fields = new HashMap<>(3); + // Add default constructor + implClassCreator.defaultConstructor(); + + Map fields = new HashMap<>(3); for (MethodInfo method : interfaceInfo.methods()) { String getterName = method.name(); @@ -351,43 +387,55 @@ private void generateCustomResultTypes(DotName interfaceName, DotName implName, } DotName fieldTypeName = returnType.name(); - FieldDescriptor field = implClassCreator.getFieldCreator(propertyName, fieldTypeName.toString()) - .getFieldDescriptor(); + FieldDesc field = implClassCreator.field(propertyName, ifc -> { + ifc.setType(GenerationUtil.toClassDesc(fieldTypeName.toString())); + }); // create getter (based on the interface) - try (MethodCreator getter = implClassCreator.getMethodCreator(getterName, returnType.name().toString())) { - getter.setModifiers(Modifier.PUBLIC); - getter.returnValue(getter.readInstanceField(field, getter.getThis())); - } + MethodTypeDesc getterMtd = GenerationUtil.toMethodTypeDesc(returnType.name().toString()); + implClassCreator.method(getterName, mc -> { + mc.setType(getterMtd); + mc.public_(); + mc.body(bc -> { + bc.return_(bc.get(mc.this_().field(field))); + }); + }); fields.put(getterName, field); } - // Add static methods to convert from Object[] to this type + // Add static methods to convert from entity to this type for (String queryMethod : queryMethods) { - try (MethodCreator convert = implClassCreator.getMethodCreator("convert_" + queryMethod, - implName.toString(), entityClassInfo.name().toString())) { - convert.setModifiers(Modifier.STATIC | Modifier.PUBLIC); - - ResultHandle newObject = convert.newInstance(MethodDescriptor.ofConstructor(implName.toString())); - - ResultHandle entity = convert.getMethodParam(0); - final List availableMethods = availableMethods(entityClassInfo, index); - for (Map.Entry field : fields.entrySet()) { - if (!getterExists(availableMethods, field.getKey())) { - throw new IllegalArgumentException(field.getKey() + " method does not exists in " - + entityClassInfo.name().toString() + " class."); + MethodTypeDesc convertMtd = GenerationUtil.toMethodTypeDesc(implName.toString(), + entityClassInfo.name().toString()); + implClassCreator.staticMethod("convert_" + queryMethod, smc -> { + smc.setType(convertMtd); + smc.public_(); + ParamVar entityParam = smc.parameter("entity"); + + smc.body(bc -> { + LocalVar newObject = bc.localVar("newObject", + bc.new_(ClassDesc.of(implName.toString()))); + + final List availableMethods = availableMethods(entityClassInfo, index); + for (Map.Entry field : fields.entrySet()) { + if (!getterExists(availableMethods, field.getKey())) { + throw new IllegalArgumentException(field.getKey() + " method does not exists in " + + entityClassInfo.name().toString() + " class."); + } + + FieldDesc f = field.getValue(); + Expr getterResult = bc.invokeVirtual( + ClassMethodDesc.of(ClassDesc.of(entityClassInfo.name().toString()), field.getKey(), + MethodTypeDesc.of(f.type())), + entityParam); + bc.set(newObject.field(f), getterResult); } - - FieldDescriptor f = field.getValue(); - convert.writeInstanceField(f, newObject, convert.invokeVirtualMethod( - MethodDescriptor.ofMethod(entityClassInfo.name().toString(), field.getKey(), f.getType()), - entity)); - } - convert.returnValue(newObject); - } + bc.return_(newObject); + }); + }); } - } + }); } private static List availableMethods(ClassInfo entityClassInfo, IndexView index) { diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsAdder.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsAdder.java index 54bb1ce362ee2..12f95c12ff703 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsAdder.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/FragmentMethodsAdder.java @@ -1,7 +1,11 @@ package io.quarkus.spring.data.deployment.generate; +import java.lang.constant.ClassDesc; +import java.lang.constant.MethodTypeDesc; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Consumer; import org.jboss.jandex.ClassInfo; @@ -9,11 +13,12 @@ import org.jboss.jandex.IndexView; import org.jboss.jandex.MethodInfo; -import io.quarkus.gizmo.ClassCreator; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; -import io.quarkus.gizmo.ResultHandle; +import io.quarkus.gizmo2.Expr; +import io.quarkus.gizmo2.ParamVar; +import io.quarkus.gizmo2.creator.ClassCreator; +import io.quarkus.gizmo2.desc.ClassMethodDesc; +import io.quarkus.gizmo2.desc.FieldDesc; +import io.quarkus.gizmo2.desc.MethodDesc; public class FragmentMethodsAdder { @@ -26,7 +31,8 @@ public FragmentMethodsAdder(Consumer fragmentImplClassResolvedCallback, } public void add(ClassCreator classCreator, String generatedClassName, - List customInterfaceNamesToImplement, Map customImplNameToHandle) { + List customInterfaceNamesToImplement, Map customImplNameToHandle, + Set existingMethods) { for (DotName customInterfaceToImplement : customInterfaceNamesToImplement) { String customImplementationClassName = FragmentMethodsUtil .getImplementationDotName(customInterfaceToImplement, index).toString(); @@ -41,38 +47,53 @@ public void add(ClassCreator classCreator, String generatedClassName, for (MethodInfo methodToImplement : customInterfaceToImplementClassInfo.methods()) { // methods defined on the interface are implemented by forwarding them to the bean that implements them - Object[] methodParameterTypes = new Object[methodToImplement.parametersCount()]; + String[] methodParameterTypes = new String[methodToImplement.parametersCount()]; for (int i = 0; i < methodToImplement.parametersCount(); i++) { methodParameterTypes[i] = methodToImplement.parameterType(i).name().toString(); } String methodReturnType = methodToImplement.returnType().name().toString(); + String methodKey = GenerationUtil.methodKey(methodToImplement.name(), methodReturnType, + methodParameterTypes); - MethodDescriptor methodDescriptor = MethodDescriptor.ofMethod(generatedClassName, methodToImplement.name(), - methodReturnType, methodParameterTypes); + if (!existingMethods.contains(methodKey)) { + // Build the MethodTypeDesc + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(methodReturnType, methodParameterTypes); - if (!classCreator.getExistingMethods().contains(methodDescriptor)) { - try (MethodCreator methodCreator = classCreator.getMethodCreator(methodDescriptor)) { - // obtain the bean from Arc - ResultHandle bean = methodCreator.readInstanceField( - customImplNameToHandle.get(customImplementationClassName), methodCreator.getThis()); - - ResultHandle[] methodParameterHandles = new ResultHandle[methodToImplement.parametersCount()]; + classCreator.method(methodToImplement.name(), mc -> { + mc.setType(mtd); + // Declare parameters + ParamVar[] params = new ParamVar[methodToImplement.parametersCount()]; for (int i = 0; i < methodToImplement.parametersCount(); i++) { - methodParameterHandles[i] = methodCreator.getMethodParam(i); + params[i] = mc.parameter("p" + i); } - // delegate call to bean - ResultHandle result = methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(customImplementationClassName, methodToImplement.name(), - methodReturnType, methodParameterTypes), - bean, methodParameterHandles); - if (void.class.getName().equals(methodReturnType)) { - methodCreator.returnValue(null); - } else { - methodCreator.returnValue(result); - } - } + mc.body(bc -> { + // obtain the bean from the field + Expr bean = bc.get(mc.this_().field( + customImplNameToHandle.get(customImplementationClassName))); + + // Build args list + List args = new ArrayList<>(); + for (ParamVar param : params) { + args.add(param); + } + + // Build the target MethodDesc for invokeVirtual + MethodDesc targetMethod = ClassMethodDesc.of( + ClassDesc.of(customImplementationClassName), + methodToImplement.name(), mtd); + + // delegate call to bean + Expr result = bc.invokeVirtual(targetMethod, bean, args); + if (void.class.getName().equals(methodReturnType)) { + bc.return_(); + } else { + bc.return_(result); + } + }); + }); + existingMethods.add(methodKey); } } } diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/GenerationUtil.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/GenerationUtil.java index f5dda05c46e10..6376ae2553bd7 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/GenerationUtil.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/GenerationUtil.java @@ -3,6 +3,9 @@ import static io.quarkus.spring.data.deployment.DotNames.JPA_NAMED_QUERIES; import static io.quarkus.spring.data.deployment.DotNames.JPA_NAMED_QUERY; +import java.lang.constant.ClassDesc; +import java.lang.constant.ConstantDescs; +import java.lang.constant.MethodTypeDesc; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; @@ -17,7 +20,8 @@ import org.jboss.jandex.MethodInfo; import org.jboss.jandex.Type; -import io.quarkus.gizmo.MethodDescriptor; +import io.quarkus.gizmo2.desc.ClassMethodDesc; +import io.quarkus.gizmo2.desc.MethodDesc; import io.quarkus.spring.data.deployment.DotNames; public final class GenerationUtil { @@ -57,17 +61,80 @@ static Set interfaceMethods(Collection interfaces, IndexVie return result; } - // Used in case where we can't simply use MethodDescriptor.of(MethodInfo) - // because that used the class of the method - static MethodDescriptor toMethodDescriptor(String generatedClassName, MethodInfo methodInfo) { + /** + * Build a method key string for tracking existing methods in a Set. + */ + static String methodKey(String name, String returnType, String... paramTypes) { + return name + "(" + String.join(",", paramTypes) + ")" + returnType; + } + + /** + * Build a method key string from a MethodInfo for a given generated class. + */ + static String methodKey(String generatedClassName, MethodInfo methodInfo) { final List parameterTypesStr = new ArrayList<>(); for (Type parameter : methodInfo.parameterTypes()) { parameterTypesStr.add(parameter.name().toString()); } - return MethodDescriptor.ofMethod(generatedClassName, methodInfo.name(), methodInfo.returnType().name().toString(), + return methodKey(methodInfo.name(), methodInfo.returnType().name().toString(), parameterTypesStr.toArray(new String[0])); } + /** + * Create a MethodTypeDesc from the return type and parameter types. + */ + static MethodTypeDesc toMethodTypeDesc(String returnType, String... paramTypes) { + ClassDesc retDesc = toClassDesc(returnType); + ClassDesc[] paramDescs = new ClassDesc[paramTypes.length]; + for (int i = 0; i < paramTypes.length; i++) { + paramDescs[i] = toClassDesc(paramTypes[i]); + } + return MethodTypeDesc.of(retDesc, paramDescs); + } + + /** + * Convert a dot-separated class name or primitive type name to a ClassDesc. + * Also handles JVM array type descriptors like {@code [Ljava.lang.Object;} and + * dot-name array forms like {@code java.lang.Object[]}. + */ + static ClassDesc toClassDesc(String typeName) { + // Handle JVM internal array descriptors (e.g. "[Ljava.lang.Object;", "[[I") + if (typeName.startsWith("[")) { + return ClassDesc.ofDescriptor(typeName.replace('.', '/')); + } + // Handle dot-name array forms (e.g. "java.lang.Object[]") + if (typeName.endsWith("[]")) { + String componentType = typeName.substring(0, typeName.length() - 2); + return toClassDesc(componentType).arrayType(); + } + return switch (typeName) { + case "void" -> ConstantDescs.CD_void; + case "boolean" -> ConstantDescs.CD_boolean; + case "byte" -> ConstantDescs.CD_byte; + case "short" -> ConstantDescs.CD_short; + case "int" -> ConstantDescs.CD_int; + case "long" -> ConstantDescs.CD_long; + case "float" -> ConstantDescs.CD_float; + case "double" -> ConstantDescs.CD_double; + case "char" -> ConstantDescs.CD_char; + default -> ClassDesc.of(typeName); + }; + } + + /** + * Build a MethodDesc for a method in a generated class from a MethodInfo. + * Used in case where we can't simply use the declaring class from MethodInfo. + */ + static MethodDesc toMethodDesc(String generatedClassName, MethodInfo methodInfo) { + final List parameterTypesStr = new ArrayList<>(); + for (Type parameter : methodInfo.parameterTypes()) { + parameterTypesStr.add(parameter.name().toString()); + } + String returnType = methodInfo.returnType().name().toString(); + MethodTypeDesc mtd = toMethodTypeDesc(returnType, parameterTypesStr.toArray(new String[0])); + return ClassMethodDesc.of(ClassDesc.of(generatedClassName), methodInfo.name(), mtd); + } + static AnnotationInstance getNamedQueryForMethod(MethodInfo methodInfo, ClassInfo entityClassInfo) { // try @NamedQuery AnnotationInstance namedQueryAnnotation = getNamedQueryAnnotationForMethod(methodInfo, entityClassInfo); diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/SpringDataRepositoryCreator.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/SpringDataRepositoryCreator.java index ebf95e67283d3..2bcabd84d2c16 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/SpringDataRepositoryCreator.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/SpringDataRepositoryCreator.java @@ -1,6 +1,6 @@ package io.quarkus.spring.data.deployment.generate; -import java.lang.reflect.Modifier; +import java.lang.constant.ClassDesc; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; @@ -21,12 +21,11 @@ import org.jboss.jandex.Type; import io.quarkus.deployment.util.JandexUtil; -import io.quarkus.gizmo.ClassCreator; -import io.quarkus.gizmo.ClassOutput; -import io.quarkus.gizmo.FieldCreator; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; +import io.quarkus.gizmo2.ClassOutput; +import io.quarkus.gizmo2.Const; +import io.quarkus.gizmo2.Gizmo; +import io.quarkus.gizmo2.desc.ConstructorDesc; +import io.quarkus.gizmo2.desc.FieldDesc; import io.quarkus.panache.common.deployment.TypeBundle; import io.quarkus.runtime.util.HashUtil; import io.quarkus.spring.data.deployment.DotNames; @@ -40,7 +39,8 @@ public class SpringDataRepositoryCreator { private final DerivedMethodsAdder derivedMethodsAdder; private final CustomQueryMethodsAdder customQueryMethodsAdder; - public SpringDataRepositoryCreator(ClassOutput classOutput, ClassOutput otherClassOutput, IndexView index, + public SpringDataRepositoryCreator(ClassOutput classOutput, + ClassOutput otherClassOutput, IndexView index, Consumer fragmentImplClassResolvedCallback, Consumer customClassCreatedCallback, TypeBundle typeBundle) { this.classOutput = classOutput; @@ -81,44 +81,55 @@ public Result implementCrudRepository(ClassInfo repositoryToImplement, IndexView } } - Map fragmentImplNameToFieldDescriptor = new HashMap<>(); + Map fragmentImplNameToFieldDescriptor = new HashMap<>(); String repositoryToImplementStr = repositoryToImplement.name().toString(); String generatedClassName = repositoryToImplementStr + "_" + HashUtil.sha1(repositoryToImplementStr) + "Impl"; - try (ClassCreator classCreator = ClassCreator.builder().classOutput(classOutput) - .className(generatedClassName) - .interfaces(repositoryToImplementStr) - .build()) { + + // Track existing methods across all adders + Set existingMethods = new HashSet<>(); + + Gizmo gizmo = Gizmo.create(classOutput) + .withDebugInfo(false) + .withParameters(false); + gizmo.class_(generatedClassName, classCreator -> { + classCreator.implements_(ClassDesc.of(repositoryToImplementStr)); classCreator.addAnnotation(ApplicationScoped.class); - FieldCreator entityClassFieldCreator = classCreator.getFieldCreator("entityClass", Class.class.getName()) - .setModifiers(Modifier.PRIVATE | Modifier.FINAL); + // Create the entityClass field + FieldDesc entityClassFieldDesc = classCreator.field("entityClass", ifc -> { + ifc.setType(Class.class); + ifc.private_(); + ifc.final_(); + }); // create an instance field of type Class for each one of the implementations of the custom interfaces createCustomImplFields(classCreator, fragmentNamesToImplement, index, fragmentImplNameToFieldDescriptor); // initialize all class fields in the constructor - try (MethodCreator ctor = classCreator.getMethodCreator("", "V")) { - ctor.invokeSpecialMethod(MethodDescriptor.ofMethod(Object.class, "", void.class), ctor.getThis()); - // initialize the entityClass field - ctor.writeInstanceField(entityClassFieldCreator.getFieldDescriptor(), ctor.getThis(), - ctor.loadClassFromTCCL(entityTypeStr)); - ctor.returnValue(null); - } + classCreator.constructor(ctor -> { + ctor.body(bc -> { + bc.invokeSpecial(ConstructorDesc.of(Object.class), ctor.this_()); + // initialize the entityClass field + bc.set(ctor.this_().field(entityClassFieldDesc), + Const.of(ClassDesc.of(entityTypeStr))); + bc.return_(); + }); + }); // for every method we add we need to make sure that we only haven't added it before // we first add custom methods (as per Spring Data implementation) thus ensuring that user provided methods // always override stock methods from the Spring Data repository interfaces fragmentMethodsAdder.add(classCreator, generatedClassName, fragmentNamesToImplement, - fragmentImplNameToFieldDescriptor); - - stockMethodsAdder.add(classCreator, entityClassFieldCreator.getFieldDescriptor(), generatedClassName, - repositoryToImplement, entityDotName, idTypeStr); - derivedMethodsAdder.add(classCreator, entityClassFieldCreator.getFieldDescriptor(), generatedClassName, - repositoryToImplement, entityClassInfo); - customQueryMethodsAdder.add(classCreator, entityClassFieldCreator.getFieldDescriptor(), - repositoryToImplement, entityClassInfo, idTypeStr); - } + fragmentImplNameToFieldDescriptor, existingMethods); + + stockMethodsAdder.add(classCreator, entityClassFieldDesc, generatedClassName, + repositoryToImplement, entityDotName, idTypeStr, existingMethods); + derivedMethodsAdder.add(classCreator, entityClassFieldDesc, generatedClassName, + repositoryToImplement, entityClassInfo, existingMethods); + customQueryMethodsAdder.add(classCreator, entityClassFieldDesc, + repositoryToImplement, entityClassInfo, idTypeStr, existingMethods); + }); return new Result(entityDotName, idTypeDotName, generatedClassName); } @@ -164,8 +175,9 @@ private Map.Entry extractIdAndEntityTypes(ClassInfo repository return new AbstractMap.SimpleEntry<>(idDotName, entityDotName); } - private void createCustomImplFields(ClassCreator repositoryImpl, List customInterfaceNamesToImplement, - IndexView index, Map customImplNameToFieldDescriptor) { + private void createCustomImplFields(io.quarkus.gizmo2.creator.ClassCreator repositoryImpl, + List customInterfaceNamesToImplement, + IndexView index, Map customImplNameToFieldDescriptor) { Set customImplClassNames = new HashSet<>(customInterfaceNamesToImplement.size()); // go through the interfaces and collect the implementing classes in a Set @@ -178,13 +190,14 @@ private void createCustomImplFields(ClassCreator repositoryImpl, List c // do the actual field creation and book-keeping of them in the customImplNameToFieldDescriptor Map int i = 0; for (String customImplClassName : customImplClassNames) { - FieldCreator customClassField = repositoryImpl - .getFieldCreator("customImplClass" + (i + 1), customImplClassName) - .setModifiers(Modifier.PROTECTED); // done to prevent warning during the build - customClassField.addAnnotation(Inject.class); - - customImplNameToFieldDescriptor.put(customImplClassName, - customClassField.getFieldDescriptor()); + final int fieldIndex = i; + FieldDesc customClassField = repositoryImpl.field("customImplClass" + (fieldIndex + 1), ifc -> { + ifc.setType(ClassDesc.of(customImplClassName)); + ifc.protected_(); // done to prevent warning during the build + ifc.addAnnotation(Inject.class); + }); + + customImplNameToFieldDescriptor.put(customImplClassName, customClassField); i++; } } diff --git a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/StockMethodsAdder.java b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/StockMethodsAdder.java index d09dc30e4e810..1cfb5874b19d7 100644 --- a/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/StockMethodsAdder.java +++ b/extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/StockMethodsAdder.java @@ -1,9 +1,8 @@ package io.quarkus.spring.data.deployment.generate; -import static io.quarkus.gizmo.FieldDescriptor.of; -import static io.quarkus.gizmo.MethodDescriptor.ofMethod; - -import java.lang.reflect.Modifier; +import java.lang.constant.ClassDesc; +import java.lang.constant.ConstantDescs; +import java.lang.constant.MethodTypeDesc; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; @@ -38,14 +37,15 @@ import org.springframework.data.domain.Sort; import io.quarkus.deployment.bean.JavaBeanUtil; -import io.quarkus.gizmo.AssignableResultHandle; -import io.quarkus.gizmo.BranchResult; -import io.quarkus.gizmo.BytecodeCreator; -import io.quarkus.gizmo.ClassCreator; -import io.quarkus.gizmo.FieldDescriptor; -import io.quarkus.gizmo.MethodCreator; -import io.quarkus.gizmo.MethodDescriptor; -import io.quarkus.gizmo.ResultHandle; +import io.quarkus.gizmo2.Const; +import io.quarkus.gizmo2.Expr; +import io.quarkus.gizmo2.LocalVar; +import io.quarkus.gizmo2.ParamVar; +import io.quarkus.gizmo2.creator.BlockCreator; +import io.quarkus.gizmo2.creator.ClassCreator; +import io.quarkus.gizmo2.desc.ClassMethodDesc; +import io.quarkus.gizmo2.desc.FieldDesc; +import io.quarkus.gizmo2.desc.MethodDesc; import io.quarkus.hibernate.orm.panache.PanacheQuery; import io.quarkus.hibernate.orm.panache.common.runtime.AbstractManagedJpaOperations; import io.quarkus.hibernate.orm.panache.runtime.AdditionalJpaOperations; @@ -60,16 +60,17 @@ public class StockMethodsAdder { private static Set ALL_SPRING_DATA_REPOSITORY_METHODS = null; private final IndexView index; - private final FieldDescriptor operationsField; + private final FieldDesc operationsField; public StockMethodsAdder(IndexView index, TypeBundle typeBundle) { this.index = index; String operationsName = typeBundle.operations().dotName().toString(); - operationsField = of(operationsName, "INSTANCE", operationsName); + operationsField = FieldDesc.of(ClassDesc.of(operationsName), "INSTANCE", ClassDesc.of(operationsName)); } - public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, ClassInfo repositoryToImplement, DotName entityDotName, String idTypeStr) { + public void add(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, ClassInfo repositoryToImplement, DotName entityDotName, String idTypeStr, + Set existingMethods) { Set methodsOfExtendedSpringDataRepositories = methodsOfExtendedSpringDataRepositories( repositoryToImplement); @@ -77,148 +78,163 @@ public void add(ClassCreator classCreator, FieldDescriptor entityClassFieldDescr Set allMethodsToBeImplemented = new LinkedHashSet<>(methodsOfExtendedSpringDataRepositories); allMethodsToBeImplemented.addAll(stockMethodsAddedToInterface); - Map allMethodsToBeImplementedToResult = new LinkedHashMap<>(); + Map allMethodsToBeImplementedToResult = new LinkedHashMap<>(); for (MethodInfo methodInfo : allMethodsToBeImplemented) { - allMethodsToBeImplementedToResult.put(GenerationUtil.toMethodDescriptor(generatedClassName, methodInfo), false); + allMethodsToBeImplementedToResult.put(GenerationUtil.methodKey(generatedClassName, methodInfo), false); } String entityTypeStr = entityDotName.toString(); - // for all Spring Data repository methods we know how to implement, check if the generated class actually needs the method - // and if so generate the implementation while also keeping the proper records - generateSave(classCreator, generatedClassName, entityDotName, entityTypeStr, - allMethodsToBeImplementedToResult, entityClassFieldDescriptor); + allMethodsToBeImplementedToResult, entityClassFieldDescriptor, existingMethods); generateSaveAndFlush(classCreator, generatedClassName, entityDotName, entityTypeStr, - allMethodsToBeImplementedToResult, entityClassFieldDescriptor); + allMethodsToBeImplementedToResult, entityClassFieldDescriptor, existingMethods); generateSaveAll(classCreator, entityClassFieldDescriptor, generatedClassName, entityDotName, entityTypeStr, - allMethodsToBeImplementedToResult); - generateFlush(classCreator, generatedClassName, allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); + generateFlush(classCreator, generatedClassName, allMethodsToBeImplementedToResult, existingMethods); generateFindById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateExistsById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateGetOne(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateGetReferenceById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateGetById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateFindAll(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateFindAllWithSort(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateFindAllWithPageable(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateFindAllById(classCreator, entityClassFieldDescriptor, generatedClassName, entityDotName, entityTypeStr, - idTypeStr, allMethodsToBeImplementedToResult); - generateCount(classCreator, entityClassFieldDescriptor, generatedClassName, allMethodsToBeImplementedToResult); + idTypeStr, allMethodsToBeImplementedToResult, existingMethods); + generateCount(classCreator, entityClassFieldDescriptor, generatedClassName, allMethodsToBeImplementedToResult, + existingMethods); generateDeleteById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); - generateDelete(classCreator, generatedClassName, entityTypeStr, allMethodsToBeImplementedToResult); - generateDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, allMethodsToBeImplementedToResult); - generateDeleteAll(classCreator, entityClassFieldDescriptor, generatedClassName, allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); + generateDelete(classCreator, generatedClassName, entityTypeStr, allMethodsToBeImplementedToResult, existingMethods); + generateDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, allMethodsToBeImplementedToResult, + existingMethods); + generateDeleteAll(classCreator, entityClassFieldDescriptor, generatedClassName, allMethodsToBeImplementedToResult, + existingMethods); generateDeleteAllInBatch(classCreator, entityClassFieldDescriptor, generatedClassName, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateDeleteAllInBatchWithIterable(classCreator, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateDeleteInBatchWithIterable(classCreator, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); generateDeleteAllByIdInBatchWithIterable(classCreator, generatedClassName, entityTypeStr, - allMethodsToBeImplementedToResult); + allMethodsToBeImplementedToResult, existingMethods); - handleUnimplementedMethods(classCreator, allMethodsToBeImplementedToResult); + handleUnimplementedMethods(classCreator, generatedClassName, allMethodsToBeImplementedToResult, existingMethods); } private void generateSave(ClassCreator classCreator, String generatedClassName, DotName entityDotName, String entityTypeStr, - Map allMethodsToBeImplementedToResult, - FieldDescriptor entityClassFieldDescriptor) { - - MethodDescriptor saveDescriptor = MethodDescriptor.ofMethod(generatedClassName, "save", entityTypeStr, - entityTypeStr); - MethodDescriptor bridgeSaveDescriptor = MethodDescriptor.ofMethod(generatedClassName, "save", Object.class, - Object.class); - - if (allMethodsToBeImplementedToResult.containsKey(saveDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeSaveDescriptor)) { - - if (!classCreator.getExistingMethods().contains(saveDescriptor)) { - try (MethodCreator save = classCreator.getMethodCreator(saveDescriptor)) { - save.addAnnotation(Transactional.class); - - ResultHandle entity = save.getMethodParam(0); - - // if an entity is Persistable, then all we need to do is call isNew to determine if it's new or not - if (isPersistable(entityDotName)) { - ResultHandle isNew = save.invokeVirtualMethod( - ofMethod(entityDotName.toString(), "isNew", boolean.class.toString()), - entity); - BranchResult isNewBranch = save.ifTrue(isNew); - generatePersistAndReturn(entity, isNewBranch.trueBranch()); - generateMergeAndReturn(entity, isNewBranch.falseBranch(), entityClassFieldDescriptor); - } else { - AnnotationTarget idAnnotationTarget = getIdAnnotationTarget(entityDotName, index); - ResultHandle idValue = generateObtainValue(save, entityDotName, entity, idAnnotationTarget); - Type idType = getTypeOfTarget(idAnnotationTarget); - Optional versionValueTarget = getVersionAnnotationTarget(entityDotName, index); - - // the following code generated bytecode that: - // if there is a field annotated with @Version, calls 'persist' if the field is null, 'merge' otherwise - // if there is no field annotated with @Version, then if the value of the field annotated with '@Id' - // is "falsy", 'persist' is called, otherwise 'merge' is called - - if (versionValueTarget.isPresent()) { - Type versionType = getTypeOfTarget(versionValueTarget.get()); - if (versionType instanceof PrimitiveType) { - throw new IllegalArgumentException( - "The '@Version' annotation cannot be used on primitive types. Offending entity is '" - + entityDotName + "'."); + Map allMethodsToBeImplementedToResult, + FieldDesc entityClassFieldDescriptor, Set existingMethods) { + + String saveKey = GenerationUtil.methodKey("save", entityTypeStr, entityTypeStr); + String bridgeSaveKey = GenerationUtil.methodKey("save", Object.class.getName(), Object.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(saveKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeSaveKey)) { + + if (!existingMethods.contains(saveKey)) { + MethodTypeDesc saveMtd = GenerationUtil.toMethodTypeDesc(entityTypeStr, entityTypeStr); + classCreator.method("save", mc -> { + mc.setType(saveMtd); + mc.addAnnotation(Transactional.class); + ParamVar entityParam = mc.parameter("entity"); + + mc.body(bc -> { + // Read the static operations field and entity class field once and store in LocalVars + // so they can be used across ifElse branches + LocalVar opsVar = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClassVar = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + if (isPersistable(entityDotName)) { + Expr isNew = bc.invokeVirtual( + ClassMethodDesc.of(ClassDesc.of(entityDotName.toString()), "isNew", + MethodTypeDesc.of(ConstantDescs.CD_boolean)), + entityParam); + bc.ifElse(isNew, + tb -> generatePersistAndReturn(entityParam, tb, opsVar), + fb -> generateMergeAndReturn(entityParam, fb, opsVar, + entityClassVar)); + } else { + AnnotationTarget idAnnotationTarget = getIdAnnotationTarget(entityDotName, index); + Expr idValue = generateObtainValue(bc, entityDotName, entityParam, idAnnotationTarget); + Type idType = getTypeOfTarget(idAnnotationTarget); + Optional versionValueTarget = getVersionAnnotationTarget(entityDotName, index); + + if (versionValueTarget.isPresent()) { + Type versionType = getTypeOfTarget(versionValueTarget.get()); + if (versionType instanceof PrimitiveType) { + throw new IllegalArgumentException( + "The '@Version' annotation cannot be used on primitive types. Offending entity is '" + + entityDotName + "'."); + } + Expr versionValue = generateObtainValue(bc, entityDotName, entityParam, + versionValueTarget.get()); + bc.ifElse(bc.isNull(versionValue), + tb -> generatePersistAndReturn(entityParam, tb, opsVar), + fb -> generateMergeAndReturn(entityParam, fb, opsVar, + entityClassVar)); + // if version is present, we've handled both branches, so return here + return; } - ResultHandle versionValue = generateObtainValue(save, entityDotName, entity, - versionValueTarget.get()); - BranchResult versionValueIsNullBranch = save.ifNull(versionValue); - generatePersistAndReturn(entity, versionValueIsNullBranch.trueBranch()); - generateMergeAndReturn(entity, versionValueIsNullBranch.falseBranch(), entityClassFieldDescriptor); - } - BytecodeCreator idValueUnset; - BytecodeCreator idValueSet; - if (idType instanceof PrimitiveType) { - if (!idType.name().equals(DotNames.PRIMITIVE_LONG) - && !idType.name().equals(DotNames.PRIMITIVE_INTEGER)) { - throw new IllegalArgumentException("Id type of '" + entityDotName + "' is invalid."); - } - if (idType.name().equals(DotNames.PRIMITIVE_LONG)) { - ResultHandle longObject = save.invokeStaticMethod( - MethodDescriptor.ofMethod(Long.class, "valueOf", Long.class, long.class), idValue); - idValue = save.invokeVirtualMethod(MethodDescriptor.ofMethod(Long.class, "intValue", int.class), - longObject); + if (idType instanceof PrimitiveType) { + if (!idType.name().equals(DotNames.PRIMITIVE_LONG) + && !idType.name().equals(DotNames.PRIMITIVE_INTEGER)) { + throw new IllegalArgumentException( + "Id type of '" + entityDotName + "' is invalid."); + } + Expr idValueForComparison = idValue; + if (idType.name().equals(DotNames.PRIMITIVE_LONG)) { + Expr longObject = bc.invokeStatic( + MethodDesc.of(Long.class, "valueOf", Long.class, long.class), idValue); + idValueForComparison = bc.invokeVirtual( + MethodDesc.of(Long.class, "intValue", int.class), longObject); + } + // ifNonZero equivalent: if id != 0 => idValueSet, if id == 0 => idValueUnset + bc.ifElse(bc.ne(idValueForComparison, 0), + idValueSetBlock -> generateMergeAndReturn(entityParam, idValueSetBlock, + opsVar, entityClassVar), + idValueUnsetBlock -> generatePersistAndReturn(entityParam, + idValueUnsetBlock, opsVar)); + } else { + bc.ifElse(bc.isNull(idValue), + idValueUnsetBlock -> generatePersistAndReturn(entityParam, + idValueUnsetBlock, opsVar), + idValueSetBlock -> generateMergeAndReturn(entityParam, idValueSetBlock, + opsVar, entityClassVar)); } - BranchResult idValueNonZeroBranch = save.ifNonZero(idValue); - idValueSet = idValueNonZeroBranch.trueBranch(); - idValueUnset = idValueNonZeroBranch.falseBranch(); - } else { - BranchResult idValueNullBranch = save.ifNull(idValue); - idValueSet = idValueNullBranch.falseBranch(); - idValueUnset = idValueNullBranch.trueBranch(); } - generatePersistAndReturn(entity, idValueUnset); - generateMergeAndReturn(entity, idValueSet, entityClassFieldDescriptor); - } - } - try (MethodCreator bridgeSave = classCreator.getMethodCreator(bridgeSaveDescriptor)) { - MethodDescriptor save = MethodDescriptor.ofMethod(generatedClassName, "save", entityTypeStr, - entityTypeStr); - ResultHandle methodParam = bridgeSave.getMethodParam(0); - ResultHandle castedMethodParam = bridgeSave.checkCast(methodParam, entityTypeStr); - ResultHandle result = bridgeSave.invokeVirtualMethod(save, bridgeSave.getThis(), castedMethodParam); - bridgeSave.returnValue(result); - } + }); + }); + existingMethods.add(saveKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Object.class.getName(), Object.class.getName()); + MethodDesc saveDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "save", saveMtd); + classCreator.method("save", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("entity"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(entityTypeStr)); + Expr result = bbc.invokeVirtual(saveDesc, bmc.this_(), castedParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeSaveKey); } - allMethodsToBeImplementedToResult.put(saveDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeSaveDescriptor, true); + allMethodsToBeImplementedToResult.put(saveKey, true); + allMethodsToBeImplementedToResult.put(bridgeSaveKey, true); } } @@ -240,49 +256,46 @@ private boolean isPersistable(DotName entityDotName) { return isPersistable(superDotName); } - private void generatePersistAndReturn(ResultHandle entity, BytecodeCreator bytecodeCreator) { - bytecodeCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "persist", void.class, Object.class), - bytecodeCreator.readStaticField(operationsField), + private void generatePersistAndReturn(Expr entity, BlockCreator bc, Expr opsExpr) { + bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "persist", void.class, Object.class), + opsExpr, entity); - bytecodeCreator.returnValue(entity); + bc.return_(entity); } - private void generateMergeAndReturn(ResultHandle entity, BytecodeCreator bytecodeCreator, - FieldDescriptor entityClassFieldDescriptor) { - ResultHandle entityClass = bytecodeCreator.readInstanceField(entityClassFieldDescriptor, bytecodeCreator.getThis()); - ResultHandle session = bytecodeCreator.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "getSession", Session.class, Class.class), - bytecodeCreator.readStaticField(operationsField), - entityClass); - entity = bytecodeCreator.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Session.class, "merge", Object.class, Object.class), + private void generateMergeAndReturn(Expr entity, BlockCreator bc, Expr opsExpr, + Expr entityClassExpr) { + Expr session = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "getSession", Session.class, Class.class), + opsExpr, + entityClassExpr); + Expr merged = bc.invokeInterface( + MethodDesc.of(Session.class, "merge", Object.class, Object.class), session, entity); - bytecodeCreator.returnValue(entity); + bc.return_(merged); } - /** - * Given an annotation target, generate the bytecode that is needed to obtain its value - * either by reading the field or by calling the method. - * Meant to be called for annotations alike {@code @Id} or {@code @Version} - */ - private ResultHandle generateObtainValue(MethodCreator methodCreator, DotName entityDotName, ResultHandle entity, + private Expr generateObtainValue(BlockCreator bc, DotName entityDotName, Expr entity, AnnotationTarget annotationTarget) { if (annotationTarget instanceof FieldInfo) { FieldInfo fieldInfo = annotationTarget.asField(); - if (Modifier.isPublic(fieldInfo.flags())) { - return methodCreator.readInstanceField(of(fieldInfo), entity); + if (java.lang.reflect.Modifier.isPublic(fieldInfo.flags())) { + FieldDesc fd = FieldDesc.of(ClassDesc.of(fieldInfo.declaringClass().name().toString()), + fieldInfo.name(), ClassDesc.of(fieldInfo.type().name().toString())); + return bc.get(entity.field(fd)); } String getterMethodName = JavaBeanUtil.getGetterName(fieldInfo.name(), fieldInfo.type().name()); - return methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(entityDotName.toString(), getterMethodName, fieldInfo.type().name().toString()), + return bc.invokeVirtual( + ClassMethodDesc.of(ClassDesc.of(entityDotName.toString()), getterMethodName, + MethodTypeDesc.of(GenerationUtil.toClassDesc(fieldInfo.type().name().toString()))), entity); } MethodInfo methodInfo = annotationTarget.asMethod(); - return methodCreator.invokeVirtualMethod( - MethodDescriptor.ofMethod(entityDotName.toString(), methodInfo.name(), - methodInfo.returnType().name().toString()), + return bc.invokeVirtual( + ClassMethodDesc.of(ClassDesc.of(entityDotName.toString()), methodInfo.name(), + MethodTypeDesc.of(GenerationUtil.toClassDesc(methodInfo.returnType().name().toString()))), entity); } @@ -295,728 +308,834 @@ private Type getTypeOfTarget(AnnotationTarget idAnnotationTarget) { private void generateSaveAndFlush(ClassCreator classCreator, String generatedClassName, DotName entityDotName, String entityTypeStr, - Map allMethodsToBeImplementedToResult, FieldDescriptor entityClassFieldDescriptor) { + Map allMethodsToBeImplementedToResult, FieldDesc entityClassFieldDescriptor, + Set existingMethods) { - MethodDescriptor saveAndFlushDescriptor = MethodDescriptor.ofMethod(generatedClassName, "saveAndFlush", entityTypeStr, - entityTypeStr); - MethodDescriptor bridgeSaveAndFlushDescriptor = MethodDescriptor.ofMethod(generatedClassName, "saveAndFlush", - Object.class, - Object.class); + String saveAndFlushKey = GenerationUtil.methodKey("saveAndFlush", entityTypeStr, entityTypeStr); + String bridgeSaveAndFlushKey = GenerationUtil.methodKey("saveAndFlush", Object.class.getName(), + Object.class.getName()); - if (allMethodsToBeImplementedToResult.containsKey(saveAndFlushDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeSaveAndFlushDescriptor)) { + if (allMethodsToBeImplementedToResult.containsKey(saveAndFlushKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeSaveAndFlushKey)) { - if (!classCreator.getExistingMethods().contains(saveAndFlushDescriptor)) { - MethodDescriptor save = MethodDescriptor.ofMethod(generatedClassName, "save", entityTypeStr, - entityTypeStr); - - // we need to force the generation of findById since this method depends on it - allMethodsToBeImplementedToResult.put(save, false); + if (!existingMethods.contains(saveAndFlushKey)) { + // Force generation of save since this method depends on it + String saveKey = GenerationUtil.methodKey("save", entityTypeStr, entityTypeStr); + allMethodsToBeImplementedToResult.put(saveKey, false); generateSave(classCreator, generatedClassName, entityDotName, entityTypeStr, - allMethodsToBeImplementedToResult, entityClassFieldDescriptor); - - try (MethodCreator saveAndFlush = classCreator.getMethodCreator(saveAndFlushDescriptor)) { - saveAndFlush.addAnnotation(Transactional.class); - - ResultHandle entity = saveAndFlush.getMethodParam(0); - entity = saveAndFlush.invokeVirtualMethod(save, saveAndFlush.getThis(), entity); - saveAndFlush.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "flush", void.class), - saveAndFlush.readStaticField(operationsField)); - saveAndFlush.returnValue(entity); - } - try (MethodCreator bridgeSave = classCreator.getMethodCreator(bridgeSaveAndFlushDescriptor)) { - MethodDescriptor saveAndFlush = MethodDescriptor.ofMethod(generatedClassName, "saveAndFlush", entityTypeStr, - entityTypeStr); - ResultHandle methodParam = bridgeSave.getMethodParam(0); - ResultHandle castedMethodParam = bridgeSave.checkCast(methodParam, entityTypeStr); - ResultHandle result = bridgeSave.invokeVirtualMethod(saveAndFlush, bridgeSave.getThis(), castedMethodParam); - bridgeSave.returnValue(result); - } + allMethodsToBeImplementedToResult, entityClassFieldDescriptor, existingMethods); + + MethodTypeDesc saveMtd = GenerationUtil.toMethodTypeDesc(entityTypeStr, entityTypeStr); + MethodDesc saveDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "save", saveMtd); + + classCreator.method("saveAndFlush", mc -> { + mc.setType(saveMtd); + mc.addAnnotation(Transactional.class); + ParamVar entityParam = mc.parameter("entity"); + + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + Expr savedEntity = bc.invokeVirtual(saveDesc, mc.this_(), entityParam); + bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "flush", void.class), + ops); + bc.return_(savedEntity); + }); + }); + existingMethods.add(saveAndFlushKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Object.class.getName(), Object.class.getName()); + MethodDesc saveAndFlushDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "saveAndFlush", saveMtd); + classCreator.method("saveAndFlush", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("entity"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(entityTypeStr)); + Expr result = bbc.invokeVirtual(saveAndFlushDesc, bmc.this_(), castedParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeSaveAndFlushKey); } - allMethodsToBeImplementedToResult.put(saveAndFlushDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeSaveAndFlushDescriptor, true); + allMethodsToBeImplementedToResult.put(saveAndFlushKey, true); + allMethodsToBeImplementedToResult.put(bridgeSaveAndFlushKey, true); } } - private void generateSaveAll(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateSaveAll(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, DotName entityDotName, String entityTypeStr, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor saveAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "saveAll", List.class, - Iterable.class); - MethodDescriptor bridgeSaveAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "saveAll", Iterable.class, - Iterable.class); - - if (allMethodsToBeImplementedToResult.containsKey(saveAllDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeSaveAllDescriptor)) { - - if (!classCreator.getExistingMethods().contains(saveAllDescriptor)) { - MethodDescriptor save = MethodDescriptor.ofMethod(generatedClassName, "save", entityTypeStr, - entityTypeStr); - - try (MethodCreator saveAll = classCreator.getMethodCreator(saveAllDescriptor)) { - saveAll.setSignature(String.format("(Ljava/lang/Iterable;)Ljava/util/List;", - entityTypeStr.replace('.', '/'))); - saveAll.addAnnotation(Transactional.class); - - ResultHandle iterable = saveAll.getMethodParam(0); - ResultHandle resultList = saveAll.newInstance(MethodDescriptor.ofConstructor(ArrayList.class)); - - ResultHandle iterator = saveAll.invokeInterfaceMethod( - ofMethod(Iterable.class, "iterator", Iterator.class), - iterable); - BytecodeCreator loop = saveAll.createScope(); - ResultHandle hasNextValue = loop.invokeInterfaceMethod( - ofMethod(Iterator.class, "hasNext", boolean.class), - iterator); - - BranchResult hasNextBranch = loop.ifNonZero(hasNextValue); - BytecodeCreator hasNext = hasNextBranch.trueBranch(); - BytecodeCreator doesNotHaveNext = hasNextBranch.falseBranch(); - ResultHandle next = hasNext.invokeInterfaceMethod( - ofMethod(Iterator.class, "next", Object.class), - iterator); - ResultHandle saveResult = hasNext.invokeVirtualMethod(save, hasNext.getThis(), next); - hasNext.invokeInterfaceMethod( - MethodDescriptor.ofMethod(List.class, "add", boolean.class, Object.class), - resultList, saveResult); - hasNext.continueScope(loop); - - doesNotHaveNext.breakScope(loop); - - saveAll.returnValue(resultList); - } - try (MethodCreator bridgeSaveAll = classCreator.getMethodCreator(bridgeSaveAllDescriptor)) { - MethodDescriptor saveAll = MethodDescriptor.ofMethod(generatedClassName, "saveAll", - List.class.getName(), Iterable.class); - ResultHandle result = bridgeSaveAll.invokeVirtualMethod(saveAll, bridgeSaveAll.getThis(), - bridgeSaveAll.getMethodParam(0)); - bridgeSaveAll.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String saveAllKey = GenerationUtil.methodKey("saveAll", List.class.getName(), Iterable.class.getName()); + String bridgeSaveAllKey = GenerationUtil.methodKey("saveAll", Iterable.class.getName(), Iterable.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(saveAllKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeSaveAllKey)) { + + if (!existingMethods.contains(saveAllKey)) { + MethodTypeDesc saveMtd = GenerationUtil.toMethodTypeDesc(entityTypeStr, entityTypeStr); + MethodDesc saveDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "save", saveMtd); + + MethodTypeDesc saveAllMtd = GenerationUtil.toMethodTypeDesc(List.class.getName(), Iterable.class.getName()); + classCreator.method("saveAll", mc -> { + mc.setType(saveAllMtd); + mc.addAnnotation(Transactional.class); + ParamVar iterableParam = mc.parameter("entities"); + + mc.body(bc -> { + LocalVar resultList = bc.localVar("resultList", + bc.new_(ClassDesc.of(ArrayList.class.getName()))); + LocalVar iteratorVar = bc.localVar("iterator", + bc.invokeInterface( + MethodDesc.of(Iterable.class, "iterator", Iterator.class), + iterableParam)); + + bc.while_( + cond -> { + Expr hasNext = cond.invokeInterface( + MethodDesc.of(Iterator.class, "hasNext", boolean.class), + iteratorVar); + cond.yield(hasNext); + }, + body -> { + Expr next = body.invokeInterface( + MethodDesc.of(Iterator.class, "next", Object.class), + iteratorVar); + Expr saveResult = body.invokeVirtual(saveDesc, mc.this_(), next); + body.invokeInterface( + MethodDesc.of(List.class, "add", boolean.class, Object.class), + resultList, saveResult); + }); + + bc.return_(resultList); + }); + }); + existingMethods.add(saveAllKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Iterable.class.getName(), Iterable.class.getName()); + MethodDesc saveAllDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "saveAll", saveAllMtd); + classCreator.method("saveAll", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("entities"); + bmc.body(bbc -> { + Expr result = bbc.invokeVirtual(saveAllDesc, bmc.this_(), methodParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeSaveAllKey); } - allMethodsToBeImplementedToResult.put(saveAllDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeSaveAllDescriptor, true); + allMethodsToBeImplementedToResult.put(saveAllKey, true); + allMethodsToBeImplementedToResult.put(bridgeSaveAllKey, true); } } private void generateFlush(ClassCreator classCreator, String generatedClassName, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor flushDescriptor = MethodDescriptor.ofMethod(generatedClassName, "flush", void.class.getName()); - - if (allMethodsToBeImplementedToResult.containsKey(flushDescriptor)) { - - if (!classCreator.getExistingMethods().contains(flushDescriptor)) { - try (MethodCreator flush = classCreator.getMethodCreator(flushDescriptor)) { - flush.addAnnotation(Transactional.class); - flush.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "flush", void.class), - flush.readStaticField(operationsField)); - flush.returnValue(null); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String flushKey = GenerationUtil.methodKey("flush", void.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(flushKey)) { + if (!existingMethods.contains(flushKey)) { + classCreator.method("flush", mc -> { + mc.returning(void.class); + mc.addAnnotation(Transactional.class); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "flush", void.class), + ops); + bc.return_(); + }); + }); + existingMethods.add(flushKey); } - - allMethodsToBeImplementedToResult.put(flushDescriptor, true); + allMethodsToBeImplementedToResult.put(flushKey, true); } } - private void generateFindById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateFindById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor findByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findById", - Optional.class.getName(), idTypeStr); - MethodDescriptor bridgeFindByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findById", - Optional.class.getName(), Object.class); - - if (allMethodsToBeImplementedToResult.containsKey(findByIdDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeFindByIdDescriptor)) { - - if (!classCreator.getExistingMethods().contains(findByIdDescriptor)) { - try (MethodCreator findById = classCreator.getMethodCreator(findByIdDescriptor)) { - findById.setSignature(String.format("(L%s;)Ljava/util/Optional;", - idTypeStr.replace('.', '/'), entityTypeStr.replace('.', '/'))); - ResultHandle id = findById.getMethodParam(0); - ResultHandle entity = findById.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "findById", Object.class, Class.class, - Object.class), - findById.readStaticField(operationsField), - findById.readInstanceField(entityClassFieldDescriptor, findById.getThis()), id); - ResultHandle optional = findById.invokeStaticMethod( - MethodDescriptor.ofMethod(Optional.class, "ofNullable", Optional.class, Object.class), - entity); - findById.returnValue(optional); - } - try (MethodCreator bridgeFindById = classCreator.getMethodCreator(bridgeFindByIdDescriptor)) { - MethodDescriptor findById = MethodDescriptor.ofMethod(generatedClassName, "findById", - Optional.class.getName(), - idTypeStr); - ResultHandle methodParam = bridgeFindById.getMethodParam(0); - ResultHandle castedMethodParam = bridgeFindById.checkCast(methodParam, idTypeStr); - ResultHandle result = bridgeFindById.invokeVirtualMethod(findById, bridgeFindById.getThis(), - castedMethodParam); - bridgeFindById.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String findByIdKey = GenerationUtil.methodKey("findById", Optional.class.getName(), idTypeStr); + String bridgeFindByIdKey = GenerationUtil.methodKey("findById", Optional.class.getName(), Object.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(findByIdKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeFindByIdKey)) { + + if (!existingMethods.contains(findByIdKey)) { + MethodTypeDesc findByIdMtd = GenerationUtil.toMethodTypeDesc(Optional.class.getName(), idTypeStr); + classCreator.method("findById", mc -> { + mc.setType(findByIdMtd); + ParamVar idParam = mc.parameter("id"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr entity = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "findById", Object.class, Class.class, + Object.class), + ops, entityClass, idParam); + Expr optional = bc.invokeStatic( + MethodDesc.of(Optional.class, "ofNullable", Optional.class, Object.class), + entity); + bc.return_(optional); + }); + }); + existingMethods.add(findByIdKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Optional.class.getName(), Object.class.getName()); + MethodDesc findByIdDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "findById", findByIdMtd); + classCreator.method("findById", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("id"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(idTypeStr)); + Expr result = bbc.invokeVirtual(findByIdDesc, bmc.this_(), castedParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeFindByIdKey); } - allMethodsToBeImplementedToResult.put(findByIdDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeFindByIdDescriptor, true); + allMethodsToBeImplementedToResult.put(findByIdKey, true); + allMethodsToBeImplementedToResult.put(bridgeFindByIdKey, true); } } - private void generateExistsById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateExistsById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { + Map allMethodsToBeImplementedToResult, Set existingMethods) { - MethodDescriptor existsByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "existsById", - boolean.class, idTypeStr); - MethodDescriptor bridgeExistsByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "existsById", boolean.class, - Object.class); + String existsByIdKey = GenerationUtil.methodKey("existsById", boolean.class.getName(), idTypeStr); + String bridgeExistsByIdKey = GenerationUtil.methodKey("existsById", boolean.class.getName(), Object.class.getName()); - if (allMethodsToBeImplementedToResult.containsKey(existsByIdDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeExistsByIdDescriptor)) { + if (allMethodsToBeImplementedToResult.containsKey(existsByIdKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeExistsByIdKey)) { - if (!classCreator.getExistingMethods().contains(existsByIdDescriptor)) { - MethodDescriptor findById = MethodDescriptor.ofMethod(generatedClassName, "findById", - Optional.class.getName(), - idTypeStr); - - // we need to force the generation of findById since this method depends on it - allMethodsToBeImplementedToResult.put(findById, false); + if (!existingMethods.contains(existsByIdKey)) { + // Force generation of findById + String findByIdKey = GenerationUtil.methodKey("findById", Optional.class.getName(), idTypeStr); + allMethodsToBeImplementedToResult.put(findByIdKey, false); generateFindById(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, idTypeStr, - allMethodsToBeImplementedToResult); - - try (MethodCreator existsById = classCreator.getMethodCreator(existsByIdDescriptor)) { - - ResultHandle methodParam = existsById.getMethodParam(0); - ResultHandle optional = existsById.invokeVirtualMethod(findById, existsById.getThis(), - methodParam); - ResultHandle isPresent = existsById.invokeVirtualMethod( - ofMethod(Optional.class, "isPresent", boolean.class), - optional); - existsById.returnValue(isPresent); - } - try (MethodCreator bridgeExistsById = classCreator.getMethodCreator(bridgeExistsByIdDescriptor)) { - MethodDescriptor existsById = MethodDescriptor.ofMethod(generatedClassName, "existsById", - boolean.class.getName(), - idTypeStr); - ResultHandle methodParam = bridgeExistsById.getMethodParam(0); - ResultHandle castedMethodParam = bridgeExistsById.checkCast(methodParam, idTypeStr); - ResultHandle result = bridgeExistsById.invokeVirtualMethod(existsById, bridgeExistsById.getThis(), - castedMethodParam); - bridgeExistsById.returnValue(result); - } + allMethodsToBeImplementedToResult, existingMethods); + + MethodTypeDesc findByIdMtd = GenerationUtil.toMethodTypeDesc(Optional.class.getName(), idTypeStr); + MethodDesc findByIdDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "findById", findByIdMtd); + + MethodTypeDesc existsByIdMtd = GenerationUtil.toMethodTypeDesc(boolean.class.getName(), idTypeStr); + classCreator.method("existsById", mc -> { + mc.setType(existsByIdMtd); + ParamVar idParam = mc.parameter("id"); + mc.body(bc -> { + Expr optional = bc.invokeVirtual(findByIdDesc, mc.this_(), idParam); + Expr isPresent = bc.invokeVirtual( + MethodDesc.of(Optional.class, "isPresent", boolean.class), + optional); + bc.return_(isPresent); + }); + }); + existingMethods.add(existsByIdKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(boolean.class.getName(), Object.class.getName()); + MethodDesc existsByIdDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "existsById", existsByIdMtd); + classCreator.method("existsById", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("id"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(idTypeStr)); + Expr result = bbc.invokeVirtual(existsByIdDesc, bmc.this_(), castedParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeExistsByIdKey); } - allMethodsToBeImplementedToResult.put(existsByIdDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeExistsByIdDescriptor, true); + allMethodsToBeImplementedToResult.put(existsByIdKey, true); + allMethodsToBeImplementedToResult.put(bridgeExistsByIdKey, true); } } - private void generateGetOne(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateGetOne(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { + Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificFindEntityReference(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - idTypeStr, "getOne", allMethodsToBeImplementedToResult); - + idTypeStr, "getOne", allMethodsToBeImplementedToResult, existingMethods); } - private void generateGetById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateGetById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { - + Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificFindEntityReference(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - idTypeStr, "getById", allMethodsToBeImplementedToResult); + idTypeStr, "getById", allMethodsToBeImplementedToResult, existingMethods); } - private void generateGetReferenceById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateGetReferenceById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { - + Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificFindEntityReference(classCreator, entityClassFieldDescriptor, generatedClassName, entityTypeStr, - idTypeStr, "getReferenceById", allMethodsToBeImplementedToResult); + idTypeStr, "getReferenceById", allMethodsToBeImplementedToResult, existingMethods); } - private void generateSpecificFindEntityReference(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateSpecificFindEntityReference(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, String actualMethodName, - Map allMethodsToBeImplementedToResult) { - MethodDescriptor getReferenceByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, actualMethodName, - entityTypeStr, idTypeStr); - MethodDescriptor bridgegetReferenceByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, actualMethodName, - Object.class, Object.class); - - if (allMethodsToBeImplementedToResult.containsKey(getReferenceByIdDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgegetReferenceByIdDescriptor)) { - - if (!classCreator.getExistingMethods().contains(getReferenceByIdDescriptor)) { - try (MethodCreator findById = classCreator.getMethodCreator(getReferenceByIdDescriptor)) { - - ResultHandle entity = findById.invokeStaticMethod(ofMethod(RepositorySupport.class, actualMethodName, - Object.class, AbstractManagedJpaOperations.class, Class.class, Object.class), - findById.readStaticField(operationsField), - findById.readInstanceField(entityClassFieldDescriptor, findById.getThis()), - findById.getMethodParam(0)); - - findById.returnValue(entity); - } - try (MethodCreator bridgeGetOne = classCreator.getMethodCreator(bridgegetReferenceByIdDescriptor)) { - MethodDescriptor getReferenceById = MethodDescriptor.ofMethod(generatedClassName, actualMethodName, - entityTypeStr, idTypeStr); - ResultHandle methodParam = bridgeGetOne.getMethodParam(0); - ResultHandle castedMethodParam = bridgeGetOne.checkCast(methodParam, idTypeStr); - ResultHandle result = bridgeGetOne.invokeVirtualMethod(getReferenceById, bridgeGetOne.getThis(), - castedMethodParam); - bridgeGetOne.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String methodKey = GenerationUtil.methodKey(actualMethodName, entityTypeStr, idTypeStr); + String bridgeKey = GenerationUtil.methodKey(actualMethodName, Object.class.getName(), Object.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(methodKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeKey)) { + + if (!existingMethods.contains(methodKey)) { + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(entityTypeStr, idTypeStr); + classCreator.method(actualMethodName, mc -> { + mc.setType(mtd); + ParamVar idParam = mc.parameter("id"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr entity = bc.invokeStatic( + MethodDesc.of(RepositorySupport.class, actualMethodName, + Object.class, AbstractManagedJpaOperations.class, Class.class, Object.class), + ops, entityClass, idParam); + bc.return_(entity); + }); + }); + existingMethods.add(methodKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Object.class.getName(), Object.class.getName()); + MethodDesc refDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), actualMethodName, mtd); + classCreator.method(actualMethodName, bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("id"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(idTypeStr)); + Expr result = bbc.invokeVirtual(refDesc, bmc.this_(), castedParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeKey); } - allMethodsToBeImplementedToResult.put(getReferenceByIdDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgegetReferenceByIdDescriptor, true); + allMethodsToBeImplementedToResult.put(methodKey, true); + allMethodsToBeImplementedToResult.put(bridgeKey, true); } } - private void generateFindAll(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult) { - - MethodDescriptor findAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAll", List.class); - MethodDescriptor bridgeFindAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAll", Iterable.class); - - if (allMethodsToBeImplementedToResult.containsKey(findAllDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllDescriptor)) { - - if (!classCreator.getExistingMethods().contains(findAllDescriptor)) { - try (MethodCreator findAll = classCreator.getMethodCreator(findAllDescriptor)) { - findAll.setSignature(String.format("()Ljava/util/List;", - entityTypeStr.replace('.', '/'))); - ResultHandle panacheQuery = findAll.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class), - findAll.readStaticField(operationsField), - findAll.readInstanceField(entityClassFieldDescriptor, findAll.getThis())); - ResultHandle list = findAll.invokeInterfaceMethod( - ofMethod(PanacheQuery.class, "list", List.class), - panacheQuery); - findAll.returnValue(list); - } - try (MethodCreator bridgeFindAll = classCreator.getMethodCreator(bridgeFindAllDescriptor)) { - MethodDescriptor findAll = MethodDescriptor.ofMethod(generatedClassName, "findAll", List.class.getName()); - ResultHandle result = bridgeFindAll.invokeVirtualMethod(findAll, bridgeFindAll.getThis()); - bridgeFindAll.returnValue(result); - } + private void generateFindAll(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult, + Set existingMethods) { + + String findAllKey = GenerationUtil.methodKey("findAll", List.class.getName()); + String bridgeFindAllKey = GenerationUtil.methodKey("findAll", Iterable.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(findAllKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllKey)) { + + if (!existingMethods.contains(findAllKey)) { + classCreator.method("findAll", mc -> { + mc.returning(List.class); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr panacheQuery = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class), + ops, entityClass); + Expr list = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "list", List.class), + panacheQuery); + bc.return_(list); + }); + }); + existingMethods.add(findAllKey); + + // Bridge method + MethodDesc findAllDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "findAll", + MethodTypeDesc.of(ConstantDescs.CD_List)); + classCreator.method("findAll", bmc -> { + bmc.returning(Iterable.class); + bmc.body(bbc -> { + Expr result = bbc.invokeVirtual(findAllDesc, bmc.this_()); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeFindAllKey); } - allMethodsToBeImplementedToResult.put(findAllDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeFindAllDescriptor, true); + allMethodsToBeImplementedToResult.put(findAllKey, true); + allMethodsToBeImplementedToResult.put(bridgeFindAllKey, true); } } - private void generateFindAllWithSort(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult) { - - MethodDescriptor findAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAll", List.class, Sort.class); - MethodDescriptor bridgeFindAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAll", Iterable.class, - Sort.class); - - if (allMethodsToBeImplementedToResult.containsKey(findAllDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllDescriptor)) { - - if (!classCreator.getExistingMethods().contains(findAllDescriptor)) { - try (MethodCreator findAll = classCreator.getMethodCreator(findAllDescriptor)) { - findAll.setSignature(String.format("(Lorg/springframework/data/domain/Sort;)Ljava/util/List;", - entityTypeStr.replace('.', '/'))); - - ResultHandle sort = findAll.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort", - io.quarkus.panache.common.Sort.class, - org.springframework.data.domain.Sort.class), - findAll.getMethodParam(0)); - - ResultHandle panacheQuery = findAll.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class, - io.quarkus.panache.common.Sort.class), - findAll.readStaticField(operationsField), - findAll.readInstanceField(entityClassFieldDescriptor, findAll.getThis()), sort); - ResultHandle list = findAll.invokeInterfaceMethod( - ofMethod(PanacheQuery.class, "list", List.class), - panacheQuery); - findAll.returnValue(list); - } - try (MethodCreator bridgeFindAll = classCreator.getMethodCreator(bridgeFindAllDescriptor)) { - MethodDescriptor findAll = MethodDescriptor.ofMethod(generatedClassName, "findAll", List.class.getName(), - Sort.class); - ResultHandle result = bridgeFindAll.invokeVirtualMethod(findAll, bridgeFindAll.getThis(), - bridgeFindAll.getMethodParam(0)); - bridgeFindAll.returnValue(result); - } + private void generateFindAllWithSort(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult, + Set existingMethods) { + + String findAllKey = GenerationUtil.methodKey("findAll", List.class.getName(), Sort.class.getName()); + String bridgeFindAllKey = GenerationUtil.methodKey("findAll", Iterable.class.getName(), Sort.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(findAllKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllKey)) { + + if (!existingMethods.contains(findAllKey)) { + MethodTypeDesc findAllMtd = GenerationUtil.toMethodTypeDesc(List.class.getName(), Sort.class.getName()); + classCreator.method("findAll", mc -> { + mc.setType(findAllMtd); + ParamVar sortParam = mc.parameter("sort"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr sort = bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanacheSort", + io.quarkus.panache.common.Sort.class, Sort.class), + sortParam); + + Expr panacheQuery = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class, + io.quarkus.panache.common.Sort.class), + ops, entityClass, sort); + Expr list = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "list", List.class), + panacheQuery); + bc.return_(list); + }); + }); + existingMethods.add(findAllKey); + + // Bridge method + MethodDesc findAllDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "findAll", findAllMtd); + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Iterable.class.getName(), Sort.class.getName()); + classCreator.method("findAll", bmc -> { + bmc.setType(bridgeMtd); + ParamVar sortParam = bmc.parameter("sort"); + bmc.body(bbc -> { + Expr result = bbc.invokeVirtual(findAllDesc, bmc.this_(), sortParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeFindAllKey); } - allMethodsToBeImplementedToResult.put(findAllDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeFindAllDescriptor, true); + allMethodsToBeImplementedToResult.put(findAllKey, true); + allMethodsToBeImplementedToResult.put(bridgeFindAllKey, true); } } - private void generateFindAllWithPageable(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult) { - - MethodDescriptor findAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAll", Page.class, - Pageable.class); - - if (allMethodsToBeImplementedToResult.containsKey(findAllDescriptor)) { - - if (!classCreator.getExistingMethods().contains(findAllDescriptor)) { - try (MethodCreator findAll = classCreator.getMethodCreator(findAllDescriptor)) { - findAll.setSignature(String.format( - "(Lorg/springframework/data/domain/Pageable;)Lorg/springframework/data/domain/Page;", - entityTypeStr.replace('.', '/'))); - - ResultHandle pageable = findAll.getMethodParam(0); - ResultHandle pageableSort = findAll.invokeInterfaceMethod( - MethodDescriptor.ofMethod(Pageable.class, "getSort", Sort.class), - pageable); - - ResultHandle panachePage = findAll.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanachePage", - io.quarkus.panache.common.Page.class, Pageable.class), - pageable); - ResultHandle panacheSort = findAll.invokeStaticMethod( - MethodDescriptor.ofMethod(TypesConverter.class, "toPanacheSort", - io.quarkus.panache.common.Sort.class, - org.springframework.data.domain.Sort.class), - pageableSort); - - // depending on whether there was a io.quarkus.panache.common.Sort returned, we need to execute a different findAll method - BranchResult sortNullBranch = findAll.ifNull(panacheSort); - BytecodeCreator sortNullTrue = sortNullBranch.trueBranch(); - BytecodeCreator sortNullFalse = sortNullBranch.falseBranch(); - AssignableResultHandle panacheQueryVar = findAll.createVariable(PanacheQuery.class); - - ResultHandle panacheQueryWithoutSort = sortNullTrue.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class), - sortNullTrue.readStaticField(operationsField), - sortNullTrue.readInstanceField(entityClassFieldDescriptor, sortNullTrue.getThis())); - sortNullTrue.assign(panacheQueryVar, panacheQueryWithoutSort); - sortNullTrue.breakScope(); - - ResultHandle panacheQueryWithSort = sortNullFalse.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "findAll", Object.class, Class.class, - io.quarkus.panache.common.Sort.class), - sortNullFalse.readStaticField(operationsField), - sortNullFalse.readInstanceField(entityClassFieldDescriptor, sortNullFalse.getThis()), panacheSort); - sortNullFalse.assign(panacheQueryVar, panacheQueryWithSort); - sortNullFalse.breakScope(); - - ResultHandle panacheQuery = findAll.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "page", PanacheQuery.class, - io.quarkus.panache.common.Page.class), - panacheQueryVar, panachePage); - ResultHandle list = findAll.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "list", List.class), - panacheQuery); - ResultHandle count = findAll.invokeInterfaceMethod( - MethodDescriptor.ofMethod(PanacheQuery.class, "count", long.class), - panacheQuery); - ResultHandle pageResult = findAll.newInstance( - MethodDescriptor.ofConstructor(PageImpl.class, List.class, Pageable.class, long.class), - list, findAll.getMethodParam(0), count); - - findAll.returnValue(pageResult); - } + private void generateFindAllWithPageable(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, String entityTypeStr, Map allMethodsToBeImplementedToResult, + Set existingMethods) { + + String findAllKey = GenerationUtil.methodKey("findAll", Page.class.getName(), Pageable.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(findAllKey)) { + if (!existingMethods.contains(findAllKey)) { + MethodTypeDesc findAllMtd = GenerationUtil.toMethodTypeDesc(Page.class.getName(), Pageable.class.getName()); + classCreator.method("findAll", mc -> { + mc.setType(findAllMtd); + ParamVar pageableParam = mc.parameter("pageable"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + LocalVar pageableSort = bc.localVar("pageableSort", + bc.invokeInterface( + MethodDesc.of(Pageable.class, "getSort", Sort.class), + pageableParam)); + LocalVar panachePage = bc.localVar("panachePage", + bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanachePage", + io.quarkus.panache.common.Page.class, Pageable.class), + pageableParam)); + LocalVar panacheSort = bc.localVar("panacheSort", + bc.invokeStatic( + MethodDesc.of(TypesConverter.class, "toPanacheSort", + io.quarkus.panache.common.Sort.class, Sort.class), + pageableSort)); + + // Build panacheQuery based on whether sort is null + LocalVar panacheQueryVar = bc.localVar("panacheQuery", + ConstantDescs.CD_Object, + Const.ofNull(ConstantDescs.CD_Object)); + + bc.ifElse(bc.isNull(panacheSort), + sortNullTrue -> { + Expr pqWithoutSort = sortNullTrue.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "findAll", Object.class, + Class.class), + ops, entityClass); + sortNullTrue.set(panacheQueryVar, pqWithoutSort); + }, + sortNullFalse -> { + Expr pqWithSort = sortNullFalse.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "findAll", Object.class, + Class.class, io.quarkus.panache.common.Sort.class), + ops, entityClass, panacheSort); + sortNullFalse.set(panacheQueryVar, pqWithSort); + }); + + LocalVar panacheQuery = bc.localVar("pagedQuery", + bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "page", PanacheQuery.class, + io.quarkus.panache.common.Page.class), + panacheQueryVar, panachePage)); + Expr list = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "list", List.class), + panacheQuery); + Expr count = bc.invokeInterface( + MethodDesc.of(PanacheQuery.class, "count", long.class), + panacheQuery); + Expr pageResult = bc.new_(ClassDesc.of(PageImpl.class.getName()), + list, pageableParam, count); + + bc.return_(pageResult); + }); + }); + existingMethods.add(findAllKey); } - - allMethodsToBeImplementedToResult.put(findAllDescriptor, true); + allMethodsToBeImplementedToResult.put(findAllKey, true); } } - private void generateFindAllById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateFindAllById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, DotName entityDotName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor findAllByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAllById", List.class, - Iterable.class); - MethodDescriptor bridgeFindAllByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "findAllById", - Iterable.class, Iterable.class); - - if (allMethodsToBeImplementedToResult.containsKey(findAllByIdDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllByIdDescriptor)) { - - if (!classCreator.getExistingMethods().contains(findAllByIdDescriptor)) { - try (MethodCreator findAllById = classCreator.getMethodCreator(findAllByIdDescriptor)) { - findAllById.setSignature(String.format("(Ljava/lang/Iterable;)Ljava/util/List;", - idTypeStr.replace('.', '/'), entityTypeStr.replace('.', '/'))); - - ResultHandle entityClass = findAllById.readInstanceField(entityClassFieldDescriptor, - findAllById.getThis()); - - ResultHandle list = findAllById.invokeStaticMethod( - MethodDescriptor.ofMethod(RepositorySupport.class, "findByIds", - List.class, AbstractManagedJpaOperations.class, Class.class, Iterable.class), - findAllById.readStaticField(operationsField), - entityClass, - findAllById.getMethodParam(0)); - - findAllById.returnValue(list); - } - try (MethodCreator bridgeFindAllById = classCreator.getMethodCreator(bridgeFindAllByIdDescriptor)) { - MethodDescriptor findAllById = MethodDescriptor.ofMethod(generatedClassName, "findAllById", - List.class.getName(), Iterable.class); - ResultHandle result = bridgeFindAllById.invokeVirtualMethod(findAllById, bridgeFindAllById.getThis(), - bridgeFindAllById.getMethodParam(0)); - bridgeFindAllById.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String findAllByIdKey = GenerationUtil.methodKey("findAllById", List.class.getName(), Iterable.class.getName()); + String bridgeFindAllByIdKey = GenerationUtil.methodKey("findAllById", Iterable.class.getName(), + Iterable.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(findAllByIdKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeFindAllByIdKey)) { + + if (!existingMethods.contains(findAllByIdKey)) { + MethodTypeDesc findAllByIdMtd = GenerationUtil.toMethodTypeDesc(List.class.getName(), + Iterable.class.getName()); + classCreator.method("findAllById", mc -> { + mc.setType(findAllByIdMtd); + ParamVar idsParam = mc.parameter("ids"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr list = bc.invokeStatic( + MethodDesc.of(RepositorySupport.class, "findByIds", + List.class, AbstractManagedJpaOperations.class, Class.class, Iterable.class), + ops, entityClass, + idsParam); + bc.return_(list); + }); + }); + existingMethods.add(findAllByIdKey); + + // Bridge method + MethodDesc findAllByIdDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "findAllById", + findAllByIdMtd); + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(Iterable.class.getName(), + Iterable.class.getName()); + classCreator.method("findAllById", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("ids"); + bmc.body(bbc -> { + Expr result = bbc.invokeVirtual(findAllByIdDesc, bmc.this_(), methodParam); + bbc.return_(result); + }); + }); + existingMethods.add(bridgeFindAllByIdKey); } - allMethodsToBeImplementedToResult.put(findAllByIdDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeFindAllByIdDescriptor, true); + allMethodsToBeImplementedToResult.put(findAllByIdKey, true); + allMethodsToBeImplementedToResult.put(bridgeFindAllByIdKey, true); } } - private FieldInfo getIdField(AnnotationTarget idAnnotationTarget) { - if (idAnnotationTarget instanceof FieldInfo) { - return idAnnotationTarget.asField(); - } - - MethodInfo methodInfo = idAnnotationTarget.asMethod(); - String propertyName = JavaBeanUtil.getPropertyNameFromGetter(methodInfo.name()); - ClassInfo entityClass = methodInfo.declaringClass(); - FieldInfo field = entityClass.field(propertyName); - if (field == null) { - throw new IllegalArgumentException("Entity " + entityClass + " does not appear to have a field backing method" - + methodInfo.name() + " which is annotated with @Id"); - } - return field; - } - - private void generateCount(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, String generatedClassName, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor countDescriptor = MethodDescriptor.ofMethod(generatedClassName, "count", long.class); - - if (allMethodsToBeImplementedToResult.containsKey(countDescriptor)) { - if (!classCreator.getExistingMethods().contains(countDescriptor)) { - try (MethodCreator count = classCreator.getMethodCreator(countDescriptor)) { - ResultHandle result = count.invokeVirtualMethod( - ofMethod(AbstractManagedJpaOperations.class, "count", long.class, Class.class), - count.readStaticField(operationsField), - count.readInstanceField(entityClassFieldDescriptor, count.getThis())); - count.returnValue(result); - } + private void generateCount(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String countKey = GenerationUtil.methodKey("count", long.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(countKey)) { + if (!existingMethods.contains(countKey)) { + classCreator.method("count", mc -> { + mc.returning(long.class); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr result = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "count", long.class, Class.class), + ops, entityClass); + bc.return_(result); + }); + }); + existingMethods.add(countKey); } - allMethodsToBeImplementedToResult.put(countDescriptor, true); + allMethodsToBeImplementedToResult.put(countKey, true); } } - private void generateDeleteById(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, + private void generateDeleteById(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, String generatedClassName, String entityTypeStr, String idTypeStr, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor deleteByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "deleteById", - void.class.getName(), idTypeStr); - MethodDescriptor bridgeDeleteByIdDescriptor = MethodDescriptor.ofMethod(generatedClassName, "deleteById", - void.class, Object.class); - - if (allMethodsToBeImplementedToResult.containsKey(deleteByIdDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeDeleteByIdDescriptor)) { - - if (!classCreator.getExistingMethods().contains(deleteByIdDescriptor)) { - try (MethodCreator deleteById = classCreator.getMethodCreator(deleteByIdDescriptor)) { - deleteById.addAnnotation(Transactional.class); - ResultHandle id = deleteById.getMethodParam(0); - ResultHandle entityClass = deleteById.readInstanceField(entityClassFieldDescriptor, - deleteById.getThis()); - - ResultHandle deleted = deleteById.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "deleteById", boolean.class, - Class.class, Object.class), - deleteById.readStaticField(operationsField), entityClass, id); - - BranchResult deletedBranch = deleteById.ifNonZero(deleted); - BytecodeCreator deletedFalse = deletedBranch.falseBranch(); - - ResultHandle idToString = deletedFalse.invokeVirtualMethod( - ofMethod(Object.class, "toString", String.class), - id); - ResultHandle formatArgsArray = deletedFalse.newArray(Object.class, 1); - deletedFalse.writeArrayValue(formatArgsArray, deletedFalse.load(0), idToString); - - ResultHandle messageFormat = deletedFalse.load("No entity " + entityTypeStr + " with id %s exists"); - ResultHandle message = deletedFalse.invokeStaticMethod( - MethodDescriptor.ofMethod(String.class, "format", String.class, String.class, Object[].class), - messageFormat, formatArgsArray); - - ResultHandle exception = deletedFalse.newInstance( - MethodDescriptor.ofConstructor(IllegalArgumentException.class, String.class), - message); - deletedFalse.throwException(exception); - deletedFalse.breakScope(); - - deleteById.returnValue(null); - } - try (MethodCreator bridgeDeleteById = classCreator.getMethodCreator(bridgeDeleteByIdDescriptor)) { - MethodDescriptor deleteById = MethodDescriptor.ofMethod(generatedClassName, "deleteById", - void.class, idTypeStr); - ResultHandle methodParam = bridgeDeleteById.getMethodParam(0); - ResultHandle castedMethodParam = bridgeDeleteById.checkCast(methodParam, idTypeStr); - ResultHandle result = bridgeDeleteById.invokeVirtualMethod(deleteById, bridgeDeleteById.getThis(), - castedMethodParam); - bridgeDeleteById.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String deleteByIdKey = GenerationUtil.methodKey("deleteById", void.class.getName(), idTypeStr); + String bridgeDeleteByIdKey = GenerationUtil.methodKey("deleteById", void.class.getName(), Object.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(deleteByIdKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeDeleteByIdKey)) { + + if (!existingMethods.contains(deleteByIdKey)) { + MethodTypeDesc deleteByIdMtd = GenerationUtil.toMethodTypeDesc(void.class.getName(), idTypeStr); + classCreator.method("deleteById", mc -> { + mc.setType(deleteByIdMtd); + mc.addAnnotation(Transactional.class); + ParamVar idParam = mc.parameter("id"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + Expr deleted = bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "deleteById", boolean.class, + Class.class, Object.class), + ops, entityClass, idParam); + + bc.ifNot(deleted, deletedFalse -> { + Expr idToString = deletedFalse.invokeVirtual( + MethodDesc.of(Object.class, "toString", String.class), + idParam); + Expr formatArgsArray = deletedFalse.newArray(Object.class, idToString); + + Expr messageFormat = Const.of("No entity " + entityTypeStr + " with id %s exists"); + Expr message = deletedFalse.invokeStatic( + MethodDesc.of(String.class, "format", String.class, String.class, Object[].class), + messageFormat, formatArgsArray); + + deletedFalse.throw_(IllegalArgumentException.class, message); + }); + + bc.return_(); + }); + }); + existingMethods.add(deleteByIdKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(void.class.getName(), Object.class.getName()); + MethodDesc deleteByIdDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "deleteById", deleteByIdMtd); + classCreator.method("deleteById", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("id"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(idTypeStr)); + bbc.invokeVirtual(deleteByIdDesc, bmc.this_(), castedParam); + bbc.return_(); + }); + }); + existingMethods.add(bridgeDeleteByIdKey); } - allMethodsToBeImplementedToResult.put(deleteByIdDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeDeleteByIdDescriptor, true); + allMethodsToBeImplementedToResult.put(deleteByIdKey, true); + allMethodsToBeImplementedToResult.put(bridgeDeleteByIdKey, true); } } private void generateDelete(ClassCreator classCreator, String generatedClassName, String entityTypeStr, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor deleteDescriptor = MethodDescriptor.ofMethod(generatedClassName, "delete", - void.class.toString(), entityTypeStr); - MethodDescriptor bridgeDeleteDescriptor = MethodDescriptor.ofMethod(generatedClassName, "delete", void.class, - Object.class); - - if (allMethodsToBeImplementedToResult.containsKey(deleteDescriptor) - || allMethodsToBeImplementedToResult.containsKey(bridgeDeleteDescriptor)) { - - if (!classCreator.getExistingMethods().contains(deleteDescriptor)) { - try (MethodCreator delete = classCreator.getMethodCreator(deleteDescriptor)) { - delete.addAnnotation(Transactional.class); - ResultHandle entity = delete.getMethodParam(0); - delete.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "delete", void.class, Object.class), - delete.readStaticField(operationsField), entity); - delete.returnValue(null); - } - try (MethodCreator bridgeDelete = classCreator.getMethodCreator(bridgeDeleteDescriptor)) { - MethodDescriptor delete = MethodDescriptor.ofMethod(generatedClassName, "delete", void.class.toString(), - entityTypeStr); - ResultHandle methodParam = bridgeDelete.getMethodParam(0); - ResultHandle castedMethodParam = bridgeDelete.checkCast(methodParam, entityTypeStr); - ResultHandle result = bridgeDelete.invokeVirtualMethod(delete, bridgeDelete.getThis(), - castedMethodParam); - bridgeDelete.returnValue(result); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String deleteKey = GenerationUtil.methodKey("delete", void.class.getName(), entityTypeStr); + String bridgeDeleteKey = GenerationUtil.methodKey("delete", void.class.getName(), Object.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(deleteKey) + || allMethodsToBeImplementedToResult.containsKey(bridgeDeleteKey)) { + + if (!existingMethods.contains(deleteKey)) { + MethodTypeDesc deleteMtd = GenerationUtil.toMethodTypeDesc(void.class.getName(), entityTypeStr); + classCreator.method("delete", mc -> { + mc.setType(deleteMtd); + mc.addAnnotation(Transactional.class); + ParamVar entityParam = mc.parameter("entity"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "delete", void.class, Object.class), + ops, entityParam); + bc.return_(); + }); + }); + existingMethods.add(deleteKey); + + // Bridge method + MethodTypeDesc bridgeMtd = GenerationUtil.toMethodTypeDesc(void.class.getName(), Object.class.getName()); + MethodDesc deleteDesc = ClassMethodDesc.of(ClassDesc.of(generatedClassName), "delete", deleteMtd); + classCreator.method("delete", bmc -> { + bmc.setType(bridgeMtd); + ParamVar methodParam = bmc.parameter("entity"); + bmc.body(bbc -> { + Expr castedParam = bbc.cast(methodParam, ClassDesc.of(entityTypeStr)); + bbc.invokeVirtual(deleteDesc, bmc.this_(), castedParam); + bbc.return_(); + }); + }); + existingMethods.add(bridgeDeleteKey); } } - allMethodsToBeImplementedToResult.put(deleteDescriptor, true); - allMethodsToBeImplementedToResult.put(bridgeDeleteDescriptor, true); + allMethodsToBeImplementedToResult.put(deleteKey, true); + allMethodsToBeImplementedToResult.put(bridgeDeleteKey, true); } private void generateDeleteInBatchWithIterable(ClassCreator classCreator, String generatedClassName, String entityTypeStr, - Map allMethodsToBeImplementedToResult) { + Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, "deleteInBatch", - allMethodsToBeImplementedToResult); - + allMethodsToBeImplementedToResult, existingMethods); } private void generateDeleteAllInBatchWithIterable(ClassCreator classCreator, String generatedClassName, - String entityTypeStr, - Map allMethodsToBeImplementedToResult) { + String entityTypeStr, Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, "deleteAllInBatch", - allMethodsToBeImplementedToResult); - + allMethodsToBeImplementedToResult, existingMethods); } private void generateDeleteAllByIdInBatchWithIterable(ClassCreator classCreator, String generatedClassName, - String entityTypeStr, - Map allMethodsToBeImplementedToResult) { + String entityTypeStr, Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, "deleteAllByIdInBatch", - allMethodsToBeImplementedToResult); - + allMethodsToBeImplementedToResult, existingMethods); } private void generateDeleteAllWithIterable(ClassCreator classCreator, String generatedClassName, String entityTypeStr, - Map allMethodsToBeImplementedToResult) { + Map allMethodsToBeImplementedToResult, Set existingMethods) { generateSpecificDeleteAllWithIterable(classCreator, generatedClassName, entityTypeStr, "deleteAll", - allMethodsToBeImplementedToResult); - + allMethodsToBeImplementedToResult, existingMethods); } private void generateSpecificDeleteAllWithIterable(ClassCreator classCreator, String generatedClassName, String entityTypeStr, String actualMethodName, - Map allMethodsToBeImplementedToResult) { - - MethodDescriptor deleteAllWithIterableDescriptor = MethodDescriptor.ofMethod(generatedClassName, actualMethodName, - void.class, Iterable.class); - - if (allMethodsToBeImplementedToResult.containsKey(deleteAllWithIterableDescriptor)) { - if (!classCreator.getExistingMethods().contains(deleteAllWithIterableDescriptor)) { - try (MethodCreator deleteAll = classCreator.getMethodCreator(deleteAllWithIterableDescriptor)) { - deleteAll.setSignature(String.format("(Ljava/lang/Iterable<+L%s;>;)V", - entityTypeStr.replace('.', '/'))); - deleteAll.addAnnotation(Transactional.class); - ResultHandle entities = deleteAll.getMethodParam(0); - deleteAll.invokeStaticMethod( - MethodDescriptor.ofMethod(RepositorySupport.class, "deleteAll", - void.class, AbstractManagedJpaOperations.class, Iterable.class), - deleteAll.readStaticField(operationsField), - entities); - deleteAll.returnValue(null); - } + Map allMethodsToBeImplementedToResult, Set existingMethods) { + + String key = GenerationUtil.methodKey(actualMethodName, void.class.getName(), Iterable.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(key)) { + if (!existingMethods.contains(key)) { + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(void.class.getName(), Iterable.class.getName()); + classCreator.method(actualMethodName, mc -> { + mc.setType(mtd); + mc.addAnnotation(Transactional.class); + ParamVar entitiesParam = mc.parameter("entities"); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + bc.invokeStatic( + MethodDesc.of(RepositorySupport.class, "deleteAll", + void.class, AbstractManagedJpaOperations.class, Iterable.class), + ops, + entitiesParam); + bc.return_(); + }); + }); + existingMethods.add(key); } - allMethodsToBeImplementedToResult.put(deleteAllWithIterableDescriptor, true); + allMethodsToBeImplementedToResult.put(key, true); } } - private void generateDeleteAll(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, Map allMethodsToBeImplementedToResult) { - - MethodDescriptor deleteAllDescriptor = MethodDescriptor.ofMethod(generatedClassName, "deleteAll", void.class); - - if (allMethodsToBeImplementedToResult.containsKey(deleteAllDescriptor)) { - if (!classCreator.getExistingMethods().contains(deleteAllDescriptor)) { - try (MethodCreator deleteAll = classCreator.getMethodCreator(deleteAllDescriptor)) { - deleteAll.addAnnotation(Transactional.class); - deleteAll.invokeStaticMethod( - MethodDescriptor.ofMethod(AdditionalJpaOperations.class, "deleteAllWithCascade", long.class, - AbstractManagedJpaOperations.class, Class.class.getName()), - deleteAll.readStaticField(operationsField), - deleteAll.readInstanceField(entityClassFieldDescriptor, deleteAll.getThis())); - deleteAll.returnValue(null); - } + private void generateDeleteAll(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, Map allMethodsToBeImplementedToResult, + Set existingMethods) { + + String deleteAllKey = GenerationUtil.methodKey("deleteAll", void.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(deleteAllKey)) { + if (!existingMethods.contains(deleteAllKey)) { + classCreator.method("deleteAll", mc -> { + mc.returning(void.class); + mc.addAnnotation(Transactional.class); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + bc.invokeStatic( + MethodDesc.of(AdditionalJpaOperations.class, "deleteAllWithCascade", long.class, + AbstractManagedJpaOperations.class, Class.class), + ops, entityClass); + bc.return_(); + }); + }); + existingMethods.add(deleteAllKey); } - allMethodsToBeImplementedToResult.put(deleteAllDescriptor, true); + allMethodsToBeImplementedToResult.put(deleteAllKey, true); } } - private void generateDeleteAllInBatch(ClassCreator classCreator, FieldDescriptor entityClassFieldDescriptor, - String generatedClassName, Map allMethodsToBeImplementedToResult) { - - MethodDescriptor deleteAllInBatchDescriptor = MethodDescriptor.ofMethod(generatedClassName, "deleteAllInBatch", - void.class); - - if (allMethodsToBeImplementedToResult.containsKey(deleteAllInBatchDescriptor)) { - if (!classCreator.getExistingMethods().contains(deleteAllInBatchDescriptor)) { - try (MethodCreator deleteAll = classCreator.getMethodCreator(deleteAllInBatchDescriptor)) { - deleteAll.addAnnotation(Transactional.class); - ResultHandle result = deleteAll.invokeVirtualMethod( - MethodDescriptor.ofMethod(AbstractManagedJpaOperations.class, "deleteAll", long.class, Class.class), - deleteAll.readStaticField(operationsField), - deleteAll.readInstanceField(entityClassFieldDescriptor, deleteAll.getThis())); - deleteAll.returnValue(result); - } + private void generateDeleteAllInBatch(ClassCreator classCreator, FieldDesc entityClassFieldDescriptor, + String generatedClassName, Map allMethodsToBeImplementedToResult, + Set existingMethods) { + + String deleteAllInBatchKey = GenerationUtil.methodKey("deleteAllInBatch", void.class.getName()); + + if (allMethodsToBeImplementedToResult.containsKey(deleteAllInBatchKey)) { + if (!existingMethods.contains(deleteAllInBatchKey)) { + classCreator.method("deleteAllInBatch", mc -> { + mc.returning(void.class); + mc.addAnnotation(Transactional.class); + mc.body(bc -> { + LocalVar ops = bc.localVar("ops", bc.getStaticField(operationsField)); + LocalVar entityClass = bc.localVar("entityClass", + bc.get(mc.this_().field(entityClassFieldDescriptor))); + bc.invokeVirtual( + MethodDesc.of(AbstractManagedJpaOperations.class, "deleteAll", long.class, Class.class), + ops, entityClass); + bc.return_(); + }); + }); + existingMethods.add(deleteAllInBatchKey); } - allMethodsToBeImplementedToResult.put(deleteAllInBatchDescriptor, true); + allMethodsToBeImplementedToResult.put(deleteAllInBatchKey, true); } } - private void handleUnimplementedMethods(ClassCreator classCreator, - Map allMethodsToBeImplementedToResult) { - for (Map.Entry entry : allMethodsToBeImplementedToResult.entrySet()) { + private void handleUnimplementedMethods(ClassCreator classCreator, String generatedClassName, + Map allMethodsToBeImplementedToResult, Set existingMethods) { + for (Map.Entry entry : allMethodsToBeImplementedToResult.entrySet()) { if (entry.getValue()) { // ignore implemented methods continue; } - try (MethodCreator methodCreator = classCreator.getMethodCreator(entry.getKey())) { - ResultHandle res = methodCreator.newInstance( - MethodDescriptor.ofConstructor(FunctionalityNotImplemented.class, String.class, String.class), - methodCreator.load(classCreator.getClassName().replace('/', '.')), - methodCreator.load(entry.getKey().getName())); - methodCreator.throwException(res); + // Parse the method key to get name and types + String key = entry.getKey(); + int parenOpen = key.indexOf('('); + int parenClose = key.indexOf(')'); + String methodName = key.substring(0, parenOpen); + String paramsPart = key.substring(parenOpen + 1, parenClose); + String returnType = key.substring(parenClose + 1); + + String[] paramTypes = paramsPart.isEmpty() ? new String[0] : paramsPart.split(","); + + if (existingMethods.contains(key)) { + continue; } + + MethodTypeDesc mtd = GenerationUtil.toMethodTypeDesc(returnType, paramTypes); + classCreator.method(methodName, mc -> { + mc.setType(mtd); + for (int i = 0; i < paramTypes.length; i++) { + mc.parameter("p" + i); + } + mc.body(bc -> { + Expr res = bc.new_(ClassDesc.of(FunctionalityNotImplemented.class.getName()), + Const.of(generatedClassName.replace('/', '.')), + Const.of(methodName)); + bc.throw_(res); + }); + }); + existingMethods.add(key); } } @@ -1024,9 +1143,6 @@ private Set methodsOfExtendedSpringDataRepositories(ClassInfo reposi return GenerationUtil.interfaceMethods(GenerationUtil.extendedSpringDataRepos(repositoryToImplement, index), index); } - // Spring Data allows users to add any of the methods of CrudRepository, PagingAndSortingRepository, JpaRepository - // to their interface declaration without having to make their repository extend any of those - // this is done so users have the ability to add only what they need private Set stockMethodsAddedToInterface(ClassInfo repositoryToImplement) { Set result = new LinkedHashSet<>(); @@ -1053,8 +1169,6 @@ private Set allSpringDataRepositoryMethods() { return ALL_SPRING_DATA_REPOSITORY_METHODS; } - // Used to determine if a method with captured generic types can be considered the same as a target method - // This is rather naive but works in the constraints of Spring Data private boolean canMethodsBeConsideredSame(MethodInfo candidate, MethodInfo target) { if (!candidate.name().equals(target.name())) { return false; diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/utils/SkillComposer.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/utils/SkillComposer.java index 16e24613f5b45..e88a5695b5150 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/utils/SkillComposer.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/utils/SkillComposer.java @@ -85,9 +85,15 @@ public static String compose(ObjectNode extMeta, String rawContent, String skill } final String guide = getNestedTextValue(extMeta, "metadata", "guide"); - if (guide != null && !guide.isBlank()) { + final String categories = getNestedArrayAsString(extMeta, "metadata", "categories"); + if ((guide != null && !guide.isBlank()) || (categories != null && !categories.isBlank())) { sb.append("metadata:\n"); - sb.append(" guide: \"").append(escapeYamlString(guide)).append("\"\n"); + if (guide != null && !guide.isBlank()) { + sb.append(" guide: \"").append(escapeYamlString(guide)).append("\"\n"); + } + if (categories != null && !categories.isBlank()) { + sb.append(" categories: \"").append(escapeYamlString(categories)).append("\"\n"); + } } sb.append("---\n\n"); @@ -237,6 +243,29 @@ private static String getNestedTextValue(ObjectNode node, String parent, String return getTextValue((ObjectNode) parentNode, field); } + /** + * Extracts a nested array (e.g. {@code metadata.categories}) and joins its text + * elements with {@code ", "}, returning {@code null} if the path is missing or empty. + */ + private static String getNestedArrayAsString(ObjectNode node, String parent, String field) { + var parentNode = node.get(parent); + if (parentNode == null || parentNode.isNull() || !parentNode.isObject()) { + return null; + } + var arrayNode = parentNode.get(field); + if (arrayNode == null || arrayNode.isNull() || !arrayNode.isArray() || arrayNode.isEmpty()) { + return null; + } + StringJoiner joiner = new StringJoiner(", "); + for (var element : arrayNode) { + if (element.isTextual()) { + joiner.add(element.asText()); + } + } + String result = joiner.toString(); + return result.isEmpty() ? null : result; + } + private static String escapeMarkdownTable(String text) { return text.replace("|", "\\|").replace("\n", " "); } diff --git a/independent-projects/tools/devtools-common/src/test/java/io/quarkus/devtools/utils/SkillComposerTest.java b/independent-projects/tools/devtools-common/src/test/java/io/quarkus/devtools/utils/SkillComposerTest.java index 86be9bc5f6317..a3d09e5a0f9bc 100644 --- a/independent-projects/tools/devtools-common/src/test/java/io/quarkus/devtools/utils/SkillComposerTest.java +++ b/independent-projects/tools/devtools-common/src/test/java/io/quarkus/devtools/utils/SkillComposerTest.java @@ -90,14 +90,59 @@ public void composeWithBlankDescriptionOmitsField() throws IOException { } @Test - public void composeWithoutGuideOmitsMetadataBlock() throws IOException { + public void composeWithoutGuideOrCategoriesOmitsMetadataBlock() throws IOException { String yaml = "name: \"No Guide\"\ndescription: \"Something\"\n"; ObjectNode meta = parseYaml(yaml); String result = SkillComposer.compose(meta, "body", "quarkus-noguide"); - assertTrue(!result.contains("metadata:")); - assertTrue(!result.contains("guide:")); + assertFalse(result.contains("metadata:")); + assertFalse(result.contains("guide:")); + assertFalse(result.contains("categories:")); + } + + @Test + public void composeWithCategoriesIncludesThem() throws IOException { + String yaml = "name: \"REST\"\n" + + "description: \"Build RESTful APIs\"\n" + + "metadata:\n" + + " guide: https://quarkus.io/guides/rest\n" + + " categories:\n" + + " - \"web\"\n" + + " - \"reactive\"\n"; + ObjectNode meta = parseYaml(yaml); + + String result = SkillComposer.compose(meta, "body", "quarkus-rest"); + + assertTrue(result.contains("metadata:\n")); + assertTrue(result.contains(" guide: \"https://quarkus.io/guides/rest\"\n")); + assertTrue(result.contains(" categories: \"web, reactive\"\n")); + } + + @Test + public void composeWithCategoriesOnlyNoGuide() throws IOException { + String yaml = "name: \"Ext\"\n" + + "description: \"desc\"\n" + + "metadata:\n" + + " categories:\n" + + " - \"data\"\n"; + ObjectNode meta = parseYaml(yaml); + + String result = SkillComposer.compose(meta, "body", "quarkus-ext"); + + assertTrue(result.contains("metadata:\n")); + assertFalse(result.contains("guide:")); + assertTrue(result.contains(" categories: \"data\"\n")); + } + + @Test + public void composeWithoutCategoriesOmitsThem() throws IOException { + String yaml = "name: \"No Cat\"\ndescription: \"Something\"\n"; + ObjectNode meta = parseYaml(yaml); + + String result = SkillComposer.compose(meta, "body", "quarkus-nocat"); + + assertFalse(result.contains("categories:")); } @Test diff --git a/integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/TestEndpoint.java b/integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/TestEndpoint.java index 663e5f7aa3303..4ea5b62594866 100644 --- a/integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/TestEndpoint.java +++ b/integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/TestEndpoint.java @@ -2093,6 +2093,50 @@ public String testSortByEmbedded() { return "OK"; } + @GET + @Path("testCaseInsensitiveSorting") + @Transactional + public String testCaseInsensitiveSorting() { + Person.deleteAll(); + + Person apple = new Person(); + apple.name = "apple"; + apple.persist(); + + Person BANANA = new Person(); + BANANA.name = "BANANA"; + BANANA.persist(); + + Person cherry = new Person(); + cherry.name = "cherry"; + cherry.persist(); + + // Test case-insensitive ascending sort + List persons = Person.findAll(Sort.ascendingIgnoreCase("name")).list(); + assertEquals(3, persons.size()); + assertEquals("apple", persons.get(0).name); + assertEquals("BANANA", persons.get(1).name); + assertEquals("cherry", persons.get(2).name); + + // Test case-insensitive descending sort + persons = Person.findAll(Sort.descendingIgnoreCase("name")).list(); + assertEquals(3, persons.size()); + assertEquals("cherry", persons.get(0).name); + assertEquals("BANANA", persons.get(1).name); + assertEquals("apple", persons.get(2).name); + + // Test fluent API + persons = Person.findAll(Sort.by("name").ignoreCase()).list(); + assertEquals(3, persons.size()); + assertEquals("apple", persons.get(0).name); + assertEquals("BANANA", persons.get(1).name); + assertEquals("cherry", persons.get(2).name); + + Person.deleteAll(); + + return "OK"; + } + @GET @Path("testEnhancement27184DeleteDetached") // NOT @Transactional diff --git a/integration-tests/hibernate-orm-panache/src/test/java/io/quarkus/it/panache/defaultpu/PanacheFunctionalityTest.java b/integration-tests/hibernate-orm-panache/src/test/java/io/quarkus/it/panache/defaultpu/PanacheFunctionalityTest.java index 26e668f0e460e..60efc49e4701b 100644 --- a/integration-tests/hibernate-orm-panache/src/test/java/io/quarkus/it/panache/defaultpu/PanacheFunctionalityTest.java +++ b/integration-tests/hibernate-orm-panache/src/test/java/io/quarkus/it/panache/defaultpu/PanacheFunctionalityTest.java @@ -98,6 +98,11 @@ public void testSortByEmbedded() { RestAssured.when().get("/test/testSortByEmbedded").then().body(is("OK")); } + @Test + public void testCaseInsensitiveSorting() { + RestAssured.when().get("/test/testCaseInsensitiveSorting").then().body(is("OK")); + } + @Test public void testJaxbAnnotationTransfer() { RestAssured.when() diff --git a/integration-tests/hibernate-reactive-panache/src/main/java/io/quarkus/it/panache/reactive/TestEndpoint.java b/integration-tests/hibernate-reactive-panache/src/main/java/io/quarkus/it/panache/reactive/TestEndpoint.java index 8f73f9b583012..ce66974f0e4ec 100644 --- a/integration-tests/hibernate-reactive-panache/src/main/java/io/quarkus/it/panache/reactive/TestEndpoint.java +++ b/integration-tests/hibernate-reactive-panache/src/main/java/io/quarkus/it/panache/reactive/TestEndpoint.java @@ -2376,6 +2376,54 @@ public Uni testSortByNullPrecedence() { }).map(v -> "OK"); } + @GET + @Path("testCaseInsensitiveSorting") + @WithTransaction + public Uni testCaseInsensitiveSorting() { + return Person.deleteAll() + .flatMap(v -> { + Person apple = new Person(); + apple.name = "apple"; + apple.uniqueName = "1"; + + Person BANANA = new Person(); + BANANA.name = "BANANA"; + BANANA.uniqueName = "2"; + + Person cherry = new Person(); + cherry.name = "cherry"; + cherry.uniqueName = "3"; + + return Person.persist(apple, BANANA, cherry); + }).flatMap(p -> { + // Test case-insensitive ascending sort + return Person.findAll(Sort.ascendingIgnoreCase("name")).list(); + }).flatMap(list -> { + assertEquals(3, list.size()); + assertEquals("apple", ((Person) list.get(0)).name); + assertEquals("BANANA", ((Person) list.get(1)).name); + assertEquals("cherry", ((Person) list.get(2)).name); + + // Test case-insensitive descending sort + return Person.findAll(Sort.descendingIgnoreCase("name")).list(); + }).flatMap(list -> { + assertEquals(3, list.size()); + assertEquals("cherry", ((Person) list.get(0)).name); + assertEquals("BANANA", ((Person) list.get(1)).name); + assertEquals("apple", ((Person) list.get(2)).name); + + // Test fluent API + return Person.findAll(Sort.by("name").ignoreCase()).list(); + }).flatMap(list -> { + assertEquals(3, list.size()); + assertEquals("apple", ((Person) list.get(0)).name); + assertEquals("BANANA", ((Person) list.get(1)).name); + assertEquals("cherry", ((Person) list.get(2)).name); + + return Person.deleteAll(); + }).map(v -> "OK"); + } + @GET @Path("26308") @WithTransaction diff --git a/integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/PanacheFunctionalityTest.java b/integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/PanacheFunctionalityTest.java index e70e846efe522..09a544d2acd61 100644 --- a/integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/PanacheFunctionalityTest.java +++ b/integration-tests/hibernate-reactive-panache/src/test/java/io/quarkus/it/panache/reactive/PanacheFunctionalityTest.java @@ -163,6 +163,11 @@ public void testSortByNullPrecedence() { RestAssured.when().get("/test/testSortByNullPrecedence").then().body(is("OK")); } + @Test + public void testCaseInsensitiveSorting() { + RestAssured.when().get("/test/testCaseInsensitiveSorting").then().body(is("OK")); + } + @DisabledOnIntegrationTest @RunOnVertxContext @Test diff --git a/pom.xml b/pom.xml index ad9b1f99c973b..7e226079337b2 100644 --- a/pom.xml +++ b/pom.xml @@ -94,7 +94,7 @@ 0.27.0 - 0.9.2 + 0.9.3 7.8.0