diff --git a/.copyrightconfig b/.copyrightconfig new file mode 100644 index 00000000..09e2ab15 --- /dev/null +++ b/.copyrightconfig @@ -0,0 +1,14 @@ +# COPYRIGHT VALIDATION CONFIG +# --------------------------------- +# Required start year (keep fixed; end year auto-updates in check output) +startyear: 2019 + +# Optional exclusions list (comma-separated). Leave commented if none. +# Rules: +# - Relative paths (no leading ./) +# - Simple * wildcard only (no recursive **) +# - Use sparingly (third_party, generated, binary assets) +# - Dotfiles already skipped automatically +# Enable by removing the leading '# ' from the next line and editing values. +# filesexcluded: third_party/*, docs/generated/*.md, assets/*.png, scripts/temp_*.py, vendor/lib.js +filesexcluded: .github/*, README.md, CONTRIBUTING.md, Jenkinsfile, gradle/*, docker-compose.yml, *.gradle, gradle.properties, gradlew, gradlew.bat, **/test/resources/**, docs/**, test-app/docker-compose.yml, docker/**, *.txt diff --git a/.env b/.env new file mode 100644 index 00000000..207f58cd --- /dev/null +++ b/.env @@ -0,0 +1,2 @@ +MARKLOGIC_IMAGE=ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12 +MARKLOGIC_LOGS_VOLUME=./docker/marklogic/logs diff --git a/.github/workflows/jekyll-gh-pages.yml b/.github/workflows/jekyll-gh-pages.yml index aabbd66f..bf25bd07 100644 --- a/.github/workflows/jekyll-gh-pages.yml +++ b/.github/workflows/jekyll-gh-pages.yml @@ -2,9 +2,8 @@ name: Deploy Jekyll with GitHub Pages dependencies preinstalled on: - # Runs on pushes targeting the default branch push: - branches: ["develop"] + branches: ["main"] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -21,21 +20,20 @@ concurrency: cancel-in-progress: true jobs: - # Build job build: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Pages - uses: actions/configure-pages@v3 + uses: actions/configure-pages@v5 - name: Build with Jekyll uses: actions/jekyll-build-pages@v1 with: source: ./docs/ destination: ./_site - name: Upload artifact - uses: actions/upload-pages-artifact@v1 + uses: actions/upload-pages-artifact@v3 # Deployment job deploy: @@ -47,4 +45,4 @@ jobs: steps: - name: Deploy to GitHub Pages id: deployment - uses: actions/deploy-pages@v1 + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/pr-workflow.yaml b/.github/workflows/pr-workflow.yaml index f2a31ab9..9165227c 100644 --- a/.github/workflows/pr-workflow.yaml +++ b/.github/workflows/pr-workflow.yaml @@ -1,4 +1,4 @@ -name: šŸ·ļø JIRA ID Validator +name: PR Workflow on: # Using pull_request_target instead of pull_request to handle PRs from forks @@ -14,3 +14,10 @@ jobs: with: # Pass the PR title from the event context pr-title: ${{ github.event.pull_request.title }} + copyright-validation: + name: Ā© Validate Copyright Headers + uses: marklogic/pr-workflows/.github/workflows/copyright-check.yml@main + permissions: + contents: read + pull-requests: write + issues: write diff --git a/.gitignore b/.gitignore index 5a0f859d..0b54d47d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,8 @@ build out gradle-local.properties -docker +docker/confluent-marklogic-components/marklogic-kafka-marklogic-connector* +docker/marklogic bin .vscode diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 80ffc306..e7a87696 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,17 +1,18 @@ This guide describes how to develop and contribute pull requests to this connector. The focus is currently on how to -develop and test the connector, either via a Docker cluster install of Confluent Platform or of the regular Kafka -distribution. +develop and test the connector. There are two methods available - automated and manual. Both methods are performed via a +Docker stack. The automated tests stack creates a MarkLogic instance for the automated tests. The +manual tests use Confluent Platform in a different Docker stack to allow testing the connector via Confluent Control +Center with a MarkLogic instance in the same stack. ### Requirements: * MarkLogic Server 11+ -* Java, either version 11 or 17, is required to use the Gradle tools. -Additionally, SonarQube requires the use of Java 17. +* Java version 17 See [the Confluent compatibility matrix](https://docs.confluent.io/platform/current/installation/versions-interoperability.html#java) for more information. After installing your desired version of Java, ensure that the `JAVA_HOME` environment variable points to your Java installation. -# Configuring Local Automated and Manual Testing +# Configuring Local Automated Testing The test suite for the MarkLogic Kafka connector, found at `src/test`, requires that the test application first be deployed to a MarkLogic instance. The recommendation is for this application to be deployed via Docker and @@ -20,110 +21,99 @@ deployed to a MarkLogic instance. The recommendation is for this application to Note that you do not need to install [Gradle](https://gradle.org/) - the "gradlew" program used below will install the appropriate version of Gradle if you do not have it installed already. -## Virtual Server Preparation -The project includes a docker-compose file that includes MarkLogic, SonarQube with a Postgres server, and Confluent -Platform servers. - -### Confluent Platform -[Confluent Platform](https://docs.confluent.io/platform/current/overview.html) provides an easy mechanism for running -Kafka locally via a single Docker cluster. A primary benefit of testing with Confluent Platform is to test configuring -the MarkLogic Kafka connector via the -[Confluent Control Center](https://docs.confluent.io/platform/current/control-center/index.html) web application. -The Confluent Platform servers in this docker-compose file are based on the Confluent files and instructions at -[Install a Confluent Platform cluster in Docker using a Confluent docker-compose file](https://docs.confluent.io/platform/current/platform-quickstart.html). - -## Docker Cluster Preparation -To setup the docker cluster, use the docker-compose file in the "test-app" directory to build the Docker cluster with -the command: +## Docker Cluster Preparation for Automated Testing +The automated tests require a MarkLogic server. The docker-compose file in the repository root includes these services. +To prepare for running the automated tests, perform the following steps: ``` -docker-compose -f docker-compose.yml up -d --build -``` -When the setup is complete, you should be able to run -``` -docker-compose -f docker-compose.yml ps -``` -and see results similar to the following. -``` -NAME IMAGE COMMAND SERVICE CREATED STATUS PORTS -broker confluentinc/cp-kafka:7.6.1 "/etc/confluent/dock…" broker 14 minutes ago Up 14 minutes 0.0.0.0:9092->9092/tcp, 0.0.0.0:9101->9101/tcp -connect cnfldemos/cp-server-connect-datagen:0.6.4-7.6.0 "/etc/confluent/dock…" connect 14 minutes ago Up 14 minutes 0.0.0.0:8083->8083/tcp, 9092/tcp -control-center confluentinc/cp-enterprise-control-center:7.6.1 "/etc/confluent/dock…" control-center 14 minutes ago Up 14 minutes 0.0.0.0:9021->9021/tcp -ksql-datagen confluentinc/ksqldb-examples:7.6.1 "bash -c 'echo Waiti…" ksql-datagen 14 minutes ago Up 14 minutes -ksqldb-cli confluentinc/cp-ksqldb-cli:7.6.1 "/bin/sh" ksqldb-cli 14 minutes ago Up 14 minutes -ksqldb-server confluentinc/cp-ksqldb-server:7.6.1 "/etc/confluent/dock…" ksqldb-server 14 minutes ago Up 14 minutes 0.0.0.0:8088->8088/tcp -marklogic marklogicdb/marklogic-db:11.2.0-centos-1.1.2 "/tini -- /usr/local…" marklogic 14 minutes ago Up 14 minutes 25/tcp, 7997-7999/tcp, 0.0.0.0:8000-8002->8000-8002/tcp, 0.0.0.0:8010-8013->8010-8013/tcp, 8003-8009/tcp, 0.0.0.0:8018-8019->8018-8019/tcp -marklogic-kafka-confluent-postgres-1 postgres:15-alpine "docker-entrypoint.s…" postgres 14 minutes ago Up 14 minutes 5432/tcp -marklogic-kafka-confluent-sonarqube-1 sonarqube:10.3.0-community "/opt/sonarqube/dock…" sonarqube 14 minutes ago Up 14 minutes 0.0.0.0:9000->9000/tcp -rest-proxy confluentinc/cp-kafka-rest:7.6.1 "/etc/confluent/dock…" rest-proxy 14 minutes ago Up 14 minutes 0.0.0.0:8082->8082/tcp -schema-registry confluentinc/cp-schema-registry:7.6.1 "/etc/confluent/dock…" schema-registry 14 minutes ago Up 14 minutes 0.0.0.0:8081->8081/tcp +docker-compose up -d --build ``` -You can now visit several web applications: +You can now visit this web applications: * http://localhost:8000 to access the MarkLogic server. -* http://localhost:9000 to use the SonarQube server as described in the "Running Sonar Code Analysis" -section below. -* http://localhost:9021 to access -[Confluent's Control Center GUI](https://docs.confluent.io/platform/current/control-center/index.html) application. -Within Control Center, click on "controlcenter.cluster" to access the configuration for the Kafka cluster. ## MarkLogic Preparation To prepare the MarkLogic server for automated testing as well as testing with the Confluent Platform, the Data Hub based -application must be deployed. From the "test-app" directory, follow these steps: +application must be deployed. From the root directory, follow these steps: 1. Run `./gradlew hubInit` -2. Edit gradle-local.properties and set `mlUsername` and `mlPassword` 3. Run `./gradlew -i mlDeploy` +Note: If you change the version of Data Hub Framework used by this project, you should also delete the following directories: +* 'test-app/src/main/entity-config' +* 'test-app/src/main/hub-internal-config' + ## Automated Testing Now that your MarkLogic server is configured and the test-app is deployed, you can run the tests via from the root -directory. Note that you must be using Java 11 or Java 17 for this command due to the latest version of Gradle. +directory. Note that you must be using Java 17 for this command due to the latest version of Gradle. ``` ./gradlew test ``` Alternatively, you can import this project into an IDE such as IntelliJ and run each of the tests found under `src/test/java`. -## Running Sonar Code Analysis +## Generating code quality reports with SonarQube -To configure the SonarQube service, perform the following steps: +Please see our [internal Wiki page](https://progresssoftware.atlassian.net/wiki/spaces/PM/pages/1763541097/Developer+Experience+SonarQube) +for information on setting up SonarQube if you have not yet already. -1. Go to http://localhost:9000 . -2. Login as admin/admin. SonarQube will ask you to change this password; you can choose whatever you want ("password" works). -3. Click on "Create a local project". -4. Enter "marklogic-kafka-connector" for the Project Display Name; use that as the Project Key as well. -5. Enter "master" as the main branch name. -6. Click on "Next". -7. Click on "Use the global setting" and then "Create project". -8. On the "Analysis Method" page, click on "Locally". -9. In the "Provide a token" panel, click on "Generate". Copy the token. -10. Click the "Continue" button. -11. Update `systemProp.sonar.token=` in `gradle-local.properties` in the root of your -project. -To run the SonarQube analysis, run the following Gradle task in the root directory, which will run all the tests with -code coverage and then generate a quality report with SonarQube: +# Configuring Local Manual Testing +This project includes a Docker Compose file that creates a Kafka cluster using Confluent Platform along with a +MarkLogic server. This allows you to test the MarkLogic Kafka connector via the Confluent Control Center web +application. The instructions below describe how to get started. - ./gradlew test sonar - -If you do not update `systemProp.sonar.token` in your `gradle.properties` file, you can specify the token via the -following: +## Docker Cluster Preparation for Manual Testing +The docker-compose file in the test-app directory includes these services along with a MarkLogic server. +``` +docker-compose --env-file test-app/.env -f test-app/docker-compose.yml up -d --build +``` - ./gradlew test sonar -Dsonar.token=paste your token here +When the setup is complete, you should be able to run +``` +docker-compose --env-file test-app/.env -f test-app/docker-compose.yml ps +``` +and see results similar to the following. +``` +NAME IMAGE COMMAND SERVICE CREATED STATUS PORTS +alertmanager confluentinc/cp-enterprise-alertmanager:2.2.0 "alertmanager-start" alertmanager 51 seconds ago Up 50 seconds 0.0.0.0:9093->9093/tcp, [::]:9093->9093/tcp +broker confluentinc/cp-server:8.0.0 "/etc/confluent/dock…" broker 51 seconds ago Up 50 seconds 0.0.0.0:9092->9092/tcp, [::]:9092->9092/tcp, 0.0.0.0:9101->9101/tcp, [::]:9101->9101/tcp +connect cnfldemos/cp-server-connect-datagen:0.6.7-8.0.0 "/etc/confluent/dock…" connect 51 seconds ago Up 49 seconds 0.0.0.0:8083->8083/tcp, [::]:8083->8083/tcp +control-center confluentinc/cp-enterprise-control-center-next-gen:2.2.0 "/etc/confluent/dock…" control-center 51 seconds ago Up 49 seconds 0.0.0.0:9021->9021/tcp, [::]:9021->9021/tcp +flink-jobmanager cnfldemos/flink-kafka:1.19.1-scala_2.12-java17 "/docker-entrypoint.…" flink-jobmanager 51 seconds ago Up 50 seconds 0.0.0.0:9081->9081/tcp, [::]:9081->9081/tcp +flink-sql-client cnfldemos/flink-sql-client-kafka:1.19.1-scala_2.12-java17 "/docker-entrypoint.…" flink-sql-client 51 seconds ago Up 50 seconds 6123/tcp, 8081/tcp +flink-taskmanager cnfldemos/flink-kafka:1.19.1-scala_2.12-java17 "/docker-entrypoint.…" flink-taskmanager 51 seconds ago Up 50 seconds 6123/tcp, 8081/tcp +ksql-datagen confluentinc/ksqldb-examples:8.0.0 "bash -c 'echo Waiti…" ksql-datagen 51 seconds ago Up 49 seconds +ksqldb-cli confluentinc/cp-ksqldb-cli:8.0.0 "/bin/sh" ksqldb-cli 51 seconds ago Up 49 seconds +ksqldb-server confluentinc/cp-ksqldb-server:8.0.0 "/etc/confluent/dock…" ksqldb-server 51 seconds ago Up 49 seconds 0.0.0.0:8088->8088/tcp, [::]:8088->8088/tcp +manual-tests-marklogic-kafka-confluent-marklogic-1 ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12 "/tini -- /usr/local…" marklogic 51 seconds ago Up 50 seconds 0.0.0.0:8000-8002->8000-8002/tcp, [::]:8000-8002->8000-8002/tcp, 0.0.0.0:8010-8013->8010-8013/tcp, [::]:8010-8013->8010-8013/tcp, 0.0.0.0:8018-8019->8018-8019/tcp, [::]:8018-8019->8018-8019/tcp +prometheus confluentinc/cp-enterprise-prometheus:2.2.0 "prometheus-start" prometheus 51 seconds ago Up 50 seconds 0.0.0.0:9090->9090/tcp, [::]:9090->9090/tcp +rest-proxy confluentinc/cp-kafka-rest:8.0.0 "/etc/confluent/dock…" rest-proxy 51 seconds ago Up 49 seconds 0.0.0.0:8082->8082/tcp, [::]:8082->8082/tcp +schema-registry confluentinc/cp-schema-registry:8.0.0 "/etc/confluent/dock…" schema-registry 51 seconds ago Up 50 seconds 0.0.0.0:8081->8081/tcp, [::]:8081->8081/tcp +``` -When that completes, you can find the results at http://localhost:9000/dashboard?id=marklogic-kafka-connector +You can now visit several web applications: +* http://localhost:8000 to access the MarkLogic server. +* http://localhost:9021 to access + [Confluent's Control Center GUI](https://docs.confluent.io/platform/current/control-center/index.html) application. + Within Control Center, click on "controlcenter.cluster" to access the configuration for the Kafka cluster. -Click on that link. If it's the first time you've run the report, you'll see all issues. If you've run the report -before, then SonarQube will show "New Code" by default. That's handy, as you can use that to quickly see any issues -you've introduced on the feature branch you're working on. You can then click on "Overall Code" to see all issues. +### Confluent Platform for Manual Testing +[Confluent Platform](https://docs.confluent.io/platform/current/overview.html) provides an easy mechanism for running +Kafka locally via a single Docker cluster. A primary benefit of testing with Confluent Platform is to test configuring +the MarkLogic Kafka connector via the +[Confluent Control Center](https://docs.confluent.io/platform/current/control-center/index.html) web application. +The Confluent Platform servers in this docker-compose file are based on the Confluent files and instructions at +[Install a Confluent Platform cluster in Docker using a Confluent docker-compose file](https://docs.confluent.io/platform/current/platform-quickstart.html). -Note that if you only need results on code smells and vulnerabilities, you can repeatedly run "./gradlew sonar" -without having to re-run the tests. -For more assistance with Sonar and Gradle, see the -[Sonar Gradle plugin docs](https://docs.sonarqube.org/latest/analyzing-source-code/scanners/sonarscanner-for-gradle/). +### MarkLogic Preparation +Please ensure you've followed the instructions for "MarkLogic Preparation" in the "Configuring Local Automated Testing" +sectuib above for deploying a Data Hub test application. +Note: If you change the version of Data Hub Framework used by this project, you should also delete the following directories: +* 'test-app/src/main/entity-config' +* 'test-app/src/main/hub-internal-config' -## Confluent Platform for Manual Testing ### Building and Sharing the Connector with the Docker Container Using gradle in the root directory, build the connector archive and copy it to a directory shared with the Confluent @@ -131,7 +121,7 @@ Platform Docker cluster built in the that section, using this gradle command in ``` ./gradlew copyConnectorToDockerVolume ``` -**You MUST restart the "connect" server in the Docker "confluent-platform-example" cluster.** +**You MUST restart the "connect" server in the Docker "manual-tests-marklogic-kafka-confluent" cluster.** Now, verify the connector has loaded properly. 1. Click on "Connect" in the left sidebar. @@ -173,7 +163,7 @@ In the Control Center GUI, you can verify the MarkLogic Kafka connector instance 3. Click on the "marklogic-purchases-sink" connector You can then verify that data is being written to MarkLogic by using MarkLogic's qconsole application to inspect the -contents of the `data-hub-FINAL` database. +contents of the `data-hub-FINAL` database. There should be documents with URIs that start with `/purchase/*`. ### Load a MarkLogic Kafka source connector instance You can also load an instance of the MarkLogic Kafka source connector that will read rows from the `demo/purchases` @@ -214,8 +204,8 @@ contents of the `data-hub-FINAL` database. ## Debugging the MarkLogic Kafka connector -The main mechanism for debugging an instance of the MarkLogic Kafka connector is by examining logs from the -connector. You can access those, along with logging from Kafka Connect and all other connectors, by running the +The main mechanism for debugging an instance of the MarkLogic Kafka connector is by examining logs from the +connector. You can access those, along with logging from Kafka Connect and all other connectors, by running the following: confluent local services connect log -f @@ -224,7 +214,7 @@ See [the log command docs](https://docs.confluent.io/confluent-cli/current/comma for more information. You can also customize Confluent logging by [adjusting the log4j file for Kafka Connect](https://docs.confluent.io/platform/current/connect/logging.html#viewing-kconnect-logs). -For example, to prevent some logging from Kafka Connect and from the Java Client DMSDK, add the following to the +For example, to prevent some logging from Kafka Connect and from the Java Client DMSDK, add the following to the `$CONFLUENT_HOME/etc/kafka/connect-log4j.properties` file: log4j.logger.org.apache.kafka=WARN @@ -232,35 +222,35 @@ For example, to prevent some logging from Kafka Connect and from the Java Client # Testing with basic Apache Kafka -The primary reason to test the MarkLogic Kafka connector via a regular Kafka distribution is that the development -cycle is much faster and more reliable - i.e. you can repeatedly redeploy the connector and restart Kafka Connect to +The primary reason to test the MarkLogic Kafka connector via a regular Kafka distribution is that the development +cycle is much faster and more reliable - i.e. you can repeatedly redeploy the connector and restart Kafka Connect to test changes, and Kafka Connect will continue to work fine. This is particularly useful when the changes you're testing do not require testing the GUI provided by Confluent Control Center. -To get started, these instructions assume that you already have an instance of Apache Kafka installed; the -[Kafka Quickstart](https://kafka.apache.org/quickstart) instructions provide an easy way of accomplishing this. Perform +To get started, these instructions assume that you already have an instance of Apache Kafka installed; the +[Kafka Quickstart](https://kafka.apache.org/quickstart) instructions provide an easy way of accomplishing this. Perform step 1 of these instructions before proceeding. Next, configure your Gradle properties to point to your Kafka installation and deploy the connector there: 1. Configure `kafkaHome` in gradle-local.properties - e.g. `kafkaHome=/Users/myusername/kafka_2.13-2.8.1` 2. Configure `kafkaMlUsername` and `kafkaMlPassword` in gradle-local.properties, setting these to a MarkLogic user that - is able to write documents to MarkLogic. These values will be used to populate the + is able to write documents to MarkLogic. These values will be used to populate the `ml.connection.username` and `ml.connection.password` connector properties. 3. Run `./gradlew clean deploy` to build a jar and copy it and the config property files to your Kafka installation [Step 2 in the Kafka Quickstart guide](https://kafka.apache.org/quickstart) provides the instructions for starting the -separate Zookeeper and Kafka server processes. You'll need to run these commands from your Kafka installation -directory. As of August 2022, those commands are (these seem very unlikely to change and thus are included here for +separate Zookeeper and Kafka server processes. You'll need to run these commands from your Kafka installation +directory. As of August 2022, those commands are (these seem very unlikely to change and thus are included here for convenience): bin/zookeeper-server-start.sh config/zookeeper.properties -and +and bin/kafka-server-start.sh config/server.properties -Next, start the Kafka connector in standalone mode (also from the Kafka home directory). To run the sink connector, +Next, start the Kafka connector in standalone mode (also from the Kafka home directory). To run the sink connector, use the following command: bin/connect-standalone.sh config/marklogic-connect-standalone.properties config/marklogic-sink.properties @@ -274,7 +264,7 @@ You'll see a fair amount of logging from Kafka itself; near the end of the loggi `RowManagerSourceTask` to ensure that the connector has started up correctly. ## Sink Connector Testing -To test out the sink connector, you can use the following command to enter a CLI that allows you to manually send +To test out the sink connector, you can use the following command to enter a CLI that allows you to manually send messages to the `marklogic` topic that the connector is configured by default to read from: bin/kafka-console-producer.sh --broker-list localhost:9092 --topic marklogic diff --git a/Jenkinsfile b/Jenkinsfile index 0efe6748..b151e1fd 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -1,4 +1,35 @@ @Library('shared-libraries') _ + +def runtests(String marklogicVersion) { + cleanupDocker() + sh label:'mlsetup', script: '''#!/bin/bash + echo "Removing any running MarkLogic server and clean up MarkLogic data directory" + sudo /usr/local/sbin/mladmin remove + docker-compose down -v || true + sudo /usr/local/sbin/mladmin cleandata + cd kafka-connector + MARKLOGIC_LOGS_VOLUME=/tmp MARKLOGIC_IMAGE='''+marklogicVersion+''' docker-compose up -d --build + sleep 60s; + ''' + sh label:'deploy project', script: '''#!/bin/bash + export JAVA_HOME=$JAVA17_HOME_DIR + export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR + export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH + cd kafka-connector + ./gradlew hubInit + ./gradlew mlTestConnections + ./gradlew -i mlDeploy + ''' + sh label:'test', script: '''#!/bin/bash + export JAVA_HOME=$JAVA17_HOME_DIR + export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR + export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH + cd kafka-connector + ./gradlew test || true + ''' + junit '**/build/**/*.xml' +} + pipeline{ agent {label 'devExpLinuxPool'} options { @@ -6,36 +37,38 @@ pipeline{ buildDiscarder logRotator(artifactDaysToKeepStr: '7', artifactNumToKeepStr: '', daysToKeepStr: '30', numToKeepStr: '') } environment{ - JAVA_HOME_DIR="/home/builder/java/jdk-11.0.2" + JAVA17_HOME_DIR="/home/builder/java/jdk-17.0.2" GRADLE_DIR =".gradle" - DMC_USER = credentials('MLBUILD_USER') - DMC_PASSWORD = credentials('MLBUILD_PASSWORD') } stages{ - stage('tests'){ + stage('test-ML12'){ + steps{ + runtests("ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12") + } + post{ + always{ + updateWorkspacePermissions() + sh label:'mlcleanup', script: '''#!/bin/bash + cd kafka-connector + docker-compose down -v || true + ''' + cleanupDocker() + } + } + } + stage('test-ML11'){ steps{ - copyRPM 'Release','11.3.0' - setUpML '$WORKSPACE/xdmp/src/Mark*.rpm' - sh label:'setup', script: '''#!/bin/bash - cd kafka-connector/test-app - echo mlPassword=admin >> gradle-local.properties - ''' - sh label:'deploy project', script: '''#!/bin/bash - export JAVA_HOME=$JAVA_HOME_DIR - export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR - export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH - cd kafka-connector/test-app - ./gradlew hubInit - ./gradlew mlDeploy -PmlPassword=admin - ''' - sh label:'test', script: '''#!/bin/bash - export JAVA_HOME=$JAVA_HOME_DIR - export GRADLE_USER_HOME=$WORKSPACE/$GRADLE_DIR - export PATH=$GRADLE_USER_HOME:$JAVA_HOME/bin:$PATH - cd kafka-connector - ./gradlew test || true - ''' - junit '**/build/**/*.xml' + runtests("ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-11") + } + post{ + always{ + updateWorkspacePermissions() + sh label:'mlcleanup', script: '''#!/bin/bash + cd kafka-connector + docker-compose down -v || true + ''' + cleanupDocker() + } } } } diff --git a/NOTICE.txt b/NOTICE.txt index 617552b1..a23b2e32 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,38 +1,47 @@ -MarkLogicĀ® Connector for Kafka +ProgressĀ® MarkLogicĀ® Connector for Kafka v1.11.0 -Copyright Ā© 2019-2025 MarkLogic Corporation. All Rights Reserved. +Copyright (c) 2019-2025 Progress Software Corporation and/or its subsidiaries or affiliates. All Rights Reserved. To the extent required by the applicable open-source license, a complete machine-readable copy of the source code corresponding to such code is available upon request. This offer is valid to anyone in receipt of this information and shall expire three years following the date of the final distribution of this product version by Progress Software Corporation. To obtain such source code, send an email to Legal-thirdpartyreview@progress.com. Please specify the product and version for which you are requesting source code. -Third Party Notices +----------------------------------------------------------------------- -marklogic-data-hub 6.1.1 (Apache-2.0) -ml-app-deployer 5.0.0 (Apache-2.0) -jackson-dataformat-csv 2.17.2 (Apache-2.0) +The following is a list of the third-party components used by ProgressĀ® MarkLogicĀ® Connector for Kafka v1.11.0 (last updated October 16, 2025): + +Summary + +marklogic-data-hub 6.2.1 (Apache-2.0) +ml-gradle 6.1.0 (Apache-2.0) +jackson-dataformat-csv 2.20.2 (Apache-2.0) + +----------------------------------------------------------------------- Common Licenses Apache License 2.0 (Apache-2.0) -Third-Party Components +----------------------------------------------------------------------- -The following is a list of the third-party components used by the MarkLogicĀ® Connector for Kafka 1.11.0 (last updated August 12, 2025): - -marklogic-data-hub 6.1.1 (Apache-2.0) +marklogic-data-hub 6.2.1 (Apache-2.0) https://repo1.maven.org/maven2/com/marklogic/marklogic-data-hub/ + For the full text of the Apache-2.0 license, see Apache License 2.0 (Apache-2.0) -ml-app-deployer 5.0.0 (Apache-2.0) -https://repo1.maven.org/maven2/com/marklogic/ml-app-deployer/ +ml-gradle 6.1.0 (Apache-2.0) +https://repo1.maven.org/maven2/com/marklogic/ml-gradle/ + For the full text of the Apache-2.0 license, see Apache License 2.0 (Apache-2.0) -jackson-dataformat-csv 2.17.2 (Apache-2.0) +jackson-dataformat-csv 2.20.2 (Apache-2.0) https://repo1.maven.org/maven2/com/fasterxml/jackson/dataformat/jackson-dataformat-csv/ + For the full text of the Apache-2.0 license, see Apache License 2.0 (Apache-2.0) +----------------------------------------------------------------------- + Common Licenses -This section shows the text of common third-party licenses used by MarkLogicĀ® Connector for Kafka 1.11.0 (last updated August 12, 2025): +This section shows the text of common third-party licenses used by ProgressĀ® MarkLogicĀ® Connector for Kafka v1.11.0 (last updated October 16, 2025): Apache License 2.0 (Apache-2.0) https://spdx.org/licenses/Apache-2.0.html @@ -110,3 +119,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + diff --git a/build.gradle b/build.gradle index ec4580ca..2fb9743b 100644 --- a/build.gradle +++ b/build.gradle @@ -1,11 +1,23 @@ +buildscript { + repositories { + mavenCentral() + } + dependencies { + // Add jaxen to buildscript classpath to prevent NoClassDefFoundError when shadow plugin interferes with + // ml-data-hub plugin execution. It is not yet known why the shadow plugin causes this problem or why it's + // specific to the jaxen library, but this resolves the problem. + classpath 'jaxen:jaxen:2.0.0' + } +} + plugins { id 'java' id 'net.saliman.properties' version '1.5.2' - id 'com.gradleup.shadow' version '8.3.4' + id 'com.gradleup.shadow' version '9.2.2' // Only used for testing id 'jacoco' - id "org.sonarqube" version "5.1.0.4882" + id "org.sonarqube" version "6.3.1.5724" // Used to generate Avro classes. This will write classes to build/generated-test-avro-java and also add that folder // as a source root. Since this is commented out by default, the generated Avro test class has been added to @@ -15,61 +27,63 @@ plugins { } java { - sourceCompatibility = 1.8 - targetCompatibility = 1.8 + toolchain { + languageVersion = JavaLanguageVersion.of(17) + } + sourceCompatibility = JavaVersion.VERSION_17 + targetCompatibility = JavaVersion.VERSION_17 } repositories { mavenCentral() + + // Needed for marklogic-junit5 snapshot + maven { + url = "https://bed-artifactory.bedford.progress.com:443/artifactory/ml-maven-snapshots/" + } } configurations { documentation assets - configurations.all { + configureEach { resolutionStrategy { - // Force v4.5.0 of commons-collections4 to avoid CVEs in v4.4.0 from transitive dependecies: - // CVE-2025-48924 (https://www.cve.org/CVERecord?id=CVE-2025-48924) and - // CVE-2020-15250 (https://www.cve.org/CVERecord?id=CVE-2020-15250) - force "org.apache.commons:commons-collections4:4.5.0" - - // Force v3.18 of commons-lang3 to avoid CVE-2025-48924 - // (https://www.cve.org/CVERecord?id=CVE-2025-48924), without also - // upgrading ml-app-deployer to 6.0.0, which we are not ready to do yet. - force 'org.apache.commons:commons-lang3:3.18.0' + // Force v3.19 of commons-lang3 to avoid CVE-2025-48924 (https://www.cve.org/CVERecord?id=CVE-2025-48924), which + // is caused by the use of avro-compiler v1.12.0 with older dependencies including commons-lang3 v3.12.0. + force 'org.apache.commons:commons-lang3:3.19.0' + + // Forcing marklogic-data-hub to use the latest version of ml-gradle, which minimizes security vulnerabilities. + force "com.marklogic:ml-gradle:6.1.0" + + resolutionStrategy.eachDependency { DependencyResolveDetails details -> + if (details.requested.group.startsWith("org.eclipse.jetty") && details.requested.version.startsWith("12")) { + details.useVersion "12.1.1" + details.because "Eliminating CVEs on earlier versions. This is a compileOnly dependency of Kafka Connect and has no impact on our connector." + } + + if (details.requested.group.equals("io.netty") && details.requested.version.startsWith("4")) { + details.useVersion "4.2.7.Final" + details.because "Eliminating CVEs on earlier patch versions. io.netty is brought in by marklogic-data-hub. " + } + } } } } ext { - kafkaVersion = "3.9.1" + kafkaVersion = "4.1.0" } dependencies { def kafkaConnectRuntime = "org.apache.kafka:connect-runtime:${kafkaVersion}" compileOnly kafkaConnectRuntime - compileOnly "org.slf4j:slf4j-api:1.7.36" - - // Force DHF to use the latest version of ml-app-deployer, which minimizes security vulnerabilities - implementation "com.marklogic:ml-app-deployer:5.0.0" - implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.17.2" + implementation "com.marklogic:marklogic-data-hub:6.2.1" - // Note that in general, the version of the DHF jar must match that of the deployed DHF instance. Different versions - // may work together, but that behavior is not guaranteed. - implementation("com.marklogic:marklogic-data-hub:6.1.1") { - exclude module: "marklogic-client-api" - exclude module: "ml-javaclient-util" - exclude module: "ml-app-deployer" + implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:2.20.0" - // No need for mlcp-util, it's only used in 'legacy' DHF 4 jobs - exclude module: "mlcp-util" - // Excluding because it causes Kafka Connect to complain mightily if included - exclude module: "logback-classic" - } - - testImplementation 'com.marklogic:marklogic-junit5:1.5.0' + testImplementation 'com.marklogic:marklogic-junit5:2.0-SNAPSHOT' testImplementation "org.apache.kafka:connect-json:${kafkaVersion}" testImplementation kafkaConnectRuntime @@ -77,14 +91,14 @@ dependencies { testImplementation "org.apache.avro:avro-compiler:1.12.0" // Forcing logback to be used for test logging - testImplementation "ch.qos.logback:logback-classic:1.5.18" - testImplementation "org.slf4j:jcl-over-slf4j:2.0.16" + testImplementation "ch.qos.logback:logback-classic:1.5.19" + testImplementation "org.slf4j:jcl-over-slf4j:2.0.17" // Automatic loading of test framework implementation dependencies is deprecated. // https://docs.gradle.org/current/userguide/upgrading_version_8.html#test_framework_implementation_dependencies // Without this, once using JUnit 5.12 or higher, Gradle will not find any tests and report an error of: // org.junit.platform.commons.JUnitException: TestEngine with ID 'junit-jupiter' failed to discover tests - testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.13.4" + testRuntimeOnly "org.junit.platform:junit-platform-launcher:1.14.0" documentation files('LICENSE.txt') documentation files('NOTICE.txt') @@ -103,23 +117,25 @@ tasks.withType(JavaCompile) { test { useJUnitPlatform() -} - -// Configures jacoco test coverage to be included when "test" is run -test { finalizedBy jacocoTestReport -} -jacocoTestReport { - dependsOn test -} -// Enabling the XML report allows for sonar to grab coverage data from jacoco -jacocoTestReport { - reports { - // This isn't working with Gradle 8. Will replace this soon with the sonar instance in docker-compose. - // xml.enabled true + jacocoTestReport { + dependsOn test + reports { + xml.required = true + } } -} + jacocoTestCoverageVerification { + violationRules { + rule { + limit { + minimum = 0.0 + } + } + } + } + +} shadowJar { // Exclude DHF source files @@ -141,7 +157,8 @@ ext { import org.apache.tools.ant.filters.ReplaceTokens -task connectorArchive_CopyManifestToBuildDirectory(type: Copy, group: confluentArchiveGroup) { +tasks.register("connectorArchive_CopyManifestToBuildDirectory", Copy) { + group = confluentArchiveGroup description = "Copy the project manifest into the root folder" from '.' include 'manifest.json' @@ -149,26 +166,31 @@ task connectorArchive_CopyManifestToBuildDirectory(type: Copy, group: confluentA filter(ReplaceTokens, tokens: [CONFLUENT_USER: componentOwner, VERSION: version]) } -task connectorArchive_CopyAssetsToBuildDirectory(type: Copy, group: confluentArchiveGroup) { +tasks.register("connectorArchive_CopyAssetsToBuildDirectory", Copy) { + group = confluentArchiveGroup description = "Copy the project assets into the assets folder" from configurations.assets into "${baseArchiveBuildDir}/${baseArchiveName}/assets" } -task connectorArchive_CopyEtcToBuildDirectory(type: Copy, group: confluentArchiveGroup) { +tasks.register("connectorArchive_CopyEtcToBuildDirectory", Copy) { + group = confluentArchiveGroup description = "Copy the project support files into the etc folder" from 'config' include '*' into "${baseArchiveBuildDir}/${baseArchiveName}/etc" } -task connectorArchive_CopyDocumentationToBuildDirectory(type: Copy, group: confluentArchiveGroup) { +tasks.register("connectorArchive_CopyDocumentationToBuildDirectory", Copy) { + group = confluentArchiveGroup description = "Copy the project documentation into the doc folder" from configurations.documentation into "${baseArchiveBuildDir}/${baseArchiveName}/doc" } -task connectorArchive_CopyDependenciesToBuildDirectory(type: Copy, group: confluentArchiveGroup, dependsOn: jar) { +tasks.register("connectorArchive_CopyDependenciesToBuildDirectory", Copy) { + group = confluentArchiveGroup + dependsOn jar description = "Copy the dependency jars into the lib folder" from jar // Confluent already includes the Jackson dependencies that this connector depends on. If the connector includes any @@ -185,18 +207,21 @@ task connectorArchive_CopyDependenciesToBuildDirectory(type: Copy, group: conflu into "${baseArchiveBuildDir}/${baseArchiveName}/lib" } -task connectorArchive_BuildDirectory(group: confluentArchiveGroup) { +tasks.register("connectorArchive_BuildDirectory") { + group = confluentArchiveGroup description = "Build the directory that will be used to create the Kafka Connector Archive" - dependsOn = [ + dependsOn( connectorArchive_CopyManifestToBuildDirectory, connectorArchive_CopyDependenciesToBuildDirectory, connectorArchive_CopyDocumentationToBuildDirectory, connectorArchive_CopyEtcToBuildDirectory, connectorArchive_CopyAssetsToBuildDirectory - ] + ) } -task connectorArchive(type: Zip, dependsOn: connectorArchive_BuildDirectory, group: confluentArchiveGroup) { +tasks.register("connectorArchive", Zip) { + group = confluentArchiveGroup + dependsOn connectorArchive_BuildDirectory description = 'Build a Connector Hub for the Confluent Connector Hub' from "${baseArchiveBuildDir}" include '**/*' @@ -206,8 +231,10 @@ task connectorArchive(type: Zip, dependsOn: connectorArchive_BuildDirectory, gro // Tasks for using the connector with Confluent Platform on Docker -task copyConnectorToDockerVolume(type: Copy, dependsOn: connectorArchive, group: confluentTestingGroup) { +tasks.register("copyConnectorToDockerVolume", Copy) { + group = confluentTestingGroup + dependsOn connectorArchive description = "Copies the connector's archive directory to the Docker volume shared with the Connect server" from "build/connectorArchive" - into "test-app/docker/confluent-marklogic-components" + into "./docker/confluent-marklogic-components" } diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..49ced7db --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,19 @@ +--- +name: docker-tests-marklogic-kafka +services: + marklogic: + image: "${MARKLOGIC_IMAGE}" + platform: linux/amd64 + environment: + - INSTALL_CONVERTERS=true + - MARKLOGIC_INIT=true + - MARKLOGIC_ADMIN_USERNAME=admin + - MARKLOGIC_ADMIN_PASSWORD=admin + volumes: + - ${MARKLOGIC_LOGS_VOLUME}:/var/opt/MarkLogic/Logs + ports: + - "8000-8002:8000-8002" + - "8010-8013:8010-8013" + - "8018-8019:8018-8019" + cap_drop: + - NET_RAW diff --git a/docker/prometheus/config/alertmanager-generated.yml b/docker/prometheus/config/alertmanager-generated.yml new file mode 100644 index 00000000..4cdaa003 --- /dev/null +++ b/docker/prometheus/config/alertmanager-generated.yml @@ -0,0 +1,8 @@ +global: + resolve_timeout: 1m + smtp_require_tls: false +receivers: +- name: default +route: + receiver: default + routes: [] diff --git a/docker/prometheus/config/prometheus-generated.yml b/docker/prometheus/config/prometheus-generated.yml new file mode 100644 index 00000000..e69de29b diff --git a/docker/prometheus/config/web-config-am.yml b/docker/prometheus/config/web-config-am.yml new file mode 100644 index 00000000..e69de29b diff --git a/docker/prometheus/config/web-config-prom.yml b/docker/prometheus/config/web-config-prom.yml new file mode 100644 index 00000000..e69de29b diff --git a/docs/system-requirements.md b/docs/system-requirements.md index bef8a1e1..791c1472 100644 --- a/docs/system-requirements.md +++ b/docs/system-requirements.md @@ -6,10 +6,7 @@ nav_order: 2 The MarkLogic Kafka connector has the following system requirements: -* Kafka 2.5 or higher, or Confluent Platform 7 or higher +* For version 2.0.0 of the connector and higher, Kafka 4.0 or higher and Java 17 or higher are required. +* For versions of the connector prior to 2.0.0, Kafka 2.5 or higher and Java 8 or higher are required. * For writing data, MarkLogic 9 or higher. * For reading data, MarkLogic 10.0-6 or higher. - -The MarkLogic Kafka connector may work on versions of Kafka prior to 2.5 or Confluent Platform prior to 7, but it has -not been tested on those. - diff --git a/gradle.properties b/gradle.properties index 7c9c3920..4e1ba3d6 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,5 +1,5 @@ group=com.marklogic -version=1.11.0 +version=2.0.0 # For the Confluent Connector Archive componentOwner=marklogic diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 94113f20..d4081da4 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.11-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew index f5feea6d..f3b75f3b 100755 --- a/gradlew +++ b/gradlew @@ -86,8 +86,7 @@ done # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s -' "$PWD" ) || exit +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum diff --git a/settings.gradle b/settings.gradle index 7b86540a..4998acb7 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,2 +1,2 @@ rootProject.name = 'kafka-connect-marklogic' - +include "test-app" diff --git a/src/main/java/com/marklogic/kafka/connect/source/XmlPlanInvoker.java b/src/main/java/com/marklogic/kafka/connect/source/XmlPlanInvoker.java index db568aa0..8f3a4a73 100644 --- a/src/main/java/com/marklogic/kafka/connect/source/XmlPlanInvoker.java +++ b/src/main/java/com/marklogic/kafka/connect/source/XmlPlanInvoker.java @@ -17,11 +17,7 @@ import org.w3c.dom.NodeList; import javax.xml.XMLConstants; -import javax.xml.transform.OutputKeys; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerConfigurationException; -import javax.xml.transform.TransformerException; -import javax.xml.transform.TransformerFactory; +import javax.xml.transform.*; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.StringWriter; @@ -31,7 +27,7 @@ class XmlPlanInvoker extends AbstractPlanInvoker implements PlanInvoker { - private static final Logger logger = LoggerFactory.getLogger(XmlPlanInvoker.class); + private static final Logger staticLogger = LoggerFactory.getLogger(XmlPlanInvoker.class); private static final String TABLE_NS_URI = "http://marklogic.com/table"; @@ -83,7 +79,7 @@ private String getKeyFromRow(Node row) { NamedNodeMap attributes = column.getAttributes(); // The 'name' attribute is expected to exist; trust but verify if (attributes != null && attributes.getNamedItem("name") != null && - keyColumn.equals(attributes.getNamedItem("name").getTextContent())) { + keyColumn.equals(attributes.getNamedItem("name").getTextContent())) { return column.getTextContent(); } } @@ -128,7 +124,7 @@ private static TransformerFactory makeNewTransformerFactory() { private static void logTransformerFactoryWarning(String xmlConstant, String errorMessage) { String baseTransformerFactoryWarningMessage = "Unable to set {} on TransformerFactory; cause: {}"; - logger.warn(baseTransformerFactoryWarningMessage, xmlConstant, errorMessage); + staticLogger.warn(baseTransformerFactoryWarningMessage, xmlConstant, errorMessage); } } diff --git a/test-app/.env b/test-app/.env new file mode 100644 index 00000000..70d05d13 --- /dev/null +++ b/test-app/.env @@ -0,0 +1,2 @@ +MARKLOGIC_IMAGE=ml-docker-db-dev-tierpoint.bed-artifactory.bedford.progress.com/marklogic/marklogic-server-ubi:latest-12 +MARKLOGIC_LOGS_VOLUME=../docker/marklogic/logs diff --git a/test-app/.gitignore b/test-app/.gitignore index d73705b8..1d379df7 100644 --- a/test-app/.gitignore +++ b/test-app/.gitignore @@ -1,2 +1,5 @@ src/main/hub-internal-config src/main/entity-config +gradle +gradlew +gradlew.bat diff --git a/test-app/build.gradle b/test-app/build.gradle index 14dcf498..28f824e2 100644 --- a/test-app/build.gradle +++ b/test-app/build.gradle @@ -1,45 +1,45 @@ plugins { id 'net.saliman.properties' version '1.5.2' - id "com.marklogic.ml-data-hub" version "6.0.0" + id "com.marklogic.ml-data-hub" version "6.2.1" } // Tasks for working with Confluent Platform running locally. // See "Testing with Confluent Platform" in CONTRIBUTING.md -task loadDatagenPurchasesConnector(type: Exec) { +tasks.register("loadDatagenPurchasesConnector", Exec) { description = "Load an instance of the Datagen connector into Confluent Platform for sending JSON documents to " + "the 'purchases' topic" commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", "--data", "@ConfluentConnectorConfigs/datagen-purchases-source.json", "http://localhost:8083/connectors" } -task loadMarkLogicPurchasesSinkConnector(type: Exec) { +tasks.register("loadMarkLogicPurchasesSinkConnector", Exec) { description = "Load an instance of the MarkLogic Kafka connector into Confluent Platform for writing data to " + "MarkLogic from the 'purchases' topic" commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", "--data", "@ConfluentConnectorConfigs/marklogic-purchases-sink.json", "http://localhost:8083/connectors" } -task loadMarkLogicPurchasesSourceConnector(type: Exec) { +tasks.register("loadMarkLogicPurchasesSourceConnector", Exec) { description = "Load an instance of the MarkLogic Kafka connector into Confluent Platform for reading rows from " + "the demo/purchases view" commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", "--data", "@ConfluentConnectorConfigs/marklogic-purchases-source.json", "http://localhost:8083/connectors" } -task loadMarkLogicAuthorsSourceConnector(type: Exec) { +tasks.register("loadMarkLogicAuthorsSourceConnector", Exec) { description = "Loads a source connector that retrieves authors from the citations.xml file, which is also used for " + "all the automated tests" commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", "--data", "@ConfluentConnectorConfigs/marklogic-authors-source.json", "http://localhost:8083/connectors" } -task loadMarkLogicEmployeesSourceConnector(type: Exec) { +tasks.register("loadMarkLogicEmployeesSourceConnector", Exec) { commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", "--data", "@ConfluentConnectorConfigs/marklogic-employees-source.json", "http://localhost:8083/connectors" } -task insertAuthors(type: Test) { +tasks.register("insertAuthors", Test) { useJUnitPlatform() systemProperty "AUTHOR_IDS", authorIds description = "Insert a new author into the data-hub-STAGING database via a new citations XML document; " + @@ -47,7 +47,7 @@ task insertAuthors(type: Test) { include "com/marklogic/kafka/connect/source/debug/InsertAuthorsTest.class" } -task loadMarkLogicDHPurchasesSinkConnector(type: Exec) { +tasks.register("loadMarkLogicDHPurchasesSinkConnector", Exec) { description = "Load an instance of the MarkLogic Kafka connector into Confluent Platform for writing data to " + "MarkLogic from the 'purchases' topic" commandLine "curl", "-s", "-X", "POST", "-H", "Content-Type: application/json", diff --git a/test-app/docker-compose.yml b/test-app/docker-compose.yml index 12dd040a..3d2a8873 100644 --- a/test-app/docker-compose.yml +++ b/test-app/docker-compose.yml @@ -1,15 +1,14 @@ --- -version: '3.8' -name: marklogic-kafka-confluent +name: manual-tests-marklogic-kafka-confluent services: -# This compose file is based on: -# This guide - https://docs.confluent.io/platform/current/platform-quickstart.html#step-6-uninstall-and-clean-up -# This compose file - https://raw.githubusercontent.com/confluentinc/cp-all-in-one/7.6.1-post/cp-all-in-one-kraft/docker-compose.yml -# Extended to include a MarkLogic container + # This compose file is based on: + # This guide - https://docs.confluent.io/platform/current/platform-quickstart.html#step-6-uninstall-and-clean-up + # This compose file - https://github.com/confluentinc/cp-all-in-one/blob/8.0.0-post/cp-all-in-one/docker-compose.yml + # Extended to include a MarkLogic container broker: - image: confluentinc/cp-kafka:7.6.1 + image: confluentinc/cp-server:8.0.0 hostname: broker container_name: broker ports: @@ -26,22 +25,43 @@ services: KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092' KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 + KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 + KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 KAFKA_JMX_PORT: 9101 KAFKA_JMX_HOSTNAME: localhost + KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081 + KAFKA_METRIC_REPORTERS: io.confluent.telemetry.reporter.TelemetryReporter + CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092 + CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 KAFKA_PROCESS_ROLES: 'broker,controller' KAFKA_CONTROLLER_QUORUM_VOTERS: '1@broker:29093' KAFKA_LISTENERS: 'PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092' KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + CONFLUENT_METRICS_ENABLE: 'true' + CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' # Replace CLUSTER_ID with a unique base64 UUID using "bin/kafka-storage.sh random-uuid" # See https://docs.confluent.io/kafka/operations-tools/kafka-tools.html#kafka-storage-sh CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_TYPE: "http" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_ENABLED: "true" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_METRICS_INCLUDE: "io.confluent.kafka.server.request.(?!.*delta).*|io.confluent.kafka.server.server.broker.state|io.confluent.kafka.server.replica.manager.leader.count|io.confluent.kafka.server.request.queue.size|io.confluent.kafka.server.broker.topic.failed.produce.requests.rate.1.min|io.confluent.kafka.server.tier.archiver.total.lag|io.confluent.kafka.server.request.total.time.ms.p99|io.confluent.kafka.server.broker.topic.failed.fetch.requests.rate.1.min|io.confluent.kafka.server.broker.topic.total.fetch.requests.rate.1.min|io.confluent.kafka.server.partition.caught.up.replicas.count|io.confluent.kafka.server.partition.observer.replicas.count|io.confluent.kafka.server.tier.tasks.num.partitions.in.error|io.confluent.kafka.server.broker.topic.bytes.out.rate.1.min|io.confluent.kafka.server.request.total.time.ms.p95|io.confluent.kafka.server.controller.active.controller.count|io.confluent.kafka.server.request.total.time.ms.p999|io.confluent.kafka.server.controller.active.broker.count|io.confluent.kafka.server.request.handler.pool.request.handler.avg.idle.percent.rate.1.min|io.confluent.kafka.server.controller.unclean.leader.elections.rate.1.min|io.confluent.kafka.server.replica.manager.partition.count|io.confluent.kafka.server.controller.unclean.leader.elections.total|io.confluent.kafka.server.partition.replicas.count|io.confluent.kafka.server.broker.topic.total.produce.requests.rate.1.min|io.confluent.kafka.server.controller.offline.partitions.count|io.confluent.kafka.server.socket.server.network.processor.avg.idle.percent|io.confluent.kafka.server.partition.under.replicated|io.confluent.kafka.server.log.log.start.offset|io.confluent.kafka.server.log.tier.size|io.confluent.kafka.server.log.size|io.confluent.kafka.server.tier.fetcher.bytes.fetched.total|io.confluent.kafka.server.request.total.time.ms.p50|io.confluent.kafka.server.tenant.consumer.lag.offsets|io.confluent.kafka.server.log.log.end.offset|io.confluent.kafka.server.broker.topic.bytes.in.rate.1.min|io.confluent.kafka.server.partition.under.min.isr|io.confluent.kafka.server.partition.in.sync.replicas.count|io.confluent.telemetry.http.exporter.batches.dropped|io.confluent.telemetry.http.exporter.items.total|io.confluent.telemetry.http.exporter.items.succeeded|io.confluent.telemetry.http.exporter.send.time.total.millis|io.confluent.kafka.server.controller.leader.election.rate.(?!.*delta).*|io.confluent.telemetry.http.exporter.batches.failed" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_CLIENT_BASE_URL: "http://prometheus:9090/api/v1/otlp" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_CLIENT_COMPRESSION: "gzip" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_API_KEY: "dummy" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_API_SECRET: "dummy" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_BUFFER_PENDING_BATCHES_MAX: "80" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_BUFFER_BATCH_ITEMS_MAX: "4000" + KAFKA_CONFLUENT_TELEMETRY_EXPORTER_C3PLUSPLUS_BUFFER_INFLIGHT_SUBMISSIONS_MAX: "10" + KAFKA_CONFLUENT_TELEMETRY_METRICS_COLLECTOR_INTERVAL_MS: "60000" + KAFKA_CONFLUENT_TELEMETRY_REMOTECONFIG_CONFLUENT_ENABLED: "false" + KAFKA_CONFLUENT_CONSUMER_LAG_EMITTER_ENABLED: "true" schema-registry: - image: confluentinc/cp-schema-registry:7.6.1 + image: confluentinc/cp-schema-registry:8.0.0 hostname: schema-registry container_name: schema-registry depends_on: @@ -56,7 +76,8 @@ services: SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081 connect: - image: cnfldemos/cp-server-connect-datagen:0.6.4-7.6.0 + image: cnfldemos/cp-server-connect-datagen:0.6.7-8.0.0 + platform: linux/amd64 hostname: connect container_name: connect depends_on: @@ -80,17 +101,43 @@ services: CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081 - # CLASSPATH required due to CC-2422 - CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-7.6.1.jar - CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" - CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components,/usr/share/confluent-marklogic-components" - CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR + CONNECT_LOG4J_LOGGERS: log4j.rootLogger=DEBUG,org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR + volumes: + - ../docker/confluent-marklogic-components:/usr/share/confluent-marklogic-components + + prometheus: + image: confluentinc/cp-enterprise-prometheus:2.2.0 + hostname: cp-enterprise-prometheus + container_name: prometheus volumes: - - ./docker/confluent-marklogic-components:/usr/share/confluent-marklogic-components + - ../docker/prometheus/config:/mnt/config + ports: + - "9090:9090" + cap_drop: + - NET_RAW + environment: + CONFIG_PATH: "/mnt/config" + SHOULD_LOG_TO_FILE: false + + alertmanager: + image: confluentinc/cp-enterprise-alertmanager:2.2.0 + hostname: cp-enterprise-alertmanager + container_name: alertmanager + depends_on: + - prometheus + volumes: + - ../docker/prometheus/config:/mnt/config + ports: + - "9093:9093" + cap_drop: + - NET_RAW + environment: + CONFIG_PATH: "/mnt/config" + SHOULD_LOG_TO_FILE: false control-center: - image: confluentinc/cp-enterprise-control-center:7.6.1 + image: confluentinc/cp-enterprise-control-center-next-gen:2.2.0 hostname: control-center container_name: control-center depends_on: @@ -98,10 +145,14 @@ services: - schema-registry - connect - ksqldb-server + - prometheus + - alertmanager ports: - "9021:9021" cap_drop: - NET_RAW + volumes: + - ../docker/prometheus/config:/mnt/config environment: CONTROL_CENTER_BOOTSTRAP_SERVERS: 'broker:29092' CONTROL_CENTER_CONNECT_CONNECT-DEFAULT_CLUSTER: 'connect:8083' @@ -113,10 +164,15 @@ services: CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1 CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1 CONFLUENT_METRICS_TOPIC_REPLICATION: 1 + CONTROL_CENTER_PROMETHEUS_ENABLE: true + CONTROL_CENTER_PROMETHEUS_URL: http://prometheus:9090 + CONTROL_CENTER_PROMETHEUS_RULES_FILE: /mnt/config/trigger_rules-generated.yml + CONTROL_CENTER_ALERTMANAGER_URL: http://alertmanager:9093 + CONTROL_CENTER_ALERTMANAGER_CONFIG_FILE: /mnt/config/alertmanager-generated.yml PORT: 9021 ksqldb-server: - image: confluentinc/cp-ksqldb-server:7.6.1 + image: confluentinc/cp-ksqldb-server:8.0.0 hostname: ksqldb-server container_name: ksqldb-server depends_on: @@ -141,7 +197,7 @@ services: KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: 'true' ksqldb-cli: - image: confluentinc/cp-ksqldb-cli:7.6.1 + image: confluentinc/cp-ksqldb-cli:8.0.0 container_name: ksqldb-cli depends_on: - broker @@ -153,7 +209,7 @@ services: - NET_RAW ksql-datagen: - image: confluentinc/ksqldb-examples:7.6.1 + image: confluentinc/ksqldb-examples:8.0.0 hostname: ksql-datagen container_name: ksql-datagen depends_on: @@ -177,7 +233,7 @@ services: STREAMS_SCHEMA_REGISTRY_PORT: 8081 rest-proxy: - image: confluentinc/cp-kafka-rest:7.6.1 + image: confluentinc/cp-kafka-rest:8.0.0 depends_on: - broker - schema-registry @@ -193,55 +249,61 @@ services: KAFKA_REST_LISTENERS: "http://0.0.0.0:8082" KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' - marklogic: - image: "marklogicdb/marklogic-db:latest-11" - platform: linux/amd64 + flink-sql-client: + image: cnfldemos/flink-sql-client-kafka:1.19.1-scala_2.12-java17 + depends_on: + - flink-jobmanager + hostname: flink-sql-client + container_name: flink-sql-client + cap_drop: + - NET_RAW environment: - - INSTALL_CONVERTERS=true - - MARKLOGIC_INIT=true - - MARKLOGIC_ADMIN_USERNAME=admin - - MARKLOGIC_ADMIN_PASSWORD=admin - volumes: - - ./docker/marklogic/logs:/var/opt/MarkLogic/Logs + FLINK_JOBMANAGER_HOST: flink-jobmanager + + flink-jobmanager: + image: cnfldemos/flink-kafka:1.19.1-scala_2.12-java17 + hostname: flink-jobmanager + container_name: flink-jobmanager ports: - - "8000-8002:8000-8002" - - "8010-8013:8010-8013" - - "8018-8019:8018-8019" + - 9081:9081 cap_drop: - NET_RAW + command: jobmanager + environment: + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: flink-jobmanager + rest.bind-port: 9081 - # Copied from https://docs.sonarsource.com/sonarqube/latest/setup-and-upgrade/install-the-server/#example-docker-compose-configuration . - sonarqube: - image: sonarqube:10.3.0-community + flink-taskmanager: + image: cnfldemos/flink-kafka:1.19.1-scala_2.12-java17 + hostname: flink-taskmanager + container_name: flink-taskmanager depends_on: - - postgres + - flink-jobmanager + command: taskmanager + scale: 1 environment: - SONAR_JDBC_URL: jdbc:postgresql://postgres:5432/sonar - SONAR_JDBC_USERNAME: sonar - SONAR_JDBC_PASSWORD: sonar - volumes: - - sonarqube_data:/opt/sonarqube/data - - sonarqube_extensions:/opt/sonarqube/extensions - - sonarqube_logs:/opt/sonarqube/logs - ports: - - "9000:9000" + - | + FLINK_PROPERTIES= + jobmanager.rpc.address: flink-jobmanager + taskmanager.numberOfTaskSlots: 10 cap_drop: - NET_RAW - postgres: - image: postgres:15-alpine + marklogic: + image: "${MARKLOGIC_IMAGE}" + platform: linux/amd64 environment: - POSTGRES_USER: sonar - POSTGRES_PASSWORD: sonar + - INSTALL_CONVERTERS=true + - MARKLOGIC_INIT=true + - MARKLOGIC_ADMIN_USERNAME=admin + - MARKLOGIC_ADMIN_PASSWORD=admin volumes: - - postgresql:/var/lib/postgresql - - postgresql_data:/var/lib/postgresql/data + - ${MARKLOGIC_LOGS_VOLUME}:/var/opt/MarkLogic/Logs + ports: + - "8000-8002:8000-8002" + - "8010-8013:8010-8013" + - "8018-8019:8018-8019" cap_drop: - NET_RAW - -volumes: - sonarqube_data: - sonarqube_extensions: - sonarqube_logs: - postgresql: - postgresql_data: diff --git a/test-app/gradle/wrapper/gradle-wrapper.jar b/test-app/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 249e5832..00000000 Binary files a/test-app/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/test-app/gradle/wrapper/gradle-wrapper.properties b/test-app/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index e411586a..00000000 --- a/test-app/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,5 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/test-app/gradlew b/test-app/gradlew deleted file mode 100755 index a69d9cb6..00000000 --- a/test-app/gradlew +++ /dev/null @@ -1,240 +0,0 @@ -#!/bin/sh - -# -# Copyright Ā© 2015-2021 the original authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -############################################################################## -# -# Gradle start up script for POSIX generated by Gradle. -# -# Important for running: -# -# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is -# noncompliant, but you have some other compliant shell such as ksh or -# bash, then to run this script, type that shell name before the whole -# command line, like: -# -# ksh Gradle -# -# Busybox and similar reduced shells will NOT work, because this script -# requires all of these POSIX shell features: -# * functions; -# * expansions Ā«$varĀ», Ā«${var}Ā», Ā«${var:-default}Ā», Ā«${var+SET}Ā», -# Ā«${var#prefix}Ā», Ā«${var%suffix}Ā», and Ā«$( cmd )Ā»; -# * compound commands having a testable exit status, especially Ā«caseĀ»; -# * various built-in commands including Ā«commandĀ», Ā«setĀ», and Ā«ulimitĀ». -# -# Important for patching: -# -# (2) This script targets any POSIX shell, so it avoids extensions provided -# by Bash, Ksh, etc; in particular arrays are avoided. -# -# The "traditional" practice of packing multiple parameters into a -# space-separated string is a well documented source of bugs and security -# problems, so this is (mostly) avoided, by progressively accumulating -# options in "$@", and eventually passing that to Java. -# -# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, -# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; -# see the in-line comments for details. -# -# There are tweaks for specific operating systems such as AIX, CygWin, -# Darwin, MinGW, and NonStop. -# -# (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt -# within the Gradle project. -# -# You can find Gradle at https://github.com/gradle/gradle/. -# -############################################################################## - -# Attempt to set APP_HOME - -# Resolve links: $0 may be a link -app_path=$0 - -# Need this for daisy-chained symlinks. -while - APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path - [ -h "$app_path" ] -do - ls=$( ls -ld "$app_path" ) - link=${ls#*' -> '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit - -APP_NAME="Gradle" -APP_BASE_NAME=${0##*/} - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - -# Collect all arguments for the java command; -# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of -# shell script including quotes and variable substitutions, so put them in -# double quotes to make sure that they get re-expanded; and -# * put everything else in single quotes, so that it's not re-expanded. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Stop when "xargs" is not available. -if ! command -v xargs >/dev/null 2>&1 -then - die "xargs is not available" -fi - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" diff --git a/test-app/gradlew.bat b/test-app/gradlew.bat deleted file mode 100755 index f127cfd4..00000000 --- a/test-app/gradlew.bat +++ /dev/null @@ -1,91 +0,0 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%"=="" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%"=="" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if %ERRORLEVEL% equ 0 goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if %ERRORLEVEL% equ 0 goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -set EXIT_CODE=%ERRORLEVEL% -if %EXIT_CODE% equ 0 set EXIT_CODE=1 -if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% -exit /b %EXIT_CODE% - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega