diff --git a/.github/workflows/ci-workflow.yml b/.github/workflows/ci-workflow.yml
new file mode 100644
index 0000000..0479537
--- /dev/null
+++ b/.github/workflows/ci-workflow.yml
@@ -0,0 +1,156 @@
+name: CI Workflow
+
+on:
+ push:
+ branches: [ '**' ]
+ workflow_dispatch:
+
+concurrency: ci-${{ github.ref }}
+
+jobs:
+
+ build:
+ name: Build and Test
+ runs-on: ubuntu-latest
+ env:
+ API_SPEC_DEV_BRANCH: main
+ API_SPEC_TAG: 3.1-2.0.2
+ APPID_TENANT: 0f389ea4-778e-4831-9b29-6156c4c1df1e
+ APPID_URL: https://us-east.appid.cloud.ibm.com
+ ELASTIC_CRN: "crn:v1:bluemix:public:databases-for-elasticsearch:us-east:a/52366c9ab214402f9e96917b1b2850e9:c9acb36a-0feb-4b6e-aac1-6651c71d19e4::"
+ ELASTIC_URL: https://c9acb36a-0feb-4b6e-aac1-6651c71d19e4.2adb0220806343e3ae11df79c89b377f.databases.appdomain.cloud:32085
+ HRI_URL: https://localhost:1323/hri
+ IAM_CLOUD_URL: https://iam.cloud.ibm.com
+ JWT_AUDIENCE_ID: 21e7d376-9cdb-4a9d-a11f-9b76c007244d
+ KAFKA_BROKERS: broker-0-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-1-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-2-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-5-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093
+ KAFKA_INSTANCE: hri-dev1-event-streams
+ TENANT_ID: test
+ COS_URL: https://s3.us-east.cloud-object-storage.appdomain.cloud
+
+ CLOUD_API_KEY: ${{ secrets.CLOUD_API_KEY }}
+ ELASTIC_PASSWORD: ${{ secrets.ELASTIC_PASSWORD }}
+ ELASTIC_USERNAME: ${{ secrets.ELASTIC_USERNAME }}
+ KAFKA_PASSWORD: ${{ secrets.KAFKA_PASSWORD }}
+
+ steps:
+ - name: Set Branch Name
+ uses: nelonoel/branch-name@v1.0.1
+
+ - name: Install Go 1.15
+ uses: actions/setup-go@v2
+ with:
+ go-version: ^1.15
+
+ - name: Check out code into the Go module directory
+ uses: actions/checkout@v2
+
+ - name: Copyright Check
+ run: ./copyrightCheck.sh
+
+ - name: Build HRI Executable and Run Unit Tests
+ run: make
+
+ - name: Run Smoke Tests
+ run: ./run-smoketests.sh
+
+ - name: Install Ruby 2.6.5
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: 2.6.5
+ bundler-cache: false
+
+ - name: Install Ruby Gems, Run Dredd Tests, and Run IVT
+ run: |
+ gem install bundler
+ BUNDLE_GEMFILE="./test/Gemfile" bundle install
+ gem specific_install -l https://github.com/Alvearie/hri-test-helpers.git main
+ gem install dredd_hooks
+ ./run-dreddtests.sh
+ curl -sL https://ibm.biz/idt-installer | bash
+ ibmcloud login --apikey $CLOUD_API_KEY -r us-east || { echo 'IBM Cloud CLI login failed!'; exit 1; }
+ ibmcloud plugin install event-streams
+ ibmcloud es init -i ${KAFKA_INSTANCE}
+ ./run-ivttests.sh
+
+ - name: Upload Test Results
+ if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/heads/support-')
+ run: |
+ ./combine_ivt_results.sh
+ ./test/spec/upload_test_reports.rb IVT
+ ./test/spec/upload_test_reports.rb Dredd
+
+ - name: Post Slack Update
+ if: ${{ failure() && ( github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/heads/support-') ) }}
+ id: slack
+ uses: slackapi/slack-github-action@v1.14.0
+ with:
+ payload: "{\"Repo\":\"${{ github.repository }}\",\"Workflow\":\"${{ github.workflow }}\",\"Branch\":\"${{ env.BRANCH_NAME }}\",\"Link\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
+
+ docker-build:
+ name: Docker Build
+ needs: build
+ if: ${{ github.ref == 'refs/heads/develop' }}
+ runs-on: ubuntu-latest
+ env:
+ IMAGE_NAME: mgmt-api
+ TOOLCHAIN_ID: 44eca025-5bc3-4dc9-8d86-d8c4956d09f0
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Determine Image Name & Tag
+ run: |
+ IMAGE_ID=ghcr.io/${{ github.repository }}/$IMAGE_NAME
+ # Change all uppercase to lowercase
+ IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
+ # Strip git ref prefix from version
+ VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
+ echo IMAGE_ID=$IMAGE_ID:$VERSION
+ echo "IMAGE_ID=$IMAGE_ID:$VERSION" >> $GITHUB_ENV
+
+ - name: Build image
+ run: docker build . --file docker/Dockerfile --tag $IMAGE_ID --label "runnumber=${GITHUB_RUN_ID}"
+
+ - name: Dockle Linter
+ uses: erzz/dockle-action@v1.1.1
+ with:
+ image: "${{ env.IMAGE_ID }}"
+ report-format: sarif
+ exit-code: 1
+ failure-threshold: 'WARN'
+
+ - name: Vulnerability Scan
+ uses: aquasecurity/trivy-action@0.0.20
+ with:
+ image-ref: "${{ env.IMAGE_ID }}"
+ format: 'table'
+ exit-code: '1'
+ ignore-unfixed: true
+ vuln-type: 'os,library'
+ severity: 'CRITICAL,HIGH,MEDIUM'
+
+ - name: Log in to registry
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
+
+ - name: Push image
+ run: docker push $IMAGE_ID
+
+ - name: Trigger IBM Toolchain
+ if: github.ref == 'refs/heads/develop'
+ env:
+ CLOUD_API_KEY: ${{ secrets.CLOUD_API_KEY }}
+ run: |
+ curl -sL https://ibm.biz/idt-installer | bash
+ ibmcloud login --apikey $CLOUD_API_KEY -r us-east
+ ibmcloud dev tekton-trigger $TOOLCHAIN_ID --trigger-name 'CD Manual Trigger'
+
+ - name: Post Slack Update
+ if: ${{ failure() }}
+ id: slack
+ uses: slackapi/slack-github-action@v1.14.0
+ with:
+ payload: "{\"Repo\":\"${{ github.repository }}\",\"Workflow\":\"${{ github.workflow }}\",\"Branch\":\"${{ env.BRANCH_NAME }}\",\"Link\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/code-scans.yml b/.github/workflows/code-scans.yml
new file mode 100644
index 0000000..42b8da9
--- /dev/null
+++ b/.github/workflows/code-scans.yml
@@ -0,0 +1,44 @@
+name: Code Scans
+
+on:
+ push:
+ branches: [ 'main', 'develop', 'support-*' ]
+ pull_request:
+ types: [opened, synchronize, reopened]
+
+concurrency: code-scans-${{ github.ref }}
+
+jobs:
+ code-analysis:
+ name: Static Code Analysis
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ with:
+ # full checkout for SonarCloud analysis
+ fetch-depth: 0
+
+ - name: Setup Go
+ uses: actions/setup-go@v2
+ with:
+ go-version: ^1.15
+
+ - name: Unit Tests and Coverage
+ run: make test
+
+ - name: SonarCloud Scan
+ uses: sonarsource/sonarcloud-github-action@v1.6
+ with:
+ projectBaseDir: ./src
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
+
+ - name: Post Slack Update
+ if: ${{ failure() && github.event_name == 'push' }}
+ id: slack
+ uses: slackapi/slack-github-action@v1.14.0
+ with:
+ payload: "{\"Repo\":\"${{ github.repository }}\",\"Workflow\":\"${{ github.workflow }}\",\"Branch\":\"${{ env.BRANCH_NAME }}\",\"Link\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml
new file mode 100644
index 0000000..d2ccfa1
--- /dev/null
+++ b/.github/workflows/pull_request.yml
@@ -0,0 +1,47 @@
+name: Pull Request Checks
+
+on:
+ pull_request:
+ types: [opened]
+ pull_request_review:
+ types: [submitted]
+
+concurrency: pr-${{ github.ref }}
+
+jobs:
+ dependency-checks:
+ name: Dependency Checks
+ # run if the PR was opened or there was a comment containing '/pr_checks'
+ if: (github.event_name == 'pull_request') || (github.event_name == 'pull_request_review' && contains(github.event.review.body, '/pr_checks'))
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Setup Go
+ uses: actions/setup-go@v2
+ with:
+ go-version: ^1.15
+
+ - name: Check Dependencies
+ # override default -eo pipefail
+ shell: bash --noprofile --norc {0}
+ run: |
+ wget -q -O nancy https://github.com/sonatype-nexus-community/nancy/releases/download/v1.0.22/nancy-v1.0.22-linux-amd64
+ chmod 555 nancy
+ cd src
+ go list -json -m all | ../nancy sleuth
+ found=$?
+ echo "VULNERABILITIES=$found" >> $GITHUB_ENV
+ [[ "$found" == 1 ]] && echo "::error ::Vulnerabilities found in dependencies."
+ exit 0
+
+ - uses: actions/github-script@v4
+ if: ${{ env.VULNERABILITIES != 0 }}
+ with:
+ script: |
+ github.issues.createComment({
+ issue_number: context.issue.number,
+ owner: context.repo.owner,
+ repo: context.repo.repo,
+ body: "## ⚠️ Vulnerabilities found in dependencies.\nSee the 'PR -> Vulnerability Checks' logs for more details: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+ })
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
new file mode 100644
index 0000000..60796af
--- /dev/null
+++ b/.github/workflows/release.yml
@@ -0,0 +1,78 @@
+name: Release
+
+on:
+ push:
+ tags: [ '**' ]
+ workflow_dispatch:
+
+concurrency: release-${{ github.ref }}
+
+env:
+ IMAGE_NAME: mgmt-api
+ WH_COMMONS_CR: us.icr.io/wh-common-rns/hri
+
+jobs:
+ docker-build:
+ name: Docker Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Determine Image Name & Tag
+ run: |
+ IMAGE_ID=ghcr.io/${{ github.repository }}/$IMAGE_NAME
+ # Change all uppercase to lowercase
+ IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
+ # Strip git ref prefix from version
+ VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
+ # Strip "v" prefix from tag name
+ VERSION=$(echo $VERSION | sed -e 's/^v//')
+ echo "VERSION=$VERSION" >> $GITHUB_ENV
+ echo IMAGE_ID=$IMAGE_ID:$VERSION
+ echo "IMAGE_ID=$IMAGE_ID:$VERSION" >> $GITHUB_ENV
+
+ - name: Build image
+ run: docker build . --file docker/Dockerfile --tag $IMAGE_ID --label "runnumber=${GITHUB_RUN_ID}"
+
+ - name: Dockle Linter
+ uses: erzz/dockle-action@v1.1.1
+ with:
+ image: "${{ env.IMAGE_ID }}"
+ report-format: sarif
+ exit-code: 1
+ failure-threshold: 'WARN'
+
+ - name: Vulnerability Scan
+ uses: aquasecurity/trivy-action@0.0.20
+ with:
+ image-ref: "${{ env.IMAGE_ID }}"
+ format: 'table'
+ exit-code: '1'
+ ignore-unfixed: true
+ vuln-type: 'os,library'
+ severity: 'CRITICAL,HIGH,MEDIUM'
+
+ - name: Log in to registry
+ run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
+
+ - name: Push image
+ run: docker push $IMAGE_ID
+
+ - name: Push image to WH Commons CR
+ env:
+ CLOUD_API_KEY: ${{ secrets.CLOUD_API_KEY }}
+ run: |
+ curl -sL https://ibm.biz/idt-installer | bash
+ ibmcloud login --apikey $CLOUD_API_KEY -r us-south
+ ibmcloud cr login
+ docker tag $IMAGE_ID $WH_COMMONS_CR/$IMAGE_NAME:$VERSION
+ docker push $WH_COMMONS_CR/$IMAGE_NAME:$VERSION
+
+ - name: Post Slack Update
+ if: ${{ failure() }}
+ id: slack
+ uses: slackapi/slack-github-action@v1.14.0
+ with:
+ payload: "{\"Repo\":\"${{ github.repository }}\",\"Workflow\":\"${{ github.workflow }}\",\"Branch\":\"${{ env.BRANCH_NAME }}\",\"Link\":\"https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}\"}"
+ env:
+ SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
diff --git a/.gitignore b/.gitignore
index 7d2fa89..8e42fe5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,7 +11,12 @@ src/testCoverage.out
build/
src/github.com
+src/hri
pkg
+.gradle
+.DS_Store
+test/.idea
+test/jwt_assertion_tokens.json
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
@@ -26,3 +31,7 @@ test/jwt_assertion_tokens.json
#VS Code:
*.code-workspace
+
+# Docker test files
+docker/test.env
+
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index be53434..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,95 +0,0 @@
-language: go
-go: 1.13.x
-
-# This is an example TravisCI build that will build, scan, deploy, and run integration tests.
-# All the required environment variables are listed below, and will need to be filled in to run your own build.
-env:
- global:
- - TENANT_ID=
- # CLOUD_API_KEY
- - secure:
- - TOOLCHAIN_ID=
- # Don't use ELASTICSEARCH_* for variable names or it will interfere with unit tests
- - ELASTIC_URL=
- - ELASTIC_CERT_FILE=
- - ELASTIC_USER=
- - COS_URL=
- - IAM_CLOUD_URL=
- # ELASTIC_PASSWORD
- - secure:
- # FN_WEB_SECURE_KEY used as the API key for IBM Functions web actions
- - secure:
- # HRI_API_KEY currently only for the HRI_API namespace
- - secure:
- # gitApiKey
- - secure:
- - EVENTSTREAMS_BROKERS=
- # SASL_PLAIN_PASSWORD
- - secure:
- - ASOC_CLI_URL=
- - ASOC_APP_ID=
- - ASOC_KEY_ID=
- # ASOC_KEY_SECRET
- - secure:
- - OCTOKIT_API_ENDPOINT="https://github.com/api/v3"
- - REGION=
- - RESOURCE_GROUP=
- - ELASTIC_INSTANCE=
- - ELASTIC_SVC_ACCOUNT=
- - KAFKA_INSTANCE=
- - KAFKA_SVC_ACCOUNT=
-
-stages:
- - name: Run Tests
- if: tag IS blank
- - name: App Scan
- if: branch = release-0.3 AND tag IS blank
- - name: Create Release
- if: tag IS present
-
-jobs:
- include:
- - stage: Run Tests
- before_install:
- - openssl aes-256-cbc -K $encrypted_75283d2a415b_key -iv $encrypted_75283d2a415b_iv -in elastic-cert64.enc -out elastic-cert64 -d
- install:
- - curl -sL https://ibm.biz/idt-installer | bash
- - ibmcloud plugin install doi
- - go get -u github.com/jstemmer/go-junit-report
- before_script:
- - ibmcloud login --apikey $CLOUD_API_KEY -r us-south
- script:
- - ./run-unittests.sh
- - make bins
- - ./run-insights-publish.sh buildRecord
- - ./run-insights-publish.sh unitTest
- - ./run-insights-publish.sh evaluateCI
- - NAMESPACE="$TRAVIS_BRANCH" ./deploy.sh --skipSecureApi
- - echo y | gem update --system
- - BUNDLE_GEMFILE="./test/Gemfile" bundle install
- - ./run-dreddtests.sh
- - ./run-ivttests.sh
- - ./run-fvttests.sh
- - ./run-insights-publish.sh smokeTest
- - ./run-insights-publish.sh dreddTest
- - ./run-insights-publish.sh ivtTest
- - ./run-insights-publish.sh fvtTest
- - ./run-insights-publish.sh evaluateCD
- - stage: App Scan
- script:
- - make test
- - ./appscan.sh
- - stage: Create Release
- script:
- - make bins
- before_deploy:
- - tar cvfz mgmt-api-$TRAVIS_TAG.tgz build mgmt-api-manifest.yml deploy.sh updateApi.sh secureApi.sh run-smoketests.sh
- deploy:
- provider: releases
- api_key:
- secure:
- file:
- - ./mgmt-api-$TRAVIS_TAG.tgz
- skip_cleanup: true
- on:
- tags: true
diff --git a/README.md b/README.md
index 04ea288..7c017ee 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@ The Alvearie Health Record Ingestion service: a common 'Deployment Ready Compone
This repo contains the code for the Management API of the HRI, which is written in Golang using the [Echo](https://echo.labstack.com/) web framework. A separate OpenAPI specification is maintained in [Alvearie/hri-api-spec](https://github.com/Alvearie/hri-api-spec) for external user's reference. Please Note: Any changes to this (RESTful) Management API for the HRI requires changes in both the hri-api-spec repo and this hri-mgmt-api repo.
-This version is compatible with HRI `v3.0`.
+This version is compatible with HRI `v3.1`.
## Communication
* Please [join](https://alvearie.io/contributions/requestSlackAccess) our Slack channel for further questions: [#health-record-ingestion](https://alvearie.slack.com/archives/C01GM43LFJ6)
@@ -45,6 +45,12 @@ cd src; GOOS=linux GOACH=amd64 go build
## CI/CD
This application can be run locally, but almost all the endpoints require Elasticsearch, Kafka, and an OIDC server. GitHub actions builds and runs integration tests using a common Elastic Search and Event Streams instance. You can perform local manual testing using these resources. See [test/README.md](test/README.md) for more details.
+### Static Code Analysis
+In addition, SonarCloud [analysis](https://sonarcloud.io/dashboard?id=Alvearie_hri-mgmt-api) is performed on all pull requests and on long-lived branches: `main`, `develop`, and `support-*`. Several IDE's, including IntelliJ, have SonarLint plugins for dynamic analysis as you code.
+
+### Dependency Vulnerabilities
+Dependencies are also checked for vulnerabilities when a pull request is created. If any are found, a comment will be added to the pull request with a link to the GitHub action, where you can view the logs for the details. If the vulnerabilities are caused by your changes or easy to fix, implement the changes on your branch. If not, create a ticket. To run the check again, submit a review for the pull request with `/pr_checks` in the message.
+
### Releases
Releases are created by creating GitHub tags, which trigger a build that packages everything into a Docker image. See [docker/README.md](docker/README.md) for more details.
diff --git a/combine_ivt_results.sh b/combine_ivt_results.sh
new file mode 100755
index 0000000..173d88c
--- /dev/null
+++ b/combine_ivt_results.sh
@@ -0,0 +1,39 @@
+#!/usr/bin/env bash
+
+# (C) Copyright IBM Corp. 2020
+#
+# SPDX-License-Identifier: Apache-2.0
+
+function combineTestResults() {
+ INPUT_DIRECTORY="${1}"
+ RESULT_FILE=$2
+ SUITE_NAME="hri-mgmt-api - $BRANCH_NAME - IVT"
+ failures=0
+ testCount=0
+ errors=0
+ skipped=0
+ time=0.0
+ output=""
+
+ for file_name in ${INPUT_DIRECTORY}/*.xml; do
+ newOutput=""
+ newTime=0.0
+ testCount=$((testCount+$(cat "$file_name" | grep -o 'tests="[^"]*' | sed 's/tests="//g')))
+ failures=$((failures+$(cat "$file_name" | grep -o 'failures="[^"]*' | sed 's/failures="//g')))
+ errors=$((errors+$(cat "$file_name" | grep -o 'errors="[^"]*' | sed 's/errors="//g')))
+ skipped=$((skipped+$(cat "$file_name" | grep -o 'skipped="[^"]*' | sed 's/skipped="//g')))
+ newTime=$(cat "$file_name" | head -2 | tail -1 | grep -o 'time="[^"]*' | sed 's/time="//g')
+ time=$(awk "BEGIN {print $time+$newTime; exit}")
+ newOutput=$(cat "$file_name" | tail -n +3 | sed '$d')
+ output="$output$newOutput"
+ done
+
+ date=`date`
+ header=""
+ footer=""
+ echo -e "$header\n$output\n$footer" > "$RESULT_FILE"
+
+ echo "Finished combining IVT JUnit files"
+}
+
+echo $(combineTestResults 'test/ivt_test_results' 'ivttest.xml')
\ No newline at end of file
diff --git a/config.yml b/config.yml
index 4cde05b..9226278 100644
--- a/config.yml
+++ b/config.yml
@@ -26,9 +26,14 @@ elastic-cert: |
Rp6anX5N6yIrzhVmAgefjQdBztYzdfPhsJBkS/TDnRmk
-----END CERTIFICATE-----
elastic-crn: "crn:v1:bluemix:public:databases-for-elasticsearch:us-east:a/52366c9ab214402f9e96917b1b2850e9:c9acb36a-0feb-4b6e-aac1-6651c71d19e4::"
-kafka-username: token
-kafka-password: a-password
kafka-admin-url: https://twvyj4m0kft5j0mh.svc01.us-east.eventstreams.cloud.ibm.com
+kafka-properties:
+ - "security.protocol:sasl_ssl"
+ - "sasl.mechanism:PLAIN"
+ - "sasl.username:token"
+ - "sasl.password:a-password"
+ - "ssl.endpoint.identification.algorithm:https"
+
kafka-brokers:
- "broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
- "broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
diff --git a/copyrightCheck.sh b/copyrightCheck.sh
new file mode 100755
index 0000000..e1e622e
--- /dev/null
+++ b/copyrightCheck.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+# (C) Copyright IBM Corp. 2020
+#
+# SPDX-License-Identifier: Apache-2.0
+
+files=$(find . -name "*.go" -or -name "*.rb" -or -name "*.sh")
+
+rtn=0
+for file in $files; do
+ #echo $file
+ if ! head -n 5 $file | grep -qE '(Copyright IBM Corp)|(MockGen)'; then
+ rtn=1
+ echo $file
+ fi
+done
+
+if [ $rtn -ne 0 ]; then
+ echo "Found files without copyright, exiting."
+ exit 1
+fi
\ No newline at end of file
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0c15ac2..8afe38c 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -2,6 +2,17 @@
#
# SPDX-License-Identifier: Apache-2.0
+FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 AS builder
+
+RUN microdnf update -y && \
+ microdnf -y install golang-1.15.14 make && \
+ microdnf clean all
+
+COPY ./ /hri-mgmt-api/
+WORKDIR /hri-mgmt-api
+RUN make
+
+
FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4
ENV HOME=/mgmt-api-release/
@@ -11,14 +22,24 @@ RUN microdnf update -y && \
microdnf -y install shadow-utils.x86_64 && \
microdnf clean all
-COPY src/hri ${HOME}
+COPY --from=builder /hri-mgmt-api/src/hri ${HOME}
-WORKDIR mgmt-api-release
+WORKDIR ${HOME}
-# Setup hri user
+# Setup hri user
+# Disable priveledge escalation utilities
+# https://github.com/goodwithtech/dockle/blob/master/CHECKPOINT.md#cis-di-0008
+# Remove the temp directory
RUN groupadd -g 1000 hri && \
useradd --shell /bin/bash -u 1000 -g 1000 -m hri && \
- chown -R hri:hri /mgmt-api-release
+ chown -R hri:hri /mgmt-api-release && \
+ chmod u-s /usr/bin/chage && \
+ chmod u-g /usr/bin/chage && \
+ chmod u-s /usr/bin/gpasswd && \
+ chmod u-g /usr/bin/gpasswd && \
+ chmod u-s /usr/bin/newgrp && \
+ chmod u-g /usr/bin/newgrp && \
+ rm -rf /tmp
USER hri
diff --git a/docker/README.md b/docker/README.md
index e4e59d4..919d3bd 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -1,30 +1,18 @@
# HRI Management API Docker Image
## Building the Image Locally
-Build the code targeting linux:
+From the base repo directory, run:
```shell script
-cd src
-env GOOS=linux GOACH=amd64 go build
-```
-
-Go back to the base project directory and create the image:
-```shell script
-cd ..
docker build ./ -f docker/Dockerfile
```
+
If you have an error that's asking you "Is the docker daemon running?", and it is, then why don't you go catch it? If it's not, start up Docker by simply opening the Docker application on your computer.
-## Testing
-You can test locally by logging into the IBM Cloud container registry and running the docker container.
-If you are making changes to your local hri-mgmt-api source code (excluding files not pushed to your branch) as you test, you will need to rebuild the image every time you want to run the container with those changes.
-If you come across any trouble with this, please see the Troubleshooting section.
+## Testing Locally
+You can test locally by just running the docker container. If you are making changes to your local hri-mgmt-api source code (excluding files not pushed to your branch) as you test, you will need to rebuild the image every time you want to run the container with those changes. If you come across any trouble with this, please see the Troubleshooting section.
### Docker Run
-When running the container, you will need to pass in a [config file](https://github.com/Alvearie/hri-mgmt-api/blob/main/config.yml).
-If you would like to use TLS, you will need a cert and key as well.
-Mount the file or directory of files you need into the container with the volume flag, `-v`.
-At the end of the run command is where you add your arguments/flags.
-You should just need one to specify where your config file is. (You may include more depending on what you're testing.)
+When running the container, you will need to pass in a [config file](https://github.com/Alvearie/hri-mgmt-api/blob/develop/config.yml). If you would like to use TLS, you will need a cert and key as well. Mount the file or directory of files you need into the container with the volume flag, `-v`. At the end of the run command is where you add your arguments/flags. You should just need one to specify where your config file is. (You may include more depending on what you're testing.)
docker run command template with a mounted config file & no TLS:
```shell script
@@ -63,5 +51,5 @@ docker run --rm -it --entrypoint bash
```
You can add your mounted files using the same -v flag from before:
```shell script
-docker run --rm -v ~/mgmt-api/myConfig.yml:/mgmt-api-release/config/myConfig.yml -it --entrypoint bash
+docker run --rm -v ~/hri-mgmt-api/myConfig.yml:/mgmt-api-release/config/myConfig.yml -it --entrypoint bash
```
diff --git a/run-dreddtests.sh b/run-dreddtests.sh
index 2fc9809..7a731c8 100755
--- a/run-dreddtests.sh
+++ b/run-dreddtests.sh
@@ -4,17 +4,16 @@
#
# SPDX-License-Identifier: Apache-2.0
-npm install -g api-spec-converter
-npm install -g dredd@12.2.0
-gem install dredd_hooks
+sudo npm install -g api-spec-converter
+sudo npm install -g dredd@12.2.0
echo 'Clone Alvearie/hri-api-spec Repo'
git clone https://github.com/Alvearie/hri-api-spec.git hri-api-spec
cd hri-api-spec
-echo "if exists, checkout ${TRAVIS_BRANCH}"
-exists=$(git show-ref refs/remotes/origin/${TRAVIS_BRANCH})
+echo "if exists, checkout ${BRANCH_NAME}"
+exists=$(git show-ref refs/remotes/origin/${BRANCH_NAME})
if [[ -n "$exists" ]]; then
- git checkout ${TRAVIS_BRANCH}
+ git checkout ${BRANCH_NAME}
elif [ -n "$API_SPEC_TAG" ]; then
git checkout -b mgmt-api_auto_dredd $API_SPEC_TAG
else
@@ -26,7 +25,7 @@ api-spec-converter -f openapi_3 -t swagger_2 -s yaml management-api/management.y
tac ../hri-api-spec/management.swagger.yml | sed "1,8d" | tac > tmp && mv tmp ../hri-api-spec/management.swagger.yml
#Initialize the Management API
-../src/hri -config-path=../test/spec/test_config/valid_config.yml -tls-enabled=false >/dev/null &
+../src/hri -config-path=../test/spec/test_config/valid_config.yml -tls-enabled=false -kafka-properties=security.protocol:sasl_ssl,sasl.mechanism:PLAIN,sasl.username:token,sasl.password:$KAFKA_PASSWORD,ssl.endpoint.identification.algorithm:https >/dev/null &
sleep 1
dredd -r xunit -o ../dreddtests.xml management.swagger.yml ${HRI_URL/https/http} --sorted --language=ruby --hookfiles=../test/spec/dredd_hooks.rb --hooks-worker-connect-timeout=5000
diff --git a/run-smoketests.sh b/run-smoketests.sh
index 806dc0a..2d135f9 100755
--- a/run-smoketests.sh
+++ b/run-smoketests.sh
@@ -10,7 +10,7 @@ passing=0
failing=0
output=""
-./src/hri -config-path=test/spec/test_config/valid_config.yml >/dev/null &
+./src/hri -config-path=test/spec/test_config/valid_config.yml -kafka-properties=security.protocol:sasl_ssl,sasl.mechanism:PLAIN,sasl.username:token,sasl.password:$KAFKA_PASSWORD,ssl.endpoint.identification.algorithm:https >/dev/null &
sleep 1
HRI_WEB_SERVER_STATUS=$(curl -k --write-out "%{http_code}\n" --silent "$HRI_URL/healthcheck" )
if [ $HRI_WEB_SERVER_STATUS -eq 200 ]; then
diff --git a/run-unittests.sh b/run-unittests.sh
deleted file mode 100755
index 27c9151..0000000
--- a/run-unittests.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# (C) Copyright IBM Corp. 2020
-#
-# SPDX-License-Identifier: Apache-2.0
-
-make 2>&1 > unittest
-rtn=$?
-cat unittest
-< unittest go-junit-report > unittest.xml
-rm unittest
-exit $rtn
diff --git a/src/batches/handler.go b/src/batches/handler.go
index e529f05..86e1768 100644
--- a/src/batches/handler.go
+++ b/src/batches/handler.go
@@ -109,7 +109,12 @@ func (h *theHandler) Create(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, msg))
}
- kafkaWriter := kafka.NewWriterFromConfig(h.config)
+ kafkaWriter, err := kafka.NewWriterFromConfig(h.config)
+ if err != nil {
+ logger.Errorln(err.Error())
+ return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
+ }
+ defer kafkaWriter.Close()
if h.config.AuthDisabled == false { //Auth Enabled
//JWT claims validation
@@ -229,7 +234,12 @@ func (h *theHandler) SendComplete(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, msg))
}
- kafkaWriter := kafka.NewWriterFromConfig(h.config)
+ kafkaWriter, err := kafka.NewWriterFromConfig(h.config)
+ if err != nil {
+ logger.Errorln(err.Error())
+ return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
+ }
+ defer kafkaWriter.Close()
getBatchRequest := model.GetByIdBatch{
TenantId: request.TenantId,
@@ -288,7 +298,12 @@ func (h *theHandler) Terminate(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, msg))
}
- kafkaWriter := kafka.NewWriterFromConfig(h.config)
+ kafkaWriter, err := kafka.NewWriterFromConfig(h.config)
+ if err != nil {
+ logger.Errorln(err.Error())
+ return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
+ }
+ defer kafkaWriter.Close()
getBatchRequest := model.GetByIdBatch{
TenantId: request.TenantId,
@@ -347,7 +362,12 @@ func (h *theHandler) ProcessingComplete(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, msg))
}
- kafkaWriter := kafka.NewWriterFromConfig(h.config)
+ kafkaWriter, err := kafka.NewWriterFromConfig(h.config)
+ if err != nil {
+ logger.Errorln(err.Error())
+ return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
+ }
+ defer kafkaWriter.Close()
getBatchRequest := model.GetByIdBatch{
TenantId: request.TenantId,
@@ -413,7 +433,12 @@ func (h *theHandler) Fail(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, msg))
}
- kafkaWriter := kafka.NewWriterFromConfig(h.config)
+ kafkaWriter, err := kafka.NewWriterFromConfig(h.config)
+ if err != nil {
+ logger.Errorln(err.Error())
+ return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
+ }
+ defer kafkaWriter.Close()
var claims = auth.HriClaims{}
var errResp *response.ErrorDetailResponse
diff --git a/src/batches/handler_actions_test.go b/src/batches/handler_actions_test.go
index cf406e5..dcf9b73 100644
--- a/src/batches/handler_actions_test.go
+++ b/src/batches/handler_actions_test.go
@@ -242,6 +242,21 @@ func Test_theHandler_SendComplete(t *testing.T) {
expectedCode: http.StatusInternalServerError,
expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error getting Elastic client: cannot create client: cannot parse url: parse \"https:// a bad url.com\": invalid character \" \" in host name"}`, requestId) + "\n",
},
+ {
+ name: "500 kafka client failure",
+ tenantId: test.ValidTenantId,
+ batchId: test.ValidBatchId,
+ handler: theHandler{
+ config: badKafkaConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ },
+ requestBody: `{"expectedRecordCount": 100}`,
+ expectedCode: http.StatusInternalServerError,
+ expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error constructing Kafka producer: Invalid value for configuration property \"message.max.bytes\""}`, requestId) + "\n",
+ },
{
name: "500 sendCompete failure",
tenantId: test.ValidTenantId,
@@ -492,6 +507,20 @@ func Test_theHandler_Terminate(t *testing.T) {
expectedCode: http.StatusInternalServerError,
expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error getting Elastic client: cannot create client: cannot parse url: parse \"https:// a bad url.com\": invalid character \" \" in host name"}`, requestId) + "\n",
},
+ {
+ name: "500 kafka client failure",
+ tenantId: test.ValidTenantId,
+ batchId: test.ValidBatchId,
+ handler: theHandler{
+ config: badKafkaConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ },
+ expectedCode: http.StatusInternalServerError,
+ expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error constructing Kafka producer: Invalid value for configuration property \"message.max.bytes\""}`, requestId) + "\n",
+ },
{
name: "500 terminate failure",
tenantId: test.ValidTenantId,
@@ -776,6 +805,21 @@ func Test_theHandler_ProcessingComplete(t *testing.T) {
expectedCode: http.StatusInternalServerError,
expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error getting Elastic client: cannot create client: cannot parse url: parse \"https:// a bad url.com\": invalid character \" \" in host name"}`, requestId) + "\n",
},
+ {
+ name: "500 kafka client failure",
+ tenantId: test.ValidTenantId,
+ batchId: test.ValidBatchId,
+ handler: theHandler{
+ config: badKafkaConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ },
+ requestBody: `{"actualRecordCount":100,"invalidRecordCount":10}`,
+ expectedCode: http.StatusInternalServerError,
+ expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error constructing Kafka producer: Invalid value for configuration property \"message.max.bytes\""}`, requestId) + "\n",
+ },
{
name: "500 processingCompete failure",
tenantId: test.ValidTenantId,
@@ -1012,6 +1056,21 @@ func Test_theHandler_Fail(t *testing.T) {
expectedCode: http.StatusInternalServerError,
expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error getting Elastic client: cannot create client: cannot parse url: parse \"https:// a bad url.com\": invalid character \" \" in host name"}`, requestId) + "\n",
},
+ {
+ name: "500 kafka client failure",
+ tenantId: test.ValidTenantId,
+ batchId: test.ValidBatchId,
+ handler: theHandler{
+ config: badKafkaConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ },
+ requestBody: `{"actualRecordCount":100,"invalidRecordCount":10,"failureMessage":"a bad batch"}`,
+ expectedCode: http.StatusInternalServerError,
+ expectedBody: fmt.Sprintf(`{"errorEventId":"%s","errorDescription":"error constructing Kafka producer: Invalid value for configuration property \"message.max.bytes\""}`, requestId) + "\n",
+ },
{
name: "500 fail-action failure",
tenantId: test.ValidTenantId,
@@ -1254,7 +1313,16 @@ var badEsConfig = config.Config{
Validation: false,
ElasticUrl: "https:// a bad url.com",
ElasticServiceCrn: "elasticUsername",
- KafkaUsername: "duranDuran",
- KafkaPassword: "Toto",
KafkaBrokers: []string{"broker-1", "broker-2", "broker-DefLeppard", "broker-CoreyHart"},
}
+
+var badKafkaConfig = config.Config{
+ ConfigPath: "/some/fake/path",
+ OidcIssuer: "https://us-south.appid.blorg.forg",
+ JwtAudienceId: "1234-990-g-catnip-e9ec09a5b2f3",
+ Validation: false,
+ ElasticUrl: "https://elastic-JanetJackson.com",
+ ElasticServiceCrn: "elasticUsername",
+ KafkaBrokers: []string{"broker-1", "broker-2", "broker-DefLeppard", "broker-CoreyHart"},
+ KafkaProperties: config.StringMap{"message.max.bytes": "bad_value"},
+}
diff --git a/src/batches/handler_create_get_test.go b/src/batches/handler_create_get_test.go
index 51404b2..b9762da 100644
--- a/src/batches/handler_create_get_test.go
+++ b/src/batches/handler_create_get_test.go
@@ -165,7 +165,7 @@ func Test_theHandler_Create(t *testing.T) {
expectedBody: "{\"batchId\":\"1234-unique-id\"}\n",
},
{
- name: "should hanlde UTF8 chars",
+ name: "should handle UTF8 chars",
handler: theHandler{
config: testConfig,
jwtValidator: fakeAuthValidator{
@@ -339,6 +339,36 @@ func Test_theHandler_Create(t *testing.T) {
requestBody: specialCharInTopicReqBody,
expectedBody: "{\"errorEventId\":\"\",\"errorDescription\":\"invalid request arguments:\\n- topic (json field in request body) must not contain the following characters: \\\"=\\u003c\\u003e[]{}\"}\n",
},
+ {
+ name: "Elastic client error",
+ handler: theHandler{
+ config: badEsConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ create: nil,
+ },
+ expectedCode: http.StatusInternalServerError,
+ tenant: validTenantId,
+ requestBody: validReqBody,
+ expectedBody: "{\"errorEventId\":\"\",\"errorDescription\":\"error getting Elastic client: cannot create client: cannot parse url: parse \\\"https:// a bad url.com\\\": invalid character \\\" \\\" in host name\"}\n",
+ },
+ {
+ name: "Kafka writer error",
+ handler: theHandler{
+ config: badKafkaConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ create: nil,
+ },
+ expectedCode: http.StatusInternalServerError,
+ tenant: validTenantId,
+ requestBody: validReqBody,
+ expectedBody: "{\"errorEventId\":\"\",\"errorDescription\":\"error constructing Kafka producer: Invalid value for configuration property \\\"message.max.bytes\\\"\"}\n",
+ },
}
e := test.GetTestServer()
@@ -496,6 +526,21 @@ func Test_myHandler_GetById(t *testing.T) {
batchId: validBatchId,
expectedBody: "{\"errorEventId\":\"\",\"errorDescription\":\"Unauthorized tenant access. Tenant 'unauthorized_tenant' is not included in the authorized scopes.\"}\n",
},
+ {
+ name: "Elastic client error",
+ handler: theHandler{
+ config: badEsConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ getById: nil,
+ },
+ tenant: validTenantId,
+ batchId: validBatchId,
+ expectedCode: http.StatusInternalServerError,
+ expectedBody: "{\"errorEventId\":\"\",\"errorDescription\":\"error getting Elastic client: cannot create client: cannot parse url: parse \\\"https:// a bad url.com\\\": invalid character \\\" \\\" in host name\"}\n",
+ },
}
e := test.GetTestServer()
@@ -708,6 +753,20 @@ func Test_myHandler_Get(t *testing.T) {
responseCode: http.StatusUnauthorized,
responseBody: "{\"errorEventId\":\"\",\"errorDescription\":\"Unauthorized tenant access. Tenant '" + unauthorizedTenantId + "' is not included in the authorized scopes.\"}\n",
},
+ {
+ name: "Elastic client error",
+ handler: theHandler{
+ config: badEsConfig,
+ jwtValidator: fakeAuthValidator{
+ claims: auth.HriClaims{},
+ errResp: nil,
+ },
+ get: nil,
+ },
+ tenantId: validTenantId,
+ responseCode: http.StatusInternalServerError,
+ responseBody: "{\"errorEventId\":\"\",\"errorDescription\":\"error getting Elastic client: cannot create client: cannot parse url: parse \\\"https:// a bad url.com\\\": invalid character \\\" \\\" in host name\"}\n",
+ },
}
e := test.GetTestServer()
@@ -793,8 +852,6 @@ func createDefaultTestConfig() config.Config {
config.Validation = false
config.ElasticUrl = "https://elastic-JanetJackson.com"
config.ElasticServiceCrn = "elasticUsername"
- config.KafkaUsername = "duranDuran"
- config.KafkaPassword = "Toto"
config.KafkaBrokers = []string{"broker-1", "broker-2", "broker-DefLeppard", "broker-CoreyHart"}
return config
}
diff --git a/src/common/config/config.go b/src/common/config/config.go
index 17ce07c..3add644 100644
--- a/src/common/config/config.go
+++ b/src/common/config/config.go
@@ -9,6 +9,7 @@ import (
"encoding/pem"
"errors"
"flag"
+ "fmt"
"github.com/peterbourgon/ff/v3"
"github.com/peterbourgon/ff/v3/ffyaml"
"net/url"
@@ -27,10 +28,9 @@ type Config struct {
ElasticPassword string
ElasticCert string
ElasticServiceCrn string
- KafkaUsername string
- KafkaPassword string
- KafkaAdminUrl string
+ KafkaAdminUrl string // required for IBM Event Streams to manage topics
KafkaBrokers StringSlice
+ KafkaProperties StringMap // valid properties: https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
LogLevel string
NewRelicEnabled bool
NewRelicAppName string
@@ -54,6 +54,32 @@ func (ss *StringSlice) String() string {
return ""
}
+// StringMap is a flag.Value that collects each Set(string) into a map
+// using ':' as the key:value separator and allowing for repeated flags.
+type StringMap map[string]string
+
+// Set implements flag.Value and appends the string to the slice.
+func (sm *StringMap) Set(s string) error {
+ if *sm == nil {
+ *sm = make(StringMap)
+ }
+
+ entries := strings.Split(s, ",")
+ for i := range entries {
+ tokens := strings.Split(entries[i], ":")
+ if len(tokens) != 2 {
+ return errors.New("invalid StringMap entry '" + entries[i] + "'; it must contain exactly one ':' to separate the key from the value")
+ }
+ (*sm)[tokens[0]] = tokens[1]
+ }
+ return nil
+}
+
+// This method is required for the flag.Value interface, but is not needed for the hri-mgmt-api.
+func (sm *StringMap) String() string {
+ return fmt.Sprint(*sm)
+}
+
// ValidateConfig Perform verification on the finalized config. Return an error if validation failed.
func ValidateConfig(config Config) error {
if len(config.ConfigPath) == 0 {
@@ -89,12 +115,6 @@ func ValidateConfig(config Config) error {
if config.ElasticServiceCrn == "" {
errorBuilder.WriteString("\n\tAn Elasticsearch service CRN was not specified")
}
- if config.KafkaUsername == "" {
- errorBuilder.WriteString("\n\tA Kafka username was not specified")
- }
- if config.KafkaPassword == "" {
- errorBuilder.WriteString("\n\tA Kafka password was not specified")
- }
if config.KafkaAdminUrl == "" {
errorBuilder.WriteString("\n\tThe Kafka administration url was not specified")
}
@@ -143,10 +163,9 @@ func GetConfig(configPath string, commandLineFlags []string) (Config, error) {
fs.StringVar(&config.ElasticPassword, "elastic-password", "", "(Optional) Elasticsearch password")
fs.StringVar(&config.ElasticCert, "elastic-cert", "", "(Optional) Elasticsearch TLS public certificate")
fs.StringVar(&config.ElasticServiceCrn, "elastic-crn", "", "(Optional) Elasticsearch service CRN")
- fs.StringVar(&config.KafkaUsername, "kafka-username", "", "(Optional) Kafka user name")
- fs.StringVar(&config.KafkaPassword, "kafka-password", "", "(Optional) Kafka password")
fs.StringVar(&config.KafkaAdminUrl, "kafka-admin-url", "", "(Optional) Kafka administration url")
fs.Var(&config.KafkaBrokers, "kafka-brokers", "(Optional) A list of Kafka brokers, separated by \",\"")
+ fs.Var(&config.KafkaProperties, "kafka-properties", "(Optional) A list of Kafka properties, entries separated by \",\", key value pairs separated by \":\"")
fs.StringVar(&config.LogLevel, "log-level", "info", "(Optional) Minimum Log Level for logging output. Available levels are: Trace, Debug, Info, Warning, Error, Fatal and Panic.")
fs.BoolVar(&config.NewRelicEnabled, "new-relic-enabled", false, "(Optional) True to enable New Relic monitoring, false otherwise")
fs.StringVar(&config.NewRelicAppName, "new-relic-app-name", "", "(Optional) Application name to aggregate data under in New Relic")
diff --git a/src/common/config/config_test.go b/src/common/config/config_test.go
index 59b63d1..7f42983 100644
--- a/src/common/config/config_test.go
+++ b/src/common/config/config_test.go
@@ -55,10 +55,9 @@ func TestValidateConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: testCert,
ElasticServiceCrn: "elasticServiceCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker 1", "broker 2"},
+ KafkaProperties: StringMap{"sasl.mechanism": "PLAIN", "sasl.username": "kafkaUsername", "sasl.password": "kafkaPassword"},
LogLevel: "info",
NewRelicEnabled: true,
NewRelicAppName: "nrAppName",
@@ -82,7 +81,6 @@ func TestValidateConfig(t *testing.T) {
"\n\tAn Elasticsearch base URL was not specified\n\tAn Elasticsearch username was not specified" +
"\n\tAn Elasticsearch password was not specified\n\tAn Elasticsearch certificate was not specified" +
"\n\tAn Elasticsearch service CRN was not specified" +
- "\n\tA Kafka username was not specified\n\tA Kafka password was not specified" +
"\n\tThe Kafka administration url was not specified" + "\n\tNo Kafka brokers were defined" +
"\n\tTLS is enabled but a path to a TLS certificate for the server was not specified" +
"\n\tTLS is enabled but a path to a TLS key for the server was not specified",
@@ -100,8 +98,6 @@ func TestValidateConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: testCert,
ElasticServiceCrn: "elasticServiceCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker 1", "broker 2"},
LogLevel: "info",
@@ -124,8 +120,6 @@ func TestValidateConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: testCert,
ElasticServiceCrn: "elasticServiceCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker 1", "broker 2"},
LogLevel: "info",
@@ -145,8 +139,6 @@ func TestValidateConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: testCert,
ElasticServiceCrn: "elasticServiceCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker 1", "broker 2"},
LogLevel: "info",
@@ -171,8 +163,6 @@ func TestValidateConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: "Invalid Certificate",
ElasticServiceCrn: "elasticServiceCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker 1", "broker 2"},
LogLevel: "info",
@@ -254,10 +244,9 @@ func TestGetConfig(t *testing.T) {
ElasticPassword: "elasticPassword",
ElasticCert: testCert,
ElasticServiceCrn: "elasticCrn",
- KafkaUsername: "kafkaUsername",
- KafkaPassword: "kafkaPassword",
KafkaAdminUrl: "https://ibm.kafka.com",
KafkaBrokers: StringSlice{"broker1", "broker2"},
+ KafkaProperties: StringMap{"sasl.mechanism": "PLAIN", "sasl.username": "kafkaUsername", "sasl.password": "kafkaPassword"},
LogLevel: "info",
NewRelicEnabled: true,
NewRelicAppName: "nrAppName",
diff --git a/src/common/config/goodConfig.yml b/src/common/config/goodConfig.yml
index 55da569..9830dce 100644
--- a/src/common/config/goodConfig.yml
+++ b/src/common/config/goodConfig.yml
@@ -25,12 +25,14 @@ elastic-cert: |
+PQzp/B9cQmOO8dCe049Q3oaUA==
-----END CERTIFICATE-----
elastic-crn: "elasticCrn"
-kafka-username: "kafkaUsername"
-kafka-password: "kafkaPassword"
kafka-admin-url: "https://ibm.kafka.com"
kafka-brokers:
- "broker 1"
- "broker 2"
+kafka-properties:
+ - "sasl.mechanism:PLAIN"
+ - "sasl.username:kafkaUsername"
+ - "sasl.password:kafkaPassword"
new-relic-enabled: true
new-relic-app-name: "nrAppName"
# New relic License Keys have to be 40 characters long
diff --git a/src/common/elastic/client_test.go b/src/common/elastic/client_test.go
index a0fd486..c0c26cb 100644
--- a/src/common/elastic/client_test.go
+++ b/src/common/elastic/client_test.go
@@ -12,7 +12,6 @@ import (
"fmt"
"github.com/Alvearie/hri-mgmt-api/common/config"
"github.com/Alvearie/hri-mgmt-api/common/logwrapper"
- "github.com/Alvearie/hri-mgmt-api/common/response"
"github.com/Alvearie/hri-mgmt-api/common/test"
"github.com/IBM/resource-controller-go-sdk-generator/build/generated"
"github.com/elastic/go-elasticsearch/v7"
@@ -40,22 +39,18 @@ func TestResponseError(t *testing.T) {
responseMessage := "response message"
testCases := []struct {
- name string
- code int
- error error
- expectedResponse map[string]interface{}
+ name string
+ code int
+ error error
}{
{
- name: "Elastic Error Code and Error in Response",
- code: http.StatusNotFound,
- error: fmt.Errorf("error message"),
- expectedResponse: response.Error(http.StatusNotFound, "response message: error message"),
+ name: "Elastic Error Code and Error in Response",
+ code: http.StatusNotFound,
+ error: fmt.Errorf("error message"),
},
{
name: "Elastic Error Code and No Error in Response",
code: http.StatusNotFound,
- expectedResponse: response.Error(http.StatusNotFound,
- fmt.Sprintf(responseMessage+": "+msgUnexpectedErr, http.StatusNotFound)),
},
}
diff --git a/src/common/kafka/confluent_producer_mock.go b/src/common/kafka/confluent_producer_mock.go
new file mode 100644
index 0000000..39896e2
--- /dev/null
+++ b/src/common/kafka/confluent_producer_mock.go
@@ -0,0 +1,89 @@
+// Code generated by MockGen. DO NOT EDIT.
+// Source: src/common/kafka/writer.go
+
+// Package mock_kafka is a generated GoMock package.
+package kafka
+
+import (
+ reflect "reflect"
+
+ kafka "github.com/confluentinc/confluent-kafka-go/kafka"
+ gomock "github.com/golang/mock/gomock"
+)
+
+// MockconfluentProducer is a mock of confluentProducer interface.
+type MockconfluentProducer struct {
+ ctrl *gomock.Controller
+ recorder *MockconfluentProducerMockRecorder
+}
+
+// MockconfluentProducerMockRecorder is the mock recorder for MockconfluentProducer.
+type MockconfluentProducerMockRecorder struct {
+ mock *MockconfluentProducer
+}
+
+// NewMockconfluentProducer creates a new mock instance.
+func NewMockconfluentProducer(ctrl *gomock.Controller) *MockconfluentProducer {
+ mock := &MockconfluentProducer{ctrl: ctrl}
+ mock.recorder = &MockconfluentProducerMockRecorder{mock}
+ return mock
+}
+
+// EXPECT returns an object that allows the caller to indicate expected use.
+func (m *MockconfluentProducer) EXPECT() *MockconfluentProducerMockRecorder {
+ return m.recorder
+}
+
+// Close mocks base method.
+func (m *MockconfluentProducer) Close() {
+ m.ctrl.T.Helper()
+ m.ctrl.Call(m, "Close")
+}
+
+// Close indicates an expected call of Close.
+func (mr *MockconfluentProducerMockRecorder) Close() *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockconfluentProducer)(nil).Close))
+}
+
+// Events mocks base method.
+func (m *MockconfluentProducer) Events() chan kafka.Event {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Events")
+ ret0, _ := ret[0].(chan kafka.Event)
+ return ret0
+}
+
+// Events indicates an expected call of Events.
+func (mr *MockconfluentProducerMockRecorder) Events() *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Events", reflect.TypeOf((*MockconfluentProducer)(nil).Events))
+}
+
+// Flush mocks base method.
+func (m *MockconfluentProducer) Flush(arg0 int) int {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Flush", arg0)
+ ret0, _ := ret[0].(int)
+ return ret0
+}
+
+// Flush indicates an expected call of Flush.
+func (mr *MockconfluentProducerMockRecorder) Flush(arg0 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Flush", reflect.TypeOf((*MockconfluentProducer)(nil).Flush), arg0)
+}
+
+// Produce mocks base method.
+func (m *MockconfluentProducer) Produce(arg0 *kafka.Message, arg1 chan kafka.Event) error {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "Produce", arg0, arg1)
+ ret0, _ := ret[0].(error)
+ return ret0
+}
+
+// Produce indicates an expected call of Produce.
+func (mr *MockconfluentProducerMockRecorder) Produce(arg0, arg1 interface{}) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Produce", reflect.TypeOf((*MockconfluentProducer)(nil).Produce), arg0, arg1)
+}
diff --git a/src/common/kafka/connector.go b/src/common/kafka/connector.go
deleted file mode 100644
index c028823..0000000
--- a/src/common/kafka/connector.go
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package kafka
-
-import (
- "context"
- "crypto/tls"
- "fmt"
- "github.com/Alvearie/hri-mgmt-api/common/config"
- "github.com/Alvearie/hri-mgmt-api/common/logwrapper"
- kg "github.com/segmentio/kafka-go"
- "github.com/segmentio/kafka-go/sasl/plain"
- "github.com/sirupsen/logrus"
- "time"
-)
-
-const (
- kafkaConnectionFailMsg string = "connecting to Kafka failed, error detail: %w"
- connectTimeout = 30 * time.Second
- defaultNetwork string = "tcp"
- defaultTLSVersion = tls.VersionTLS12
-)
-
-type ContextDialer interface {
- DialContext(ctx context.Context, networkType string, address string) (*kg.Conn, error)
-}
-
-func CreateDialerFromConfig(config config.Config) *kg.Dialer {
- user := config.KafkaUsername
- password := config.KafkaPassword
- dialer := &kg.Dialer{
- SASLMechanism: plain.Mechanism{Username: user, Password: password},
- TLS: &tls.Config{
- MaxVersion: defaultTLSVersion,
- },
- }
- return dialer
-}
-
-func connectionFromConfig(config config.Config, dialer ContextDialer) (*kg.Conn, error) {
- prefix := "KafkaConnector"
- var logger = logwrapper.GetMyLogger("", prefix)
- brokers := config.KafkaBrokers
-
- logger.Debugln("Getting Kafka connection...")
- conn, err := getKafkaConn(dialer, brokers, logger)
- if err != nil {
- getConnectionError := fmt.Errorf(kafkaConnectionFailMsg, err)
- return nil, getConnectionError
- }
-
- return conn, err
-}
-
-func getKafkaConn(dialer ContextDialer, brokers []string, logger logrus.FieldLogger) (conn *kg.Conn, err error) {
- ctx, cancel := context.WithTimeout(context.Background(), connectTimeout)
- defer cancel()
-
- for _, brokerAddr := range brokers {
- logger.Debugf("selected broker address: " + brokerAddr + "\n")
- if conn, err = dialer.DialContext(ctx, defaultNetwork, brokerAddr); err == nil {
- return conn, nil
- }
- }
- return
-}
-
-func ConnectionFromConfig(config config.Config) (*kg.Conn, error) {
- dialer := CreateDialerFromConfig(config)
- return connectionFromConfig(config, dialer)
-}
diff --git a/src/common/kafka/connector_test.go b/src/common/kafka/connector_test.go
deleted file mode 100644
index db16868..0000000
--- a/src/common/kafka/connector_test.go
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package kafka
-
-import (
- "errors"
- "fmt"
- "github.com/Alvearie/hri-mgmt-api/common/config"
- "github.com/Alvearie/hri-mgmt-api/common/test"
- "github.com/golang/mock/gomock"
- kg "github.com/segmentio/kafka-go"
- "github.com/stretchr/testify/assert"
- "testing"
-)
-
-const (
- testBroker1 string = "broker-0-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- testBroker2 string = "broker-1-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- testBroker3 string = "broker-2-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- testBroker4 string = "broker-3-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- testBroker5 string = "broker-4-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- badBroker string = "bad-broker-address.monkey.ibm.com:9093"
-)
-
-func TestConnectorSuccess(t *testing.T) {
- config := config.Config{
- KafkaUsername: "token",
- KafkaPassword: "password",
- KafkaBrokers: config.StringSlice{testBroker1, testBroker2, testBroker3},
- }
-
- controller := gomock.NewController(t)
- defer controller.Finish()
-
- emptyConn := &kg.Conn{} //Upon Connection Success - we return a valid Conn obj.fakeConn := &kg.Conn{}
- mockDialer := test.NewMockContextDialer(controller)
- mockDialer.
- EXPECT().
- DialContext(gomock.Any(), defaultNetwork, testBroker1).
- Return(emptyConn, nil)
-
- rtnConn, err := connectionFromConfig(config, mockDialer)
- if assert.NoError(t, err) {
- assert.Equal(t, emptyConn, rtnConn)
- }
-}
-
-func TestConnector_ShouldIterateOverBrokerAddresses2Failures(t *testing.T) {
- config := config.Config{
- KafkaUsername: "token",
- KafkaPassword: "password",
- KafkaBrokers: config.StringSlice{testBroker1, testBroker2, testBroker3, testBroker4, testBroker5},
- }
-
- controller := gomock.NewController(t)
- defer controller.Finish()
- connError := errors.New("dial tcp 123.45.321.34:9093: i/o timeout ")
-
- emptyConn := &kg.Conn{} //Upon Connection Success - we return a valid Conn obj.
- mockDialer := test.NewMockContextDialer(controller)
-
- firstCall := mockDialer.
- EXPECT().
- DialContext(gomock.Any(), defaultNetwork, testBroker1).
- Return(nil, connError)
-
- secondCall := mockDialer.
- EXPECT().
- DialContext(gomock.Any(), defaultNetwork, testBroker2).
- Return(nil, connError).
- After(firstCall)
-
- mockDialer.
- EXPECT().
- DialContext(gomock.Any(), defaultNetwork, testBroker3).
- Return(emptyConn, nil).
- After(secondCall)
-
- rtnConn, err := connectionFromConfig(config, mockDialer)
- if assert.NoError(t, err) {
- assert.Equal(t, emptyConn, rtnConn)
- }
-}
-
-func TestConnectorConnectionFailure(t *testing.T) {
- config := config.Config{
- KafkaUsername: "token",
- KafkaPassword: "password",
- KafkaBrokers: config.StringSlice{badBroker},
- }
-
- controller := gomock.NewController(t)
- defer controller.Finish()
- connError := errors.New("dial tcp 123.45.321.34:9093: i/o timeout ")
- mockDialer := test.NewMockContextDialer(controller)
-
- mockDialer.
- EXPECT().
- DialContext(gomock.Any(), defaultNetwork, badBroker).
- Return(nil, connError)
-
- _, err := connectionFromConfig(config, mockDialer)
- expectedErr := fmt.Errorf(kafkaConnectionFailMsg, connError)
- assert.Equal(t, expectedErr, err)
-}
-
-func TestConnectionFromConfig(t *testing.T) {
- tests := []struct {
- name string
- config config.Config
- }{
- {
- name: "Good params",
- config: config.Config{
- KafkaUsername: "token",
- KafkaPassword: "password",
- KafkaBrokers: config.StringSlice{testBroker1, testBroker2, testBroker3},
- },
- },
- }
-
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- _, err := ConnectionFromConfig(tt.config)
- assert.Error(t, err)
- })
- }
-}
diff --git a/src/common/kafka/healthchecker.go b/src/common/kafka/healthchecker.go
new file mode 100644
index 0000000..d20a077
--- /dev/null
+++ b/src/common/kafka/healthchecker.go
@@ -0,0 +1,60 @@
+/**
+ * (C) Copyright IBM Corp. 2021
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ */
+package kafka
+
+import (
+ "errors"
+ "fmt"
+ "github.com/Alvearie/hri-mgmt-api/common/config"
+ "github.com/confluentinc/confluent-kafka-go/kafka"
+ "strings"
+)
+
+// HealthChecker Public interface
+type HealthChecker interface {
+ Check() error
+ Close()
+}
+
+// internal type that meets the HealthChecker interface
+type confluentHealthChecker struct {
+ confluentAdminClient
+}
+
+// internal interface for unit testing
+type confluentAdminClient interface {
+ GetMetadata(*string, bool, int) (*kafka.Metadata, error)
+ Close()
+}
+
+func NewHealthChecker(config config.Config) (HealthChecker, error) {
+ kafkaConfig := &kafka.ConfigMap{"bootstrap.servers": strings.Join(config.KafkaBrokers, ",")}
+ for key, value := range config.KafkaProperties {
+ kafkaConfig.SetKey(key, value)
+ }
+
+ client, err := kafka.NewAdminClient(kafkaConfig)
+ if err != nil {
+ return nil, fmt.Errorf("error constructing Kafka admin client: %w", err)
+ }
+
+ return confluentHealthChecker{client}, nil
+}
+
+func (chc confluentHealthChecker) Check() error {
+ metadata, err := chc.GetMetadata(nil, true, 1000)
+ if err != nil {
+ return fmt.Errorf("error getting Kafka topics: %w", err)
+ }
+
+ if metadata == nil || len(metadata.Brokers) == 0 {
+ return errors.New("error getting Kafka topics; returned metadata or list of brokers was empty")
+ }
+
+ // We can't assume that there are any topics, so if `GetMetadata()` succeeds without an error
+ // then we can assume Kafka is up and we can connect
+ return nil
+}
diff --git a/src/common/kafka/healthchecker_test.go b/src/common/kafka/healthchecker_test.go
new file mode 100644
index 0000000..5035311
--- /dev/null
+++ b/src/common/kafka/healthchecker_test.go
@@ -0,0 +1,121 @@
+/**
+ * (C) Copyright IBM Corp. 2021
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ */
+package kafka
+
+import (
+ "errors"
+ "fmt"
+ "github.com/Alvearie/hri-mgmt-api/common/config"
+ "github.com/confluentinc/confluent-kafka-go/kafka"
+ "github.com/stretchr/testify/assert"
+ "testing"
+)
+
+func TestNewHealthChecker(t *testing.T) {
+ tests := []struct {
+ name string
+ config config.Config
+ expErr error
+ }{
+ {
+ name: "successful construction",
+ config: config.Config{KafkaBrokers: []string{"broker1", "broker2"}, KafkaProperties: config.StringMap{"message.max.bytes": "10000"}},
+ expErr: nil,
+ },
+ {
+ name: "bad config",
+ config: config.Config{KafkaBrokers: []string{"broker1", "broker2"}, KafkaProperties: config.StringMap{"message.max.bytes": "bad_value"}},
+ expErr: fmt.Errorf("error constructing Kafka admin client: %w",
+ kafka.NewError(-186, "Invalid value for configuration property \"message.max.bytes\"", false)),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ _, err := NewHealthChecker(tt.config)
+
+ assert.Equal(t, tt.expErr, err)
+ })
+ }
+}
+
+type fakeAdminClient struct {
+ t *testing.T
+ metadata *kafka.Metadata
+ err error
+}
+
+func (fac fakeAdminClient) GetMetadata(topics *string, allTopics bool, timeout int) (*kafka.Metadata, error) {
+ assert.Nil(fac.t, topics)
+ assert.True(fac.t, allTopics)
+ assert.Equal(fac.t, 1000, timeout)
+
+ return fac.metadata, fac.err
+}
+
+func (fac fakeAdminClient) Close() {}
+
+func TestConfluentHealthChecker_Check(t *testing.T) {
+ tests := []struct {
+ name string
+ client fakeAdminClient
+ expErr error
+ }{
+ {
+ name: "successful health check",
+ client: fakeAdminClient{
+ t: t,
+ metadata: &kafka.Metadata{
+ Brokers: []kafka.BrokerMetadata{{1, "broker-host", 9093}},
+ Topics: map[string]kafka.TopicMetadata{"topic": {Topic: "topic"}},
+ OriginatingBroker: kafka.BrokerMetadata{},
+ },
+ err: nil,
+ },
+ expErr: nil,
+ },
+ {
+ name: "error on health check",
+ client: fakeAdminClient{
+ t: t,
+ metadata: nil,
+ err: errors.New("connection timeout"),
+ },
+ expErr: fmt.Errorf("error getting Kafka topics: %w", errors.New("connection timeout")),
+ },
+ {
+ name: "health check response missing metadata",
+ client: fakeAdminClient{
+ t: t,
+ metadata: nil,
+ err: nil,
+ },
+ expErr: errors.New("error getting Kafka topics; returned metadata or list of brokers was empty"),
+ },
+ {
+ name: "health check response missing brokers",
+ client: fakeAdminClient{
+ t: t,
+ metadata: &kafka.Metadata{
+ Brokers: []kafka.BrokerMetadata{},
+ Topics: map[string]kafka.TopicMetadata{"topic": {Topic: "topic"}},
+ OriginatingBroker: kafka.BrokerMetadata{},
+ },
+ err: nil,
+ },
+ expErr: errors.New("error getting Kafka topics; returned metadata or list of brokers was empty"),
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ healthChecker := confluentHealthChecker{tt.client}
+ err := healthChecker.Check()
+
+ assert.Equal(t, tt.expErr, err)
+ })
+ }
+}
diff --git a/src/common/kafka/kafka_healthcheck.go b/src/common/kafka/kafka_healthcheck.go
deleted file mode 100644
index cb8c8c6..0000000
--- a/src/common/kafka/kafka_healthcheck.go
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package kafka
-
-import (
- kg "github.com/segmentio/kafka-go"
-)
-
-type PartitionReader interface {
- ReadPartitions(topics ...string) (partitions []kg.Partition, err error)
- Close() error
-}
-
-func CheckConnection(pr PartitionReader) (isAvailable bool, err error) {
- _, err = pr.ReadPartitions()
- if err != nil {
- return false, err
- }
- // We can't assume that there are any topics, so if `pr.ReadPartitions()` succeeds without an error
- // then we can assume Kafka is up and we can connect
- return true, nil
-}
diff --git a/src/common/kafka/kafka_healthcheck_test.go b/src/common/kafka/kafka_healthcheck_test.go
deleted file mode 100644
index 9817b26..0000000
--- a/src/common/kafka/kafka_healthcheck_test.go
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package kafka
-
-import (
- "errors"
- "github.com/Alvearie/hri-mgmt-api/common/test"
- kg "github.com/segmentio/kafka-go"
- "github.com/stretchr/testify/assert"
- "reflect"
- "strconv"
- "testing"
-)
-
-func TestReadKafkaPartition(t *testing.T) {
- var twoPartitions = test.GetFakeTwoPartitionSlice()
- zeroPartitions := []kg.Partition{}
-
- testCases := []struct {
- name string
- reader test.FakePartitionReader
- expected bool
- expectedErr error
- }{
- {
- name: "success-multiple-partitions",
- reader: test.FakePartitionReader{
- T: t,
- Partitions: twoPartitions,
- Err: nil,
- },
- expected: true,
- expectedErr: nil,
- },
- {
- name: "success-zero-partitions-slice",
- reader: test.FakePartitionReader{
- T: t,
- Partitions: zeroPartitions,
- Err: nil,
- },
- expected: true,
- expectedErr: nil,
- },
- {
- name: "failure-partition-reader-error",
- reader: test.FakePartitionReader{
- T: t,
- Partitions: nil,
- Err: errors.New("Error contacting Kafka cluster: could not read partitions"),
- },
- expected: false,
- expectedErr: errors.New("Error contacting Kafka cluster: could not read partitions"),
- },
- {
- name: "success-nil-partitions",
- reader: test.FakePartitionReader{
- T: t,
- Partitions: nil,
- Err: nil,
- },
- expected: true,
- expectedErr: nil,
- },
- }
-
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- actual, err := CheckConnection(tc.reader)
- if !reflect.DeepEqual(err, tc.expectedErr) {
- t.Errorf("Expected err to be %q but it was %q", tc.expectedErr, err)
- }
- assert.Equal(t, tc.expected, actual,
- "Expected Kafka HealthCheck (Conn) = "+strconv.FormatBool(tc.expected)+" for testCase: "+tc.name)
- })
- }
-}
diff --git a/src/common/kafka/sasl_oauth.go b/src/common/kafka/sasl_oauth.go
deleted file mode 100644
index e77ec22..0000000
--- a/src/common/kafka/sasl_oauth.go
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package kafka
-
-import (
- "context"
- "fmt"
- "github.com/segmentio/kafka-go/sasl"
-)
-
-const (
- gs2Header = "n,,"
- kvsep = "\x01"
-)
-
-// OAuthBearer implements a minimal OAUTHBEARER SASL mechanism for Kafka-go
-// Implementation is based off the following documentation: https://tools.ietf.org/html/rfc7628
-type OAuthBearer struct {
- Token string
-}
-
-func (OAuthBearer) Name() string {
- return "OAUTHBEARER"
-}
-
-func (oath OAuthBearer) Start(_ context.Context) (sasl.StateMachine, []byte, error) {
- return oath, []byte(fmt.Sprintf("%s%sauth=Bearer %s%s%s", gs2Header, kvsep, oath.Token, kvsep, kvsep)), nil
-}
-
-func (oath OAuthBearer) Next(_ context.Context, challenge []byte) (bool, []byte, error) {
- if challenge != nil && len(challenge) > 0 {
- // error scenario
- fmt.Println(string(challenge))
- return false, []byte("\x01"), nil
- }
- return true, nil, nil
-}
diff --git a/src/common/kafka/writer.go b/src/common/kafka/writer.go
index 62eb334..d643a8a 100644
--- a/src/common/kafka/writer.go
+++ b/src/common/kafka/writer.go
@@ -6,56 +6,78 @@
package kafka
import (
- "context"
"encoding/json"
+ "fmt"
"github.com/Alvearie/hri-mgmt-api/common/config"
- kg "github.com/segmentio/kafka-go"
+ "github.com/confluentinc/confluent-kafka-go/kafka"
+ "strings"
)
type Writer interface {
Write(topic string, key string, val map[string]interface{}) error
+ Close()
}
-type KafkaConnect struct {
- Brokers []string
- Dialer *kg.Dialer
+// internal type that meets the Writer interface
+type confluentKafkaWriter struct {
+ confluentProducer
}
-func (kc KafkaConnect) Write(topic string, key string, val map[string]interface{}) error {
- jsonVal, err := json.Marshal(val)
+// internal interface for unit testing
+type confluentProducer interface {
+ Produce(*kafka.Message, chan kafka.Event) error
+ Events() chan kafka.Event
+ Flush(int) int
+ Close()
+}
+
+func NewWriterFromConfig(config config.Config) (Writer, error) {
+ kafkaConfig := &kafka.ConfigMap{"bootstrap.servers": strings.Join(config.KafkaBrokers, ",")}
+ for key, value := range config.KafkaProperties {
+ kafkaConfig.SetKey(key, value)
+ }
+
+ producer, err := kafka.NewProducer(kafkaConfig)
if err != nil {
- return err
+ return nil, fmt.Errorf("error constructing Kafka producer: %w", err)
}
- // NOTE: The Kafka Go Library used for this writer is DEPRECATED. There has been an attempt to update
- // the version in the past (shoutout to Aram), but while going down that rabbit hole, he discovered
- // copious issues. Thus, full implementation of the update was adjourned.
-
- // Configure batch size so that message is sent immediately, instead of waiting for 1-second timeout.
- // We also must set the TLS version to match example provided in Event Streams "Getting Started" guide
- writer := kg.NewWriter(kg.WriterConfig{
- Brokers: kc.Brokers,
- Topic: topic,
- Balancer: kg.Murmur2Balancer{},
- BatchSize: 1,
- Dialer: kc.Dialer,
- })
-
- defer writer.Close()
-
- // Context object is only useful in async mode, to cancel jobs, but we still must provide an instance
- return writer.WriteMessages(
- context.Background(),
- kg.Message{Key: []byte(key), Value: []byte(jsonVal)},
- )
+ return confluentKafkaWriter{
+ producer,
+ }, nil
}
-func NewWriterFromConfig(config config.Config) Writer {
- brokers := config.KafkaBrokers
- dialer := CreateDialerFromConfig(config)
+// This method is not thread safe. Each thread needs it's own ConfluentKafkaWriter instance
+func (cfk confluentKafkaWriter) Write(topic string, key string, val map[string]interface{}) error {
+ jsonVal, err := json.Marshal(val)
+ if err != nil {
+ return fmt.Errorf("error marshaling kafka message: %w", err)
+ }
- return KafkaConnect{
- Brokers: brokers,
- Dialer: dialer,
+ err = cfk.Produce(&kafka.Message{
+ TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny},
+ Key: []byte(key),
+ Value: jsonVal,
+ }, nil) // nil uses the default producer channel
+
+ if err != nil {
+ return fmt.Errorf("kafka producer error: %w", err)
}
+
+ // wait upto 1 second for messages to be written
+ cfk.Flush(1000)
+
+ // The Confluent Kafka library sends the messages in a separate thread and
+ // a channel is used to communicate a result back. This call blocks until
+ // that threads sends the result back over the channel, and let's us verify
+ // there weren't any issues. This pattern was taken from the library's examples:
+ // https://github.com/confluentinc/confluent-kafka-go/blob/master/examples/producer_example/producer_example.go
+ e := <-cfk.Events()
+ m := e.(*kafka.Message)
+
+ if m.TopicPartition.Error != nil {
+ return fmt.Errorf("kafka producer error: %w", m.TopicPartition.Error)
+ }
+
+ return nil
}
diff --git a/src/common/kafka/writer_test.go b/src/common/kafka/writer_test.go
index 4ec33b1..03be515 100644
--- a/src/common/kafka/writer_test.go
+++ b/src/common/kafka/writer_test.go
@@ -6,45 +6,144 @@
package kafka
import (
+ "encoding/json"
+ "errors"
+ "fmt"
"github.com/Alvearie/hri-mgmt-api/common/config"
- "github.com/Alvearie/hri-mgmt-api/common/test"
+ "github.com/confluentinc/confluent-kafka-go/kafka"
+ "github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
- "math"
"testing"
)
-const (
- batchId string = "batch987"
- topicBase string = "batchTopic"
-)
-
-func TestCreateNewWriterFromConfig(t *testing.T) {
- cfg, err := config.GetConfig(test.FindConfigPath(t), nil)
- if err != nil {
- t.Error(err)
+func TestNewConfluentWriter(t *testing.T) {
+ tests := []struct {
+ name string
+ config config.Config
+ expErr error
+ }{
+ {
+ name: "successful construction",
+ config: config.Config{KafkaBrokers: []string{"broker1", "broker2"}, KafkaProperties: config.StringMap{"message.max.bytes": "10000"}},
+ expErr: nil,
+ },
+ {
+ name: "bad config",
+ config: config.Config{KafkaBrokers: []string{"broker1", "broker2"}, KafkaProperties: config.StringMap{"message.max.bytes": "bad_value"}},
+ expErr: fmt.Errorf("error constructing Kafka producer: %w",
+ kafka.NewError(-186, "Invalid value for configuration property \"message.max.bytes\"", false)),
+ },
}
- writer := NewWriterFromConfig(cfg)
- if writer == nil {
- t.Fatal("Returned Kafka client was nil")
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ _, err := NewWriterFromConfig(tt.config)
+
+ assert.Equal(t, tt.expErr, err)
+ })
}
}
-func TestWriterFromConfigJsonErrors(t *testing.T) {
- cfg, err := config.GetConfig(test.FindConfigPath(t), nil)
+func TestConfluentKafkaWriter_Write(t *testing.T) {
+ const (
+ topic = "a.topic"
+ key = "a_unique_key"
+ )
+ goodValue := map[string]interface{}{"field1": "value", "field2": 10}
+ var noError error = nil
+
+ tests := []struct {
+ name string
+ topic string
+ key string
+ value map[string]interface{}
+ produceErr *error
+ partitionErr *error
+ expError error
+ }{
+ {
+ name: "successfully send a message",
+ topic: topic,
+ key: key,
+ value: goodValue,
+ produceErr: &noError,
+ partitionErr: &noError,
+ expError: nil,
+ },
+ {
+ name: "produce error",
+ topic: topic,
+ key: key,
+ value: goodValue,
+ produceErr: errPtr(errors.New("a message failure")),
+ partitionErr: nil,
+ expError: fmt.Errorf("kafka producer error: %w", errors.New("a message failure")),
+ },
+ {
+ name: "TopicPartition error",
+ topic: topic,
+ key: key,
+ value: goodValue,
+ produceErr: &noError,
+ partitionErr: errPtr(errors.New("topic does not exist")),
+ expError: fmt.Errorf("kafka producer error: %w", errors.New("topic does not exist")),
+ },
+ }
+
+ controller := gomock.NewController(t)
+ mockProducer := NewMockconfluentProducer(controller)
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ writer := confluentKafkaWriter{mockProducer}
+
+ jsonVal, _ := json.Marshal(tt.value)
+
+ // Only mock the producer methods if there is an expected response
+ if tt.produceErr != nil {
+ // expected message
+ expMessage := &kafka.Message{
+ TopicPartition: kafka.TopicPartition{Topic: &tt.topic, Partition: kafka.PartitionAny},
+ Key: []byte(tt.key),
+ Value: jsonVal,
+ }
- kafkaWriter := NewWriterFromConfig(cfg)
+ deliveryChan := make(chan kafka.Event)
+ defer close(deliveryChan)
- notificationTopic := topicBase + ".notification"
- value := make(chan int)
- invalidBatchJson := map[string]interface{}{"some str": value}
- err = kafkaWriter.Write(notificationTopic, batchId, invalidBatchJson)
- assert.NotNil(t, err)
- assert.Equal(t, "json: unsupported type: chan int", err.Error())
+ mockProducer.EXPECT().
+ Produce(expMessage, nil).
+ DoAndReturn(func(message *kafka.Message, _ chan kafka.Event) interface{} {
+ if tt.partitionErr != nil {
+ message.TopicPartition.Error = *tt.partitionErr
+ // this sends the message in another thread,
+ // channels are blocking until both sender and receiver are ready
+ go sendMessage(message, deliveryChan)
+ }
+ return *tt.produceErr
+ })
+
+ if tt.partitionErr != nil {
+ mockProducer.EXPECT().Flush(1000)
+
+ mockProducer.EXPECT().
+ Events().
+ Return(deliveryChan)
+ }
+ }
+
+ err := writer.Write(tt.topic, tt.key, tt.value)
+
+ assert.Equal(t, tt.expError, err)
+ })
+ }
+}
+
+func sendMessage(message *kafka.Message, channel chan kafka.Event) {
+ channel <- message
+}
- badValue := math.Inf(1)
- invalidValueJson := map[string]interface{}{"some str": badValue}
- err2 := kafkaWriter.Write(notificationTopic, batchId, invalidValueJson)
- assert.NotNil(t, err2)
- assert.Equal(t, "json: unsupported value: +Inf", err2.Error())
+// needed because you can't take the address of errors.New("")
+func errPtr(err error) *error {
+ return &err
}
diff --git a/src/common/param/extractor_test.go b/src/common/param/extractor_test.go
index 6a41996..32843d1 100644
--- a/src/common/param/extractor_test.go
+++ b/src/common/param/extractor_test.go
@@ -59,7 +59,7 @@ func TestExtractValuesSuccess(t *testing.T) {
t.Fatal(err)
}
- apisKeys, err := ExtractValues(params, BoundCreds, "messagehub", "apikeys")
+ apisKeys, err := ExtractValues(params, "__bx_creds", "messagehub", "apikeys")
assert.NotNil(t, apisKeys)
assert.Nil(t, err)
assert.Equal(t, apisKeys["apikey"], "FAKE_Api_Key")
@@ -71,7 +71,7 @@ func TestExtractValuesMissingUser(t *testing.T) {
t.Fatal(err)
}
- user, err := ExtractValues(params, BoundCreds, "messagehub", "user")
+ user, err := ExtractValues(params, "__bx_creds", "messagehub", "user")
assert.Nil(t, user)
assert.NotNil(t, err)
assert.Equal(t, "error extracting the user section of the JSON", err.Error())
diff --git a/src/common/param/parameters.go b/src/common/param/parameters.go
index 3bd3bd8..0e30a0d 100644
--- a/src/common/param/parameters.go
+++ b/src/common/param/parameters.go
@@ -6,8 +6,6 @@
package param
const (
- BoundCreds string = "__bx_creds"
-
BatchId string = "id"
TenantId string = "tenantId"
StreamId string = "id"
diff --git a/src/common/response/responses.go b/src/common/response/responses.go
index c227564..9f50519 100644
--- a/src/common/response/responses.go
+++ b/src/common/response/responses.go
@@ -5,40 +5,6 @@
*/
package response
-import (
- "fmt"
- "net/http"
- "os"
-)
-
-const EnvOwActivationId string = "__OW_ACTIVATION_ID"
-const missingParamsMsg string = "Missing required parameter(s): %v"
-const invalidParamsMsg string = "Invalid parameter type(s): %v"
-
-func Error(statusCode int, description string) map[string]interface{} {
- activationId := os.Getenv(EnvOwActivationId)
-
- return map[string]interface{}{
- "error": map[string]interface{}{
- "statusCode": statusCode,
- "body": map[string]interface{}{
- "errorEventId": activationId,
- "errorDescription": description,
- },
- },
- }
-}
-
-func MissingParams(params ...string) map[string]interface{} {
- desc := fmt.Sprintf(missingParamsMsg, params)
- return Error(http.StatusBadRequest, desc)
-}
-
-func InvalidParams(params ...string) map[string]interface{} {
- desc := fmt.Sprintf(invalidParamsMsg, params)
- return Error(http.StatusBadRequest, desc)
-}
-
type ErrorDetail struct {
ErrorEventId string `json:"errorEventId"`
ErrorDescription string `json:"errorDescription"`
diff --git a/src/common/response/responses_test.go b/src/common/response/responses_test.go
index 9079d01..189e86a 100644
--- a/src/common/response/responses_test.go
+++ b/src/common/response/responses_test.go
@@ -6,43 +6,14 @@
package response
import (
- "fmt"
"github.com/labstack/echo/v4"
"github.com/stretchr/testify/assert"
"net/http"
"net/http/httptest"
- "os"
"reflect"
"testing"
)
-const activationId string = "activation123"
-
-func TestError(t *testing.T) {
- _ = os.Setenv(EnvOwActivationId, activationId)
- inputDesc := "Caller is not authorized"
- response := Error(http.StatusUnauthorized, inputDesc)
-
- err, ok := response["error"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing error entry, or it's not an object")
- }
- if err["statusCode"] != http.StatusUnauthorized {
- t.Errorf("Unexpected StatusCode. Expected: [%v], Actual: [%v]", http.StatusUnauthorized, err["statusCode"])
- }
-
- body, ok := err["body"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing body entry, or it's not an object")
- }
- if body["errorEventId"] != activationId {
- t.Errorf("Unexpected errorEventId. Expected: [%v], Actual: [%v]", activationId, body["errorEventId"])
- }
- if body["errorDescription"] != inputDesc {
- t.Errorf("Unexpected errorDescription. Expected: [%v], Actual: [%v]", inputDesc, body["errorDescription"])
- }
-}
-
func TestGetErrorDetail(t *testing.T) {
requestId := "requestId"
description := "Could not perform elasticsearch health check: elasticsearch client error: client error"
@@ -87,59 +58,3 @@ func TestNewErrorDetailResponse(t *testing.T) {
t.Errorf("expected [%v] but have [%v]", expectedErrorDetail, result)
}
}
-
-func TestMissingParams(t *testing.T) {
- _ = os.Setenv(EnvOwActivationId, activationId)
- p1 := "param1"
- p2 := "param2"
- p3 := "param3"
- expectedMsg := fmt.Sprintf(missingParamsMsg, []string{p1, p2, p3})
- response := MissingParams(p1, p2, p3)
-
- err, ok := response["error"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing error entry, or it's not an object")
- }
- if err["statusCode"] != http.StatusBadRequest {
- t.Errorf("Unexpected StatusCode. Expected: [%v], Actual: [%v]", http.StatusBadRequest, err["statusCode"])
- }
-
- body, ok := err["body"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing body entry, or it's not an object")
- }
- if body["errorEventId"] != activationId {
- t.Errorf("Unexpected errorEventId. Expected: [%v], Actual: [%v]", activationId, body["errorEventId"])
- }
- if body["errorDescription"] != expectedMsg {
- t.Errorf("Unexpected errorDescription. Expected: [%v], Actual: [%v]", expectedMsg, body["errorDescription"])
- }
-}
-
-func TestInvalidParams(t *testing.T) {
- _ = os.Setenv(EnvOwActivationId, activationId)
- p1 := "param1"
- p2 := "param2"
- p3 := "param3"
- expectedMsg := fmt.Sprintf(invalidParamsMsg, []string{p1, p2, p3})
- response := InvalidParams(p1, p2, p3)
-
- err, ok := response["error"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing error entry, or it's not an object")
- }
- if err["statusCode"] != http.StatusBadRequest {
- t.Errorf("Unexpected StatusCode. Expected: [%v], Actual: [%v]", http.StatusBadRequest, err["statusCode"])
- }
-
- body, ok := err["body"].(map[string]interface{})
- if !ok {
- t.Fatalf("Missing body entry, or it's not an object")
- }
- if body["errorEventId"] != activationId {
- t.Errorf("Unexpected errorEventId. Expected: [%v], Actual: [%v]", activationId, body["errorEventId"])
- }
- if body["errorDescription"] != expectedMsg {
- t.Errorf("Unexpected errorDescription. Expected: [%v], Actual: [%v]", expectedMsg, body["errorDescription"])
- }
-}
diff --git a/src/common/test/connector_mock.go b/src/common/test/connector_mock.go
deleted file mode 100644
index d97f031..0000000
--- a/src/common/test/connector_mock.go
+++ /dev/null
@@ -1,50 +0,0 @@
-// Code generated by MockGen. DO NOT EDIT.
-// Source: connector.go
-
-// Package test is a generated GoMock package.
-package test
-
-import (
- context "context"
- gomock "github.com/golang/mock/gomock"
- kafka "github.com/segmentio/kafka-go"
- reflect "reflect"
-)
-
-// MockContextDialer is a mock of ContextDialer interface
-type MockContextDialer struct {
- ctrl *gomock.Controller
- recorder *MockContextDialerMockRecorder
-}
-
-// MockContextDialerMockRecorder is the mock recorder for MockContextDialer
-type MockContextDialerMockRecorder struct {
- mock *MockContextDialer
-}
-
-// NewMockContextDialer creates a new mock instance
-func NewMockContextDialer(ctrl *gomock.Controller) *MockContextDialer {
- mock := &MockContextDialer{ctrl: ctrl}
- mock.recorder = &MockContextDialerMockRecorder{mock}
- return mock
-}
-
-// EXPECT returns an object that allows the caller to indicate expected use
-func (m *MockContextDialer) EXPECT() *MockContextDialerMockRecorder {
- return m.recorder
-}
-
-// DialContext mocks base method
-func (m *MockContextDialer) DialContext(ctx context.Context, networkType, address string) (*kafka.Conn, error) {
- m.ctrl.T.Helper()
- ret := m.ctrl.Call(m, "DialContext", ctx, networkType, address)
- ret0, _ := ret[0].(*kafka.Conn)
- ret1, _ := ret[1].(error)
- return ret0, ret1
-}
-
-// DialContext indicates an expected call of DialContext
-func (mr *MockContextDialerMockRecorder) DialContext(ctx, networkType, address interface{}) *gomock.Call {
- mr.mock.ctrl.T.Helper()
- return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DialContext", reflect.TypeOf((*MockContextDialer)(nil).DialContext), ctx, networkType, address)
-}
diff --git a/src/common/test/fake_partition_reader.go b/src/common/test/fake_partition_reader.go
deleted file mode 100644
index dd704bb..0000000
--- a/src/common/test/fake_partition_reader.go
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * (C) Copyright IBM Corp. 2020
- *
- * SPDX-License-Identifier: Apache-2.0
- */
-package test
-
-import (
- kg "github.com/segmentio/kafka-go"
- "testing"
-)
-
-const (
- Broker1 string = "broker-0-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- Broker2 string = "broker-1-porcupine.kafka.eventstreams.monkey.ibm.com:9093"
- defaultPartitionId int = 5
- partition2Id int = 12
-)
-
-type FakePartitionReader struct {
- T *testing.T
- Partitions []kg.Partition
- Err error
-}
-
-func (f FakePartitionReader) ReadPartitions(topics ...string) (partitions []kg.Partition, err error) {
- if f.Err != nil {
- return []kg.Partition{}, f.Err
- }
- return f.Partitions, nil
-}
-
-func GetFakeTwoPartitionSlice() []kg.Partition {
- var leader = kg.Broker{
- Host: Broker1,
- Port: 1234,
- ID: 66,
- Rack: "",
- }
- var follower = kg.Broker{
- Host: Broker2,
- Port: 9093,
- ID: 25,
- Rack: "",
- }
- var replicas []kg.Broker
- replicas = append(replicas, leader)
- replicas = append(replicas, follower)
- var isr []kg.Broker
- return []kg.Partition{
- {Topic: "topic1",
- Leader: leader,
- Replicas: replicas,
- Isr: isr,
- ID: defaultPartitionId},
- {
- Topic: "topicZ03",
- Leader: leader,
- Replicas: replicas,
- Isr: isr,
- ID: partition2Id},
- }
-}
-
-func (f FakePartitionReader) Close() error {
- return nil
-}
diff --git a/src/common/test/fake_transport.go b/src/common/test/fake_transport.go
index 82fd519..0e408e2 100644
--- a/src/common/test/fake_transport.go
+++ b/src/common/test/fake_transport.go
@@ -82,8 +82,9 @@ func (ft *FakeTransport) RoundTrip(request *http.Request) (*http.Response, error
// optionally validate that the request body is as expected
ft.checkBody(request.Body, call.RequestBody)
- // convert response status code and body to an http.Response
+ // convert response status code and body to a http.Response
httpResp := &http.Response{
+ Header: http.Header{"X-Elastic-Product": {"Elasticsearch"}},
StatusCode: call.ResponseStatusCode,
Body: ioutil.NopCloser(strings.NewReader(call.ResponseBody)),
}
diff --git a/src/common/test/fake_writer.go b/src/common/test/fake_writer.go
index cf08447..653992e 100644
--- a/src/common/test/fake_writer.go
+++ b/src/common/test/fake_writer.go
@@ -48,3 +48,7 @@ func copyValue(value map[string]interface{}) map[string]interface{} {
}
return rtn
}
+
+func (fw FakeWriter) Close() {
+ return
+}
diff --git a/src/go.mod b/src/go.mod
index cc57ffa..707f22b 100644
--- a/src/go.mod
+++ b/src/go.mod
@@ -3,31 +3,39 @@ module github.com/Alvearie/hri-mgmt-api
require (
github.com/IBM/event-streams-go-sdk-generator v1.0.0
github.com/IBM/resource-controller-go-sdk-generator v1.0.1
+ github.com/confluentinc/confluent-kafka-go v1.7.0
github.com/coreos/go-oidc v2.2.1+incompatible
github.com/elastic/go-elasticsearch/v7 v7.11.0
- github.com/go-playground/locales v0.13.0
- github.com/go-playground/universal-translator v0.17.0
- github.com/go-playground/validator/v10 v10.4.1
- github.com/golang/mock v1.5.0
- github.com/kr/pretty v0.1.0 // indirect
- github.com/labstack/echo/v4 v4.2.0
- github.com/mattn/go-colorable v0.1.8 // indirect
- github.com/newrelic/go-agent/v3 v3.11.0
+ github.com/go-playground/locales v0.14.0
+ github.com/go-playground/universal-translator v0.18.0
+ github.com/go-playground/validator/v10 v10.9.0
+ github.com/golang/mock v1.6.0
+ github.com/labstack/echo/v4 v4.6.1
+ github.com/mattn/go-colorable v0.1.11 // indirect
+ github.com/newrelic/go-agent/v3 v3.15.0
github.com/newrelic/go-agent/v3/integrations/nrecho-v4 v1.0.1
- github.com/peterbourgon/ff/v3 v3.1.0
- github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35 // indirect
- github.com/segmentio/kafka-go v0.3.5
+ github.com/peterbourgon/ff/v3 v3.1.2
+ github.com/pquerna/cachecontrol v0.1.0 // indirect
github.com/sirupsen/logrus v1.8.1
- github.com/stretchr/testify v1.6.1
- golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 // indirect
- golang.org/x/net v0.0.0-20210614182718-04defd469f4e // indirect
- golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d // indirect
- gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
- gopkg.in/square/go-jose.v2 v2.4.1 // indirect
+ github.com/stretchr/testify v1.7.0
+ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 // indirect
+ golang.org/x/net v0.0.0-20211005001312-d4b1ae081e3b // indirect
+ golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 // indirect
+ golang.org/x/sys v0.0.0-20211004093028-2c5d950f24ef // indirect
+ golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac // indirect
+ google.golang.org/appengine v1.6.7 // indirect
+ google.golang.org/genproto v0.0.0-20211005153810-c76a74d43a8e // indirect
+ google.golang.org/grpc v1.41.0 // indirect
+ gopkg.in/square/go-jose.v2 v2.6.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
- gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
)
-replace github.com/Sirupsen/logrus v1.8.1 => github.com/sirupsen/logrus v1.8.1
+// The jwt-go substitution is neccessary for nrecho-v4 to work, as it uses an old version
+// of Echo that is dependent on a vulnerable dependency. The golang-jwt vers. 4 library
+// was designed to be able to be substitutable for jwt-go in this way.
+replace (
+ github.com/Sirupsen/logrus v1.8.1 => github.com/sirupsen/logrus v1.8.1
+ github.com/dgrijalva/jwt-go => github.com/golang-jwt/jwt/v4 v4.0.0
+)
go 1.15
diff --git a/src/go.sum b/src/go.sum
index a698789..fe98f47 100644
--- a/src/go.sum
+++ b/src/go.sum
@@ -1,186 +1,509 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
+cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
+cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
+cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
+cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
+cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
+cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
+cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
+cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
+cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
+cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
+cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
+cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
+cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
+cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
+cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
+cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
+cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
+cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
+cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
+cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
+cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
+cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
+cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
+cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
+cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
+cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
+cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
+cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
+dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/DataDog/zstd v1.4.0 h1:vhoV+DUHnRZdKW1i5UMjAk2G4JY8wN4ayRfYDNdEhwo=
-github.com/DataDog/zstd v1.4.0/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
+github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/IBM/event-streams-go-sdk-generator v1.0.0 h1:XLm+MsdH6Dod4uXdiRhdbwOI8gEVnQ4YZrSLRQ4wMbY=
github.com/IBM/event-streams-go-sdk-generator v1.0.0/go.mod h1:cfRUnCbmFvjE4QROL3vv+EfTshlDreHck1piqXkOvE4=
github.com/IBM/resource-controller-go-sdk-generator v1.0.1 h1:3tUag6fX+mwSA0z+NylUn9segzFXuFX3l72meodgHiI=
github.com/IBM/resource-controller-go-sdk-generator v1.0.1/go.mod h1:cKrNWsOSwM7dSY5IfWc8kopcGnhuVckN0iB6pqhOqaE=
+github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
+github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
+github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM=
+github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk=
github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
-github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=
-github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/elastic/go-elasticsearch/v7 v7.11.0 h1:bv+2GqsVrPdX/ChJqAHAFtWgtGvVJ0icN/WdBGAdNuw=
github.com/elastic/go-elasticsearch/v7 v7.11.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
+github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
-github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
-github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
-github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD876Lmtgy7VtROAbHHXk8no=
-github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
-github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
-github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
-github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
+github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
+github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
+github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
+github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
+github.com/go-playground/validator/v10 v10.9.0 h1:NgTtmN58D0m8+UuxtYmGztBJB7VnPgjj221I1QHci2A=
+github.com/go-playground/validator/v10 v10.9.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
+github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
+github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
-github.com/golang/mock v1.5.0 h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=
-github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
+github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
+github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
+github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
+github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
-github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
-github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
-github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
+github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
+github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
+github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
-github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/labstack/echo/v4 v4.0.0/go.mod h1:tZv7nai5buKSg5h/8E6zz4LsD/Dqh9/91Mvs7Z5Zyno=
-github.com/labstack/echo/v4 v4.2.0 h1:jkCSsjXmBmapVXF6U4BrSz/cgofWM0CU3Q74wQvXkIc=
-github.com/labstack/echo/v4 v4.2.0/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
+github.com/labstack/echo/v4 v4.6.1 h1:OMVsrnNFzYlGSdaiYGHbgWQnr+JM7NG+B9suCPie14M=
+github.com/labstack/echo/v4 v4.6.1/go.mod h1:RnjgMWNDB9g/HucVWhQYNQP9PvbYf6adqftqryo7s9k=
github.com/labstack/gommon v0.2.8/go.mod h1:/tj9csK2iPSBvn+3NLM9e52usepMtrd5ilFYA+wQNJ4=
github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
-github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
-github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
+github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
+github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
-github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
-github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-colorable v0.1.11 h1:nQ+aFkoE2TMGc0b68U2OKSexC+eq46+XwZzWXHRmPYs=
+github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
-github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
+github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/newrelic/go-agent/v3 v3.0.0/go.mod h1:H28zDNUC0U/b7kLoY4EFOhuth10Xu/9dchozUiOseQQ=
-github.com/newrelic/go-agent/v3 v3.11.0 h1:14LGRRAh4tyaJcP7IsOSxZyaEfwa+OIIH80PQHUGJxw=
-github.com/newrelic/go-agent/v3 v3.11.0/go.mod h1:1A1dssWBwzB7UemzRU6ZVaGDsI+cEn5/bNxI0wiYlIc=
+github.com/newrelic/go-agent/v3 v3.15.0 h1:XKF81YOkkO5cCEtQmguamOVMVmeWnv7X3+mkRtwwG3U=
+github.com/newrelic/go-agent/v3 v3.15.0/go.mod h1:1A1dssWBwzB7UemzRU6ZVaGDsI+cEn5/bNxI0wiYlIc=
github.com/newrelic/go-agent/v3/integrations/nrecho-v4 v1.0.1 h1:o09114esUDznfZ0vD/r6R4FJK08To9PZWP9zSoIWFqM=
github.com/newrelic/go-agent/v3/integrations/nrecho-v4 v1.0.1/go.mod h1:OaWlBA7HaVlDpvNsJ3JhH1IN45kP1lwIGa3+LJBnYuE=
github.com/pelletier/go-toml v1.6.0/go.mod h1:5N711Q9dKgbdkxHL+MEfF31hpT7l0S0s/t2kKREewys=
-github.com/peterbourgon/ff/v3 v3.1.0 h1:5JAeDK5j/zhKFjyHEZQXwXBoDijERaos10RE+xamOsY=
-github.com/peterbourgon/ff/v3 v3.1.0/go.mod h1:XNJLY8EIl6MjMVjBS4F0+G0LYoAqs0DTa4rmHHukKDE=
-github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
-github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
+github.com/peterbourgon/ff/v3 v3.1.2 h1:0GNhbRhO9yHA4CC27ymskOsuRpmX0YQxwxM9UPiP6JM=
+github.com/peterbourgon/ff/v3 v3.1.2/go.mod h1:XNJLY8EIl6MjMVjBS4F0+G0LYoAqs0DTa4rmHHukKDE=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35 h1:J9b7z+QKAmPf4YLrFg6oQUotqHQeUNWwkvo7jZp1GLU=
-github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
+github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc=
+github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
-github.com/segmentio/kafka-go v0.3.5 h1:2JVT1inno7LxEASWj+HflHh5sWGfM0gkRiLAxkXhGG4=
-github.com/segmentio/kafka-go v0.3.5/go.mod h1:OT5KXBPbaJJTcvokhWR2KFmm0niEx3mnccTwjmLvSi4=
+github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
+github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v0.0.0-20170224212429-dcecefd839c4/go.mod h1:50wTf68f99/Zt14pr046Tgt3Lp2vLyFZKzbFXTOabXw=
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
-github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=
-github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
-github.com/xdg/stringprep v1.0.0 h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0=
-github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
+github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
+go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
+go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
golang.org/x/crypto v0.0.0-20190130090550-b01c7a725664/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 h1:/ZScEX8SfEmUGRHs0gxpqteO5nfNW6axyZbBdw9A12g=
-golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
+golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 h1:7I4JAnoQBe7ZtJcBaYHi5UtiO8tQHbUSXxL+pnGRANg=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
+golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
+golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
+golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
+golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
+golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
+golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
+golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
+golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
+golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
+golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
-golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
-golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/net v0.0.0-20210913180222-943fd674d43e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20211005001312-d4b1ae081e3b h1:SXy8Ld8oKlcogOvUAh0J5Pm5RKzgYBMMxLxt6n5XW50=
+golang.org/x/net v0.0.0-20211005001312-d4b1ae081e3b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
-golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw=
+golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 h1:B333XXssMuKQeBwiNODx4TupZy7bf4sxFZnN2ZOcvUE=
+golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210423082822-04245dca01da h1:b3NXsE2LusjYGGjL5bxEVZZORm/YEFFrWFjR8eFrw/c=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
+golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210910150752-751e447fb3d0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211004093028-2c5d950f24ef h1:fPxZ3Umkct3LZ8gK9nbk+DWDJ9fstZa2grBn+lWVKPs=
+golang.org/x/sys v0.0.0-20211004093028-2c5d950f24ef/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
-golang.org/x/time v0.0.0-20201208040808-7e3f01d25324 h1:Hir2P/De0WpUhtrKGGjvSb2YxUgyZ7EFOSLIcSSpiwE=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac h1:7zkz7BUtwNFFqcowJ+RIgu2MaV/MapERkDIy+mwPyjs=
+golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
+golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
+google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
+google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
+google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
-google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
+google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
+google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
-google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE=
+google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
+google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
+google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
+google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20211005153810-c76a74d43a8e h1:Im71rbA1N3CbIag/PumYhQcNR8bLNmuOtRIyOnnLsT8=
+google.golang.org/genproto v0.0.0-20211005153810-c76a74d43a8e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
+google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
-google.golang.org/grpc v1.27.0 h1:rRYRFMVgRv6E0D70Skyfsr28tDXIuuPZyWGMPdMcnXg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
+google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
+google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
+google.golang.org/grpc v1.41.0 h1:f+PlOh7QV4iIJkPrx5NQ7qaNGFQ3OTse67yaDHfju4E=
+google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=
+google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/square/go-jose.v2 v2.4.1 h1:H0TmLt7/KmzlrDOpa1F+zr0Tk90PbJYBfsVUmRLrf9Y=
-gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI=
+gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
@@ -188,4 +511,12 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
+honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
+rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
+rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
diff --git a/src/healthcheck/get.go b/src/healthcheck/get.go
index 8f52758..61a16ee 100644
--- a/src/healthcheck/get.go
+++ b/src/healthcheck/get.go
@@ -13,18 +13,15 @@ import (
esp "github.com/Alvearie/hri-mgmt-api/common/param/esparam"
"github.com/Alvearie/hri-mgmt-api/common/response"
"github.com/elastic/go-elasticsearch/v7"
- "github.com/sirupsen/logrus"
"net/http"
- "strconv"
)
const statusAllGood string = "green"
const serviceUnavailableMsg string = "HRI Service Temporarily Unavailable | error Detail: %v"
-const kafkaConnFail string = "Kafka status: Kafka Connection/Read Partition failed"
const notReported string = "NotReported"
const noStatusReported = "NONE/" + notReported
-func Get(requestId string, client *elasticsearch.Client, partReader kafka.PartitionReader) (int, *response.ErrorDetail) {
+func Get(requestId string, client *elasticsearch.Client, healthChecker kafka.HealthChecker) (int, *response.ErrorDetail) {
prefix := "healthcheck/get"
var logger = logwrapper.GetMyLogger(requestId, prefix)
logger.Infof("Prepare HealthCheck - ElasticSearch (No Input Params)")
@@ -54,24 +51,24 @@ func Get(requestId string, client *elasticsearch.Client, partReader kafka.Partit
status, unixTimestamp)
}
- //2. Do Kafka Conn healthCheck
- isAvailable, err := kafka.CheckConnection(partReader)
- logger.Infoln("Kafka HealthCheck Result: " + strconv.FormatBool(isAvailable))
+ //2. Do Kafka healthCheck
+ err = healthChecker.Check()
+ logger.Infof("Kafka HealthCheck error: %v", err)
var kaErrMsg = ""
- if err != nil || isAvailable == false {
+ if err != nil {
isErr = true
- kaErrMsg = printKafkaErrDetail(logger)
+ kaErrMsg = err.Error()
logger.Errorln(kaErrMsg)
}
var errMessage string
if isErr {
if len(esErrMsg) > 0 && len(kaErrMsg) > 0 {
- errMessage = esErrMsg + "| " + kafkaConnFail
+ errMessage = fmt.Sprintf(serviceUnavailableMsg, esErrMsg+" | "+kaErrMsg)
} else if len(kaErrMsg) > 0 {
- errMessage = kaErrMsg
+ errMessage = fmt.Sprintf(serviceUnavailableMsg, kaErrMsg)
} else {
- errMessage = esErrMsg
+ errMessage = fmt.Sprintf(serviceUnavailableMsg, esErrMsg)
}
return http.StatusServiceUnavailable, response.NewErrorDetail(requestId, errMessage)
} else { //All Good for BOTH ElasticSearch AND Kafka Healthcheck
@@ -79,12 +76,6 @@ func Get(requestId string, client *elasticsearch.Client, partReader kafka.Partit
}
}
-func printKafkaErrDetail(logger logrus.FieldLogger) string {
- errMessage := fmt.Sprintf(serviceUnavailableMsg, kafkaConnFail)
- logger.Errorln(errMessage)
- return errMessage
-}
-
func getESErrorDetail(decodedResultBody map[string]interface{}, status string) string {
unixTimestamp := getReturnedTimestamp(decodedResultBody)
var clusterId = notReported
@@ -106,6 +97,5 @@ func getReturnedTimestamp(decodedResultBody map[string]interface{}) string {
}
func createESErrMsg(epoch string, clusterId string, status string) string {
- errDetails := "ElasticSearch status: " + status + ", clusterId: " + clusterId + ", unixTimestamp: " + epoch
- return fmt.Sprintf(serviceUnavailableMsg, errDetails)
+ return "ElasticSearch status: " + status + ", clusterId: " + clusterId + ", unixTimestamp: " + epoch
}
diff --git a/src/healthcheck/get_test.go b/src/healthcheck/get_test.go
index 1f213cb..93fbdf4 100644
--- a/src/healthcheck/get_test.go
+++ b/src/healthcheck/get_test.go
@@ -23,21 +23,14 @@ import (
const requestId string = "testRequestId"
func TestHealthcheck(t *testing.T) {
- //Success Case Kafka Partition Reader
- defaultKafkaReader := test.FakePartitionReader{
- T: t,
- Partitions: test.GetFakeTwoPartitionSlice(),
- Err: nil,
- }
-
logwrapper.Initialize("error", os.Stdout)
testCases := []struct {
- name string
- transport *test.FakeTransport
- kafkaReader kafka.PartitionReader
- expectedCode int
- expectedBody *response.ErrorDetail
+ name string
+ transport *test.FakeTransport
+ kafkaHealthChecker kafka.HealthChecker
+ expectedCode int
+ expectedBody *response.ErrorDetail
}{
{
name: "Success-case",
@@ -63,9 +56,9 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: defaultKafkaReader,
- expectedCode: http.StatusOK,
- expectedBody: nil,
+ kafkaHealthChecker: fakeKafkaHealthChecker{},
+ expectedCode: http.StatusOK,
+ expectedBody: nil,
},
{
name: "elastic-search-bad-status",
@@ -91,9 +84,9 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: defaultKafkaReader,
- expectedCode: http.StatusServiceUnavailable,
- expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886"),
+ kafkaHealthChecker: fakeKafkaHealthChecker{},
+ expectedCode: http.StatusServiceUnavailable,
+ expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886"),
},
{
name: "invalid-ES-response-missing-status-field",
@@ -108,9 +101,9 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: defaultKafkaReader,
- expectedCode: http.StatusServiceUnavailable,
- expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: NONE/NotReported, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886"),
+ kafkaHealthChecker: fakeKafkaHealthChecker{},
+ expectedCode: http.StatusServiceUnavailable,
+ expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: NONE/NotReported, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886"),
},
{
name: "invalid-ES-response-missing-cluster-or-epoch-field",
@@ -133,9 +126,9 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: defaultKafkaReader,
- expectedCode: http.StatusServiceUnavailable,
- expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: NotReported, unixTimestamp: NotReported"),
+ kafkaHealthChecker: fakeKafkaHealthChecker{},
+ expectedCode: http.StatusServiceUnavailable,
+ expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: NotReported, unixTimestamp: NotReported"),
},
{
name: "ES-client-error",
@@ -145,13 +138,13 @@ func TestHealthcheck(t *testing.T) {
ResponseErr: errors.New("client error"),
},
),
- kafkaReader: defaultKafkaReader,
- expectedCode: http.StatusServiceUnavailable,
+ kafkaHealthChecker: fakeKafkaHealthChecker{},
+ expectedCode: http.StatusServiceUnavailable,
expectedBody: response.NewErrorDetail(requestId,
"Could not perform elasticsearch health check: [500] elasticsearch client error: client error"),
},
{
- name: "Kafka-connection-returns-err",
+ name: "Kafka-health-check-returns-err",
transport: test.NewFakeTransport(t).AddCall(
"/_cat/health",
test.ElasticCall{
@@ -174,13 +167,11 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: test.FakePartitionReader{
- T: t,
- Partitions: nil,
- Err: errors.New("ResponseError contacting Kafka cluster: could not read partitions"),
+ kafkaHealthChecker: fakeKafkaHealthChecker{
+ err: errors.New("ResponseError contacting Kafka cluster: could not read partitions"),
},
expectedCode: http.StatusServiceUnavailable,
- expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: Kafka status: Kafka Connection/Read Partition failed"),
+ expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ResponseError contacting Kafka cluster: could not read partitions"),
},
{
name: "Kafka-returns-Err-AND-ES-return-bad-status",
@@ -206,13 +197,11 @@ func TestHealthcheck(t *testing.T) {
}]`)))),
},
),
- kafkaReader: test.FakePartitionReader{
- T: t,
- Partitions: nil,
- Err: errors.New("ResponseError contacting Kafka cluster: could not read partitions"),
+ kafkaHealthChecker: fakeKafkaHealthChecker{
+ err: errors.New("ResponseError contacting Kafka cluster: could not read partitions"),
},
expectedCode: http.StatusServiceUnavailable,
- expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886| Kafka status: Kafka Connection/Read Partition failed"),
+ expectedBody: response.NewErrorDetail(requestId, "HRI Service Temporarily Unavailable | error Detail: ElasticSearch status: red, clusterId: 8165307e-6130-4581-942d-20fcfc4e795d, unixTimestamp: 1578512886 | ResponseError contacting Kafka cluster: could not read partitions"),
},
}
@@ -223,7 +212,7 @@ func TestHealthcheck(t *testing.T) {
}
t.Run(tc.name, func(t *testing.T) {
- actualCode, actualBody := Get(requestId, client, tc.kafkaReader)
+ actualCode, actualBody := Get(requestId, client, tc.kafkaHealthChecker)
if actualCode != tc.expectedCode || !reflect.DeepEqual(tc.expectedBody, actualBody) {
//notify/print error event as test result
t.Errorf("HealthCheck-Get()\n actual: %v,%v\n expected: %v,%v", actualCode, actualBody, tc.expectedCode, tc.expectedBody)
@@ -231,3 +220,15 @@ func TestHealthcheck(t *testing.T) {
})
}
}
+
+type fakeKafkaHealthChecker struct {
+ err error
+}
+
+func (fhc fakeKafkaHealthChecker) Check() error {
+ return fhc.err
+}
+
+func (fhc fakeKafkaHealthChecker) Close() {
+ return
+}
diff --git a/src/healthcheck/handler.go b/src/healthcheck/handler.go
index 41d151e..11eb03a 100644
--- a/src/healthcheck/handler.go
+++ b/src/healthcheck/handler.go
@@ -24,17 +24,13 @@ type Handler interface {
// This struct is designed to make unit testing easier. It has function references for the calls to backend
// logic and other methods that reach out to external services like creating the Kafka partition reader.
type theHandler struct {
- config configPkg.Config
- partitionReaderFromConfig func(config configPkg.Config) (kafka.PartitionReader, error)
- healthcheck func(string, *elasticsearch.Client, kafka.PartitionReader) (int, *response.ErrorDetail)
+ config configPkg.Config
+ healthcheck func(string, *elasticsearch.Client, kafka.HealthChecker) (int, *response.ErrorDetail)
}
func NewHandler(config configPkg.Config) Handler {
return &theHandler{
- config: config,
- partitionReaderFromConfig: func(c configPkg.Config) (kafka.PartitionReader, error) {
- return kafka.ConnectionFromConfig(c)
- },
+ config: config,
healthcheck: Get,
}
}
@@ -52,14 +48,14 @@ func (h *theHandler) Healthcheck(c echo.Context) error {
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
}
- partitionReader, err := h.partitionReaderFromConfig(h.config)
+ healthChecker, err := kafka.NewHealthChecker(h.config)
if err != nil {
logger.Errorln(err.Error())
return c.JSON(http.StatusInternalServerError, response.NewErrorDetail(requestId, err.Error()))
}
- defer partitionReader.Close()
+ defer healthChecker.Close()
- code, errorDetail := h.healthcheck(requestId, esClient, partitionReader)
+ code, errorDetail := h.healthcheck(requestId, esClient, healthChecker)
if errorDetail != nil {
return c.JSON(code, errorDetail)
}
diff --git a/src/healthcheck/handler_test.go b/src/healthcheck/handler_test.go
index 1460d33..52855fd 100644
--- a/src/healthcheck/handler_test.go
+++ b/src/healthcheck/handler_test.go
@@ -7,7 +7,6 @@
package healthcheck
import (
- "errors"
"github.com/Alvearie/hri-mgmt-api/common/config"
"github.com/Alvearie/hri-mgmt-api/common/kafka"
"github.com/Alvearie/hri-mgmt-api/common/logwrapper"
@@ -54,10 +53,7 @@ func TestHealthcheckHandler(t *testing.T) {
name: "Good healthcheck",
handler: &theHandler{
config: config.Config{},
- partitionReaderFromConfig: func(config config.Config) (kafka.PartitionReader, error) {
- return test.FakePartitionReader{}, nil
- },
- healthcheck: func(requestId string, client *elasticsearch.Client, partReader kafka.PartitionReader) (int, *response.ErrorDetail) {
+ healthcheck: func(requestId string, client *elasticsearch.Client, healthChecker kafka.HealthChecker) (int, *response.ErrorDetail) {
return http.StatusOK, nil
},
},
@@ -68,10 +64,7 @@ func TestHealthcheckHandler(t *testing.T) {
name: "Bad healthcheck",
handler: &theHandler{
config: config.Config{},
- partitionReaderFromConfig: func(config config.Config) (kafka.PartitionReader, error) {
- return test.FakePartitionReader{}, nil
- },
- healthcheck: func(requestId string, client *elasticsearch.Client, partReader kafka.PartitionReader) (int, *response.ErrorDetail) {
+ healthcheck: func(requestId string, client *elasticsearch.Client, healthChecker kafka.HealthChecker) (int, *response.ErrorDetail) {
return http.StatusServiceUnavailable, response.NewErrorDetail(requestId, "Elastic not available")
},
},
@@ -81,13 +74,8 @@ func TestHealthcheckHandler(t *testing.T) {
{
name: "Elastic client error",
handler: &theHandler{
- config: config.Config{ElasticUrl: "https://an.invalid url.com/", ElasticCert: "Invalid Cert"},
- partitionReaderFromConfig: func(config config.Config) (kafka.PartitionReader, error) {
- return test.FakePartitionReader{}, nil
- },
- healthcheck: func(requestId string, client *elasticsearch.Client, partReader kafka.PartitionReader) (int, *response.ErrorDetail) {
- return http.StatusOK, nil
- },
+ config: config.Config{ElasticUrl: "https://an.invalid url.com/", ElasticCert: "Invalid Cert"},
+ healthcheck: nil,
},
expectedCode: http.StatusInternalServerError,
expectedBody: "{\"errorEventId\":\"" + requestId + "\",\"errorDescription\":\"cannot create client: cannot parse url: parse \\\"https://an.invalid url.com\\\": invalid character \\\" \\\" in host name\"}\n",
@@ -95,14 +83,11 @@ func TestHealthcheckHandler(t *testing.T) {
{
name: "Kafka client error",
handler: &theHandler{
- config: config.Config{},
- partitionReaderFromConfig: func(config config.Config) (kafka.PartitionReader, error) {
- return nil, errors.New("something went wrong")
- },
+ config: config.Config{KafkaProperties: config.StringMap{"message.max.bytes": "bad_value"}},
healthcheck: nil,
},
expectedCode: http.StatusInternalServerError,
- expectedBody: "{\"errorEventId\":\"" + requestId + "\",\"errorDescription\":\"something went wrong\"}\n",
+ expectedBody: "{\"errorEventId\":\"" + requestId + "\",\"errorDescription\":\"error constructing Kafka admin client: Invalid value for configuration property \\\"message.max.bytes\\\"\"}\n",
},
}
diff --git a/src/sonar-project.properties b/src/sonar-project.properties
new file mode 100644
index 0000000..a143aac
--- /dev/null
+++ b/src/sonar-project.properties
@@ -0,0 +1,20 @@
+sonar.projectKey=Alvearie_hri-mgmt-api
+sonar.organization=alvearie
+
+# This is the name and version displayed in the SonarCloud UI.
+#sonar.projectName=hri-mgmt-api
+#sonar.projectVersion=1.0
+
+# Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows.
+sonar.sources=.
+sonar.exclusions=**/test/**,**/*_test.go,**/*_mock.go
+
+sonar.tests=.
+#sonar.test.exclusions=
+sonar.test.inclusions=**/*_test.go
+
+sonar.go.coverage.reportPaths=./testCoverage.out
+#sonar.go.tests.reportPaths=
+
+# Encoding of the source code. Default is default system encoding
+sonar.sourceEncoding=UTF-8
diff --git a/src/streams/get.go b/src/streams/get.go
index c197b14..bbcb7ec 100644
--- a/src/streams/get.go
+++ b/src/streams/get.go
@@ -65,6 +65,8 @@ func GetStreamNames(topics []es.TopicDetail, tenantId string) []map[string]inter
streamId := strings.TrimPrefix(topicName, eventstreams.TopicPrefix+tenantId+".")
streamId = strings.TrimSuffix(streamId, eventstreams.InSuffix)
streamId = strings.TrimSuffix(streamId, eventstreams.NotificationSuffix)
+ streamId = strings.TrimSuffix(streamId, eventstreams.OutSuffix)
+ streamId = strings.TrimSuffix(streamId, eventstreams.InvalidSuffix)
//take unique stream names, we don't want duplicates due to a stream's multiple topics (in/notification)
if _, seen := seenStreamIds[streamId]; !seen {
@@ -78,5 +80,6 @@ func GetStreamNames(topics []es.TopicDetail, tenantId string) []map[string]inter
func validTopicName(topicName string) bool {
return strings.HasPrefix(topicName, eventstreams.TopicPrefix) &&
- (strings.HasSuffix(topicName, eventstreams.InSuffix) || strings.HasSuffix(topicName, eventstreams.NotificationSuffix))
+ (strings.HasSuffix(topicName, eventstreams.InSuffix) || strings.HasSuffix(topicName, eventstreams.NotificationSuffix) ||
+ strings.HasSuffix(topicName, eventstreams.OutSuffix) || strings.HasSuffix(topicName, eventstreams.InvalidSuffix))
}
diff --git a/src/streams/get_test.go b/src/streams/get_test.go
index 578e27b..807a28c 100644
--- a/src/streams/get_test.go
+++ b/src/streams/get_test.go
@@ -151,12 +151,76 @@ func TestGetStreamNames(t *testing.T) {
topics: []es.TopicDetail{
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InvalidSuffix},
+ },
+ tenantId: tenantId1,
+ expected: []map[string]interface{}{
+ {param.StreamId: streamId},
+ {param.StreamId: streamIdNoQualifier},
+ },
+ },
+ {
+ name: "with-optional-qualifier-in-only",
+ topics: []es.TopicDetail{
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InSuffix},
+ },
+ tenantId: tenantId1,
+ expected: []map[string]interface{}{
+ {param.StreamId: streamId},
+ {param.StreamId: streamIdNoQualifier},
+ },
+ },
+ {
+ name: "with-optional-qualifier-out-only",
+ topics: []es.TopicDetail{
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.OutSuffix},
+ },
+ tenantId: tenantId1,
+ expected: []map[string]interface{}{
+ {param.StreamId: streamId},
+ {param.StreamId: streamIdNoQualifier},
+ },
+ },
+ {
+ name: "with-optional-qualifier-invalid-only",
+ topics: []es.TopicDetail{
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InvalidSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InvalidSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InvalidSuffix},
+ },
+ tenantId: tenantId1,
+ expected: []map[string]interface{}{
+ {param.StreamId: streamId},
+ {param.StreamId: streamIdNoQualifier},
+ },
+ },
+ {
+ name: "with-optional-qualifier-notification-only",
+ topics: []es.TopicDetail{
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.NotificationSuffix},
},
tenantId: tenantId1,
expected: []map[string]interface{}{
@@ -170,10 +234,16 @@ func TestGetStreamNames(t *testing.T) {
topics: []es.TopicDetail{
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.InvalidSuffix},
},
tenantId: tenantId0,
expected: []map[string]interface{}{{param.StreamId: streamId}},
@@ -183,8 +253,12 @@ func TestGetStreamNames(t *testing.T) {
topics: []es.TopicDetail{
{Name: eventstreams.TopicPrefix + tenant1ExtraPeriod + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1ExtraPeriod + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1ExtraPeriod + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1ExtraPeriod + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant0WithQualifier + eventstreams.InvalidSuffix},
},
tenantId: tenantId1,
expected: []map[string]interface{}{{param.StreamId: streamIdExtraPeriod}},
@@ -194,12 +268,20 @@ func TestGetStreamNames(t *testing.T) {
topics: []es.TopicDetail{
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant2NoQualifier + eventstreams.InvalidSuffix},
},
tenantId: tenantId0,
expected: []map[string]interface{}{},
@@ -209,10 +291,16 @@ func TestGetStreamNames(t *testing.T) {
topics: []es.TopicDetail{
{Name: tenant1WithQualifier + eventstreams.InSuffix},
{Name: tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: tenant1WithQualifier + eventstreams.InvalidSuffix},
{Name: "bad-prefix" + tenant1WithQualifier + eventstreams.InSuffix},
{Name: "bad-prefix" + tenant1WithQualifier + eventstreams.NotificationSuffix},
+ {Name: "bad-prefix" + tenant1WithQualifier + eventstreams.OutSuffix},
+ {Name: "bad-prefix" + tenant1WithQualifier + eventstreams.InvalidSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
},
tenantId: tenantId1,
expected: []map[string]interface{}{{param.StreamId: streamIdNoQualifier}},
@@ -226,6 +314,8 @@ func TestGetStreamNames(t *testing.T) {
{Name: eventstreams.TopicPrefix + tenant1WithQualifier + "bad-suffix"},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InSuffix},
{Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.NotificationSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.OutSuffix},
+ {Name: eventstreams.TopicPrefix + tenant1NoQualifier + eventstreams.InvalidSuffix},
},
tenantId: tenantId1,
expected: []map[string]interface{}{{param.StreamId: streamIdNoQualifier}},
diff --git a/test/Gemfile b/test/Gemfile
index 7fdc73b..dc756be 100644
--- a/test/Gemfile
+++ b/test/Gemfile
@@ -13,4 +13,5 @@ gem 'rspec-expectations'
gem 'rspec-mocks'
gem 'rspec-support'
gem 'ruby-kafka'
-gem 'specific_install'
\ No newline at end of file
+gem 'specific_install'
+gem 'nokogiri'
\ No newline at end of file
diff --git a/test/Gemfile.lock b/test/Gemfile.lock
index 48e3fe0..b148c0b 100644
--- a/test/Gemfile.lock
+++ b/test/Gemfile.lock
@@ -17,6 +17,9 @@ GEM
mime-types-data (3.2021.0212)
net_http_ssl_fix (0.0.10)
netrc (0.11.0)
+ nokogiri (1.12.4)
+ racc (~> 1.4)
+ racc (1.5.2)
rack (2.2.3)
rainbow (3.0.0)
rake (13.0.3)
@@ -53,6 +56,7 @@ PLATFORMS
DEPENDENCIES
httplog
net_http_ssl_fix
+ nokogiri
rest-client
rspec
rspec-core
diff --git a/test/README.md b/test/README.md
index 2af8e58..34ed9bb 100644
--- a/test/README.md
+++ b/test/README.md
@@ -36,27 +36,25 @@
NOTE: Ensure that your Ruby versions match across terminal default, Gemfile, and Gemfile.lock. If using IntelliJ, Ruby version in your module should match as well.
-8. (Optional) To run tests locally, export these environment variables. All of the values can be found in GitHub actions unless otherwise specified.
+8. (Optional) To run tests locally, export these environment variables. Most of the values can be found in `.github/workflows/ci-workflow.yml`, and the remaining values should be found in your password manager.
- APPID_TENANT
- APPID_URL
- - CLOUD_API_KEY - Password Manager
- - COS_URL
+ - CLOUD_API_KEY
- ELASTIC_CRN
- - ELASTIC_PASSWORD - IBM Cloud -> Elasticsearch service -> Service credentials -> elastic-search-credential -> "password" field
+ - ELASTIC_PASSWORD
- ELASTIC_URL
- ELASTIC_USERNAME
- HRI_URL
- IAM_CLOUD_URL
- JWT_AUDIENCE_ID
- KAFKA_BROKERS
- - KAFKA_PASSWORD - IBM Cloud -> EventStreams service -> Service credentials -> dev-test -> "password" field
- - KAFKA_USERNAME
+ - KAFKA_PASSWORD
- TENANT_ID
- You will also need to set an environment variable called TRAVIS_BRANCH that corresponds to your current working branch.
+ You will also need to set an environment variable called BRANCH_NAME that corresponds to your current working branch.
- Then, install the IBM Cloud CLI and the Event Streams CLI. You can find the RESOURCE_GROUP in GitHub actions and the CLOUD_API_KEY in our password manager:
+ Then, install the IBM Cloud CLI and the Event Streams CLI. You can find the RESOURCE_GROUP in `.github/workflows/ci-workflow.yml` and the CLOUD_API_KEY in your password manager:
```bash
curl -sL https://ibm.biz/idt-installer | bash
bx login --apikey {CLOUD_API_KEY}
@@ -64,13 +62,13 @@
bx plugin install event-streams
bx es init
```
-
- Select the number corresponding to the KAFKA_INSTANCE.
+
+ Select the number corresponding to the KAFKA_INSTANCE in `.github/workflows/ci-workflow.yml`.
The last step before running the tests is to install the `hri-test-helpers` gem locally. Run the following commands:
```bash
gem install specific_install
- gem specific_install -l git@github.com:Alvearie/hri-test-helpers.git -b master
+ gem specific_install -l git@github.com:Alvearie/hri-test-helpers.git -b main
```
Then, add the following line to Gemfile, but *do not commit this change to Github*:
```gem 'hri-test-helpers```
@@ -80,7 +78,7 @@
```rspec test/spec --tag ~@broken```
# Dredd Tests
-Dredd is used to verify the implemented API meets our published [specification](https://github.com/Alvearie/hri-api-spec/blob/main/management-api/management.yml).
+Dredd is used to verify the implemented API meets our published [specification](https://github.com/Alvearie/hri-api-spec/blob/develop/management-api/management.yml).
By default, it generates a test for every endpoint, uses the example values for input, and verifies the response matches the 200 response schema. All other responses are skipped. Ruby 'hooks' are used to modify the default behavior and do setup/teardown.
Here are some helpful documentation links:
* https://dredd.org/en/latest/hooks/ruby.html
@@ -103,7 +101,7 @@ gem install dredd_hooks
### Running Dredd Tests
First you need to convert the API spec to Swagger 2.0, so checkout the hri-api-spec [repo](https://github.com/Alvearie/hri-api-spec).
-Then use api-spec-converter to convert it. You should make a branch with the same name if changes are needed. The build will checkout the same branch if it exists.
+Then use api-spec-converter to convert it. You should make a branch with the same name if changes are needed. The build will checkout the same branch if it exists.
```bash
api-spec-converter -f openapi_3 -t swagger_2 -s yaml hri-api-spec/management-api/management.yml > hri-api-spec/management-api/management.swagger.yml
```
diff --git a/test/env.rb b/test/env.rb
index d63de2d..0bae697 100644
--- a/test/env.rb
+++ b/test/env.rb
@@ -21,6 +21,7 @@
require 'securerandom'
require 'kafka'
require 'base64'
+require 'nokogiri'
require 'hritesthelpers'
diff --git a/test/spec/dredd_hooks.rb b/test/spec/dredd_hooks.rb
index 03146b4..896d344 100644
--- a/test/spec/dredd_hooks.rb
+++ b/test/spec/dredd_hooks.rb
@@ -7,7 +7,7 @@
include DreddHooks::Methods
DREDD_TENANT_ID = 'provider1234'
-TENANT_ID_TENANTS_STREAMS = "#{ENV['TRAVIS_BRANCH'].tr('.-', '')}".downcase
+TENANT_ID_TENANTS_STREAMS = "#{ENV['BRANCH_NAME'].tr('.-', '')}".downcase
TENANT_ID_BATCHES = ENV['TENANT_ID']
elastic = HRITestHelpers::ElasticHelper.new({url: ENV['ELASTIC_URL'], username: ENV['ELASTIC_USERNAME'], password: ENV['ELASTIC_PASSWORD']})
@@ -16,8 +16,8 @@
puts 'before all'
@iam_token = HRITestHelpers::IAMHelper.new(ENV['IAM_CLOUD_URL']).get_access_token(ENV['CLOUD_API_KEY'])
app_id_helper = HRITestHelpers::AppIDHelper.new(ENV['APPID_URL'], ENV['APPID_TENANT'], @iam_token, ENV['JWT_AUDIENCE_ID'])
- @token_integrator = app_id_helper.get_access_token('hri_integration_tenant_test_data_integrator', 'tenant_test tenant_provider1234 hri_data_integrator', ENV['APPID_HRI_AUDIENCE'])
- @token_internal = app_id_helper.get_access_token('hri_integration_tenant_test_internal', 'tenant_test tenant_provider1234 hri_consumer hri_internal', ENV['APPID_HRI_AUDIENCE'])
+ @token_integrator = app_id_helper.get_access_token('hri_integration_tenant_test_data_integrator', 'tenant_test tenant_provider1234 hri_data_integrator', ENV['JWT_AUDIENCE_ID'])
+ @token_internal = app_id_helper.get_access_token('hri_integration_tenant_test_internal', 'tenant_test tenant_provider1234 hri_consumer hri_internal', ENV['JWT_AUDIENCE_ID'])
@token_invalid_tenant = app_id_helper.get_access_token('hri_integration_tenant_test_invalid', 'tenant_test_invalid')
@content_type = 'application/json; charset=UTF-8'
diff --git a/test/spec/hri_deploy_helper.rb b/test/spec/hri_deploy_helper.rb
index dbab890..aa79531 100644
--- a/test/spec/hri_deploy_helper.rb
+++ b/test/spec/hri_deploy_helper.rb
@@ -5,12 +5,14 @@
class HRIDeployHelper
def deploy_hri(exe_path, config_path, log_path, override_params = nil)
- Open3.popen3("#{exe_path} -config-path=#{config_path} #{override_params} 2> #{log_path}/error.txt > #{log_path}/output.txt &")
+ Open3.popen3("#{exe_path} -config-path=#{config_path} #{override_params} -kafka-properties=security.protocol:sasl_ssl,sasl.mechanism:PLAIN,sasl.username:token,sasl.password:#{ENV['KAFKA_PASSWORD']},ssl.endpoint.identification.algorithm:https 2> #{log_path}/error.txt > #{log_path}/output.txt &")
sleep 1
@error_log = File.read(File.join(File.dirname(__FILE__), 'error.txt'))
@output_log = File.read(File.join(File.dirname(__FILE__), 'output.txt'))
unless @error_log.empty? && !@output_log.include?('"level":"FATAL"')
- raise "A fatal error was encountered when deploying the hri-mgmt-api: #{@output_log}"
+ raise "A fatal error was encountered when deploying the hri-mgmt-api.
+ OUTPUT LOG: #{@output_log}
+ ERROR LOG: #{@error_log}"
end
end
diff --git a/test/spec/hri_management_api_deploy_spec.rb b/test/spec/hri_management_api_deploy_spec.rb
index 6671680..441f199 100644
--- a/test/spec/hri_management_api_deploy_spec.rb
+++ b/test/spec/hri_management_api_deploy_spec.rb
@@ -48,10 +48,10 @@
ENV['KAFKA_PASSWORD'] = 'INVALID'
@hri_deploy_helper.deploy_hri(@exe_path, "#{@config_path}/valid_config.yml", @log_path)
response = @request_helper.rest_get("#{@hri_base_url}/healthcheck", {})
- expect(response.code).to eq 500
+ expect(response.code).to eq 503
response_body = JSON.parse(response.body)
expect(response_body['errorEventId']).to_not be_nil
- expect(response_body['errorDescription']).to eql 'connecting to Kafka failed, error detail: [58] SASL Authentication Failed: SASL Authentication failed'
+ expect(response_body['errorDescription']).to eql 'HRI Service Temporarily Unavailable | error Detail: error getting Kafka topics: Local: Broker transport failure'
ensure
ENV['KAFKA_PASSWORD'] = temp
end
@@ -63,10 +63,10 @@
ENV['KAFKA_BROKERS'] = 'INVALID:9093'
@hri_deploy_helper.deploy_hri(@exe_path, "#{@config_path}/valid_config.yml", @log_path)
response = @request_helper.rest_get("#{@hri_base_url}/healthcheck", {})
- expect(response.code).to eq 500
+ expect(response.code).to eq 503
response_body = JSON.parse(response.body)
expect(response_body['errorEventId']).to_not be_nil
- expect(response_body['errorDescription']).to include('no such host')
+ expect(response_body['errorDescription']).to eql 'HRI Service Temporarily Unavailable | error Detail: error getting Kafka topics: Local: Broker transport failure'
ensure
ENV['KAFKA_BROKERS'] = temp
end
diff --git a/test/spec/hri_management_api_no_validation_spec.rb b/test/spec/hri_management_api_no_validation_spec.rb
index 890a753..6c2b575 100644
--- a/test/spec/hri_management_api_no_validation_spec.rb
+++ b/test/spec/hri_management_api_no_validation_spec.rb
@@ -9,8 +9,8 @@
INVALID_ID = 'INVALID'
TENANT_ID = ENV['TENANT_ID']
INTEGRATOR_ID = 'claims'
- TEST_TENANT_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-tenant".downcase
- TEST_INTEGRATOR_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-integrator".downcase
+ TEST_TENANT_ID = "rspec-#{ENV['BRANCH_NAME'].delete('.')}-test-tenant".downcase
+ TEST_INTEGRATOR_ID = "rspec-#{ENV['BRANCH_NAME'].delete('.')}-test-integrator".downcase
DATA_TYPE = 'rspec-batch'
STATUS = 'started'
BATCH_INPUT_TOPIC = "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in"
@@ -43,7 +43,7 @@
@kafka_consumer.subscribe("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.notification")
#Create Batch
- @batch_prefix = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}"
+ @batch_prefix = "rspec-#{ENV['BRANCH_NAME'].delete('.')}"
@batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}"
create_batch = {
name: @batch_name,
@@ -68,11 +68,11 @@
Logger.new(STDOUT).info("New Batch Created With ID: #{@batch_id}")
#Get AppId Access Tokens
- @token_invalid_tenant = @app_id_helper.get_access_token('hri_integration_tenant_test_invalid', 'tenant_test_invalid', ENV['APPID_HRI_AUDIENCE'])
- @token_no_roles = @app_id_helper.get_access_token('hri_integration_tenant_test', 'tenant_test', ENV['APPID_HRI_AUDIENCE'])
- @token_integrator_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_integrator', 'tenant_test hri_data_integrator', ENV['APPID_HRI_AUDIENCE'])
- @token_consumer_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_consumer', 'tenant_test hri_consumer', ENV['APPID_HRI_AUDIENCE'])
- @token_all_roles = @app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer', ENV['APPID_HRI_AUDIENCE'])
+ @token_invalid_tenant = @app_id_helper.get_access_token('hri_integration_tenant_test_invalid', 'tenant_test_invalid')
+ @token_no_roles = @app_id_helper.get_access_token('hri_integration_tenant_test', 'tenant_test')
+ @token_integrator_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_integrator', 'tenant_test hri_data_integrator')
+ @token_consumer_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_consumer', 'tenant_test hri_consumer')
+ @token_all_roles = @app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer')
@token_invalid_audience = @app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer', ENV['APPID_TENANT'])
end
@@ -87,11 +87,15 @@
end
#Delete Batches
- response = @elastic.es_delete_by_query(TENANT_ID, "name:rspec-#{ENV['TRAVIS_BRANCH']}*")
+ response = @elastic.es_delete_by_query(TENANT_ID, "name:#{@batch_prefix}*")
response.nil? ? (raise 'Elastic batch delete did not return a response') : (expect(response.code).to eq 200)
Logger.new(STDOUT).info("Delete test batches by query response #{response.body}")
@kafka_consumer.stop
+
+ #Ensure Event Stream topics were deleted
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.in")
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.notification")
end
context 'POST /tenants/{tenant_id}' do
@@ -306,15 +310,19 @@
context 'DELETE /tenants/{tenant_id}/streams/{integrator_id}' do
it 'Success' do
- #Delete Stream
- response = @mgmt_api_helper.hri_delete_tenant_stream(TEST_TENANT_ID, TEST_INTEGRATOR_ID)
- expect(response.code).to eq 200
-
- #Verify Stream Deletion
- response = @mgmt_api_helper.hri_get_tenant_streams(TEST_TENANT_ID)
- expect(response.code).to eq 200
- parsed_response = JSON.parse(response.body)
- expect(parsed_response['results']).to eql []
+ #Delete Stream and Verify Deletion
+ Timeout.timeout(20, nil, 'Kafka topics not deleted after 20 seconds') do
+ loop do
+ response = @mgmt_api_helper.hri_delete_tenant_stream(TEST_TENANT_ID, TEST_INTEGRATOR_ID)
+ break if response.code == 200
+
+ response = @mgmt_api_helper.hri_get_tenant_streams(TEST_TENANT_ID)
+ expect(response.code).to eq 200
+ parsed_response = JSON.parse(response.body)
+ break if parsed_response['results'] == []
+ sleep 1
+ end
+ end
end
it 'Invalid Stream' do
@@ -426,6 +434,29 @@
expect(parsed_response['results'][0]['id']).to eql INTEGRATOR_ID
end
+ it 'Success With Invalid Topic Only' do
+ invalid_topic = "ingest.#{TENANT_ID}.#{TEST_INTEGRATOR_ID}.invalid"
+ @event_streams_helper.create_topic(invalid_topic, 1)
+ response = @mgmt_api_helper.hri_get_tenant_streams(TENANT_ID)
+ expect(response.code).to eq 200
+ parsed_response = JSON.parse(response.body)
+ stream_found = false
+ parsed_response['results'].each do |integrator|
+ stream_found = true if integrator['id'] == TEST_INTEGRATOR_ID
+ end
+ raise "Tenant Stream Not Found: #{TEST_INTEGRATOR_ID}" unless stream_found
+
+ Timeout.timeout(15, nil, "Timed out waiting for the '#{invalid_topic}' topic to be deleted") do
+ loop do
+ break if @event_streams_helper.get_topics.include?(invalid_topic)
+ end
+ loop do
+ @event_streams_helper.delete_topic(invalid_topic)
+ break unless @event_streams_helper.get_topics.include?(invalid_topic)
+ end
+ end
+ end
+
it 'Missing Tenant ID' do
response = @mgmt_api_helper.hri_get_tenant_streams(nil)
expect(response.code).to eq 400
@@ -597,7 +628,7 @@
response = @mgmt_api_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, { 'Authorization' => "Bearer #{@token_all_roles}" })
expect(response.code).to eq 500
parsed_response = JSON.parse(response.body)
- expect(parsed_response['errorDescription']).to eql '[3] Unknown Topic Or Partition: the request is for a topic or partition that does not exist on this broker'
+ expect(parsed_response['errorDescription']).to eql 'kafka producer error: Broker: Unknown topic or partition'
#Verify Batch Delete
50.times do
diff --git a/test/spec/hri_management_api_validation_spec.rb b/test/spec/hri_management_api_validation_spec.rb
index 457f8aa..38d4113 100644
--- a/test/spec/hri_management_api_validation_spec.rb
+++ b/test/spec/hri_management_api_validation_spec.rb
@@ -9,8 +9,8 @@
INVALID_ID = 'INVALID'
TENANT_ID = ENV['TENANT_ID']
INTEGRATOR_ID = 'claims'
- TEST_TENANT_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-tenant".downcase
- TEST_INTEGRATOR_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-integrator".downcase
+ TEST_TENANT_ID = "rspec-#{ENV['BRANCH_NAME'].delete('.')}-test-tenant".downcase
+ TEST_INTEGRATOR_ID = "rspec-#{ENV['BRANCH_NAME'].delete('.')}-test-integrator".downcase
DATA_TYPE = 'rspec-batch'
STATUS = 'started'
BATCH_INPUT_TOPIC = "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in"
@@ -36,7 +36,7 @@
@config_path = File.absolute_path(File.join(File.dirname(__FILE__), "test_config"))
@log_path = File.absolute_path(File.join(File.dirname(__FILE__), "/"))
- @hri_deploy_helper.deploy_hri(@exe_path, "#{@config_path}/valid_config.yml", @log_path, '-validation true')
+ @hri_deploy_helper.deploy_hri(@exe_path, "#{@config_path}/valid_config.yml", @log_path, '-validation=true')
response = @request_helper.rest_get("#{@hri_base_url}/healthcheck", {})
unless response.code == 200
raise "Health check failed: #{response.body}"
@@ -48,7 +48,7 @@
@kafka_consumer.subscribe("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.notification")
#Create Batch
- @batch_prefix = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}"
+ @batch_prefix = "rspec-#{ENV['BRANCH_NAME'].delete('.')}"
@batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}"
create_batch = {
name: @batch_name,
@@ -93,11 +93,17 @@
end
#Delete Batches
- response = @elastic.es_delete_by_query(TENANT_ID, "name:rspec-#{ENV['TRAVIS_BRANCH']}*")
+ response = @elastic.es_delete_by_query(TENANT_ID, "name:#{@batch_prefix}*")
response.nil? ? (raise 'Elastic batch delete did not return a response') : (expect(response.code).to eq 200)
Logger.new(STDOUT).info("Delete test batches by query response #{response.body}")
@kafka_consumer.stop
+
+ #Ensure Event Stream topics were deleted
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.in")
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.notification")
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.out")
+ @event_streams_helper.delete_topic("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.invalid")
end
context 'POST /tenants/{tenant_id}/streams/{integrator_id}' do
@@ -142,15 +148,19 @@
context 'DELETE /tenants/{tenant_id}/streams/{integrator_id}' do
it 'Success' do
- #Delete Stream
- response = @mgmt_api_helper.hri_delete_tenant_stream(TEST_TENANT_ID, TEST_INTEGRATOR_ID)
- expect(response.code).to eq 200
-
- #Verify Stream Deletion
- response = @mgmt_api_helper.hri_get_tenant_streams(TEST_TENANT_ID)
- expect(response.code).to eq 200
- parsed_response = JSON.parse(response.body)
- expect(parsed_response['results']).to eql []
+ #Delete Stream and Verify Deletion
+ Timeout.timeout(20, nil, 'Kafka topics not deleted after 20 seconds') do
+ loop do
+ response = @mgmt_api_helper.hri_delete_tenant_stream(TEST_TENANT_ID, TEST_INTEGRATOR_ID)
+ break if response.code == 200
+
+ response = @mgmt_api_helper.hri_get_tenant_streams(TEST_TENANT_ID)
+ expect(response.code).to eq 200
+ parsed_response = JSON.parse(response.body)
+ break if parsed_response['results'] == []
+ sleep 1
+ end
+ end
#Delete Tenant
response = @mgmt_api_helper.hri_delete_tenant(TEST_TENANT_ID)
diff --git a/test/spec/send_slack_message.rb b/test/spec/send_slack_message.rb
deleted file mode 100755
index e49c8da..0000000
--- a/test/spec/send_slack_message.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env ruby
-
-# (C) Copyright IBM Corp. 2021
-#
-# SPDX-License-Identifier: Apache-2.0
-
-require_relative '../env'
-
-logger = Logger.new(STDOUT)
-
-if %w[main develop].include?(ENV['TRAVIS_BRANCH'])
- logger.info("#{ARGV[0]} tests failed. Sending a message to Slack...")
- HRITestHelpers::SlackHelper.new(ENV['SLACK_WEBHOOK']).send_slack_message(ARGV[0], ENV['TRAVIS_BUILD_DIR'], ENV['TRAVIS_BRANCH'], ENV['TRAVIS_JOB_WEB_URL'])
-else
- logger.info("#{ARGV[0]} tests failed, but a Slack message is only sent for the 'main' or 'develop' branches.")
-end
-
-exit 1
\ No newline at end of file
diff --git a/test/spec/test_config/invalid_cert_config.yml b/test/spec/test_config/invalid_cert_config.yml
index 62e7a83..0d347d4 100644
--- a/test/spec/test_config/invalid_cert_config.yml
+++ b/test/spec/test_config/invalid_cert_config.yml
@@ -25,11 +25,22 @@ elastic-cert: |
i+3XjJ7/peF3xMvTMoy35DcT3E2ZeSVjouZs15O90kI3k2daS2OHJABW0vSj4nLz
+PQzp/B9cQmOO8dCe049Q3oaUA==
-----END CERTIFICATE-----
-kafka-username: "token"
-kafka-password: "kafkaPassword"
kafka-admin-url: "https://twvyj4m0kft5j0mh.svc01.us-east.eventstreams.cloud.ibm.com"
-kafka-brokers: "broker-0-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-1-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-2-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-5-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+kafka-brokers:
+ - "broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-2-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-5-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-1-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-0-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+kafka-properties:
+ - "security.protocol:sasl_ssl"
+ - "sasl.mechanism:PLAIN"
+ - "sasl.username:token"
+ - "sasl.password:kafkaPassword"
+ - "ssl.endpoint.identification.algorithm:https"
new-relic-enabled: false
tls-enabled: true
tls-cert-path: "cert.pem"
tls-key-path: "key.pem"
+log-level: info
diff --git a/test/spec/test_config/valid_config.yml b/test/spec/test_config/valid_config.yml
index 2f955c4..cb61217 100644
--- a/test/spec/test_config/valid_config.yml
+++ b/test/spec/test_config/valid_config.yml
@@ -25,11 +25,22 @@ elastic-cert: |
pZKK8wWNUwgWQ66MNh8Ckq732JZ+so6RAfb4BbNj45I3s9fuZSYlvjkc5/+da3Ck
Rp6anX5N6yIrzhVmAgefjQdBztYzdfPhsJBkS/TDnRmk
-----END CERTIFICATE-----
-kafka-username: "token"
-kafka-password: "kafkaPassword"
kafka-admin-url: "https://twvyj4m0kft5j0mh.svc01.us-east.eventstreams.cloud.ibm.com"
-kafka-brokers: "broker-0-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-1-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-2-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093,broker-5-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+kafka-brokers:
+ - "broker-4-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-3-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-2-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-5-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-1-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+ - "broker-0-twvyj4m0kft5j0mh.kafka.svc01.us-east.eventstreams.cloud.ibm.com:9093"
+kafka-properties:
+ - "security.protocol:sasl_ssl"
+ - "sasl.mechanism:PLAIN"
+ - "sasl.username:token"
+ - "sasl.password:kafkaPassword"
+ - "ssl.endpoint.identification.algorithm:https"
new-relic-enabled: false
tls-enabled: true
tls-cert-path: "cert.pem"
tls-key-path: "key.pem"
+log-level: info
diff --git a/test/spec/upload_test_reports.rb b/test/spec/upload_test_reports.rb
index 5f3eeeb..af41a4f 100755
--- a/test/spec/upload_test_reports.rb
+++ b/test/spec/upload_test_reports.rb
@@ -6,28 +6,34 @@
require_relative '../env'
-# This script uploads JUnit test reports to Cloud Object Storage to be used by the UnitTH application to generate HTML
-# test trend reports for the IVT and Dredd tests. More information on unitth can be found here: http://junitth.sourceforge.net/
+# This script uploads JUnit test reports to Cloud Object Storage to be used by the Allure application to generate HTML
+# test trend reports for the IVT and Dredd tests. More information on Allure can be found here: https://github.com/allure-framework/allure2
#
-# The 'ivttest.xml' and 'dreddtests.xml' JUnit reports are uploaded to the 'hri-test-reports' Cloud Object Storage bucket,
-# which is also mounted on the 'unitth' kubernetes pod. This bucket keeps 30 days of reports that will be used to generate a
-# historical HTML report when the UnitTH jar is run on the pod.
+# The 'ivttest.xml' and 'dreddtests.xml' JUnit reports are uploaded to the 'wh-hri-dev1-allure-reports' Cloud Object Storage bucket,
+# which is also mounted on the 'allure' kubernetes pod. This bucket keeps 30 days of reports that will be used to generate a
+# historical HTML report when the allure executable is invoked on the pod.
cos_helper = HRITestHelpers::COSHelper.new(ENV['COS_URL'], ENV['IAM_CLOUD_URL'], ENV['CLOUD_API_KEY'])
logger = Logger.new(STDOUT)
time = Time.now.strftime '%Y%m%d%H%M%S'
-if %w[main develop].include?(ENV['TRAVIS_BRANCH'])
- if ARGV[0] == 'IVT'
- logger.info('Uploading ivttest.xml to COS')
- `sed -i 's#test/ivt_test_results#rspec#g' ivttest.xml`
- cos_helper.upload_object_data('wh-hri-dev1-test-reports', "mgmt-api/#{ENV['TRAVIS_BRANCH']}/ivt/#{time}/ivttest.xml", File.read(File.join(Dir.pwd, 'ivttest.xml')))
- elsif ARGV[0] == 'Dredd'
- logger.info('Uploading dreddtests.xml to COS')
- cos_helper.upload_object_data('wh-hri-dev1-test-reports', "mgmt-api/#{ENV['TRAVIS_BRANCH']}/dredd/#{time}/dreddtests.xml", File.read(File.join(Dir.pwd, 'dreddtests.xml')))
- else
- raise "Invalid argument: #{ARGV[0]}. Valid arguments: 'IVT' or 'Dredd'"
+if ARGV[0] == 'IVT'
+ logger.info("Uploading ivttest-#{time}.xml to COS")
+ File.rename("#{Dir.pwd}/ivttest.xml", "#{Dir.pwd}/hri-mgmt-api-ivttest-#{time}.xml")
+ cos_helper.upload_object_data('wh-hri-dev1-allure-reports', "hri-mgmt-api-ivttest-#{time}.xml", File.read(File.join(Dir.pwd, "hri-mgmt-api-ivttest-#{time}.xml")))
+elsif ARGV[0] == 'Dredd'
+ logger.info("Uploading dreddtests-#{time}.xml to COS")
+ doc = Nokogiri::XML(File.open("#{Dir.pwd}/dreddtests.xml")) { |file| file.noblanks }
+ doc.search('//testsuite').attribute('name').value = "hri-mgmt-api - #{ENV['BRANCH_NAME']} - Dredd"
+
+ #Dredd XUnit output contains skipped tests, so delete these elements from the result xml
+ doc.search('//testsuite/testcase').each do |block|
+ block.remove if block.children.count == 1
end
+
+ File.write("#{Dir.pwd}/dreddtests.xml", doc)
+ File.rename("#{Dir.pwd}/dreddtests.xml", "#{Dir.pwd}/hri-mgmt-api-dreddtests-#{time}.xml")
+ cos_helper.upload_object_data('wh-hri-dev1-allure-reports', "hri-mgmt-api-dreddtests-#{time}.xml", File.read(File.join(Dir.pwd, "hri-mgmt-api-dreddtests-#{time}.xml")))
else
- logger.info("Test reports are only generated for the 'main' or 'develop' branches. Exiting.")
+ raise "Invalid argument: #{ARGV[0]}. Valid arguments: 'IVT' or 'Dredd'"
end
\ No newline at end of file