From d2e4bc74882729a75a29649e14cd0268a699c1de Mon Sep 17 00:00:00 2001 From: "david.perkins" Date: Thu, 2 Sep 2021 12:50:48 -0400 Subject: [PATCH] Copied v1.2.5 Signed-off-by: david.perkins --- CHANGELOG.md | 13 - CONTRIBUTING.md | 8 +- Makefile | 20 +- README.md | 51 +- appscan.sh | 12 - deploy.sh | 116 +-- docker/Dockerfile | 47 + docker/README.md | 53 ++ docker/appid.sh | 118 +++ docker/elastic.sh | 44 + docker/run.sh | 44 + docker/template.env | 11 + document-store/index-templates/batches.json | 13 +- elastic-cert64 | 19 + mgmt-api-manifest.yml | 28 +- run-dreddtests.sh | 11 +- run-fvttests.sh | 11 - run-insights-publish.sh | 110 --- run-ivttests.sh | 3 + run-smoketests.sh | 18 +- run-unittests.sh | 3 + secureApi.sh | 42 - src/batches/conversion.go | 19 +- src/batches/conversion_test.go | 28 +- src/batches/create.go | 40 +- src/batches/create_test.go | 75 +- src/batches/get.go | 26 +- src/batches/get_by_id.go | 46 +- src/batches/get_by_id_test.go | 181 +++- src/batches/get_test.go | 141 ++- src/batches/update_status.go | 100 +- src/batches/update_status_test.go | 329 +++++-- src/batches_create.go | 11 +- src/batches_get.go | 11 +- src/batches_get_by_id.go | 11 +- src/batches_sendcomplete.go | 11 +- src/batches_terminate.go | 11 +- src/common/actionloopmin/actionloopmin.go | 5 + src/common/auth/constants.go | 16 + src/common/auth/hri_claims.go | 40 + src/common/auth/hri_claims_test.go | 98 ++ src/common/auth/validate.go | 185 ++++ src/common/auth/validate_test.go | 467 +++++++++ src/common/auth/validator_mock.go | 77 ++ src/common/elastic/client.go | 2 +- src/common/elastic/client_test.go | 2 +- src/common/elastic/decoder.go | 2 +- src/common/elastic/decoder_test.go | 2 +- src/common/eventstreams/client_utils.go | 2 +- src/common/eventstreams/service.go | 2 +- src/common/kafka/read_partitions_test.go | 51 + src/common/param/parameters.go | 27 +- src/exec.env | 1 + src/go.mod | 14 +- src/go.sum | 51 +- src/healthcheck/get.go | 2 +- src/tenants/create.go | 2 +- src/tenants/delete.go | 2 +- src/tenants/get.go | 2 +- src/tenants/get_by_id.go | 2 +- test/README.md | 31 +- test/env.rb | 9 +- test/spec/app_id_helper.rb | 32 + test/spec/dredd_hooks.rb | 378 ++++++-- test/spec/elastic_helper.rb | 13 +- test/spec/event_streams_helper.rb | 15 + test/spec/helper.rb | 14 +- test/spec/hri_management_api_spec.rb | 992 +++++++++++++++----- updateApi.sh | 15 - 69 files changed, 3461 insertions(+), 927 deletions(-) delete mode 100644 CHANGELOG.md delete mode 100755 appscan.sh create mode 100644 docker/Dockerfile create mode 100644 docker/README.md create mode 100755 docker/appid.sh create mode 100755 docker/elastic.sh create mode 100755 docker/run.sh create mode 100644 docker/template.env create mode 100644 elastic-cert64 delete mode 100755 run-fvttests.sh delete mode 100755 run-insights-publish.sh delete mode 100755 secureApi.sh create mode 100644 src/common/auth/constants.go create mode 100644 src/common/auth/hri_claims.go create mode 100644 src/common/auth/hri_claims_test.go create mode 100644 src/common/auth/validate.go create mode 100644 src/common/auth/validate_test.go create mode 100644 src/common/auth/validator_mock.go create mode 100644 src/common/kafka/read_partitions_test.go create mode 100644 src/exec.env create mode 100644 test/spec/app_id_helper.rb create mode 100644 test/spec/event_streams_helper.rb delete mode 100755 updateApi.sh diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 1ed032f..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,13 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -## [Unreleased] - -## [0.3.0] - 2020-11-20 - -### Added -- Initial publish of code into the open - -[unreleased]: https://github.com/Alvearie/hri-mgmt-api/compare/v0.3.0...HEAD -[0.3.0]: https://github.com/Alvearie/hri-mgmt-api/releases/tag/v0.3.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 739792d..bd6a884 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,10 @@ # Guiding Principles for Contribution First of all, thank you for taking the time to contribute! The HRI Team values your contribution. -Since we have not completely moved our development into the open yet, external contributions are limited. If you would like to make contributions, please create an issue detailing the change. We will work with you to get it merged in. \ No newline at end of file +In general, contributions can be made using the standard fork and pull request process. We use the [Git Flow](https://nvie.com/posts/a-successful-git-branching-model/) branching model, so branch off of and submit the pull request back to the `develop` branch. If updating an older release, submit a pull request against the associated `support-.x` branch. If one does not exist, contact us, and we will create one. + +The GitHub actions may not run successfully in your forked repository without several secrets and external resources used for integration testing. You can ignore this and rely on the actions that will run in our repository when you create the pull request, but you should be able to run local unit tests to test your changes. + +Once the pull request is reviewed and approved and all the integration tests pass, we will merge it and handle releasing the updates. + +If making a significant contribution, please reach out to the development team's Slack channel, [#health-record-ingestion](https://alvearie.slack.com/archives/C01GM43LFJ6), so that we can coordinate the desired changes. diff --git a/Makefile b/Makefile index e1e41da..9f892c3 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,7 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + # This creates an IBM Function's zipped actionloop executable for every *.go file in the base src/ directory. # Each of these files should have a 'main' method and use common/actionloopmin to implement the actionloop protocol. # Also '// +build !tests' must be at the beginning of the file, so it is excluded from tests. @@ -7,21 +11,29 @@ BIN:=$(MAINS:src/%.go=$(BUILD)/%-bin.zip) SRCS:=$(find src -name *.go) # Multiple builds on the same input files is causing the Makefile's automatic rebuild only on dependency file modification to not work properly. Added clean to the default build, so the actions are always built. -bins: clean test format $(BIN) +bins: clean test format tidy $(BIN) $(BIN): $(SRCS) src/go.mod src/go.sum $(BUILD) cd src; GOOS=linux GOACH=amd64 go build -o exec $(@:$(BUILD)/%-bin.zip=%.go) - cd src; zip ../$@ -qr exec + cd src; zip ../$@ -qr exec exec.env rm src/exec # '-tags tests' is used to excluded the multiple main declarations from test builds -# GOPRIVATE environment variable needs to be set in order for this project to be able to access the private Resource Controller golang client test: cd src; go test -coverprofile testCoverage.out ./... -v -tags tests +coverage: + cd src; go tool cover -html=testCoverage.out + format: cd src; go fmt ./... +tidy: + cd src; go mod tidy + +deps: + cd src; go list -m all + clean: -rm -f $(BUILD)/*.zip 2>/dev/null @@ -29,4 +41,4 @@ $(BUILD): mkdir $(BUILD) # targets that don't produce physical files and get run every time -.PHONY: test clean +.PHONY: test clean tidy deps diff --git a/README.md b/README.md index f06700d..cedaf81 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,17 @@ # HRI Management API -The IBM Watson Health, Health Record Ingestion service is an open source project designed to serve as a “front door”, receiving health data for cloud-based solutions. See our [documentation](https://alvearie.github.io/HRI/) for more details. +The Alvearie Health Record Ingestion service: a common 'Deployment Ready Component' designed to serve as a “front door” for data for cloud-based solutions. See our [documentation](https://alvearie.io/HRI/) for more details. -This repo contains the code for the Management API of the HRI, which uses [IBM Functions](https://cloud.ibm.com/docs/openwhisk?topic=cloud-functions-getting-started) (Serverless built on [OpenWhisk](https://openwhisk.apache.org/)) with [Golang](https://golang.org/doc/). This repo defines an API and maps endpoints to Golang executables packaged into 'actions'. IBM Functions takes care of standing up an API Gateway, executing & scaling the actions, and transmitting data between the gateway and action endpoints. [mgmt-api-manifest.yml](mgmt-api-manifest.yml) defines the actions, API, and the mapping between them. A separate OpenAPI specification is maintained in [Alvearie/hri-api-spec](https://github.com/Alvearie/hri-api-spec) for external user's reference. Please Note: Any changes to this (RESTful) Management API for the HRI requires changes in both the api-spec repo and this mgmt-api repo. - -This is an initial publish of the code, which is being transitioned to open source, but is not yet completed. To use this code, you will need to update the CI/CD for your environment. There is a TravisCI build and integration tests, but all the references to the CD Process that deployed to internal IBM Cloud environments were removed. At some future date, a more comprehensive CI/CD pipeline will be published, as part of Watson Health's continuing Open Source initiative. +This repo contains the code for the Management API of the HRI, which uses [IBM Functions](https://cloud.ibm.com/docs/openwhisk?topic=cloud-functions-getting-started) (Serverless built on [OpenWhisk](https://openwhisk.apache.org/)) with [Golang](https://golang.org/doc/). Basically, this repo defines an API and maps endpoints to Golang executables packaged into 'actions'. IBM Functions takes care of standing up an API Gateway, executing & scaling the actions, and transmitting data between them. [mgmt-api-manifest.yml](mgmt-api-manifest.yml) defines the actions, API, and the mapping between them. A separate OpenAPI specification is maintained in [Alvearie/hri-api-spec](https://github.com/Alvearie/hri-api-spec) for external user's reference. Please Note: Any changes to this (RESTful) Management API for the HRI requires changes in both the hri-api-spec repo and this hri-mgmt-api repo. ## Communication -* Please TBD -* Please see [MAINTAINERS.md](MAINTAINERS.md) +* Please [join](https://alvearie.io/contributions/requestSlackAccess) our Slack channel for further questions: [#health-record-ingestion](https://alvearie.slack.com/archives/C01GM43LFJ6) +* Please see recent contributors or [maintainers](MAINTAINERS.md) ## Getting Started ### Prerequisites -* Golang 1.13 - you can use an official [distribution](https://golang.org/dl/) or a package manager like `homebrew` for mac +* Golang 1.15 - you can use an official [distribution](https://golang.org/dl/) or a package manager like `homebrew` for mac * Make - should come pre-installed on MacOS and Linux * [GoMock latest](https://github.com/golang/mock) released version. Installation: run `$ GO111MODULE=on go get github.com/golang/mock/mockgen@latest`. See [GoMock docs](https://github.com/golang/mock). @@ -26,7 +24,7 @@ This is an initial publish of the code, which is being transitioned to open sour From the base directory, run `make`. This will download dependencies using Go Modules, run all the unit tests, and package up the code for IBM Functions in the `build` directory. ``` -mgmt-api$ make +hri-mgmt-api$ make rm -f build/*.zip 2>/dev/null cd src; go test ./... -v -tags tests === RUN TestEsDocToBatch @@ -35,7 +33,7 @@ cd src; go test ./... -v -tags tests --- PASS: TestEsDocToBatch/example1 (0.00s) ... PASS -ok ibm.com/watson/health/foundation/hri/healthcheck 2.802s +ok github.com/Alvearie/hri-mgmt-api/healthcheck 2.802s cd src; GOOS=linux GOACH=amd64 go build -o exec batches_create.go cd src; zip ../build/batches_create-bin.zip -qr exec rm src/exec @@ -45,20 +43,27 @@ cd src; zip ../build/healthcheck_get-bin.zip -qr exec rm src/exec ``` ## CI/CD -Since this application must be deployed using IBM Functions in an IBM Cloud account, there isn't a way to launch and test the API & actions locally. So, we have an automated TravisCI build that automatically deploys every branch in its own IBM Functions' namespace in the IBM Cloud and runs integration tests. They all share a common ElasticSearch and Event Streams instance. Once it's deployed, you can perform manual testing with your namespace. You can also use the IBM Functions UI or IBM Cloud CLI to modify the actions or API in your namespace. +Since this application must be deployed using IBM Functions in an IBM Cloud account, there isn't a way to launch and test the API & actions locally. So, we have set up GitHub actions to automatically deploy every branch in its own IBM Function's namespace in our IBM cloud account and run integration tests. They all share common Elastic Search and Event Streams instances. Once it's deployed, you can perform manual testing with your namespace. You can also use the IBM Functions UI or IBM Cloud CLI to modify the actions or API in your namespace. When the GitHub branch is deleted, the associated IBM Function's namespace is also automatically deleted. -If you're working off a fork, the secure `.travis.yml` environment variables will not work, because they are repository specific. We will work with you to get your fork deployed and tested. +### Docker image build +Images are published on every `develop` branch build with the tag `-timestamp`. ## Code Overview ### IBM Function Actions - Golang Mains -For each API endpoint, there is a Golang executable packaged into an IBM Function's 'action' to service the requests. There are several `.go` files in the base `src/` directory, one for each action and no others, each of which defines `func main()`. If you're familiar with Golang, you might be asking how there can be multiple files with different definitions of `func main()`. The Makefile takes care of compiling each one into a separate executable, and each file includes a [Build Constraint](https://golang.org/pkg/go/build/#hdr-Build_Constraints) to exclude it from unit tests. This also means these files are not unit tested and thus are kept as small as possible. Each one sets up any required clients and then calls an implementation method in a sub package. They also use `common.actionloopmin.Main()` to implement the OpenWhisk [action loop protocol](https://github.com/apache/openwhisk-runtime-go/blob/master/docs/ACTION.md). +For each API endpoint, there is a Golang executable packaged into an IBM Function's 'action' to service the requests. There are several `.go` files in the base `src/` directory, one for each action and no others, each of which defines `func main()`. If you're familiar with Golang, you might be asking how there can be multiple files with different definitions of `func main()`. The Makefile takes care of compiling each one into a separate executable, and each file includes a [Build Constraint](https://golang.org/pkg/go/build/#hdr-Build_Constraints) to exclude it from unit tests. This also means these files are not unit tested and thus are kept as small as possible. Each one sets up any required clients and then calls an implementation method in a sub package. They also use `common.actionloopmin.Main()` to implement the OpenWhisk [action loop protocol](https://github.com/apache/openwhisk-runtime-go/blob/main/docs/ACTION.md). + +The compiled binaries have to be named `exec` and put in a zip file. Additionally, a `exec.env` file has to be included, which contains the name of the docker container to use when running the action. All the zip files are written to the `build` directory when running `make`. ### Packages -There are three packages: -- batches - code for all the `tenants/tenantId/batches` endpoints. In general, each endpoint has an implementation method that each `func main()` above calls. -- common - common code for various clients (i.e. ElasticSearch & Kafka) and input/output helper methods. +- tenants - code for all the `tenants` endpoints. Tenants are mainly indexes in Elastic Search. + +- streams - code for all the `tenants//streams` endpoints. Streams are mainly sets of topics in Kafka (Event Streams). + +- batches - code for all the `tenants//batches` endpoints. + +- common - common code for various clients (i.e. Elastic Search & Kafka) and input/output helper methods. - healthcheck - code for the `healthcheck` endpoint @@ -66,10 +71,20 @@ There are three packages: Each unit test file follows the Golang conventions where it's named `*_test.go` (e.g. `get_by_id_test.go`) and is located in the same directory as the file it's testing (e.g. `get_by_id.go`). The team uses 'mock' code in files generated by [the GoMock framework](https://github.com/golang/mock) to support unit testing by mocking some collaborating component (function/struct) upon which the System Under Test (function/struct) depends. A good example test class that makes use of a mock object is `connector_test.go` (in `kafka` package) - it makes use of the generated code in `connector_mock.go`. Note that you may need to create a new go interface in order to use GoMock to generate the mock code file for the behavior you are trying to mock. This is because the framework requires [an interface](https://medium.com/rungo/interfaces-in-go-ab1601159b3a) in order to generate the mock code. [This article](https://medium.com/@duythhuynh/gomock-unit-testing-made-easy-b59a0e947ba7) might be helpful to understand GoMock. #### Test Coverage -The goal is to have 90% code coverage with unit tests. The build automatically prints out test coverage percentages and a coverage file at `src/testCoverage.out`. Additionally, you can view test coverage interactively in your browser by running `go tool cover -html=testCoverage.out` from the `src/` directory. +The goal is to have 90% code coverage with unit tests. The build automatically prints out test coverage percentages and a coverage file at `src/testCoverage.out`. Additionally, you can view test coverage interactively in your browser by running `make coverage`. ### API Definition -The API that this repo implements is defined in [Alvearie/hri-api-spec](https://github.com/Alvearie/hri-api-spec) using OpenAPI 3.0. There are automated Dredd tests to make sure the implemented API meets the spec. For any changes that affect the API itself (which naturally, result in changes also here in the mgmt-api Repo), please use branches with the same name for your corresponding changes in both repositories. +The API that this repo implements is defined in [Alvearie/hri-api-spec](https://github.com/Alvearie/hri-api-spec) using OpenAPI 3.0. There are automated Dredd tests to make sure the implemented API meets the spec. If there are changes to the API, make them to the specification repo using a branch with the same name. Then the Dredd tests will run against the modified API specification. + +### Authentication & Authorization +All endpoints (except the health check) require an OAuth 2.0 JWT bearer access token per [RFC8693](https://tools.ietf.org/html/rfc8693) in the `Authorization` header field. The Tenant and Stream endpoints require IAM tokens, but the Batch endpoints require a token with HRI and Tenant scopes for authorization. The Batch token issuer is configurable via a bound parameter, and must be OIDC compliant because the code dynamically uses the OIDC defined well know endpoints to validate tokens. Integration and testing have already been completed with App ID, the standard IBM Cloud solution. + +Batch JWT access token scopes: +- hri_data_integrator - Data Integrators can create, get, and change the status of batches, but only ones that they created. +- hri_consumer - Consumers can list and get Batches +- tenant_ - provides access to this tenant's batches. This scope must use the prefix 'tenant_'. For example, if a data integrator tries to create a batch by making an HTTP POST call to `tenants/24/batches`, the token must contain scope `tenant_24`, where the `24` is the tenantId. + +The scopes claim must contain one or more of the HRI roles ("hri_data_integrator", "hri_consumer") as well as the tenant id of the tenant being accessed. ## Contribution Guide -Since we have not completely moved our development into the open yet, external contributions are limited. If you would like to make contributions, please create an issue detailing the change. We will work with you to get it merged in. +Please read [CONTRIBUTING.md](CONTRIBUTING.md) for details on our code of conduct, and the process for submitting pull requests to us. diff --git a/appscan.sh b/appscan.sh deleted file mode 100755 index 9d3c87d..0000000 --- a/appscan.sh +++ /dev/null @@ -1,12 +0,0 @@ -scan_name=$([ ! -z "$TRAVIS_BRANCH" ] && [ ! -z "$TRAVIS_BUILD_NUMBER" ] && echo "TRAVIS-CI-$TRAVIS_BRANCH-$TRAVIS_BUILD_NUMBER" || echo "$USER-$(date +'%Y-%b-%d-%H-%M-%S')") - -wget -O ../SAClientUtil.zip $ASOC_CLI_URL -unzip ../SAClientUtil.zip -d ../ -find .. -type d | sort | grep SAClientUtil | while read folder; do mv $folder ../SAClientUtil; break; done; - -cp -r $GOPATH/pkg/mod dependencies -../SAClientUtil/bin/appscan.sh api_login -P $ASOC_KEY_SECRET -u $ASOC_KEY_ID -persist - -echo $SCAN_NAME -../SAClientUtil/bin/appscan.sh prepare -d $(pwd) -l $(pwd) -n appscan -v -../SAClientUtil/bin/appscan.sh queue_analysis -a $ASOC_APP_ID -f ./appscan.irx -n $scan_name diff --git a/deploy.sh b/deploy.sh index ca5536c..f978671 100755 --- a/deploy.sh +++ b/deploy.sh @@ -1,70 +1,10 @@ #!/usr/bin/env bash -set -eo pipefail - -# check for command-line options -SKIP_SECURE_API=false -POSITIONAL=() -while [[ $# -gt 0 ]] -do - key="$1" - case $key in - -s|--skipSecureApi) - SKIP_SECURE_API=true - shift - ;; - *) - POSITIONAL+=("$1") - shift - ;; - esac -done -set -- "${POSITONAL[@]}" # restore unprocessed positional params - -# prompt for parameters if not already provided as environment variables -if [ -z "$CLOUD_API_KEY" ] -then - read -p "CLOUD_API_KEY not set. Enter IBM Cloud API Key: " CLOUD_API_KEY -fi - -if [ -z "$REGION" ] -then - read -p "REGION not set. Enter target Region (i.e. us-south): " REGION -fi - -if [ -z "$RESOURCE_GROUP" ] -then - read -p "RESOURCE_GROUP not set. Enter target Resource Group (i.e. CDT_Payer_ASB_RG): " RESOURCE_GROUP -fi - -if [ -z "$NAMESPACE" ] -then - read -p "NAMESPACE not set. Enter target namespace: " NAMESPACE -fi - -if [ -z "$FN_WEB_SECURE_KEY" ] -then - read -p "FN_WEB_SECURE_KEY not set. Enter IBM Function Web API Key: " FN_WEB_SECURE_KEY -fi -if [ -z "$ELASTIC_INSTANCE" ] -then - read -p "ELASTIC_INSTANCE not set. Enter Elastic instance name (i.e. HRI-DocumentStore): " ELASTIC_INSTANCE -fi - -if [ -z "$ELASTIC_SVC_ACCOUNT" ] -then - read -p "ELASTIC_SVC_ACCOUNT not set. Enter Elastic service account: " ELASTIC_SVC_ACCOUNT -fi +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 -if [ -z "$KAFKA_INSTANCE" ] -then - read -p "KAFKA_INSTANCE not set. Enter Kafka instance name (i.e. HRI-Event Streams): " KAFKA_INSTANCE -fi - -if [ -z "$KAFKA_SVC_ACCOUNT" ] -then - read -p "KAFKA_SVC_ACCOUNT not set. Enter Kafka service account: " KAFKA_SVC_ACCOUNT -fi +set -eo pipefail echo "CLOUD_API_KEY: ****" echo "REGION: $REGION" @@ -75,6 +15,8 @@ echo "ELASTIC_INSTANCE: $ELASTIC_INSTANCE" echo "ELASTIC_SVC_ACCOUNT: $ELASTIC_SVC_ACCOUNT" echo "KAFKA_INSTANCE: $KAFKA_INSTANCE" echo "KAFKA_SVC_ACCOUNT: $KAFKA_SVC_ACCOUNT" +echo "OIDC_ISSUER: $OIDC_ISSUER" +echo "JWT_AUDIENCE_ID: $JWT_AUDIENCE_ID" # determine if IBM Cloud CLI is already installed set +e > /dev/null 2>&1 @@ -98,32 +40,44 @@ if test "$res" != "0"; then ibmcloud plugin install cloud-functions fi -ibmcloud login --apikey $CLOUD_API_KEY -r $REGION -ibmcloud target -g $RESOURCE_GROUP +ibmcloud login --apikey "${CLOUD_API_KEY}" -r "${REGION}" +ibmcloud target -g "${RESOURCE_GROUP}" ibmcloud fn property unset --namespace # create namespace if it doesn't already exist -if ! ibmcloud fn property set --namespace ${NAMESPACE}; then +if ! ibmcloud fn property set --namespace "${NAMESPACE}"; then echo "creating IBM Functions namespace ${NAMESPACE}" - ibmcloud fn namespace create $NAMESPACE - ibmcloud fn property set --namespace $NAMESPACE + ibmcloud fn namespace create "${NAMESPACE}" + ibmcloud fn property set --namespace "${NAMESPACE}" fi -# deploy HRI mgmt-api to IBM Functions +# deploy hri-mgmt-api to IBM Functions ibmcloud fn deploy --manifest mgmt-api-manifest.yml -# redefine IBM Functions API to properly authenticate with web actions -# (only necessary because of the following openwhisk bug: https://github.com/apache/openwhisk-wskdeploy/issues/1046) -./updateApi.sh +echo "Building OpenWhisk Parameters" +params="$(cat < $paramFile + +echo "Created temp params file" + +# set config parameters, all of them have to be set in the same command +ibmcloud fn package update hri_mgmt_api --param-file $paramFile + +# cleanup temp file +rm $paramFile -# bind Elastic and Kafka service instances to HRI mgmt-api -ibmcloud fn service bind databases-for-elasticsearch hri_mgmt_api --instance "$ELASTIC_INSTANCE" --keyname $ELASTIC_SVC_ACCOUNT -ibmcloud fn service bind messagehub hri_mgmt_api --instance "$KAFKA_INSTANCE" --keyname $KAFKA_SVC_ACCOUNT +# bind Elastic and Kafka service instances to hri-mgmt-api +ibmcloud fn service bind databases-for-elasticsearch hri_mgmt_api --instance "${ELASTIC_INSTANCE}" --keyname "${ELASTIC_SVC_ACCOUNT}" +ibmcloud fn service bind messagehub hri_mgmt_api --instance "${KAFKA_INSTANCE}" --keyname "${KAFKA_SVC_ACCOUNT}" ./run-smoketests.sh diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..b6845bb --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,47 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +# Use IBM's standard base image +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.4 + + +# yum is required by the IBM CLI installer and it tries to install git, curl, docker, kubectl, and helm +# The docker install fails and causes problems. We don't need it, so this adds a dummy file to trick +# the installer into thinking that docker is installed. +# jq is used for JSON parsing of Elastic credentials. +RUN microdnf update -y && \ + microdnf -y install git openssl yum which tar sudo && \ + microdnf clean all && \ + echo "echo 'fake docker install'" > /usr/local/bin/docker && \ + chmod 777 /usr/local/bin/docker && \ + curl -sL --fail https://ibm.biz/idt-installer | bash && \ + yum clean all && \ + curl -sL --fail https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 > /usr/local/bin/jq && \ + chmod 755 /usr/local/bin/jq + +COPY docker/run.sh \ + docker/elastic.sh \ + docker/appid.sh \ + document-store/index-templates/batches.json \ + deploy.sh \ + mgmt-api-manifest.yml \ + run-smoketests.sh \ + /mgmt-api-release/ + +COPY build \ + /mgmt-api-release/build/ + +WORKDIR mgmt-api-release + +# Setup flink user +RUN groupadd -g 1000 hri && \ + useradd --shell /bin/bash -u 1000 -g 1000 -m hri && \ + chown -R hri:hri /mgmt-api-release + +USER hri + +# IBM CLI plugins have to be installed for each user +RUN ibmcloud plugin install cloud-functions + +ENTRYPOINT ["./run.sh"] diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000..7c5e225 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,53 @@ +# Docker Deploy +Deployment of the HRI Management API and configuring the ElasticSearch Document Store is packaged into a docker image. The image contains several scripts, all the compiled Go code, and Elastic index templates. It also runs smoke tests and logs the results. + +There are several environment variables that must be set in the container. + +| Name | Description | +|-----------|---------------------| +| IBM_CLOUD_API_KEY | The API key for IBM Cloud | +| IBM_CLOUD_REGION | Target IBM Cloud Region, e.g. 'ibm:yp:us-south' | +| RESOURCE_GROUP | Target IBM Cloud Resource Group | +| NAMESPACE | Target IBM Function namespace | +| ELASTIC_INSTANCE | Name of Elasticsearch instance | +| ELASTIC_SVC_ACCOUNT | Name of Elasticsearch service ID | +| KAFKA_INSTANCE | Name of Event Streams (Kafka) instance | +| KAFKA_SVC_ACCOUNT | Name of Event Streams (Kafka) service ID | +| TOOLCHAIN_ID | ID of the Toolchain for publishing results to Insights | +| Logical_App_Name | Application Name smoke test results are published under in Insights | +| OIDC_ISSUER | The base URL of the OIDC issuer to use for OAuth authentication (e.g. `https://us-south.appid.cloud.ibm.com/oauth/v4/`) | +| APPID_PREFIX | (Optional) Prefix string to append to the AppId applications and roles created during deployment | +| SET_UP_APPID | (Optional) defaults to true. Set to false if you do not want the App ID set-up described [above](#using-app-id-for-oidc-authentication) enabled. | + +## Implementation Details + +The image entrypoint is `run.sh`, which: + 1. sets some environment variables + 1. logs into the IBM Cloud CLI + 1. calls `elastic.sh` + 1. calls `appid.sh` + 1. calls `deploy.sh` + +`elastic.sh` turns off automatic index creation and sets the default template for batch indexes. These are idempotent actions, so they can be executed multiple times. + +`appid.sh` creates HRI application as well as HRI Consumer and HRI Data Integrator roles in AppId. + +`deploy.sh` deploys the Management API to IBM Functions and runs smoke tests (by calling the health check endpoint). + +## Building +To build the docker image locally, build the code and run the following command from the base project directory: +```shell script +docker build ./ -f docker/Dockerfile +``` + +## Testing +You can test locally by running the docker container with all the required environment variables. A `template.env` file is included that you can copy and set the secure parameters. Be careful not to commit and secure parameters into GitHub. + +```shell script +docker run --env-file docker/test.env --rm +``` + +To investigate issues, you may want to run it interactively with a bash prompt. Just add `-it --entrypoint bash`. +```shell script +docker run --env-file docker/test.env --rm -it --entrypoint bash +``` diff --git a/docker/appid.sh b/docker/appid.sh new file mode 100755 index 0000000..c583cd4 --- /dev/null +++ b/docker/appid.sh @@ -0,0 +1,118 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +# Exit on errors +set -e + +# Replace oauth with management in the OIDC_ISSUER url +issuer="${OIDC_ISSUER/oauth/management}" +echo "issuer:$issuer" + +# Get IAM Token +# Note, in this command and many below, the response is gathered and then sent to jq via echo (rather than piping directly) because if you pipe the response +# directly to jq, the -f flag to fail if the curl command fails will not terminate the script properly. +echo +echo "Requesting IAM token" +response=$(curl -X POST -sS 'https://iam.cloud.ibm.com/identity/token' -d "grant_type=urn:ibm:params:oauth:grant-type:apikey&apikey=${CLOUD_API_KEY}") +iamToken=$(echo $response | jq -r '.access_token // "NO_TOKEN"') +if [ $iamToken = "NO_TOKEN" ]; then + echo "the_curl_response: $response" + echo "Error getting IAM Token! Exiting!" + exit 1 +fi + +# Create application +echo +echo "Creating HRI provider application" +# Do not fail script if this call fails. We need to check if it failed because of a CONFLICT, in which case the script will exit 0. +hriApplicationName="${APPID_PREFIX}HRI Management API" +response=$(curl -X POST -sS "${issuer}/applications" -H "Authorization: Bearer ${iamToken}" -H 'Content-Type: application/json' -d @- << EOF +{ +"name": "${hriApplicationName}", +"type": "regularwebapp" +} +EOF +) + +# Get the application ID if the call was successful. If it was unsuccessful, check the returned code. +# If it failed because of a CONFLICT, that just means AppId was already configured (likely in a previous +# deployment attempt) and we will exit 0. If any other code, the script fails. +hriApplicationId=$(echo $response | jq -r '.clientId // empty') +if [ -z $hriApplicationId ]; then + code=$(echo $response | jq -r '.code // empty') + if [ $code == 'CONFLICT' ]; then + echo + echo 'App Id already configured! Continuing deployment without continuing App Id initialization.' + + # Get the existing applicationId to export as JWT_AUDIENCE_ID + response=$(curl -X GET -sS "${issuer}/applications" -H "Authorization: Bearer ${iamToken}") + hriApplicationId=$(echo $response | jq -r --arg name "$hriApplicationName" '.applications[] | select(.name == $name) | .clientId') + + echo + echo "hriApplicationId: $hriApplicationId" + if [ -z $hriApplicationId ]; then + echo "Failed to get existing HRI Management API application ID! Unable to set JWT_AUDIENCE_ID!" + echo "the_curl_response: $response" + exit 1 + fi + echo "Setting JWT_AUDIENCE_ID to existing HRI Management API ID: $hriApplicationId" + echo $hriApplicationId > JWT_AUDIENCE_ID + exit 0 + else + echo "Failed to create ${APPID_PREFIX}HRI Management API application! Exit code ${code}." + echo "the_curl_response: $response" + exit 1 + fi +fi +echo $hriApplicationId > JWT_AUDIENCE_ID + +# Assign scopes to application +echo +echo "Assigning hri_consumer and hri_data_integrator scopes to HRI provider application" +curl -X PUT -sS -f "${issuer}/applications/${hriApplicationId}/scopes" -H "Content-Type: application/json" -H "Authorization: Bearer ${iamToken}" -d @- << EOF +{ +"scopes": [ "hri_consumer", "hri_data_integrator"] +} +EOF + +# Create roles +echo +echo "Creating roles for each of the created scopes" +response=$(curl -X POST -sS "${issuer}/roles" -H "Authorization: Bearer ${iamToken}" -H "Content-Type: application/json" -d @- << EOF +{ +"name": "${APPID_PREFIX}HRI Consumer", +"description": "HRI Consumer Role", +"access": [ { + "application_id": "${hriApplicationId}", + "scopes": [ "hri_consumer" ] +} ] +} +EOF +) +consumerRoleId=$(echo $response | jq -r '.id // "REQUEST_FAILED"') +if [ $consumerRoleId = "REQUEST_FAILED" ]; then + echo "Error Creating role: HRI Consumer Role!" + echo "the_curl_response: $response" + exit 1 +fi + +response=$(curl -X POST -sS "${issuer}/roles" -H "Authorization: Bearer ${iamToken}" -H "Content-Type: application/json" -d @- << EOF +{ +"name": "${APPID_PREFIX}HRI Data Integrator", +"description": "HRI Data Integrator Role", +"access": [ { + "application_id": "${hriApplicationId}", + "scopes": [ "hri_data_integrator" ] +} ] +} +EOF +) +dataIntegratorRoleId=$(echo $response | jq -r '.id // "REQUEST_FAILED"') +if [ $dataIntegratorRoleId = "REQUEST_FAILED" ]; then + echo "Error Creating role: HRI Data Integrator Role!" + echo "the_curl_response: $response" + exit 1 +fi + +exit 0 diff --git a/docker/elastic.sh b/docker/elastic.sh new file mode 100755 index 0000000..e754ad3 --- /dev/null +++ b/docker/elastic.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +echo "Looking up ElasticSearch connection information and credentials" +# Get Elastic connection info +id=$(ibmcloud resource service-instance "${ELASTIC_INSTANCE}" --output json | jq .[0].id) +echo +echo "ES id: $id" + +ibmcloud resource service-key "${ELASTIC_SVC_ACCOUNT}" --output json | jq ".[] | select(.source_crn == $id) | .credentials.connection.https.certificate.certificate_base64" | tr -d '"' | base64 -d > elastic.crt + +export CURL_CA_BUNDLE=elastic.crt + +baseUrl=$(ibmcloud resource service-key "${ELASTIC_SVC_ACCOUNT}" --output json | jq ".[] | select(.source_crn == $id) | .credentials.connection.https.composed[0]" | tr -d '"') +echo +# Remove the credentials from the url when logging +echo "ES baseUrl: ${baseUrl/:\/\/*@/://}" + +# Keep track of any failures +rtn=0 + +# set auto-index creation off +echo +echo "Setting ElasticSearch auto index creation to false" +curl -X PUT -sS -f $baseUrl/_cluster/settings -H 'Content-Type: application/json' -d' +{ +"persistent": { "action.auto_create_index": "false" } +}' || { echo 'Setting ElasticSearch auto index creation failed!' ; rtn=1; } + +# upload batches index template +echo +echo "Setting ElasticSearch Batches index template" +curl -X PUT -sS -f $baseUrl/_index_template/batches -H 'Content-Type: application/json' -d '@batches.json' || +{ + echo -e '\nSetting ElasticSearch Batches index template failed!' ; rtn=1; +} + +echo +echo "ElasticSearch configuration complete" + +exit $rtn diff --git a/docker/run.sh b/docker/run.sh new file mode 100755 index 0000000..1b2b245 --- /dev/null +++ b/docker/run.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +# map values to expected environment variables +export CLOUD_API_KEY=$IBM_CLOUD_API_KEY +# strip off preceding 'ibm:yp:' if present +export REGION=${IBM_CLOUD_REGION##*:} + +# generate a random 32 character string for IBM Functions Action API key +export FN_WEB_SECURE_KEY=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) + +# login +ibmcloud login --apikey "${CLOUD_API_KEY}" -r "${REGION}" || { echo 'IBM Cloud CLI login failed!'; exit 1; } +ibmcloud target -g "${RESOURCE_GROUP}" + +# Keep track of any failures +rtn=0 + +# Update ElasticSearch +echo "Configuring HRI Document Store (ElasticSearch)..." +./elastic.sh || { echo 'HRI Document Store (ElasticSearch) configuration failed!' ; rtn=1; } + +if $SET_UP_APPID; then + echo "Setting up AppId applications and roles..." + ./appid.sh || { echo 'AppId setup failed!' ; rtn=1; } + + # JWT_AUDIENCE_ID was written to file -- read it, then delete it. + export JWT_AUDIENCE_ID=$(cat JWT_AUDIENCE_ID) + echo "JWT_AUDIENCE_ID set to $JWT_AUDIENCE_ID" + rm JWT_AUDIENCE_ID +fi + +# Deploy Mgmt-API +echo "Deploying HRI Management API..." +./deploy.sh || { echo 'HRI Management API Deployment failed!' ; rtn=1; } + +# Write the smoke test results to the log +echo "smoketests.xml" +cat smoketests.xml + +exit $rtn diff --git a/docker/template.env b/docker/template.env new file mode 100644 index 0000000..64c0228 --- /dev/null +++ b/docker/template.env @@ -0,0 +1,11 @@ +IBM_CLOUD_API_KEY= +IBM_CLOUD_REGION=ibm:yp:us-south +RESOURCE_GROUP=MY_RESOURCE_GROUP +NAMESPACE=master +ELASTIC_INSTANCE=HRI-DocumentStore +ELASTIC_SVC_ACCOUNT=dev-test +KAFKA_INSTANCE=HRI-EventStreams +KAFKA_SVC_ACCOUNT=dev-test +SET_UP_APPID=true +OIDC_ISSUER= +APPID_PREFIX=Test Prefix diff --git a/document-store/index-templates/batches.json b/document-store/index-templates/batches.json index 92b5b08..47a6e3b 100644 --- a/document-store/index-templates/batches.json +++ b/document-store/index-templates/batches.json @@ -1,14 +1,17 @@ { "index_patterns": ["*-batches"], - "settings": { - "number_of_shards": 1 - }, - "mappings": { - "batch": { + "template": { + "settings": { + "number_of_shards": 1 + }, + "mappings": { "properties": { "name": { "type": "keyword" }, + "integratorId": { + "type": "keyword" + }, "status": { "type": "keyword" }, diff --git a/elastic-cert64 b/elastic-cert64 new file mode 100644 index 0000000..3d59122 --- /dev/null +++ b/elastic-cert64 @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDHTCCAgWgAwIBAgIUL5g6t1HmNOt8OOqUWyuQZeWsrUAwDQYJKoZIhvcNAQEL +BQAwHjEcMBoGA1UEAwwTSUJNIENsb3VkIERhdGFiYXNlczAeFw0xODExMjExMTQ3 +MjdaFw0yODExMTgxMTQ3MjdaMB4xHDAaBgNVBAMME0lCTSBDbG91ZCBEYXRhYmFz +ZXMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3hxfosYeXdmrQRHlU +vMg91IWSCuGZ16RbHbZ2p62SEUfBJemr4MkTr48YZTfiTCZjckWWGlBlvUYr0cYY +KM3OPtkhk3lmkAQlJcMCPfiF2+mBxAUqrIibCgmQijylRJW4sEFQgSbrXewDywEd +8JUgm+82DwLYy6vnkKJjzcUZk5OKS1Uv7w0pHUtUMtNLaSyKksDdE9d9sj6lWTFJ +W9CyXCp5iffJ97oewIh47nIFYU5D5MptCDYr6wifOWvhiduYD6WMR7wDgLMv/ra9 +Zt4nt7MYglRmKnO77DZI5Y9lT4gw4FiU0k65aWnXrk+1DZ4jqFdWk3VpoXhA0IuX +ZMbPAgMBAAGjUzBRMB0GA1UdDgQWBBQYWTX4dsUL3/LnQAXQLLzh46yDATAfBgNV +HSMEGDAWgBQYWTX4dsUL3/LnQAXQLLzh46yDATAPBgNVHRMBAf8EBTADAQH/MA0G +CSqGSIb3DQEBCwUAA4IBAQAPwYrOIQMEreNuHY/68lFJimB6GCinGsJw8YIBBQwA +tdJMOXU8bXGrEHEuZ6Qaho7Q5dV2bB6GVKTMqhSEdrvuyxKniQ90jdkQSk2VHhDe +H+6i04hA9TkKT6ooLwMPc1LYYzqDljEkfKlLIPWCkOAozD3cyc26pV/35nG7WzAF +xw7S3jAyB3WcJDlWlSWGTn58w3EHxzVXvKT6Y9eAdKp4SjUHyVFsL5xtSyjH8zpF +pZKK8wWNUwgWQ66MNh8Ckq732JZ+so6RAfb4BbNj45I3s9fuZSYlvjkc5/+da3Ck +Rp6anX5N6yIrzhVmAgefjQdBztYzdfPhsJBkS/TDnRmk +-----END CERTIFICATE----- \ No newline at end of file diff --git a/mgmt-api-manifest.yml b/mgmt-api-manifest.yml index 16ad803..d5ef9fa 100644 --- a/mgmt-api-manifest.yml +++ b/mgmt-api-manifest.yml @@ -4,83 +4,83 @@ packages: hri_mgmt_api: - version: 1.0 + version: 1.2.5 actions: create_batch: function: build/batches_create-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY get_batches: function: build/batches_get-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY get_batch_by_id: function: build/batches_get_by_id-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY healthcheck: function: build/healthcheck_get-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY send_complete: function: build/batches_sendcomplete-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY terminate_batch: function: build/batches_terminate-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY create_tenant: function: build/tenants_create-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY get_tenants: function: build/tenants_get-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY get_tenant_by_id: function: build/tenants_get_by_id-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY delete_tenant: function: build/tenants_delete-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY get_streams: function: build/streams_get-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY create_stream: function: build/streams_create-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY delete_stream: function: build/streams_delete-bin.zip - runtime: go:1.11 + runtime: go:1.15 web-export: true annotations: require-whisk-auth: $FN_WEB_SECURE_KEY diff --git a/run-dreddtests.sh b/run-dreddtests.sh index 41e2311..e8658ea 100755 --- a/run-dreddtests.sh +++ b/run-dreddtests.sh @@ -1,23 +1,26 @@ #!/bin/bash +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 npm install -g api-spec-converter npm install -g dredd@12.2.0 gem install dredd_hooks echo 'Clone Alvearie/hri-api-spec Repo' -git clone https://github.com/Alvearie/hri-api-spec.git api-spec -cd api-spec +git clone https://github.com/Alvearie/hri-api-spec.git hri-api-spec +cd hri-api-spec echo "if exists, checkout ${TRAVIS_BRANCH}" exists=$(git show-ref refs/remotes/origin/${TRAVIS_BRANCH}) if [[ -n "$exists" ]]; then git checkout ${TRAVIS_BRANCH} else - git checkout master + git checkout support-1.x fi # convert API to swagger 2.0 api-spec-converter -f openapi_3 -t swagger_2 -s yaml management-api/management.yml > management.swagger.yml -tac ../api-spec/management.swagger.yml | sed "1,8d" | tac > tmp && mv tmp ../api-spec/management.swagger.yml +tac ../hri-api-spec/management.swagger.yml | sed "1,8d" | tac > tmp && mv tmp ../hri-api-spec/management.swagger.yml # lookup the base API url for the current targeted functions namespace serviceUrl=$(bx fn api list -f | grep 'URL: ' | grep -v batchId -m 1 | sed -rn 's/^.*: (.*)\/hri.*/\1\/hri/p') diff --git a/run-fvttests.sh b/run-fvttests.sh deleted file mode 100755 index 1f597cd..0000000 --- a/run-fvttests.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash - -date=`date` -time=0.0 -title="" -header="" -test="" -footer="" - -filename="fvttest.xml" -echo -e "$title\n$header\n$test\n$footer" > "$filename" \ No newline at end of file diff --git a/run-insights-publish.sh b/run-insights-publish.sh deleted file mode 100755 index c61c299..0000000 --- a/run-insights-publish.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env bash - -PUBLISH_TYPE=$1 - -export MY_APP_NAME="hri-mgmt-api" - -function combineTestResults() { - echo "===============BEGIN COMBINING UNIT TESTS===============" - - INPUT_DIRECTORY="build/test-results/${1}" - RESULT_FILE=$2 - failures=0 - testCount=0 - errors=0 - skipped=0 - time=0.0 - output="" - - for file_name in ${INPUT_DIRECTORY}/*.xml; do - newOutput="" - newTime=0.0 - testCount=$((testCount+$(cat "$file_name" | grep -o 'tests="[^"]*' | sed 's/tests="//g'))) - failures=$((failures+$(cat "$file_name" | grep -o 'failures="[^"]*' | sed 's/failures="//g'))) - errors=$((errors+$(cat "$file_name" | grep -o 'errors="[^"]*' | sed 's/errors="//g'))) - skipped=$((skipped+$(cat "$file_name" | grep -o 'skipped="[^"]*' | sed 's/skipped="//g'))) - newTime=$(cat "$file_name" | head -2 | tail -1 | grep -o 'time="[^"]*' | sed 's/time="//g') - time=$(awk "BEGIN {print $time+$newTime; exit}") - newOutput=$(cat "$file_name" | tail -n +3 | sed '$d') - output="$output$newOutput" - done - - date=`date` - header="" - footer="" - echo -e "$header\n$output\n$footer" > "$RESULT_FILE" - - echo "===============END COMBINING UNIT TESTS===============" -} - -function ifFileExists() { - FILE_PATH=$1 - if [ -f "$FILE_PATH" ] - then - echo "Found test file to publish. Continuing..." - else - echo "Cannot find file path $FILE_PATH" - exit 1 - fi -} - -if [ "$PUBLISH_TYPE" == "buildRecord" ] -then - # Upload a build record for this build, It is assumed that the build was successful - ibmcloud doi publishbuildrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --branch $TRAVIS_BRANCH --repositoryurl https://github.com/Alvearie/hri-mgmt-api --commitid $TRAVIS_COMMIT --status pass - -elif [ "$PUBLISH_TYPE" == "deployRecord" ] -then - # Upload a deployment record; It is assumed that the deployment was successful - ibmcloud doi publishdeployrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --env=dev --status=pass - -elif [ "$PUBLISH_TYPE" == "unitTest" ] -then - # Upload unittest test record for the build - ifFileExists "unittest.xml" - ibmcloud doi publishtestrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=unittest.xml --type=unittest - -elif [ "$PUBLISH_TYPE" == "ivtTest" ] -then - # Upload IVT test record for the build - ifFileExists "ivttest.xml" - ibmcloud doi publishtestrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=ivttest.xml --type=ivt - -elif [ "$PUBLISH_TYPE" == "dreddTest" ] -then - # Upload Dredd test record as IVT for the build - ifFileExists "dreddtests.xml" - ibmcloud doi publishtestrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=dreddtests.xml --type=ivt - -elif [ "$PUBLISH_TYPE" == "fvtTest" ] -then - # Upload FVT test record for the build - ifFileExists "fvttest.xml" - ibmcloud doi publishtestrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=fvttest.xml --type=fvt - -elif [ "$PUBLISH_TYPE" == "smokeTest" ] -then - # Upload Smoke test record for the build - echo $(combineTestResults 'smokeTest' 'smoketests.xml') - ifFileExists "smoketests.xml" - ibmcloud doi publishtestrecord --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=smoketests.xml --type=smoketests - -elif [ "$PUBLISH_TYPE" == "sonarQube" ] -then - # Upload SonarQube test record for the build - REPORT_TASK_PATH="build/sonar/report-task.txt" - ifFileExists "$REPORT_TASK_PATH" - ibmcloud doi publishtestrecord --sqtoken="$SONARQUBE_CREDENTIALS" --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --filelocation=$REPORT_TASK_PATH --type=sonarqube - find . -name '*.json' - ifFileExists "SQData_$MY_APP_NAME.json" - -elif [ "$PUBLISH_TYPE" == "evaluateCI" ] -then - # Invoke a DevOps Insights gate to evaluated a policy based on uploaded data - ibmcloud doi evaluategate --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --policy=WFFH-CI - -elif [ "$PUBLISH_TYPE" == "evaluateCD" ] -then - # Invoke a DevOps Insights gate to evaluated a policy based on uploaded data - ibmcloud doi evaluategate --logicalappname="$MY_APP_NAME" --buildnumber="$TRAVIS_BUILD_NUMBER" --policy=WFFH-CD -fi \ No newline at end of file diff --git a/run-ivttests.sh b/run-ivttests.sh index c7f3d70..f2a525f 100755 --- a/run-ivttests.sh +++ b/run-ivttests.sh @@ -1,4 +1,7 @@ #!/usr/bin/env bash +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 echo 'Run IVT Tests' rspec test/spec --tag ~@broken --format documentation --format RspecJunitFormatter --out ivttest.xml diff --git a/run-smoketests.sh b/run-smoketests.sh index 0c6716d..20b8dc8 100755 --- a/run-smoketests.sh +++ b/run-smoketests.sh @@ -1,28 +1,26 @@ #!/usr/bin/env bash +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + set -x -# Requires an HRI_API_KEY if authorization is enabled passing=0 failing=0 output="" -# Acquire Service Endpoint -healthcheckAction=$(ibmcloud fn action list | awk '/healthcheck/{print $1}') -apiHost=$(ibmcloud fn property get --apihost | awk '{print $4}') -healthcheckUrl="https://$apiHost/api/v1/web$healthcheckAction" +# lookup the base API url for the current targeted functions namespace +serviceUrl=$(ibmcloud fn api list -f | grep 'URL: ' | grep 'hri/healthcheck' -m 1 | sed -rn 's/^.*: (.*)\/hri.*/\1\/hri/p') echo 'Run Smoke Tests' -# Don't display FN_WEB_SECURE_KEY -set +x -HRI_API_STATUS=$(curl --write-out "%{http_code}\n" --silent --output /dev/null "$healthcheckUrl" -H "X-Require-Whisk-Auth: $FN_WEB_SECURE_KEY") -set -x +HRI_API_STATUS=$(curl --write-out "%{http_code}\n" --silent --output /dev/null "$serviceUrl/healthcheck" ) if [ $HRI_API_STATUS -eq 200 ]; then passing=$((passing+1)) failure='/>' else failing=$((failing+1)) - HRI_API_ERROR=$(curl "$healthcheckUrl" -H "X-Require-Whisk-Auth: ${FN_WEB_SECURE_KEY}") + HRI_API_ERROR=$(curl "$serviceUrl/healthcheck") failure=">$HRI_API_ERROR" fi output="$output\n&1 > unittest rtn=$? diff --git a/secureApi.sh b/secureApi.sh deleted file mode 100755 index a20b2b9..0000000 --- a/secureApi.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash - -# This script enable API key authentication to an API. It downloads the API swagger definition, -# adds a security definition, and then updates the API with the new configuration. See these -# for more detailed information: -# https://github.com/apache/openwhisk/blob/master/docs/apigateway.md -# https://github.com/apache/openwhisk-apigateway/blob/master/doc/v2/management_interface_v2.md - -# Download the api definition -ibmcloud fn api get hri-batches > api.json - -# check for an existing security definition -if [[ -n $(grep securityDefinitions api.json) ]]; then - echo "Found existing security definitions, exiting" - exit 0 -fi - -# Remove the last two lines -lines=$(wc -l < api.json) -head -n $(( lines - 2 )) api.json > secureApi.json - -# Append the security definition and closing '}' -cat >>secureApi.json < 0 { @@ -189,7 +199,7 @@ func Get(params map[string]interface{}, client *elasticsearch.Client) map[string } return response.Success(http.StatusOK, map[string]interface{}{ - "total": body["hits"].(map[string]interface{})["total"].(float64), + "total": body["hits"].(map[string]interface{})["total"].(map[string]interface{})["value"].(float64), "results": hits, }) } diff --git a/src/batches/get_by_id.go b/src/batches/get_by_id.go index 5e2b6d7..b64a5c0 100644 --- a/src/batches/get_by_id.go +++ b/src/batches/get_by_id.go @@ -6,20 +6,29 @@ package batches import ( - // "fmt" + "fmt" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" ) -func GetById(params map[string]interface{}, client *elasticsearch.Client) map[string]interface{} { +const msgMissingStatusElem = "Error: Elastic Search Result body does Not have the expected '_source' Element" + +func GetById(params map[string]interface{}, claims auth.HriClaims, client *elasticsearch.Client) map[string]interface{} { logger := log.New(os.Stdout, "batches/GetById: ", log.Llongfile) + if !claims.HasScope(auth.HriIntegrator) && !claims.HasScope(auth.HriConsumer) { + errMsg := auth.MsgAccessTokenMissingScopes + logger.Println(errMsg) + return response.Error(http.StatusUnauthorized, errMsg) + } + // validate that required Two input params are present ONLY in the PATH param tenantId, err := path.ExtractParam(params, param.TenantIndex) if err != nil { @@ -38,12 +47,41 @@ func GetById(params map[string]interface{}, client *elasticsearch.Client) map[st index := elastic.IndexFromTenantId(tenantId) logger.Printf("index: %v", index) - res, err := client.Get(index, BatchIdToEsDocId(batchId)) + res, err := client.Get(index, batchId) resultBody, errResp := elastic.DecodeBody(res, err, tenantId, logger) if errResp != nil { return errResp } + errResponse := checkBatchAuthorization(claims, resultBody) + if errResponse != nil { + return errResponse + } + return response.Success(http.StatusOK, EsDocToBatch(resultBody)) } + +func checkBatchAuthorization(claims auth.HriClaims, resultBody map[string]interface{}) map[string]interface{} { + if claims.HasScope(auth.HriConsumer) { //= Always Authorized + return nil // return nil Error for Authorized + } + + if claims.HasScope(auth.HriIntegrator) { + if sourceBody, ok := resultBody["_source"].(map[string]interface{}); ok { + integratorId := sourceBody[param.IntegratorId] + //if claims.Subject from the token does NOT match the previously saved batch.IntegratorId, user NOT Authorized + if claims.Subject != integratorId { + errMsg := fmt.Sprintf(auth.MsgIntegratorSubClaimNoMatch, claims.Subject, integratorId) + return response.Error(http.StatusUnauthorized, errMsg) + } + } else { //_source elem does Not exist - Internal Server Error + return response.Error(http.StatusInternalServerError, msgMissingStatusElem) + } + } else { //No Scope was provided -> Unauthorized - we should never reach here + errMsg := auth.MsgAccessTokenMissingScopes + return response.Error(http.StatusUnauthorized, errMsg) + } + + return nil //Default Return: we are Authorized => nil error +} diff --git a/src/batches/get_by_id_test.go b/src/batches/get_by_id_test.go index 8fc16ae..70fda6d 100644 --- a/src/batches/get_by_id_test.go +++ b/src/batches/get_by_id_test.go @@ -7,6 +7,7 @@ package batches import ( "errors" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" @@ -22,9 +23,9 @@ const activationId string = "activationId" func TestGetById(t *testing.T) { _ = os.Setenv(response.EnvOwActivationId, activationId) - docId := "batch7j3" - batchId := EsDocIdToBatchId(docId) + batchId := "batch7j3" tenantId := "tenant12x" + subject := "dataIntegrator1" validPath := "/hri/tenants/" + tenantId + "/batches/" + batchId validPathArg := map[string]interface{}{ path.ParamOwPath: validPath, @@ -33,12 +34,14 @@ func TestGetById(t *testing.T) { testCases := []struct { name string args map[string]interface{} + claims auth.HriClaims transport *test.FakeTransport expected map[string]interface{} }{ { - name: "success-case", - args: validPathArg, + name: "success-case", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch7j3", test.ElasticCall{ @@ -62,13 +65,14 @@ func TestGetById(t *testing.T) { }`, }, ), - expected: response.Success(http.StatusOK, map[string]interface{}{"id": batchId, "name": "monkeyBatch", "status": "started", "startDate": "2019-12-13", "dataType": "claims", "topic": "ingest-test", "recordCount": float64(1)}), + expected: response.Success(http.StatusOK, map[string]interface{}{"id": "batch7j3", "name": "monkeyBatch", "status": "started", "startDate": "2019-12-13", "dataType": "claims", "topic": "ingest-test", "recordCount": float64(1)}), }, { name: "batch not found", args: map[string]interface{}{ path.ParamOwPath: "/hri/tenants/tenant12x/batches/batch-no-existo", }, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch-no-existo", test.ElasticCall{ @@ -84,9 +88,19 @@ func TestGetById(t *testing.T) { ), expected: response.Error(http.StatusNotFound, "The document for tenantId: tenant12x with document (batch) ID: batch-no-existo was not found"), }, + { + name: "no role set in Claim error", + args: validPathArg, + claims: auth.HriClaims{}, + transport: test.NewFakeTransport(t), + expected: response.Error( + http.StatusUnauthorized, + auth.MsgAccessTokenMissingScopes), + }, { name: "missing open whisk path param", args: map[string]interface{}{}, //Missing Path Param + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t), expected: response.Error( http.StatusBadRequest, @@ -97,6 +111,7 @@ func TestGetById(t *testing.T) { args: map[string]interface{}{ path.ParamOwPath: "/hri/tenants", }, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t), expected: response.Error( http.StatusBadRequest, @@ -107,6 +122,7 @@ func TestGetById(t *testing.T) { args: map[string]interface{}{ path.ParamOwPath: "/hri/tenants/tenant12x/batchId", }, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t), expected: response.Error( http.StatusBadRequest, @@ -115,8 +131,9 @@ func TestGetById(t *testing.T) { { name: "bad tenantId", args: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/bad-tenant/batches/" + batchId, + path.ParamOwPath: "/hri/tenants/bad-tenant/batches/batch7j3", }, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/bad-tenant-batches/_doc/batch7j3", test.ElasticCall{ @@ -150,8 +167,9 @@ func TestGetById(t *testing.T) { "index_not_found_exception: no such index"), }, { - name: "bad-ES-response-body-EOF", - args: validPathArg, + name: "bad-ES-response-body-EOF", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch7j3", test.ElasticCall{ @@ -164,8 +182,9 @@ func TestGetById(t *testing.T) { "Error parsing the Elastic search response body: EOF"), }, { - name: "body decode error on ES OK Response", - args: validPathArg, + name: "body decode error on ES OK Response", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch7j3", test.ElasticCall{ @@ -177,8 +196,9 @@ func TestGetById(t *testing.T) { "Error parsing the Elastic search response body: invalid character 'b' looking for beginning of object key string"), }, { - name: "body decode error on ES Response: 400 Bad Request", - args: validPathArg, + name: "body decode error on ES Response: 400 Bad Request", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch7j3", test.ElasticCall{ @@ -191,8 +211,9 @@ func TestGetById(t *testing.T) { "Error parsing the Elastic search response body: invalid character 'b' looking for beginning of object key string"), }, { - name: "client error", - args: validPathArg, + name: "client error", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriConsumer}, transport: test.NewFakeTransport(t).AddCall( "/tenant12x-batches/_doc/batch7j3", test.ElasticCall{ @@ -203,6 +224,68 @@ func TestGetById(t *testing.T) { http.StatusInternalServerError, "Elastic client error: some client error"), }, + { + name: "integrator role integrator id matches sub claim", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriIntegrator, Subject: subject}, + transport: test.NewFakeTransport(t).AddCall( + "/tenant12x-batches/_doc/batch7j3", + test.ElasticCall{ + ResponseBody: ` + { + "_index" : "tenant12x-batches", + "_type" : "_doc", + "_id" : "batch7j3", + "_version" : 1, + "_seq_no" : 0, + "_primary_term" : 1, + "found" : true, + "_source" : { + "name" : "monkeyBatch", + "topic" : "ingest-test", + "dataType" : "claims", + "integratorId" : "dataIntegrator1", + "status" : "started", + "recordCount" : 1, + "startDate" : "2019-12-13" + } + }`, + }, + ), + expected: response.Success(http.StatusOK, map[string]interface{}{"id": "batch7j3", "integratorId": "dataIntegrator1", "name": "monkeyBatch", "status": "started", "startDate": "2019-12-13", "dataType": "claims", "topic": "ingest-test", "recordCount": float64(1)}), + }, + { + name: "integrator role integrator id Does NOT Match sub claim", + args: validPathArg, + claims: auth.HriClaims{Scope: auth.HriIntegrator, Subject: "no_match_integrator"}, + transport: test.NewFakeTransport(t).AddCall( + "/tenant12x-batches/_doc/batch7j3", + test.ElasticCall{ + ResponseBody: ` + { + "_index" : "tenant12x-batches", + "_type" : "_doc", + "_id" : "batch7j3", + "_version" : 1, + "_seq_no" : 0, + "_primary_term" : 1, + "found" : true, + "_source" : { + "name" : "monkeyBatch", + "topic" : "ingest-test", + "dataType" : "claims", + "integratorId" : "dataIntegrator1", + "status" : "started", + "recordCount" : 1, + "startDate" : "2019-12-13" + } + }`, + }, + ), + expected: response.Error( + http.StatusUnauthorized, + "The token's sub claim (clientId): no_match_integrator does not match the data integratorId: dataIntegrator1"), + }, } for _, tc := range testCases { @@ -213,9 +296,77 @@ func TestGetById(t *testing.T) { } t.Run(tc.name, func(t *testing.T) { - if actual := GetById(tc.args, client); !reflect.DeepEqual(tc.expected, actual) { + if actual := GetById(tc.args, tc.claims, client); !reflect.DeepEqual(tc.expected, actual) { t.Errorf("GetById() = %v, expected %v", actual, tc.expected) } }) } } + +func TestCheckBatchAuthorization(t *testing.T) { + subject := "dataIntegrator1" + + testCases := []struct { + name string + claims auth.HriClaims + resultBody map[string]interface{} + expected map[string]interface{} + }{ + + { + name: "empty_claim_scope_return_error", + claims: auth.HriClaims{}, + resultBody: map[string]interface{}{ + "_index": "tenant12x-batches", + "_type": "_doc", + "_id": "batch7j3", + "_version": 1, + "_seq_no": 0, + "_primary_term": 1, + "found": true, + "_source": map[string]interface{}{ + "name": "monkeyBatch", + "topic": "ingest-test", + "dataType": "claims", + "integratorId": subject, + "status": "started", + "recordCount": 1, + "startDate": "2019-12-13", + }, + }, + expected: response.Error(http.StatusUnauthorized, auth.MsgAccessTokenMissingScopes), + }, + { + name: "consumer_role_returns_authorized", + claims: auth.HriClaims{Scope: auth.HriConsumer}, + resultBody: map[string]interface{}{}, + expected: nil, + }, + { + name: "missing_result_source_error", + claims: auth.HriClaims{Scope: auth.HriIntegrator, Subject: subject}, + resultBody: map[string]interface{}{ + "_index": "tenant12x-batches", + "_type": "_doc", + "_id": "batch7j3", + "_version": 1, + "_seq_no": 0, + "_primary_term": 1, + "found": true, + }, + expected: response.Error(http.StatusInternalServerError, msgMissingStatusElem), + }, + } + + for _, tc := range testCases { + + t.Run(tc.name, func(t *testing.T) { + t.Run(tc.name, func(t *testing.T) { + if actual := checkBatchAuthorization(tc.claims, tc.resultBody); !reflect.DeepEqual(tc.expected, actual) { + t.Errorf("GetById() = %v, expected %v", actual, tc.expected) + } + }) + }) + } + +} diff --git a/src/batches/get_test.go b/src/batches/get_test.go index 83d26c0..47defda 100644 --- a/src/batches/get_test.go +++ b/src/batches/get_test.go @@ -7,6 +7,7 @@ package batches import ( "errors" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" @@ -19,16 +20,19 @@ import ( func TestGet(t *testing.T) { activationId := "activationId" + subject := "clientId" _ = os.Setenv(response.EnvOwActivationId, activationId) tests := []struct { name string params map[string]interface{} + claims auth.HriClaims ft *test.FakeTransport expected map[string]interface{} }{ {"simple", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -36,13 +40,24 @@ func TestGet(t *testing.T) { ResponseBody: ` { "hits":{ - "total":1, + "total" : { + "value" : 1, + "relation" : "eq" + }, "hits":[ { "_id":"uuid", "_source":{ - "name":"mybatch", - "status":"started" + "dataType" : "rspec-batch", + "invalidThreshold" : -1, + "metadata" : { + "rspec1" : "test1" + }, + "name" : "mybatch", + "startDate" : "2021-02-24T18:08:36Z", + "status" : "started", + "topic" : "ingest.test.claims.in", + "integratorId" : "modified-integrator-id" } } ] @@ -50,10 +65,10 @@ func TestGet(t *testing.T) { }`, }, ), - response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": EsDocIdToBatchId("uuid"), "name": "mybatch", "status": "started"}}}), - }, + response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": "uuid", "dataType": "rspec-batch", "invalidThreshold": float64(-1), "name": "mybatch", "startDate": "2021-02-24T18:08:36Z", "status": "started", "topic": "ingest.test.claims.in", "integratorId": "modified-integrator-id", "metadata": map[string]interface{}{"rspec1": "test1"}}}})}, {"allparams", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "size": "20", "from": "10", "name": "mybatch", "status": "started", "gteDate": "01/01/2019", "lteDate": "01/01/2020"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -63,14 +78,24 @@ func TestGet(t *testing.T) { ResponseBody: ` { "hits":{ - "total":1, + "total" : { + "value" : 1, + "relation" : "eq" + }, "hits":[ { "_id":"uuid", "_source":{ - "name":"mybatch", - "status":"started", - "startDate":"01/02/2019" + "dataType" : "rspec-batch", + "invalidThreshold" : -1, + "metadata" : { + "rspec1" : "test1" + }, + "name" : "mybatch", + "startDate" : "01/02/2019", + "status" : "started", + "topic" : "ingest.test.claims.in", + "integratorId" : "modified-integrator-id" } } ] @@ -78,30 +103,34 @@ func TestGet(t *testing.T) { }`, }, ), - response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": EsDocIdToBatchId("uuid"), "name": "mybatch", "status": "started", "startDate": "01/02/2019"}}}), - }, + response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": "uuid", "dataType": "rspec-batch", "invalidThreshold": float64(-1), "name": "mybatch", "startDate": "01/02/2019", "status": "started", "topic": "ingest.test.claims.in", "integratorId": "modified-integrator-id", "metadata": map[string]interface{}{"rspec1": "test1"}}}})}, {"missing open whisk path param", map[string]interface{}{}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "Required parameter '__ow_path' is missing"), }, {"bad open whisk path param", map[string]interface{}{path.ParamOwPath: "/hri/tenants"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "The path is shorter than the requested path parameter; path: [ hri tenants], requested index: 3"), }, {"bad size param", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "size": "a1"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "Error parsing 'size' parameter: strconv.Atoi: parsing \"a1\": invalid syntax"), }, {"bad from param", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "from": "b2"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "Error parsing 'from' parameter: strconv.Atoi: parsing \"b2\": invalid syntax"), }, {"bad gteDate value", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "gteDate": "2019-aaaef-01"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -146,6 +175,7 @@ func TestGet(t *testing.T) { }, { "invalid error Json in Response body", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -182,22 +212,26 @@ func TestGet(t *testing.T) { }, {"bad name param_prohibited character", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "name": "{[]//zzx[]}"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "query parameters may not contain these characters: \"[]{}"), }, {"bad status param_prohibited character", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "status": "z{[z]//j[]}"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "query parameters may not contain these characters: \"[]{}"), }, {"bad startDate param_prohibited character", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches", "gteDate": "01/01/2019", "lteDate": "{}[][xxx]\"}"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t), response.Error(http.StatusBadRequest, "query parameters may not contain these characters: \"[]{}"), }, {"client error", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -209,6 +243,7 @@ func TestGet(t *testing.T) { }, {"response error", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -227,6 +262,7 @@ func TestGet(t *testing.T) { }, {"body decode error on OK", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -238,6 +274,7 @@ func TestGet(t *testing.T) { }, {"body decode error on 400", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -250,6 +287,7 @@ func TestGet(t *testing.T) { }, {"bad tenantId", map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriConsumer}, test.NewFakeTransport(t).AddCall( "/1234-batches/_search", test.ElasticCall{ @@ -281,6 +319,85 @@ func TestGet(t *testing.T) { ), response.Error(http.StatusNotFound, "index_not_found_exception: no such index"), }, + {"Missing scopes", + map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{}, + test.NewFakeTransport(t), + response.Error(http.StatusUnauthorized, auth.MsgAccessTokenMissingScopes), + }, + {"Integrator filter", + map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriIntegrator, Subject: subject}, + test.NewFakeTransport(t).AddCall( + "/1234-batches/_search", + test.ElasticCall{ + RequestQuery: "from=0&size=10&track_total_hits=true", + RequestBody: `{"query":{"bool":{"must":\[{"term":{"integratorId":"clientId"}}\]}}}` + "\n", + ResponseBody: ` + { + "hits":{ + "total" : { + "value" : 1, + "relation" : "eq" + }, + "hits":[ + { + "_id":"uuid", + "_source":{ + "dataType" : "rspec-batch", + "invalidThreshold" : -1, + "metadata" : { + "rspec1" : "test1" + }, + "name" : "mybatch", + "startDate" : "2021-02-24T18:08:36Z", + "status" : "started", + "topic" : "ingest.test.claims.in", + "integratorId" : "modified-integrator-id" + } + } + ] + } + }`, + }, + ), + response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": "uuid", "dataType": "rspec-batch", "invalidThreshold": float64(-1), "name": "mybatch", "startDate": "2021-02-24T18:08:36Z", "status": "started", "topic": "ingest.test.claims.in", "integratorId": "modified-integrator-id", "metadata": map[string]interface{}{"rspec1": "test1"}}}})}, + {"Consumer & Integrator no filter", + map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches"}, + auth.HriClaims{Scope: auth.HriIntegrator + " " + auth.HriConsumer, Subject: subject}, + test.NewFakeTransport(t).AddCall( + "/1234-batches/_search", + test.ElasticCall{ + RequestQuery: "from=0&size=10&track_total_hits=true", + ResponseBody: ` + { + "hits":{ + "total" : { + "value" : 1, + "relation" : "eq" + }, + "hits":[ + { + "_id":"uuid", + "_source":{ + "dataType" : "rspec-batch", + "invalidThreshold" : -1, + "metadata" : { + "rspec1" : "test1" + }, + "name" : "mybatch", + "startDate" : "2021-02-24T18:08:36Z", + "status" : "started", + "topic" : "ingest.test.claims.in", + "integratorId" : "modified-integrator-id" + } + } + ] + } + }`, + }, + ), + response.Success(http.StatusOK, map[string]interface{}{"total": float64(1), "results": []interface{}{map[string]interface{}{"id": "uuid", "dataType": "rspec-batch", "invalidThreshold": float64(-1), "name": "mybatch", "startDate": "2021-02-24T18:08:36Z", "status": "started", "topic": "ingest.test.claims.in", "integratorId": "modified-integrator-id", "metadata": map[string]interface{}{"rspec1": "test1"}}}})}, } for _, tt := range tests { @@ -290,7 +407,7 @@ func TestGet(t *testing.T) { t.Error(err) } - if got := Get(tt.params, esClient); !reflect.DeepEqual(got, tt.expected) { + if got := Get(tt.params, tt.claims, esClient); !reflect.DeepEqual(got, tt.expected) { t.Errorf("Get() = %v, expected %v", got, tt.expected) } }) diff --git a/src/batches/update_status.go b/src/batches/update_status.go index b343457..060412e 100644 --- a/src/batches/update_status.go +++ b/src/batches/update_status.go @@ -9,12 +9,13 @@ import ( "context" "fmt" "github.com/Alvearie/hri-mgmt-api/batches/status" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/kafka" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" @@ -29,9 +30,23 @@ const ( msgUpdateResultNotReturned string = "Update result not returned in Elastic response" ) -func UpdateStatus(params map[string]interface{}, validator param.Validator, targetStatus status.BatchStatus, client *elasticsearch.Client, kafkaWriter kafka.Writer) map[string]interface{} { +func UpdateStatus( + params map[string]interface{}, + validator param.Validator, + claims auth.HriClaims, + targetStatus status.BatchStatus, + client *elasticsearch.Client, + kafkaWriter kafka.Writer) map[string]interface{} { + logger := log.New(os.Stdout, fmt.Sprintf("batches/%s: ", targetStatus), log.Llongfile) + // validate that caller has sufficient permissions + if !claims.HasScope(auth.HriIntegrator) { + msg := fmt.Sprintf(fmt.Sprintf(auth.MsgIntegratorRoleRequired, "update")) + logger.Printf(msg) + return response.Error(http.StatusUnauthorized, msg) + } + // validate that required input params are present tenantId, err := path.ExtractParam(params, param.TenantIndex) if err != nil { @@ -43,8 +58,17 @@ func UpdateStatus(params map[string]interface{}, validator param.Validator, targ logger.Println(err.Error()) return response.Error(http.StatusBadRequest, err.Error()) } - // recordCount is required for StatusProcessComplete, unused for StatusTerminated - var recordCount int + + errResp := validator.ValidateOptional( + params, + param.Info{param.Metadata, reflect.Map}, + ) + if errResp != nil { + logger.Printf("Bad input optional params: %s", errResp) + return errResp + } + metadata := params[param.Metadata] + index := elastic.IndexFromTenantId(tenantId) // Elastic conditional update query @@ -61,22 +85,50 @@ func UpdateStatus(params map[string]interface{}, validator param.Validator, targ logger.Printf("Bad input params: %s", errResp) return errResp } - recordCount = int(params[param.RecordCount].(float64)) + recordCount := int(params[param.RecordCount].(float64)) + // NOTE: whenever the Elastic document is NOT updated, set the ctx.op = 'none' // flag. Elastic will use this flag in the response so we can check if the update took place. - updateScript = fmt.Sprintf("if (ctx._source.status == '%s') {ctx._source.status = '%s'; ctx._source.recordCount = %d; ctx._source.endDate = '%s';} else {ctx.op = 'none'}", status.Started, targetStatus, recordCount, currentTime.Format(elastic.DateTimeFormat)) + if metadata == nil { + updateScript = fmt.Sprintf("if (ctx._source.status == '%s' && ctx._source.integratorId == '%s') {ctx._source.status = '%s'; ctx._source.recordCount = %d; ctx._source.endDate = '%s';} else {ctx.op = 'none'}", + status.Started, claims.Subject, targetStatus, recordCount, currentTime.Format(elastic.DateTimeFormat)) + } else { + updateScript = fmt.Sprintf("if (ctx._source.status == '%s' && ctx._source.integratorId == '%s') {ctx._source.status = '%s'; ctx._source.recordCount = %d; ctx._source.endDate = '%s'; ctx._source.metadata = params.metadata;} else {ctx.op = 'none'}", + status.Started, claims.Subject, targetStatus, recordCount, currentTime.Format(elastic.DateTimeFormat)) + } + } else if targetStatus == status.Terminated { - updateScript = fmt.Sprintf("if (ctx._source.status == '%s') {ctx._source.status = '%s'; ctx._source.endDate = '%s';} else {ctx.op = 'none'}", status.Started, targetStatus, currentTime.Format(elastic.DateTimeFormat)) + + if metadata == nil { + updateScript = fmt.Sprintf("if (ctx._source.status == '%s' && ctx._source.integratorId == '%s') {ctx._source.status = '%s'; ctx._source.endDate = '%s';} else {ctx.op = 'none'}", + status.Started, claims.Subject, targetStatus, currentTime.Format(elastic.DateTimeFormat)) + } else { + updateScript = fmt.Sprintf("if (ctx._source.status == '%s' && ctx._source.integratorId == '%s') {ctx._source.status = '%s'; ctx._source.endDate = '%s'; ctx._source.metadata = params.metadata;} else {ctx.op = 'none'}", + status.Started, claims.Subject, targetStatus, currentTime.Format(elastic.DateTimeFormat)) + } + } else { // this method was somehow invoked with an invalid batch status errMsg := fmt.Sprintf("Cannot update batch to status '%s', only '%s' and '%s' are acceptable", targetStatus, status.Completed, status.Terminated) logger.Println(errMsg) return response.Error(http.StatusUnprocessableEntity, errMsg) } - updateRequest := map[string]interface{}{ - "script": map[string]string{ - "source": updateScript, - }, + + var updateRequest map[string]interface{} + if metadata == nil { + updateRequest = map[string]interface{}{ + "script": map[string]interface{}{ + "source": updateScript, + }, + } + } else { + updateRequest = map[string]interface{}{ + "script": map[string]interface{}{ + "source": updateScript, + "lang": "painless", + "params": map[string]interface{}{"metadata": metadata}, + }, + } } encodedQuery, err := elastic.EncodeQueryBody(updateRequest) @@ -88,7 +140,7 @@ func UpdateStatus(params map[string]interface{}, validator param.Validator, targ updateResponse, updateErr := client.Update( index, - BatchIdToEsDocId(batchId), + batchId, encodedQuery, // request body client.Update.WithContext(context.Background()), client.Update.WithSource("true"), // return updated batch in response @@ -122,13 +174,31 @@ func UpdateStatus(params map[string]interface{}, validator param.Validator, targ } return response.Success(http.StatusOK, map[string]interface{}{}) } else if updateResult == elasticResultNoop { - // update resulted in no-op, due to previous batch status - errMsg := fmt.Sprintf("Batch status was not updated to '%s', batch is already in '%s' state", targetStatus, updatedBatch[param.Status].(string)) + statusCode, errMsg := determineCause(targetStatus, claims.Subject, updatedBatch) logger.Println(errMsg) - return response.Error(http.StatusConflict, errMsg) + return response.Error(statusCode, errMsg) } else { errMsg := fmt.Sprintf("An unexpected error occurred updating the batch, Elastic update returned result '%s'", updateResult) logger.Println(errMsg) return response.Error(http.StatusInternalServerError, errMsg) } } + +func determineCause(targetStatus status.BatchStatus, subject string, batch map[string]interface{}) (int, string) { + if subject != batch[param.IntegratorId] { + // update resulted in no-op, due to insuffient permissions + return http.StatusUnauthorized, fmt.Sprintf( + "Batch status was not updated to '%s'. Requested by '%s' but owned by '%s'", + targetStatus, + subject, + batch[param.IntegratorId], + ) + } else { + // update resulted in no-op, due to previous batch status + return http.StatusConflict, fmt.Sprintf( + "Batch status was not updated to '%s', batch is already in '%s' state", + targetStatus, + batch[param.Status], + ) + } +} diff --git a/src/batches/update_status_test.go b/src/batches/update_status_test.go index e0fe4cc..1af6c8b 100644 --- a/src/batches/update_status_test.go +++ b/src/batches/update_status_test.go @@ -10,6 +10,7 @@ import ( "errors" "fmt" "github.com/Alvearie/hri-mgmt-api/batches/status" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" @@ -22,72 +23,60 @@ import ( ) const ( - docId string = "docId" + batchId string = "test-batch" batchName string = "batchName" batchTopic string = "test.batch.in" batchDataType string = "batchDataType" batchStartDate string = "ignored" batchRecordCount float64 = float64(1) + integratorId string = "integratorId" // Note that the following chars must be escaped because RequestBody is used as a regex pattern: ., ), ( - scriptProcessComplete string = `{"script":{"source":"if \(ctx\._source\.status == 'started'\) {ctx\._source\.status = 'completed'; ctx\._source\.recordCount = 1; ctx\._source\.endDate = '` + test.DatePattern + `';} else {ctx\.op = 'none'}"}}` + "\n" + scriptSendComplete string = `{"script":{"source":"if \(ctx\._source\.status == 'started' \\u0026\\u0026 ctx._source.integratorId == '` + integratorId + `'\) {ctx\._source\.status = 'completed'; ctx\._source\.recordCount = 1; ctx\._source\.endDate = '` + test.DatePattern + `';} else {ctx\.op = 'none'}"}}` + "\n" + scriptSendCompleteMetadata string = `{"script":{"lang":"painless","params":{"metadata":{"compression":"gzip","userMetaField1":"metadata"}},"source":"if \(ctx._source.status == 'started' \\u0026\\u0026 ctx._source.integratorId == '` + integratorId + `'\) {ctx._source.status = 'completed'; ctx._source.recordCount = 1; ctx._source.endDate = '` + test.DatePattern + `'; ctx\._source\.metadata = params\.metadata;} else {ctx.op = 'none'}"}}` + "\n" // Note that the following chars must be escaped because RequestBody is used as a regex pattern: ., ), ( - scriptTerminated string = `{"script":{"source":"if \(ctx\._source\.status == 'started'\) {ctx\._source\.status = 'terminated'; ctx\._source\.endDate = '` + test.DatePattern + `';} else {ctx\.op = 'none'}"}}` + "\n" - transportQueryParams string = "_source=true" + scriptTerminated string = `{"script":{"source":"if \(ctx._source.status == 'started' \\u0026\\u0026 ctx._source.integratorId == '` + integratorId + `'\) {ctx._source.status = 'terminated'; ctx._source.endDate = '` + test.DatePattern + `';} else {ctx.op = 'none'}"}}` + "\n" + scriptTerminatedMetadata string = `{"script":{"lang":"painless","params":{"metadata":{"compression":"gzip","userMetaField1":"metadata"}},"source":"if \(ctx\._source\.status == 'started' \\u0026\\u0026 ctx._source.integratorId == '` + integratorId + `'\) {ctx\._source\.status = 'terminated'; ctx\._source\.endDate = '` + test.DatePattern + `'; ctx\._source\.metadata = params\.metadata;} else {ctx\.op = 'none'}"}}` + "\n" + transportQueryParams string = "_source=true" ) -var batchId = EsDocIdToBatchId(docId) +var batchMetadata = map[string]interface{}{"compression": "gzip", "userMetaField1": "metadata"} func TestUpdateStatus(t *testing.T) { activationId := "activationId" _ = os.Setenv(response.EnvOwActivationId, activationId) + validClaims := auth.HriClaims{Scope: auth.HriIntegrator, Subject: integratorId} + // create some example batches in different states - sendCompleteBatch := map[string]interface{}{ - param.BatchId: batchId, - param.Name: batchName, - param.Topic: batchTopic, - param.DataType: batchDataType, - param.Status: status.Completed.String(), - param.StartDate: batchStartDate, - param.RecordCount: batchRecordCount, - } + sendCompleteBatch := createBatch(status.Completed) sendCompleteJSON, err := json.Marshal(sendCompleteBatch) if err != nil { t.Errorf("Unable to create batch JSON string: %s", err.Error()) } - terminatedBatch := map[string]interface{}{ - param.BatchId: batchId, - param.Name: batchName, - param.Topic: batchTopic, - param.DataType: batchDataType, - param.Status: status.Terminated.String(), - param.StartDate: batchStartDate, - param.RecordCount: batchRecordCount, - } + terminatedBatch := createBatch(status.Terminated) terminatedJSON, err := json.Marshal(terminatedBatch) if err != nil { t.Errorf("Unable to create batch JSON string: %s", err.Error()) } - failedBatch := map[string]interface{}{ - param.BatchId: batchId, - param.Name: batchName, - param.Topic: batchTopic, - param.DataType: batchDataType, - param.Status: status.Failed.String(), - param.StartDate: batchStartDate, - param.RecordCount: batchRecordCount, - } + failedBatch := createBatch(status.Failed) failedJSON, err := json.Marshal(failedBatch) if err != nil { t.Errorf("Unable to create batch JSON string: %s", err.Error()) } + startedBatch := createBatch(status.Started) + startedJSON, err := json.Marshal(startedBatch) + if err != nil { + t.Errorf("Unable to create batch JSON string: %s", err.Error()) + } + tests := []struct { name string targetStatus status.BatchStatus params map[string]interface{} + claims auth.HriClaims ft *test.FakeTransport writerError error expectedNotification map[string]interface{} @@ -100,6 +89,7 @@ func TestUpdateStatus(t *testing.T) { path.ParamOwPath: "/hri/tenants/1234", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t), expectedResponse: response.Error(http.StatusBadRequest, "The path is shorter than the requested path parameter; path: [ hri tenants 1234], requested index: 5"), }, @@ -110,31 +100,62 @@ func TestUpdateStatus(t *testing.T) { path.ParamOwPath: "/hri/tenants", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t), expectedResponse: response.Error(http.StatusBadRequest, "The path is shorter than the requested path parameter; path: [ hri tenants], requested index: 3"), }, { name: "simple sendComplete", targetStatus: status.Completed, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", + param.RecordCount: batchRecordCount, + }, + claims: validClaims, + ft: test.NewFakeTransport(t).AddCall( + "/1234-batches/_doc/test-batch/_update", + test.ElasticCall{ + RequestQuery: transportQueryParams, + RequestBody: scriptSendComplete, + ResponseBody: fmt.Sprintf(` + { + "_index": "1234-batches", + "_type": "_doc", + "_id": "test-batch", + "result": "updated", + "get": { + "_source": %s + } + }`, sendCompleteJSON), + }, + ), + expectedNotification: sendCompleteBatch, + expectedResponse: response.Success(http.StatusOK, map[string]interface{}{}), + }, + { + name: "sendComplete with metadata", + targetStatus: status.Completed, params: map[string]interface{}{ path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", param.RecordCount: batchRecordCount, + param.Metadata: batchMetadata, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendCompleteMetadata, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "updated", "get": { "_source": %s } - }`, docId, sendCompleteJSON), + }`, sendCompleteJSON), }, ), expectedNotification: sendCompleteBatch, @@ -144,24 +165,25 @@ func TestUpdateStatus(t *testing.T) { name: "sendComplete fails on terminated batch", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "noop", "get": { "_source": %s } - }`, docId, terminatedJSON), + }`, terminatedJSON), }, ), expectedResponse: response.Error(http.StatusConflict, "Batch status was not updated to 'completed', batch is already in 'terminated' state"), @@ -170,33 +192,47 @@ func TestUpdateStatus(t *testing.T) { name: "sendComplete fails on missing record count parameter", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", }, + claims: validClaims, ft: test.NewFakeTransport(t), expectedResponse: response.Error(http.StatusBadRequest, "Missing required parameter(s): [recordCount]"), }, { - name: "sendComplete fails when batch already in completed state", + name: "sendComplete fails on bad metadata parameter", targetStatus: status.Completed, params: map[string]interface{}{ path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", param.RecordCount: batchRecordCount, + param.Metadata: "not json object", + }, + claims: validClaims, + ft: test.NewFakeTransport(t), + expectedResponse: response.Error(http.StatusBadRequest, "Invalid parameter type(s): [metadata must be a map, got string instead.]"), + }, + { + name: "sendComplete fails when batch already in completed state", + targetStatus: status.Completed, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", + param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "noop", "get": { "_source": %s } - }`, docId, sendCompleteJSON), + }`, sendCompleteJSON), }, ), expectedResponse: response.Error(http.StatusConflict, "Batch status was not updated to 'completed', batch is already in 'completed' state"), @@ -205,23 +241,24 @@ func TestUpdateStatus(t *testing.T) { name: "fail when update result not returned by elastic", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "get": { "_source": %s } - }`, docId, sendCompleteJSON), + }`, sendCompleteJSON), }, ), expectedResponse: response.Error(http.StatusInternalServerError, "Update result not returned in Elastic response"), @@ -230,21 +267,22 @@ func TestUpdateStatus(t *testing.T) { name: "fail when updated document not returned by elastic", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, - ResponseBody: fmt.Sprintf(` + RequestBody: scriptSendComplete, + ResponseBody: ` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "updated" - }`, docId), + }`, }, ), expectedResponse: response.Error(http.StatusInternalServerError, "Updated document not returned in Elastic response: error extracting the get section of the JSON"), @@ -253,24 +291,25 @@ func TestUpdateStatus(t *testing.T) { name: "fail when elastic result is unrecognized or invalid", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "MOnkeez-bad-result", "get": { "_source": %s } - }`, docId, sendCompleteJSON), + }`, sendCompleteJSON), }, ), expectedResponse: response.Error(http.StatusInternalServerError, "An unexpected error occurred updating the batch, Elastic update returned result 'MOnkeez-bad-result'"), @@ -279,14 +318,15 @@ func TestUpdateStatus(t *testing.T) { name: "invalid elastic response", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: `{"_index": "1234-batches",`, }, ), @@ -296,14 +336,15 @@ func TestUpdateStatus(t *testing.T) { name: "fail on nonexistent tenant", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/tenant-that-doesnt-exist/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/tenant-that-doesnt-exist/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/tenant-that-doesnt-exist-batches/_doc/"+docId+"/_update", + "/tenant-that-doesnt-exist-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseStatusCode: http.StatusNotFound, ResponseBody: ` { @@ -325,11 +366,12 @@ func TestUpdateStatus(t *testing.T) { path.ParamOwPath: "/hri/tenants/1234/batches/batch-that-doesnt-exist/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( "/1234-batches/_doc/batch-that-doesnt-exist/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseStatusCode: http.StatusNotFound, ResponseBody: ` { @@ -348,24 +390,25 @@ func TestUpdateStatus(t *testing.T) { name: "fail when unable to send notification", targetStatus: status.Completed, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/sendComplete", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, - RequestBody: scriptProcessComplete, + RequestBody: scriptSendComplete, ResponseBody: fmt.Sprintf(` { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "updated", "get": { "_source": %s } - }`, docId, sendCompleteJSON), + }`, sendCompleteJSON), }, ), expectedNotification: sendCompleteBatch, @@ -375,9 +418,10 @@ func TestUpdateStatus(t *testing.T) { { name: "simple terminate", targetStatus: status.Terminated, - params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/terminate"}, + params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/terminate"}, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, RequestBody: scriptTerminated, @@ -385,12 +429,40 @@ func TestUpdateStatus(t *testing.T) { { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", + "result": "updated", + "get": { + "_source": %s + } + }`, terminatedJSON), + }, + ), + expectedNotification: terminatedBatch, + expectedResponse: response.Success(http.StatusOK, map[string]interface{}{}), + }, + { + name: "terminate with metadata", + targetStatus: status.Terminated, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/terminate", + param.Metadata: batchMetadata, + }, + claims: validClaims, + ft: test.NewFakeTransport(t).AddCall( + "/1234-batches/_doc/test-batch/_update", + test.ElasticCall{ + RequestQuery: transportQueryParams, + RequestBody: scriptTerminatedMetadata, + ResponseBody: fmt.Sprintf(` + { + "_index": "1234-batches", + "_type": "_doc", + "_id": "test-batch", "result": "updated", "get": { "_source": %s } - }`, docId, terminatedJSON), + }`, terminatedJSON), }, ), expectedNotification: terminatedBatch, @@ -399,9 +471,10 @@ func TestUpdateStatus(t *testing.T) { { name: "terminate fails on failed batch", targetStatus: status.Terminated, - params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/terminate"}, + params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/terminate"}, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, RequestBody: scriptTerminated, @@ -409,12 +482,12 @@ func TestUpdateStatus(t *testing.T) { { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "noop", "get": { "_source": %s } - }`, docId, failedJSON), + }`, failedJSON), }, ), expectedResponse: response.Error(http.StatusConflict, "Batch status was not updated to 'terminated', batch is already in 'failed' state"), @@ -422,9 +495,10 @@ func TestUpdateStatus(t *testing.T) { { name: "terminate fails on previously terminated batch", targetStatus: status.Terminated, - params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/terminate"}, + params: map[string]interface{}{path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/terminate"}, + claims: validClaims, ft: test.NewFakeTransport(t).AddCall( - "/1234-batches/_doc/"+docId+"/_update", + "/1234-batches/_doc/test-batch/_update", test.ElasticCall{ RequestQuery: transportQueryParams, RequestBody: scriptTerminated, @@ -432,12 +506,12 @@ func TestUpdateStatus(t *testing.T) { { "_index": "1234-batches", "_type": "_doc", - "_id": "%s", + "_id": "test-batch", "result": "noop", "get": { "_source": %s } - }`, docId, terminatedJSON), + }`, terminatedJSON), }, ), expectedResponse: response.Error(http.StatusConflict, "Batch status was not updated to 'terminated', batch is already in 'terminated' state"), @@ -446,12 +520,75 @@ func TestUpdateStatus(t *testing.T) { name: "return error response for Unknown status", targetStatus: status.Unknown, params: map[string]interface{}{ - path.ParamOwPath: "/hri/tenants/1234/batches/" + batchId + "/action/blargBlarg", + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/blargBlarg", param.RecordCount: batchRecordCount, }, + claims: validClaims, ft: test.NewFakeTransport(t), expectedResponse: response.Error(http.StatusUnprocessableEntity, "Cannot update batch to status 'unknown', only 'completed' and 'terminated' are acceptable"), }, + { + name: "invalid role", + targetStatus: status.Completed, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", + param.RecordCount: batchRecordCount, + }, + claims: auth.HriClaims{Scope: auth.HriConsumer, Subject: integratorId}, + ft: test.NewFakeTransport(t), + expectedResponse: response.Error(http.StatusUnauthorized, fmt.Sprintf(auth.MsgIntegratorRoleRequired, "update")), + }, + { + name: "complete fails on bad integrator id", + targetStatus: status.Completed, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", + param.RecordCount: batchRecordCount, + }, + claims: auth.HriClaims{Scope: auth.HriIntegrator, Subject: "bad integrator"}, + ft: test.NewFakeTransport(t).AddCall( + "/1234-batches/_doc/test-batch/_update", + test.ElasticCall{ + RequestQuery: transportQueryParams, + ResponseBody: fmt.Sprintf(` + { + "_index": "1234-batches", + "_type": "_doc", + "_id": "test-batch", + "result": "noop", + "get": { + "_source": %s + } + }`, startedJSON), + }, + ), + expectedResponse: response.Error(http.StatusUnauthorized, fmt.Sprintf("Batch status was not updated to 'completed'. Requested by 'bad integrator' but owned by '%s'", integratorId)), + }, + { + name: "terminate fails on bad integrator id", + targetStatus: status.Terminated, + params: map[string]interface{}{ + path.ParamOwPath: "/hri/tenants/1234/batches/test-batch/action/sendComplete", + }, + claims: auth.HriClaims{Scope: auth.HriIntegrator, Subject: "bad integrator"}, + ft: test.NewFakeTransport(t).AddCall( + "/1234-batches/_doc/test-batch/_update", + test.ElasticCall{ + RequestQuery: transportQueryParams, + ResponseBody: fmt.Sprintf(` + { + "_index": "1234-batches", + "_type": "_doc", + "_id": "test-batch", + "result": "noop", + "get": { + "_source": %s + } + }`, startedJSON), + }, + ), + expectedResponse: response.Error(http.StatusUnauthorized, fmt.Sprintf("Batch status was not updated to 'terminated'. Requested by 'bad integrator' but owned by '%s'", integratorId)), + }, } for _, tt := range tests { @@ -468,9 +605,23 @@ func TestUpdateStatus(t *testing.T) { Error: tt.writerError, } - if result := UpdateStatus(tt.params, param.ParamValidator{}, tt.targetStatus, esClient, writer); !reflect.DeepEqual(result, tt.expectedResponse) { + if result := UpdateStatus(tt.params, param.ParamValidator{}, tt.claims, tt.targetStatus, esClient, writer); !reflect.DeepEqual(result, tt.expectedResponse) { t.Errorf("UpdateStatus() = %v, expected %v", result, tt.expectedResponse) } }) } } + +func createBatch(status status.BatchStatus) map[string]interface{} { + return map[string]interface{}{ + param.BatchId: batchId, + param.Name: batchName, + param.Topic: batchTopic, + param.DataType: batchDataType, + param.Status: status.String(), + param.StartDate: batchStartDate, + param.RecordCount: batchRecordCount, + param.Metadata: batchMetadata, + param.IntegratorId: integratorId, + } +} diff --git a/src/batches_create.go b/src/batches_create.go index 40d0a9c..898c329 100644 --- a/src/batches_create.go +++ b/src/batches_create.go @@ -11,10 +11,12 @@ package main import ( "github.com/Alvearie/hri-mgmt-api/batches" "github.com/Alvearie/hri-mgmt-api/common/actionloopmin" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/kafka" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" "log" "net/http" "os" @@ -30,6 +32,11 @@ func createMain(params map[string]interface{}) map[string]interface{} { start := time.Now() logger.Printf("start createMain, %s \n", start) + claims, errResp := auth.GetValidatedClaims(params, auth.AuthValidator{}, oidc.NewProvider) + if errResp != nil { + return errResp + } + esClient, err := elastic.ClientFromParams(params) if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) @@ -39,7 +46,9 @@ func createMain(params map[string]interface{}) map[string]interface{} { if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) } - resp := batches.Create(params, param.ParamValidator{}, esClient, kafkaWriter) + + resp := batches.Create(params, param.ParamValidator{}, claims, esClient, kafkaWriter) + logger.Printf("processing time createMain, %d milliseconds \n", time.Since(start).Milliseconds()) return resp } diff --git a/src/batches_get.go b/src/batches_get.go index 37f0f6e..0e33c44 100644 --- a/src/batches_get.go +++ b/src/batches_get.go @@ -11,8 +11,10 @@ package main import ( "github.com/Alvearie/hri-mgmt-api/batches" "github.com/Alvearie/hri-mgmt-api/common/actionloopmin" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" "log" "net/http" "os" @@ -28,11 +30,18 @@ func getMain(params map[string]interface{}) map[string]interface{} { start := time.Now() logger.Printf("start getMain, %s \n", start) + claims, errResp := auth.GetValidatedClaims(params, auth.AuthValidator{}, oidc.NewProvider) + if errResp != nil { + return errResp + } + esClient, err := elastic.ClientFromParams(params) if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) } - resp := batches.Get(params, esClient) + + resp := batches.Get(params, claims, esClient) + logger.Printf("processing time getMain, %d milliseconds \n", time.Since(start).Milliseconds()) return resp } diff --git a/src/batches_get_by_id.go b/src/batches_get_by_id.go index 8a51a62..de0049e 100644 --- a/src/batches_get_by_id.go +++ b/src/batches_get_by_id.go @@ -11,8 +11,10 @@ package main import ( "github.com/Alvearie/hri-mgmt-api/batches" "github.com/Alvearie/hri-mgmt-api/common/actionloopmin" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" "log" "net/http" "os" @@ -28,11 +30,18 @@ func getByIdMain(params map[string]interface{}) map[string]interface{} { start := time.Now() logger.Printf("start getByIdMain, %s \n", start) + claims, errResp := auth.GetValidatedClaims(params, auth.AuthValidator{}, oidc.NewProvider) + if errResp != nil { + return errResp + } + esClient, err := elastic.ClientFromParams(params) if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) } - resp := batches.GetById(params, esClient) + + resp := batches.GetById(params, claims, esClient) + logger.Printf("processing time getByIdMain, %d milliseconds \n", time.Since(start).Milliseconds()) return resp } diff --git a/src/batches_sendcomplete.go b/src/batches_sendcomplete.go index 96b07ad..61df411 100644 --- a/src/batches_sendcomplete.go +++ b/src/batches_sendcomplete.go @@ -12,10 +12,12 @@ import ( "github.com/Alvearie/hri-mgmt-api/batches" "github.com/Alvearie/hri-mgmt-api/batches/status" "github.com/Alvearie/hri-mgmt-api/common/actionloopmin" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/kafka" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" "log" "net/http" "os" @@ -31,6 +33,11 @@ func sendCompleteMain(params map[string]interface{}) map[string]interface{} { start := time.Now() logger.Printf("start sendCompleteMain, %s \n", start) + claims, errResp := auth.GetValidatedClaims(params, auth.AuthValidator{}, oidc.NewProvider) + if errResp != nil { + return errResp + } + esClient, err := elastic.ClientFromParams(params) if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) @@ -39,7 +46,9 @@ func sendCompleteMain(params map[string]interface{}) map[string]interface{} { if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) } - resp := batches.UpdateStatus(params, param.ParamValidator{}, status.Completed, esClient, kafkaWriter) + + resp := batches.UpdateStatus(params, param.ParamValidator{}, claims, status.Completed, esClient, kafkaWriter) + logger.Printf("processing time sendCompleteMain, %d milliseconds \n", time.Since(start).Milliseconds()) return resp } diff --git a/src/batches_terminate.go b/src/batches_terminate.go index 0aec676..eccefcf 100644 --- a/src/batches_terminate.go +++ b/src/batches_terminate.go @@ -12,10 +12,12 @@ import ( "github.com/Alvearie/hri-mgmt-api/batches" "github.com/Alvearie/hri-mgmt-api/batches/status" "github.com/Alvearie/hri-mgmt-api/common/actionloopmin" + "github.com/Alvearie/hri-mgmt-api/common/auth" "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/kafka" "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" "log" "net/http" "os" @@ -31,6 +33,11 @@ func terminateMain(params map[string]interface{}) map[string]interface{} { start := time.Now() logger.Printf("start terminateMain, %s \n", start) + claims, errResp := auth.GetValidatedClaims(params, auth.AuthValidator{}, oidc.NewProvider) + if errResp != nil { + return errResp + } + esClient, err := elastic.ClientFromParams(params) if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) @@ -39,7 +46,9 @@ func terminateMain(params map[string]interface{}) map[string]interface{} { if err != nil { return response.Error(http.StatusInternalServerError, err.Error()) } - resp := batches.UpdateStatus(params, param.ParamValidator{}, status.Terminated, esClient, kafkaWriter) + + resp := batches.UpdateStatus(params, param.ParamValidator{}, claims, status.Terminated, esClient, kafkaWriter) + logger.Printf("processing time terminateMain, %d milliseconds \n", time.Since(start).Milliseconds()) return resp } diff --git a/src/common/actionloopmin/actionloopmin.go b/src/common/actionloopmin/actionloopmin.go index 61018e0..93de2f1 100644 --- a/src/common/actionloopmin/actionloopmin.go +++ b/src/common/actionloopmin/actionloopmin.go @@ -32,6 +32,11 @@ func startLoop(fn func(params map[string]interface{}) map[string]interface{}, ou reader := bufio.NewReader(os.Stdin) + // send ack + // note that it depends on the runtime, + // go 1.13+ requires an ack, past versions does not + fmt.Fprintf(out, `{ "ok": true}%s`, "\n") + // read-eval-print loop for { // read one line diff --git a/src/common/auth/constants.go b/src/common/auth/constants.go new file mode 100644 index 0000000..d392736 --- /dev/null +++ b/src/common/auth/constants.go @@ -0,0 +1,16 @@ +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ +package auth + +const ( + HriIntegrator string = "hri_data_integrator" + HriConsumer string = "hri_consumer" + TenantScopePrefix string = "tenant_" + MsgAccessTokenMissingScopes = "The access token must have one of these scopes: hri_consumer, hri_data_integrator" + MsgIntegratorSubClaimNoMatch = "The token's sub claim (clientId): %s does not match the data integratorId: %s" + MsgIntegratorRoleRequired = "Must have hri_data_integrator role to %s a batch" + MsgSubClaimRequiredInJwt = "JWT access token 'sub' claim must be populated" +) diff --git a/src/common/auth/hri_claims.go b/src/common/auth/hri_claims.go new file mode 100644 index 0000000..1c048fb --- /dev/null +++ b/src/common/auth/hri_claims.go @@ -0,0 +1,40 @@ +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ +package auth + +import ( + "strings" +) + +// interface to support testing +type ClaimsHolder interface { + Claims(claims interface{}) error +} + +type HriClaims struct { + // Claim information extracted from a JWT access token + Scope string `json:"scope"` + Subject string `json:"sub"` + Audience []string `json:"aud"` +} + +func (c HriClaims) HasScope(claim string) bool { + // split space-delimited scope string into an array + scopes := strings.Fields(c.Scope) + + for _, val := range scopes { + if val == claim { + // token contains claim for this scope + return true + } + } + + logger := GetLogger() + logger.Printf("None of the authorized scopes [%s] matched the required scope: %s", + c.Scope, claim) + + return false +} diff --git a/src/common/auth/hri_claims_test.go b/src/common/auth/hri_claims_test.go new file mode 100644 index 0000000..0b8b685 --- /dev/null +++ b/src/common/auth/hri_claims_test.go @@ -0,0 +1,98 @@ +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ +package auth + +import ( + "encoding/json" + "fmt" + "reflect" + "testing" +) + +func TestHasScope(t *testing.T) { + scopeToFind := "hri_data_integrator" + + tests := []struct { + name string + scope string + expected bool + }{ + { + name: "Empty String", + expected: false, + }, + { + name: "No Spaces String", + scope: "hri_other" + scopeToFind + "hri_consumer", + expected: false, + }, + { + name: "Comma-Delimited String", + scope: "hri_other, " + scopeToFind + ", hri_consumer", + expected: false, + }, + { + name: "Not In String", + scope: "hri_consumer", + expected: false, + }, + { + name: "Partially In String", + scope: "hri integrator hri_consumer", + expected: false, + }, + { + name: "Single scope", + scope: scopeToFind, + expected: true, + }, + { + name: "In String", + scope: scopeToFind + " hri_consumer", + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + claims := HriClaims{Scope: tt.scope} + actual := claims.HasScope(scopeToFind) + if actual != tt.expected { + t.Fatalf("Unexpected result.\nexpected: %v\nactual :\n%v", tt.expected, actual) + } + }) + } +} + +func TestUnmarshalClaims(t *testing.T) { + testScope := "testScope" + testSubject := "testSubject" + testAudience := "testAudience" + jwtTokenJson := fmt.Sprintf(` + { + "scope": "%s", + "sub": "%s", + "aud": ["%s"] + }`, testScope, testSubject, testAudience) + + claims := HriClaims{} + err := json.Unmarshal([]byte(jwtTokenJson), &claims) + if err != nil { + t.Fatal(err.Error()) + } + + if !reflect.DeepEqual(claims.Scope, testScope) { + t.Fatalf("Unexpected result.\nexpected: %v\nactual : %v", testScope, claims.Scope) + } + + if !reflect.DeepEqual(claims.Subject, testSubject) { + t.Fatalf("Unexpected result.\nexpected: %v\nactual : %v", testSubject, claims.Subject) + } + + if !reflect.DeepEqual(claims.Audience, []string{testAudience}) { + t.Fatalf("Unexpected result.\nexpected: %v\nactual : %v", []string{testAudience}, claims.Audience) + } +} diff --git a/src/common/auth/validate.go b/src/common/auth/validate.go new file mode 100644 index 0000000..773e84c --- /dev/null +++ b/src/common/auth/validate.go @@ -0,0 +1,185 @@ +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ +package auth + +import ( + "context" + "fmt" + "github.com/Alvearie/hri-mgmt-api/common/param" + "github.com/Alvearie/hri-mgmt-api/common/path" + "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" + "log" + "net/http" + "os" + "reflect" + "strings" +) + +var Logger *log.Logger + +// NOTE: Do to the tightly coupled nature of several classes in the oidc library, the test code coverage is below 90%. +// Interfaces cannot be created for the Provider and IDTokenVerifier, because return types cannot be inferred in Golang. + +// type to support testing +type NewOidcProvider func(context.Context, string) (*oidc.Provider, error) + +// interface to support testing +type Validator interface { + GetSignedToken(params map[string]interface{}, providerNew NewOidcProvider, logger *log.Logger) (ClaimsHolder, map[string]interface{}) + CheckTenant(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} + CheckAudience(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} +} + +// struct that implements the Validator interface +type AuthValidator struct{} + +// Ensures the request has a valid OAuth JWT OIDC compliant access token. +// Eventually this will also check the token scopes against a provided list. +// Returns a non-nil web action http response map on error. +func (v AuthValidator) GetSignedToken(params map[string]interface{}, providerNew NewOidcProvider, logger *log.Logger) (ClaimsHolder, map[string]interface{}) { + issuer, rawToken, errResp := v.extractIssuerAndToken(params, logger) + if errResp != nil { + return nil, errResp + } + + ctx := context.Background() + + provider, err := providerNew(ctx, issuer) + if err != nil { + msg := fmt.Sprintf("Failed to create OIDC provider: %s", err.Error()) + logger.Printf(msg) + return nil, response.Error(http.StatusInternalServerError, msg) + } + + // Since this is the protected resource, we don't have a client id and will accept tokens for all clients + verifier := provider.Verifier(&oidc.Config{SkipClientIDCheck: true}) + + token, err := verifier.Verify(ctx, rawToken) + if err != nil { + msg := fmt.Sprintf("Authorization token validation failed: %s", err.Error()) + logger.Printf(msg) + return nil, response.Error(http.StatusUnauthorized, msg) + } + + return token, nil +} + +func (v AuthValidator) extractIssuerAndToken(params map[string]interface{}, logger *log.Logger) (string, string, map[string]interface{}) { + + // validate that required input params are present + validator := param.ParamValidator{} + errResp := validator.Validate( + params, + param.Info{param.OidcIssuer, reflect.String}, + ) + if errResp != nil { + logger.Printf("Missing OIDC issuer parameter: %s", errResp) + return "", "", errResp + } + + // extract the Authorization Bearer token + headers, err := param.ExtractValues(params, param.OpenWhiskHeaders) + if err != nil { + msg := fmt.Sprintf("Unable to extract OpenWhisk headers: %s", err.Error()) + logger.Printf(msg) + return "", "", response.Error(http.StatusInternalServerError, msg) + } + + auth, ok := headers["authorization"].(string) + if !ok { + auth, ok = headers["Authorization"].(string) + if !ok { + return "", "", response.Error(http.StatusUnauthorized, "Missing Authorization header") + } + } + rawToken := strings.ReplaceAll(strings.ReplaceAll(auth, "Bearer ", ""), "bearer ", "") + + return params[param.OidcIssuer].(string), rawToken, nil +} + +func (v AuthValidator) CheckTenant(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} { + // extract tenantId from URL path + tenantId, err := path.ExtractParam(params, param.TenantIndex) + if err != nil { + logger.Println(err.Error()) + return response.Error(http.StatusBadRequest, err.Error()) + } + + // The tenant scope token must have "tenant_" as a prefix + if !claims.HasScope(TenantScopePrefix + tenantId) { + // The authorized scopes do not include tenant data + msg := fmt.Sprintf("Unauthorized tenant access. Tenant '%s' is not included in the authorized scopes: %v.", tenantId, claims.Scope) + logger.Println(msg) + return response.Error(http.StatusUnauthorized, msg) + } + + // Tenant data included in authorized scopes + return nil +} + +func (v AuthValidator) CheckAudience(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} { + // validate that required input params are present + validator := param.ParamValidator{} + errResp := validator.Validate( + params, + param.Info{param.JwtAudienceId, reflect.String}, + ) + if errResp != nil { + logger.Printf("Missing %s parameter: %s", param.JwtAudienceId, errResp) + return errResp + } + + for _, jwtAudience := range claims.Audience { + if jwtAudience == params[param.JwtAudienceId] { + // JWT Audience matches JWT Audience Id + return nil + } + } + + msg := fmt.Sprintf( + "Unauthorized tenant access. The JWT Audience '%s' does not match the JWT Audience Id '%s'.", + claims.Audience, params[param.JwtAudienceId]) + logger.Println(msg) + return response.Error(http.StatusUnauthorized, msg) +} + +func GetValidatedClaims(params map[string]interface{}, validator Validator, providerNew NewOidcProvider) (HriClaims, map[string]interface{}) { + logger := GetLogger() + claims := HriClaims{} + + // verify that request has a signed OAuth JWT OIDC-compliant access token + token, errResp := validator.GetSignedToken(params, providerNew, logger) + if errResp != nil { + return claims, errResp + } + + // extract HRI-related claims from JWT access token + if err := token.Claims(&claims); err != nil { + logger.Println(err.Error()) + return claims, response.Error(http.StatusUnauthorized, err.Error()) + } + + // verify that necessary tenant claim exists to access this endpoint's data + if errResp := validator.CheckTenant(params, claims, logger); errResp != nil { + return claims, errResp + } + + // verify that the JWT Audience matches the JWT Audience Id + if errResp := validator.CheckAudience(params, claims, logger); errResp != nil { + return claims, errResp + } + + return claims, nil +} + +func GetLogger() *log.Logger { + if Logger == nil { + Logger = log.New(os.Stdout, "auth/validate: ", log.Llongfile) + } + + return Logger +} diff --git a/src/common/auth/validate_test.go b/src/common/auth/validate_test.go new file mode 100644 index 0000000..bd70b00 --- /dev/null +++ b/src/common/auth/validate_test.go @@ -0,0 +1,467 @@ +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ +package auth + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "github.com/Alvearie/hri-mgmt-api/common/param" + "github.com/Alvearie/hri-mgmt-api/common/path" + "github.com/Alvearie/hri-mgmt-api/common/response" + "github.com/coreos/go-oidc" + "github.com/golang/mock/gomock" + "log" + "net/http" + "os" + "reflect" + "testing" +) + +// this is for manual testing with a specific OIDC provider (AppID) +func /*Test*/ OidcLib(t *testing.T) { + const iss = "https://us-south.appid.cloud.ibm.com/oauth/v4/" + username := os.Getenv("APPID_USERNAME") + password := os.Getenv("APPID_PASSWORD") + + // First get a token from AppID + request, err := http.NewRequest("POST", iss+"/token", bytes.NewBuffer([]byte("grant_type=client_credentials"))) + if err != nil { + t.Errorf("Error creating new http request: %v", err) + } + request.SetBasicAuth(username, password) + request.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + client := http.Client{} + resp, err := client.Do(request) + if err != nil { + t.Errorf("Error executing AppID token POST: %v", err) + } + + defer resp.Body.Close() + + if resp.StatusCode != 200 { + t.Errorf("Non 200 from AppID token POST: %v", resp) + } + + var body map[string]interface{} + if err := json.NewDecoder(resp.Body).Decode(&body); err != nil { + t.Errorf("Error decoding AppID token response: %v", err) + } + + logger := log.New(os.Stdout, "auth/validate: ", log.Llongfile) + validator := AuthValidator{} + + // now validate the token + _, errResp := validator.GetSignedToken( + map[string]interface{}{ + "issuer": iss, + param.OpenWhiskHeaders: map[string]interface{}{"authorization": "Bearer " + body["access_token"].(string)}}, + oidc.NewProvider, + logger) + + if errResp != nil { + t.Fatalf("Error: %v", errResp) + } +} + +func testProvider(ctx context.Context, issuer string) (*oidc.Provider, error) { + return &oidc.Provider{}, nil +} + +func TestGetSignedToken(t *testing.T) { + tests := []struct { + name string + params map[string]interface{} + expErr map[string]interface{} + newProvider NewOidcProvider + }{ + {"Bad Token", + map[string]interface{}{ + "issuer": "https://issuer", + param.OpenWhiskHeaders: map[string]interface{}{"authorization": "Bearer AEFFASEFLIJPIOJ"}, + }, + response.Error(http.StatusUnauthorized, "Authorization token validation failed: oidc: malformed jwt: square/go-jose: compact JWS format must have three parts"), + testProvider, + }, + {"Missing Params", + map[string]interface{}{ + "issuer": "https://issuer", + param.OpenWhiskHeaders: map[string]interface{}{}, + }, + response.Error(http.StatusUnauthorized, "Missing Authorization header"), + testProvider, + }, + {"NewProvider Error", + map[string]interface{}{ + "issuer": "https://issuer", + param.OpenWhiskHeaders: map[string]interface{}{"authorization": "Bearer AEFFASEFLIJPIOJ"}, + }, + response.Error(http.StatusInternalServerError, "Failed to create OIDC provider: new provider error"), + func(ctx context.Context, issuer string) (*oidc.Provider, error) { + return nil, errors.New("new provider error") + }, + }, + } + + logger := log.New(os.Stdout, "auth/validate: ", log.Llongfile) + validator := AuthValidator{} + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validator.GetSignedToken(tt.params, tt.newProvider, logger) + expected := fmt.Sprint(tt.expErr) + if err == nil || expected != fmt.Sprint(err) { + t.Fatalf("Expected error with bad token, but expected:\n%v\ngot:\n%v", expected, err) + } + }) + } +} + +func TestExtractIssuerAndToken(t *testing.T) { + const issuer = "https://myissuer" + const token = "LIJIJUGBFFDRYCXWRETYUJBCXCVBNKUYTRF" + + tests := []struct { + name string + params map[string]interface{} + expErr map[string]interface{} + }{ + {"Happy Path", + map[string]interface{}{param.OidcIssuer: issuer, param.OpenWhiskHeaders: map[string]interface{}{"authorization": "Bearer " + token}}, + nil, + }, + {"Missing Issuer", + map[string]interface{}{param.OpenWhiskHeaders: map[string]interface{}{"authorization": "Bearer " + token}}, + response.MissingParams(param.OidcIssuer), + }, + {"Missing OpenWhisk Headers", + map[string]interface{}{param.OidcIssuer: issuer}, + response.Error(http.StatusInternalServerError, "Unable to extract OpenWhisk headers: error extracting the __ow_headers section of the JSON"), + }, + {"Missing Authorization", + map[string]interface{}{param.OidcIssuer: issuer, param.OpenWhiskHeaders: map[string]interface{}{}}, + response.Error(http.StatusUnauthorized, "Missing Authorization header"), + }, + } + + logger := log.New(os.Stdout, "auth/validate: ", log.Llongfile) + validator := AuthValidator{} + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actIssuer, actToken, err := validator.extractIssuerAndToken(tt.params, logger) + if (err != nil || tt.expErr != nil) && fmt.Sprint(tt.expErr) != fmt.Sprint(err) { + t.Fatalf("Expected error does not match actual\nexpected:\n%v\nactual:\n%v", tt.expErr, err) + } else if tt.expErr == nil && (issuer != actIssuer || token != actToken) { + t.Fatalf("Expected does not match actual\nexpected:\n%s, %s\nactual:\n%s, %s", issuer, token, actIssuer, actToken) + } + }) + } +} + +func TestCheckTenant(t *testing.T) { + authorizedTenant := "123" + + tests := []struct { + name string + params map[string]interface{} + claims HriClaims + statusCode int + }{ + { + name: "Invalid Url Path", + params: map[string]interface{}{path.ParamOwPath: "/hri/bad_path"}, + claims: HriClaims{Scope: "tenant_" + authorizedTenant}, + statusCode: http.StatusBadRequest, + }, + { + name: "Unauthorized Tenant", + params: map[string]interface{}{path.ParamOwPath: fmt.Sprintf("/hri/tenants/%s/batches", authorizedTenant)}, + claims: HriClaims{Scope: "unauthorizedTenant"}, + statusCode: http.StatusUnauthorized, + }, + { + name: "Happy Path", + params: map[string]interface{}{path.ParamOwPath: fmt.Sprintf("/hri/tenants/%s/batches", authorizedTenant)}, + claims: HriClaims{Scope: "tenant_" + authorizedTenant}, + }, + } + + logger := log.New(os.Stdout, "auth/validate: ", log.Llongfile) + validator := AuthValidator{} + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resp := validator.CheckTenant(tt.params, tt.claims, logger) + if resp != nil { + actualStatusCode := resp["error"].(map[string]interface{})["statusCode"] + if actualStatusCode != tt.statusCode { + // error response doesn't match expected response + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", tt.statusCode, actualStatusCode) + } + } else if resp == nil && tt.statusCode != 0 { + // expected error response, but got none + t.Fatalf("Expected err response with status code: %v, but got no error response.", tt.statusCode) + } + }) + } +} + +func TestCheckAudience(t *testing.T) { + jwtAudienceId := "123" + + tests := []struct { + name string + params map[string]interface{} + claims HriClaims + statusCode int + }{ + { + name: "No Audience Param", + params: map[string]interface{}{}, + claims: HriClaims{Audience: []string{jwtAudienceId}}, + statusCode: http.StatusBadRequest, + }, + { + name: "Unauthorized Audience", + params: map[string]interface{}{param.JwtAudienceId: "Will not match Audience"}, + claims: HriClaims{Audience: []string{jwtAudienceId}}, + statusCode: http.StatusUnauthorized, + }, + { + name: "Happy Path", + params: map[string]interface{}{param.JwtAudienceId: jwtAudienceId}, + claims: HriClaims{Audience: []string{jwtAudienceId}}, + }, + } + + logger := log.New(os.Stdout, "auth/validate: ", log.Llongfile) + validator := AuthValidator{} + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + resp := validator.CheckAudience(tt.params, tt.claims, logger) + if resp != nil { + actualStatusCode := resp["error"].(map[string]interface{})["statusCode"] + if actualStatusCode != tt.statusCode { + // error response doesn't match expected response + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", tt.statusCode, actualStatusCode) + } + } else if resp == nil && tt.statusCode != 0 { + // expected error response, but got none + t.Fatalf("Expected err response with status code: %v, but got no error response.", tt.statusCode) + } + }) + } +} + +// Define a function type with the same signature as the single method in the ClaimsHolder interface +// Any function with this signature can be cast as ClaimsHolderFunc +type ClaimsHolderFunc func(claims interface{}) error + +// Make ClaimsHolderFunc satisfy the ClaimsHolder interface +// When ClaimsHolderFunc.Claims is called, the args are simply forwarded on to the underlying ClaimsHolderFunc function +func (f ClaimsHolderFunc) Claims(claims interface{}) error { + return f(claims) +} + +func TestGetValidatedClaimsHappyPath(t *testing.T) { + // create a mock validator + controller := gomock.NewController(t) + defer controller.Finish() + mockValidator := NewMockValidator(controller) + + // define inputs/outputs + applicationId := "applicationId" + params := map[string]interface{}{param.JwtAudienceId: applicationId} + hriClaims := HriClaims{Scope: "foo bar", Audience: []string{applicationId}} + claimsHolder := func(c interface{}) error { + *c.(*HriClaims) = hriClaims + return nil + } + + // define expected calls + gomock.InOrder( + mockValidator. + EXPECT(). + GetSignedToken(params, nil, gomock.Any()). + Return(ClaimsHolderFunc(claimsHolder), nil), + mockValidator. + EXPECT(). + CheckTenant(params, hriClaims, gomock.Any()). + Return(nil), + mockValidator. + EXPECT(). + CheckAudience(params, hriClaims, gomock.Any()). + Return(nil), + ) + + claims, err := GetValidatedClaims(params, mockValidator, nil) + + // we expect to get back a valid set of claims and no error + expClaims := hriClaims + var expErr map[string]interface{} + + if !reflect.DeepEqual(err, expErr) { + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", expErr, err) + } + if !reflect.DeepEqual(claims, expClaims) { + t.Fatalf("Unexpected claims.\nexpected: %v\nactual : %v", expClaims, claims) + } +} + +func TestGetValidatedClaimsTokenError(t *testing.T) { + // create a mock validator + controller := gomock.NewController(t) + defer controller.Finish() + mockValidator := NewMockValidator(controller) + + // define inputs/outputs + params := map[string]interface{}{} + badTokenErr := response.Error(999, "bad token") + + // define expected calls + mockValidator. + EXPECT(). + GetSignedToken(params, nil, gomock.Any()). + Return(nil, badTokenErr) + + claims, err := GetValidatedClaims(params, mockValidator, nil) + + // we expect to get back an empty set of claims and a bad token error + expClaims := HriClaims{} + expErr := badTokenErr + + if !reflect.DeepEqual(err, expErr) { + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", expErr, err) + } + if !reflect.DeepEqual(claims, expClaims) { + t.Fatalf("Unexpected claims.\nexpected: %v\nactual : %v", expClaims, claims) + } +} + +func TestGetValidatedClaimsExtractionError(t *testing.T) { + // create a mock validator + controller := gomock.NewController(t) + defer controller.Finish() + mockValidator := NewMockValidator(controller) + + // define inputs/outputs + params := map[string]interface{}{} + badClaimsHolderErr := "bad claims holder" + claimsHolder := func(c interface{}) error { + return errors.New(badClaimsHolderErr) + } + + // define expected calls + mockValidator. + EXPECT(). + GetSignedToken(params, nil, gomock.Any()). + Return(ClaimsHolderFunc(claimsHolder), nil) + + claims, err := GetValidatedClaims(params, mockValidator, nil) + + // we expect to get back an empty set of claims and a bad token error + expClaims := HriClaims{} + expErr := response.Error(http.StatusUnauthorized, badClaimsHolderErr) + + if !reflect.DeepEqual(err, expErr) { + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", expErr, err) + } + if !reflect.DeepEqual(claims, expClaims) { + t.Fatalf("Unexpected claims.\nexpected: %v\nactual : %v", expClaims, claims) + } +} + +func TestGetValidatedClaimsTenantError(t *testing.T) { + // create a mock validator + controller := gomock.NewController(t) + defer controller.Finish() + mockValidator := NewMockValidator(controller) + + // define inputs/outputs + params := map[string]interface{}{} + hriClaims := HriClaims{Scope: "foo bar"} + claimsHolder := func(c interface{}) error { + *c.(*HriClaims) = hriClaims + return nil + } + badTenantErr := response.Error(999, "bad tenant") + + // define expected calls + gomock.InOrder( + mockValidator. + EXPECT(). + GetSignedToken(params, nil, gomock.Any()). + Return(ClaimsHolderFunc(claimsHolder), nil), + mockValidator. + EXPECT(). + CheckTenant(params, hriClaims, gomock.Any()). + Return(badTenantErr), + ) + + claims, err := GetValidatedClaims(params, mockValidator, nil) + + // we expect to get back a valid set of claims and a bad tenant error + expClaims := hriClaims + expErr := badTenantErr + + if !reflect.DeepEqual(err, expErr) { + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", expErr, err) + } + if !reflect.DeepEqual(claims, expClaims) { + t.Fatalf("Unexpected claims.\nexpected: %v\nactual : %v", expClaims, claims) + } +} + +func TestGetValidatedClaimsApplicationError(t *testing.T) { + // create a mock validator + controller := gomock.NewController(t) + defer controller.Finish() + mockValidator := NewMockValidator(controller) + + // define inputs/outputs + applicationId := "applicationId" + params := map[string]interface{}{param.JwtAudienceId: applicationId} + hriClaims := HriClaims{Scope: "foo bar", Audience: []string{applicationId}} + claimsHolder := func(c interface{}) error { + *c.(*HriClaims) = hriClaims + return nil + } + badApplicationErr := response.Error(999, "bad audience") + + // define expected calls + gomock.InOrder( + mockValidator. + EXPECT(). + GetSignedToken(params, nil, gomock.Any()). + Return(ClaimsHolderFunc(claimsHolder), nil), + mockValidator. + EXPECT(). + CheckTenant(params, hriClaims, gomock.Any()). + Return(nil), + mockValidator. + EXPECT(). + CheckAudience(params, hriClaims, gomock.Any()). + Return(badApplicationErr), + ) + + claims, err := GetValidatedClaims(params, mockValidator, nil) + + // we expect to get back a valid set of claims and a bad tenant error + expClaims := hriClaims + expErr := badApplicationErr + + if !reflect.DeepEqual(err, expErr) { + t.Fatalf("Unexpected err response.\nexpected: %v\nactual : %v", expErr, err) + } + if !reflect.DeepEqual(claims, expClaims) { + t.Fatalf("Unexpected claims.\nexpected: %v\nactual : %v", expClaims, claims) + } +} diff --git a/src/common/auth/validator_mock.go b/src/common/auth/validator_mock.go new file mode 100644 index 0000000..7d2fba0 --- /dev/null +++ b/src/common/auth/validator_mock.go @@ -0,0 +1,77 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: validate.go + +// Package auth is a generated GoMock package. +package auth + +import ( + gomock "github.com/golang/mock/gomock" + log "log" + reflect "reflect" +) + +// MockValidator is a mock of Validator interface +type MockValidator struct { + ctrl *gomock.Controller + recorder *MockValidatorMockRecorder +} + +// MockValidatorMockRecorder is the mock recorder for MockValidator +type MockValidatorMockRecorder struct { + mock *MockValidator +} + +// NewMockValidator creates a new mock instance +func NewMockValidator(ctrl *gomock.Controller) *MockValidator { + mock := &MockValidator{ctrl: ctrl} + mock.recorder = &MockValidatorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockValidator) EXPECT() *MockValidatorMockRecorder { + return m.recorder +} + +// GetSignedToken mocks base method +func (m *MockValidator) GetSignedToken(params map[string]interface{}, providerNew NewOidcProvider, logger *log.Logger) (ClaimsHolder, map[string]interface{}) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSignedToken", params, providerNew, logger) + ret0, _ := ret[0].(ClaimsHolder) + ret1, _ := ret[1].(map[string]interface{}) + return ret0, ret1 +} + +// GetSignedToken indicates an expected call of GetSignedToken +func (mr *MockValidatorMockRecorder) GetSignedToken(params, providerNew, logger interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSignedToken", reflect.TypeOf((*MockValidator)(nil).GetSignedToken), params, providerNew, logger) +} + +// CheckTenant mocks base method +func (m *MockValidator) CheckTenant(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CheckTenant", params, claims, logger) + ret0, _ := ret[0].(map[string]interface{}) + return ret0 +} + +// CheckAudience mocks base method +func (m *MockValidator) CheckAudience(params map[string]interface{}, claims HriClaims, logger *log.Logger) map[string]interface{} { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CheckAudience", params, claims, logger) + ret0, _ := ret[0].(map[string]interface{}) + return ret0 +} + +// CheckTenant indicates an expected call of CheckTenant +func (mr *MockValidatorMockRecorder) CheckTenant(params, claims, logger interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckTenant", reflect.TypeOf((*MockValidator)(nil).CheckTenant), params, claims, logger) +} + +// CheckAudience indicates an expected call of CheckTenant +func (mr *MockValidatorMockRecorder) CheckAudience(params, claims, logger interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CheckAudience", reflect.TypeOf((*MockValidator)(nil).CheckAudience), params, claims, logger) +} diff --git a/src/common/elastic/client.go b/src/common/elastic/client.go index cc88247..31c04ec 100644 --- a/src/common/elastic/client.go +++ b/src/common/elastic/client.go @@ -16,7 +16,7 @@ import ( "fmt" "github.com/Alvearie/hri-mgmt-api/common/param" service "github.com/IBM/resource-controller-go-sdk-generator/build/generated" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "net/http" "strings" ) diff --git a/src/common/elastic/client_test.go b/src/common/elastic/client_test.go index 09f7c83..f3aa66a 100644 --- a/src/common/elastic/client_test.go +++ b/src/common/elastic/client_test.go @@ -12,7 +12,7 @@ import ( "errors" "github.com/Alvearie/hri-mgmt-api/common/test" "github.com/IBM/resource-controller-go-sdk-generator/build/generated" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" "io/ioutil" diff --git a/src/common/elastic/decoder.go b/src/common/elastic/decoder.go index 534f59b..a8f3ef9 100644 --- a/src/common/elastic/decoder.go +++ b/src/common/elastic/decoder.go @@ -9,7 +9,7 @@ import ( "encoding/json" "fmt" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6/esapi" + "github.com/elastic/go-elasticsearch/v7/esapi" "log" "net/http" ) diff --git a/src/common/elastic/decoder_test.go b/src/common/elastic/decoder_test.go index 7137e75..3382168 100644 --- a/src/common/elastic/decoder_test.go +++ b/src/common/elastic/decoder_test.go @@ -10,7 +10,7 @@ import ( "errors" "fmt" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6/esapi" + "github.com/elastic/go-elasticsearch/v7/esapi" "io/ioutil" "log" "net/http" diff --git a/src/common/eventstreams/client_utils.go b/src/common/eventstreams/client_utils.go index 86e41bb..3441626 100644 --- a/src/common/eventstreams/client_utils.go +++ b/src/common/eventstreams/client_utils.go @@ -13,7 +13,7 @@ const ( NotificationSuffix string = ".notification" ) -//See documentation on HRI topic naming conventions: https://ibm.github.io/hri/admin.html#onboarding-new-data-integrators +//See documentation on HRI topic naming conventions: https://alvearie.io/HRI/admin.html#onboarding-new-data-integrators func CreateTopicNames(tenantId string, streamId string) (string, string) { baseTopicName := strings.Join([]string{tenantId, streamId}, ".") inTopicName := TopicPrefix + baseTopicName + InSuffix diff --git a/src/common/eventstreams/service.go b/src/common/eventstreams/service.go index cf03dbd..981893f 100644 --- a/src/common/eventstreams/service.go +++ b/src/common/eventstreams/service.go @@ -52,7 +52,7 @@ func CreateService(params map[string]interface{}) (Service, map[string]interface } conf.BasePath = adminUrl - headerMap, err := param.ExtractValues(params, param.OwHeaders) + headerMap, err := param.ExtractValues(params, param.OpenWhiskHeaders) if err != nil { return nil, response.Error(http.StatusInternalServerError, err.Error()) } diff --git a/src/common/kafka/read_partitions_test.go b/src/common/kafka/read_partitions_test.go new file mode 100644 index 0000000..228e9d8 --- /dev/null +++ b/src/common/kafka/read_partitions_test.go @@ -0,0 +1,51 @@ +// +build !tests + +/** + * (C) Copyright IBM Corp. 2020 + * + * SPDX-License-Identifier: Apache-2.0 + */ + +package kafka + +const realCreds string = `{ +}` + +//func TestReadPartitions(t *testing.T) { +// +// d := &kg.Dialer{ +// //SASLMechanism: plain.Mechanism{"token", "REPLACE-ME-WITH-PASSWORD"}, +// TLS: &tls.Config{ +// MaxVersion: tls.VersionTLS12, +// }, +// } +// +// ctx, cancel := context.WithTimeout(context.Background(), 20*time.Second) +// defer cancel() +// +// //conn, err := d.DialLeader (ctx, "tcp", "broker-0-g1xtlwh2v9x9rtcp.kafka.svc01.us-south.eventstreams.cloud.ibm.com:9093", "connect-offsets", 0) +// conn, err := d.DialContext(ctx, "tcp", "broker-2-g1xtlwh2v9x9rtcp.kafka.svc01.us-south.eventstreams.cloud.ibm.com:9093") +// +// if err != nil { +// t.Error(err) +// conn.Close() +// } +// +// defer conn.Close() +// parts, err := conn.ReadPartitions() +// if err != nil { +// t.Error(err) +// } +// +// if len(parts) == 0 { +// t.Errorf("no partitions were returned") +// } else { +// fmt.Printf("partitions Count: %v \n", len(parts)) +// part := parts[0] +// fmt.Printf("Topic: %v", part.Topic+"| firstPartition: "+strconv.Itoa(part.ID)+" \n") +// fmt.Printf("partitions: %v \n", parts) +// +// } +// +// conn.Close() +//} diff --git a/src/common/param/parameters.go b/src/common/param/parameters.go index c0a9816..374a4b8 100644 --- a/src/common/param/parameters.go +++ b/src/common/param/parameters.go @@ -11,18 +11,23 @@ const ( BatchIndex = 5 StreamIndex = 5 - BoundCreds string = "__bx_creds" - OwHeaders string = "__ow_headers" + BoundCreds string = "__bx_creds" + OpenWhiskHeaders string = "__ow_headers" + OidcIssuer string = "issuer" + JwtAudienceId string = "jwtAudienceId" + + BatchId string = "id" + DataType string = "dataType" + Metadata string = "metadata" + Name string = "name" + Status string = "status" + StartDate string = "startDate" + Topic string = "topic" + RecordCount string = "recordCount" + IntegratorId string = "integratorId" + + TenantId string = "tenantId" - BatchId string = "id" - DataType string = "dataType" - Metadata string = "metadata" - Name string = "name" - Status string = "status" - StartDate string = "startDate" - Topic string = "topic" - TenantId string = "tenantId" - RecordCount string = "recordCount" StreamId string = "id" NumPartitions string = "numPartitions" RetentionMs string = "retentionMs" diff --git a/src/exec.env b/src/exec.env new file mode 100644 index 0000000..8d3bc48 --- /dev/null +++ b/src/exec.env @@ -0,0 +1 @@ +openwhisk/action-golang-v1.15 \ No newline at end of file diff --git a/src/go.mod b/src/go.mod index bde2e9a..c0719f8 100644 --- a/src/go.mod +++ b/src/go.mod @@ -3,10 +3,16 @@ module github.com/Alvearie/hri-mgmt-api require ( github.com/IBM/event-streams-go-sdk-generator v1.0.0 github.com/IBM/resource-controller-go-sdk-generator v1.0.1 - github.com/elastic/go-elasticsearch/v6 v6.8.2 - github.com/golang/mock v1.4.3 + github.com/coreos/go-oidc v2.2.1+incompatible + github.com/elastic/go-elasticsearch/v7 v7.11.0 + github.com/golang/mock v1.5.0 + github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35 // indirect github.com/segmentio/kafka-go v0.3.5 - github.com/stretchr/testify v1.4.0 + github.com/stretchr/testify v1.6.1 + golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a // indirect + golang.org/x/net v0.0.0-20210614182718-04defd469f4e //indirect + golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d // indirect + gopkg.in/square/go-jose.v2 v2.4.1 // indirect ) -go 1.13 +go 1.15 diff --git a/src/go.sum b/src/go.sum index 661fc9a..63b29b0 100644 --- a/src/go.sum +++ b/src/go.sum @@ -1,4 +1,5 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= github.com/DataDog/zstd v1.4.0 h1:vhoV+DUHnRZdKW1i5UMjAk2G4JY8wN4ayRfYDNdEhwo= github.com/DataDog/zstd v1.4.0/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/IBM/event-streams-go-sdk-generator v1.0.0 h1:XLm+MsdH6Dod4uXdiRhdbwOI8gEVnQ4YZrSLRQ4wMbY= @@ -7,56 +8,76 @@ github.com/IBM/resource-controller-go-sdk-generator v1.0.1 h1:3tUag6fX+mwSA0z+Ny github.com/IBM/resource-controller-go-sdk-generator v1.0.1/go.mod h1:cKrNWsOSwM7dSY5IfWc8kopcGnhuVckN0iB6pqhOqaE= github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk= +github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw= github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= -github.com/elastic/go-elasticsearch/v6 v6.8.2 h1:rp5DGrd63V5c6nHLjF6QEXUpZSvs0+QM3ld7m9VhV2g= -github.com/elastic/go-elasticsearch/v6 v6.8.2/go.mod h1:UwaDJsD3rWLM5rKNFzv9hgox93HoX8utj1kxD9aFUcI= -github.com/golang/mock v1.4.3 h1:GV+pQPG/EUUbkh47niozDcADz6go/dUwhVzdUQHIVRw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/elastic/go-elasticsearch/v7 v7.11.0 h1:bv+2GqsVrPdX/ChJqAHAFtWgtGvVJ0icN/WdBGAdNuw= +github.com/elastic/go-elasticsearch/v7 v7.11.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= +github.com/golang/mock v1.5.0 h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35 h1:J9b7z+QKAmPf4YLrFg6oQUotqHQeUNWwkvo7jZp1GLU= +github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/segmentio/kafka-go v0.3.5 h1:2JVT1inno7LxEASWj+HflHh5sWGfM0gkRiLAxkXhGG4= github.com/segmentio/kafka-go v0.3.5/go.mod h1:OT5KXBPbaJJTcvokhWR2KFmm0niEx3mnccTwjmLvSi4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/stringprep v1.0.0 h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0= github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284 h1:rlLehGeYg6jfoyz/eDqDU1iRXLKfR42nnNh57ytKEWo= golang.org/x/crypto v0.0.0-20190506204251-e1dfcc566284/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a h1:vclmkQCjlDX5OydZ9wv8rBCcS0QyQY66Mpf/7BZbInM= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3 h1:0GoQqolDA55aaLxZyTzK/Y2ePZzZTUrRacwib7cNsYQ= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262 h1:qsl9y/CJx34tuA7QCPNp86JNJe4spst6Ff8MjvPUdPg= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +gopkg.in/square/go-jose.v2 v2.4.1 h1:H0TmLt7/KmzlrDOpa1F+zr0Tk90PbJYBfsVUmRLrf9Y= +gopkg.in/square/go-jose.v2 v2.4.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/src/healthcheck/get.go b/src/healthcheck/get.go index cf2bee1..af83776 100644 --- a/src/healthcheck/get.go +++ b/src/healthcheck/get.go @@ -11,7 +11,7 @@ import ( "github.com/Alvearie/hri-mgmt-api/common/kafka" esp "github.com/Alvearie/hri-mgmt-api/common/param/esparam" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" diff --git a/src/tenants/create.go b/src/tenants/create.go index 45555fc..f9f5442 100644 --- a/src/tenants/create.go +++ b/src/tenants/create.go @@ -10,7 +10,7 @@ import ( "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" diff --git a/src/tenants/delete.go b/src/tenants/delete.go index ec0e999..2b091e3 100644 --- a/src/tenants/delete.go +++ b/src/tenants/delete.go @@ -11,7 +11,7 @@ import ( "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" diff --git a/src/tenants/get.go b/src/tenants/get.go index 08a4d09..2c799af 100644 --- a/src/tenants/get.go +++ b/src/tenants/get.go @@ -8,7 +8,7 @@ package tenants import ( "github.com/Alvearie/hri-mgmt-api/common/elastic" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" diff --git a/src/tenants/get_by_id.go b/src/tenants/get_by_id.go index 2f441aa..9fe4577 100644 --- a/src/tenants/get_by_id.go +++ b/src/tenants/get_by_id.go @@ -11,7 +11,7 @@ import ( "github.com/Alvearie/hri-mgmt-api/common/param" "github.com/Alvearie/hri-mgmt-api/common/path" "github.com/Alvearie/hri-mgmt-api/common/response" - "github.com/elastic/go-elasticsearch/v6" + "github.com/elastic/go-elasticsearch/v7" "log" "net/http" "os" diff --git a/test/README.md b/test/README.md index 918e9b8..46995fc 100644 --- a/test/README.md +++ b/test/README.md @@ -36,12 +36,27 @@ NOTE: Ensure that your Ruby versions match across terminal default, Gemfile, and Gemfile.lock. If using IntelliJ, Ruby version in your module should match as well. -8. (Optional) To run tests locally, first export the ELASTIC_URL and ELASTIC_AUTH values from .travis.yml to your terminal. Then, from within the top directory of this project, run all tests with: - - ```rspec test/spec --tag ~@broken``` +8. (Optional) To run tests locally + - Export these environment variables. You can get most of the values from GitHub actions. Check IBM cloud service credentials or 1password for secure ones. + * HRI_API_KEY + * ELASTIC_URL + * ELASTIC_USER + * ELASTIC_PASSWORD + * SASL_PLAIN_PASSWORD + * COS_URL + * IAM_CLOUD_URL + * CLOUD_API_KEY + * APPID_URL + * APPID_TENANT + * TRAVIS_BRANCH + - Get an unencrypted copy of `jwt_assertion_tokens.json` + - Login with the IBM Cloud CLI and set the Functions namespace to match the branch being tested: + ```ibmcloud fn property set --namespace ``` + - Run the tests: + ```rspec spec --tag ~@broken``` # Dredd Tests -Dredd is used to verify the implemented API meets our published [specification](https://github.com/Alvearie/hri-api-spec/blob/master/management-api/management.yml). +Dredd is used to verify the implemented API meets our published [specification](https://github.com/Alvearie/hri-api-spec/blob/main/management-api/management.yml). By default it generates a test for every endpoint, uses the example values for input, and verifies the response matches the 200 response schema. All other responses are skipped. Ruby 'hooks' are used to modify the default behavior and do setup/teardown. Here are some helpful documentation links: * https://dredd.org/en/latest/hooks/ruby.html @@ -63,14 +78,14 @@ gem install dredd_hooks ``` ### Running Dredd Tests -First you need to convert the API spec to Swagger 2.0, so checkout the api-spec [repo](https://github.com/Alvearie/hri-api-spec). -Then use api-spec-converter to convert it. You should make a branch with the same name if changes are needed. The Travis build will checkout the same branch if it exists. +First you need to convert the API spec to Swagger 2.0, so checkout the hri-api-spec [repo](https://github.com/Alvearie/hri-api-spec). +Then use api-spec-converter to convert it. You should make a branch with the same name if changes are needed. The build will checkout the same branch if it exists. ```bash -api-spec-converter -f openapi_3 -t swagger_2 -s yaml api-spec/management-api/management.yml > api-spec/management-api/management.swagger.yml +api-spec-converter -f openapi_3 -t swagger_2 -s yaml hri-api-spec/management-api/management.yml > hri-api-spec/management-api/management.swagger.yml ``` Then run Dredd. Make sure you replace the base url with the one for your current branch. ```bash -dredd ../api-spec/management-api/management.swagger.yml https://fc40a048.us-south.apigw.appdomain.cloud/hri --sorted --language=ruby --hookfiles=test/spec/*_helper.rb --hookfiles=test/spec/dredd_hooks.rb --hooks-worker-connect-timeout=5000 +dredd ../hri-api-spec/management-api/management.swagger.yml https://fc40a048.us-south.apigw.appdomain.cloud/hri --sorted --language=ruby --hookfiles=test/spec/*_helper.rb --hookfiles=test/spec/dredd_hooks.rb --hooks-worker-connect-timeout=5000 ``` ### Debugging diff --git a/test/env.rb b/test/env.rb index 5a21f4d..206a37d 100644 --- a/test/env.rb +++ b/test/env.rb @@ -1,3 +1,7 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + require 'rubygems' require 'rspec' require 'json' @@ -16,9 +20,12 @@ require 'logger' require 'securerandom' require 'kafka' +require 'base64' require_relative './spec/helper' require_relative './spec/elastic_helper' require_relative './spec/hri_helper' require_relative './spec/cos_helper' -require_relative './spec/iam_helper' \ No newline at end of file +require_relative './spec/iam_helper' +require_relative './spec/app_id_helper' +require_relative './spec/event_streams_helper' \ No newline at end of file diff --git a/test/spec/app_id_helper.rb b/test/spec/app_id_helper.rb new file mode 100644 index 0000000..43bbfed --- /dev/null +++ b/test/spec/app_id_helper.rb @@ -0,0 +1,32 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +class AppIDHelper + + def initialize + @helper = Helper.new + @app_id_url = ENV['APPID_URL'] + @iam_token = IAMHelper.new.get_access_token + end + + def get_access_token(application_name, scopes, audience_override = nil) + response = @helper.rest_get("#{@app_id_url}/management/v4/#{ENV['APPID_TENANT']}/applications", {'Authorization' => "Bearer #{@iam_token}"}) + raise 'Failed to get AppId credentials' unless response.code == 200 + JSON.parse(response.body)['applications'].each do |application| + if application['name'] == application_name + @credentials = Base64.encode64("#{application['clientId']}:#{application['secret']}").delete("\n") + break + end + end + + if @credentials.nil? + raise "Unable to get AppID Application credentials for #{application_name}" + end + + response = @helper.rest_post("#{@app_id_url}/oauth/v4/#{ENV['APPID_TENANT']}/token", {'grant_type' => 'client_credentials', 'scope' => scopes, 'audience' => (audience_override.nil? ? ENV['JWT_AUDIENCE_ID'] : audience_override)}, {'Content-Type' => 'application/x-www-form-urlencoded', 'Accept' => 'application/json', 'Authorization' => "Basic #{@credentials}"}) + raise 'App ID token request failed' unless response.code == 200 + JSON.parse(response.body)['access_token'] + end + +end diff --git a/test/spec/dredd_hooks.rb b/test/spec/dredd_hooks.rb index 8ecc244..6196578 100644 --- a/test/spec/dredd_hooks.rb +++ b/test/spec/dredd_hooks.rb @@ -7,168 +7,285 @@ include DreddHooks::Methods DEFAULT_TENANT_ID = 'provider1234' -TENANT_ID = "#{ENV['TRAVIS_BRANCH'].tr('.-', '')}".downcase +TENANT_ID_TENANTS_STREAMS = "#{ENV['TRAVIS_BRANCH'].tr('.-', '')}".downcase +TENANT_ID_BATCHES = 'test' + elastic = ElasticHelper.new -# uncomment to print out all the transaction names before_all do |transactions| puts 'before all' @iam_token = IAMHelper.new.get_access_token + app_id_helper = AppIDHelper.new + @token_all_roles = app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer') + @token_invalid_tenant = app_id_helper.get_access_token('hri_integration_tenant_test_invalid', 'tenant_test_invalid') + + # uncomment to print out all the transaction names # for transaction in transactions # puts transaction['name'] # end # make sure the tenant doesn't already exist - elastic.delete_index(TENANT_ID) + elastic.delete_index(TENANT_ID_TENANTS_STREAMS) end -before_each do |transaction| - transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID) +# GET /healthcheck + +before '/healthcheck > Perform a health check query of system availability > 200 > application/json' do |transaction| + puts 'before heathcheck 200' + transaction['skip'] = false end +# POST /tenants/{tenant_id} + before 'tenant > /tenants/{tenantId} > Create new tenant > 201 > application/json' do |transaction| puts 'before create tenant 201' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end before 'tenant > /tenants/{tenantId} > Create new tenant > 401 > application/json' do |transaction| puts 'before create tenant 401' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) end -before 'tenant > /tenants > Get a list of all tenantIds > 200 > application/json' do |transaction| - puts 'before get tenants 200' - transaction['skip'] = false - transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" -end +# POST /tenants/{tenant_id}/streams/{integrator_id} -before 'tenant > /tenants > Get a list of all tenantIds > 401 > application/json' do |transaction| - puts 'before get tenants 401' +before 'stream > /tenants/{tenantId}/streams/{streamId} > Create new Stream for a Tenant > 201 > application/json' do |transaction| + puts 'before create stream 201' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) + transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 200 > application/json' do |transaction| - puts 'before get tenant 200' +before 'stream > /tenants/{tenantId}/streams/{streamId} > Create new Stream for a Tenant > 400 > application/json' do |transaction| + puts 'before create stream 400' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" + transaction['request']['body'] = '{bad json string"' end -before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 401 > application/json' do |transaction| - puts 'before get tenant 401' - transaction['skip'] = false -end +# DELETE /tenants/{tenant_id}/streams/{integrator_id} -before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 404 > application/json' do |transaction| - puts 'before get tenant 404' +before 'stream > /tenants/{tenantId}/streams/{streamId} > Delete a Stream for a Tenant > 200 > application/json' do |transaction| + puts 'before delete stream 200' transaction['skip'] = false - transaction['fullPath'].gsub!(TENANT_ID, 'missingTenant') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'batch > /tenants/{tenantId}/batches > Create Batch > 201 > application/json' do |transaction| - puts 'before create batch 201' +before 'stream > /tenants/{tenantId}/streams/{streamId} > Delete a Stream for a Tenant > 404 > application/json' do |transaction| + puts 'before delete stream 404' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, 'missingTenant') + transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -after 'batch > /tenants/{tenantId}/batches > Create Batch > 201 > application/json' do |transaction| - puts 'after create batch 201' - @batch_id = JSON.parse(transaction['real']['body'])['id'] -end +# DELETE /tenants/{tenant_id} -before '/healthcheck > Perform a health check query of system availability > 200 > application/json' do |transaction| - puts 'before heathcheck 200' +before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 200 > application/json' do |transaction| + puts 'before delete tenant 200' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) + transaction['expected']['headers'].delete('Content-Type') + transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'stream > /tenants/{tenantId}/streams/{streamId} > Create new Stream for a Tenant > 201 > application/json' do |transaction| - puts 'before create stream 201' +before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 401 > application/json' do |transaction| + puts 'before delete tenant 401' + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['skip'] = false - transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'stream > /tenants/{tenantId}/streams/{streamId} > Create new Stream for a Tenant > 400 > application/json' do |transaction| - puts 'before create stream 400' +before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 404 > application/json' do |transaction| + puts 'before delete tenant 404' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, 'invalid') transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" - transaction['request']['body'] = '{bad json string"' end +# GET /tenants/{tenant_id}/streams + before 'stream > /tenants/{tenantId}/streams > Get all Streams for Tenant > 200 > application/json' do |transaction| puts 'before get streams 200' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end before 'stream > /tenants/{tenantId}/streams > Get all Streams for Tenant > 404 > application/json' do |transaction| puts 'before get streams 404' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, 'invalid') transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" - transaction['fullPath'].gsub!(TENANT_ID, 'invalid') end -before 'stream > /tenants/{tenantId}/streams/{streamId} > Delete a Stream for a Tenant > 200 > application/json' do |transaction| - puts 'before delete stream 200' +# GET /tenants + +before 'tenant > /tenants > Get a list of all tenantIds > 200 > application/json' do |transaction| + puts 'before get tenants 200' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'stream > /tenants/{tenantId}/streams/{streamId} > Delete a Stream for a Tenant > 404 > application/json' do |transaction| - puts 'before delete stream 404' +before 'tenant > /tenants > Get a list of all tenantIds > 401 > application/json' do |transaction| + puts 'before get tenants 401' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) +end + +# GET /tenants/{tenant_id} + +before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 200 > application/json' do |transaction| + puts 'before get tenant 200' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" - transaction['fullPath'].gsub!(TENANT_ID, 'invalid') end -before 'batch > /tenants/{tenantId}/batches > Create Batch > 400 > application/json' do |transaction| - puts 'before create batch 400' +before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 401 > application/json' do |transaction| + puts 'before get tenant 401' transaction['skip'] = false - transaction['request']['body'] = '{bad json string"' + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_TENANTS_STREAMS) +end + +before 'tenant > /tenants/{tenantId} > Get information on a specific elastic index > 404 > application/json' do |transaction| + puts 'before get tenant 404' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, 'invalid') + transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" end -before 'batch > /tenants/{tenantId}/batches > Create Batch > 404 > application/json' do |transaction| - puts 'before create batch 404' +# GET /tenants/{tenant_id}/batches + +before 'batch > /tenants/{tenantId}/batches > Get Batches for Tenant > 200 > application/json' do |transaction| + puts 'before get batches 200' transaction['skip'] = false - transaction['fullPath'].gsub!(TENANT_ID, 'missingTenant') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" end -before 'batch > /tenants/{tenantId}/batches > Get Batches for Tenant > 404 > application/json' do |transaction| - puts 'before get batches 404' +before 'batch > /tenants/{tenantId}/batches > Get Batches for Tenant > 401 > application/json' do |transaction| + puts 'before get batches 401' transaction['skip'] = false - transaction['fullPath'].gsub!(TENANT_ID, 'missingTenant') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" end +# GET /tenants/{tenantId}/batches/{batchId} + before 'batch > /tenants/{tenantId}/batches/{batchId} > Retrieve Metadata for Batch > 200 > application/json' do |transaction| puts 'before get batch 200' + transaction['skip'] = false + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" + end +end + +before 'batch > /tenants/{tenantId}/batches/{batchId} > Retrieve Metadata for Batch > 401 > application/json' do |transaction| + puts 'before get batch 401' + transaction['skip'] = false if @batch_id.nil? transaction['fail'] = 'nil batch_id' else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" end end before 'batch > /tenants/{tenantId}/batches/{batchId} > Retrieve Metadata for Batch > 404 > application/json' do |transaction| puts 'before get batch 404' transaction['skip'] = false + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!('batch12345', 'INVALID') + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" + end +end + +# POST /tenants/{tenant_id}/batches + +before 'batch > /tenants/{tenantId}/batches > Create Batch > 201 > application/json' do |transaction| + puts 'before create batch 201' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" +end + +after 'batch > /tenants/{tenantId}/batches > Create Batch > 201 > application/json' do |transaction| + puts 'after create batch 201' + transaction['skip'] = false + parsed_body = JSON.parse transaction['real']['body'] + @batch_id = parsed_body['id'] end +before 'batch > /tenants/{tenantId}/batches > Create Batch > 400 > application/json' do |transaction| + puts 'before create batch 400' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['body'] = '{bad json string"' +end + +before 'batch > /tenants/{tenantId}/batches > Create Batch > 401 > application/json' do |transaction| + puts 'before create batch 401' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" +end + +# PUT /tenants/{tenantId}/batches/{batchId}/action/sendComplete + before 'batch > /tenants/{tenantId}/batches/{batchId}/action/sendComplete > Update Batch status to Send Complete > 200 > application/json' do |transaction| puts 'before sendComplete 200' + transaction['skip'] = false if @batch_id.nil? transaction['fail'] = 'nil batch_id' else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'started' + } + } + }.to_json + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, update_batch_script) end end before 'batch > /tenants/{tenantId}/batches/{batchId}/action/sendComplete > Update Batch status to Send Complete > 400 > application/json' do |transaction| puts 'before sendComplete 400' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) transaction['request']['body'] = '{bad json string"' end +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/sendComplete > Update Batch status to Send Complete > 401 > application/json' do |transaction| + puts 'before sendComplete 401' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" +end + before 'batch > /tenants/{tenantId}/batches/{batchId}/action/sendComplete > Update Batch status to Send Complete > 404 > application/json' do |transaction| puts 'before sendComplete 404' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" end before 'batch > /tenants/{tenantId}/batches/{batchId}/action/sendComplete > Update Batch status to Send Complete > 409 > application/json' do |transaction| @@ -177,7 +294,7 @@ if @batch_id.nil? transaction['fail'] = 'nil batch_id' else - elastic.es_batch_update(TENANT_ID, @batch_id[0..-6], ' + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, ' { "script" : { "source": "ctx._source.status = params.status", @@ -187,16 +304,88 @@ } } }') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" + end +end + +# PUT /tenants/{tenantId}/batches/{batchId}/action/processingComplete + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/processingComplete > Indicate the Batch has been processed (Internal) > 200 > application/json' do |transaction| + puts 'before processingComplete 200' + transaction['skip'] = false + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!("batch12345", @batch_id) + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'sendCompleted' + } + } + }.to_json + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, update_batch_script) end end +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/processingComplete > Indicate the Batch has been processed (Internal) > 400 > application/json' do |transaction| + puts 'before processingComplete 400' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['body'] = '{bad json string"' +end + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/processingComplete > Indicate the Batch has been processed (Internal) > 401 > application/json' do |transaction| + puts 'before processingComplete 401' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" +end + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/processingComplete > Indicate the Batch has been processed (Internal) > 404 > application/json' do |transaction| + puts 'before processingComplete 404' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) +end + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/processingComplete > Indicate the Batch has been processed (Internal) > 409 > application/json' do |transaction| + puts 'before processingComplete 409' + transaction['skip'] = false + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!("batch12345", @batch_id) + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'completed' + } + } + }.to_json + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, update_batch_script) + end +end + +# PUT /tenants/{tenantId}/batches/{batchId}/action/terminate + before 'batch > /tenants/{tenantId}/batches/{batchId}/action/terminate > Terminate Batch > 200 > application/json' do |transaction| puts 'before terminate 200' + transaction['skip'] = false if @batch_id.nil? transaction['fail'] = 'nil batch_id' else - elastic.es_batch_update(TENANT_ID, @batch_id[0..-6], ' + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, ' { "script" : { "source": "ctx._source.status = params.status", @@ -206,13 +395,21 @@ } } }') - transaction['fullPath'].gsub!('batch12345', @batch_id) end end +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/terminate > Terminate Batch > 401 > application/json' do |transaction| + puts 'before terminate 401' + transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" +end + before 'batch > /tenants/{tenantId}/batches/{batchId}/action/terminate > Terminate Batch > 404 > application/json' do |transaction| puts 'before terminate 404' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" end before 'batch > /tenants/{tenantId}/batches/{batchId}/action/terminate > Terminate Batch > 409 > application/json' do |transaction| @@ -221,7 +418,7 @@ if @batch_id.nil? transaction['fail'] = 'nil batch_id' else - elastic.es_batch_update(TENANT_ID, @batch_id[0..-6], ' + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, ' { "script" : { "source": "ctx._source.status = params.status", @@ -231,31 +428,82 @@ } } }') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) transaction['fullPath'].gsub!('batch12345', @batch_id) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_all_roles}" end end -before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 200 > application/json' do |transaction| - puts 'before delete tenant 200' +# PUT /tenants/{tenantId}/batches/{batchId}/action/fail + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/fail > Fail the Batch (Internal) > 200 > application/json' do |transaction| + puts 'before fail 200' transaction['skip'] = false - transaction['expected']['headers'].delete('Content-Type') - transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, ' + { + "script" : { + "source": "ctx._source.status = params.status", + "lang": "painless", + "params" : { + "status" : "completed" + } + } + }') + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!('batch12345', @batch_id) + end end -before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 401 > application/json' do |transaction| - puts 'before delete tenant 401' +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/fail > Fail the Batch (Internal) > 400 > application/json' do |transaction| + puts 'before fail 400' transaction['skip'] = false + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['body'] = '{bad json string"' end -before 'tenant > /tenants/{tenantId} > Delete a specific tenant > 404 > application/json' do |transaction| - puts 'before delete tenant 404' +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/fail > Fail the Batch (Internal) > 401 > application/json' do |transaction| + puts 'before fail 401' transaction['skip'] = false - transaction['fullPath'].gsub!(TENANT_ID, 'invalid') - transaction['request']['headers']['Authorization'] = "Bearer #{@iam_token}" + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['request']['headers']['Authorization'] = "Bearer #{@token_invalid_tenant}" +end + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/fail > Fail the Batch (Internal) > 404 > application/json' do |transaction| + puts 'before fail 404' + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['skip'] = false +end + +before 'batch > /tenants/{tenantId}/batches/{batchId}/action/fail > Fail the Batch (Internal) > 409 > application/json' do |transaction| + puts 'before fail 409' + transaction['skip'] = false + if @batch_id.nil? + transaction['fail'] = 'nil batch_id' + else + transaction['fullPath'].gsub!(DEFAULT_TENANT_ID, TENANT_ID_BATCHES) + transaction['fullPath'].gsub!("batch12345", @batch_id) + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'failed' + } + } + }.to_json + elastic.es_batch_update(TENANT_ID_BATCHES, @batch_id, update_batch_script) + end end after_all do |transactions| puts 'after_all' + unless @batch_id.nil? + elastic.es_delete_batch(TENANT_ID_BATCHES, @batch_id) + end + # make sure the tenant index is deleted - elastic.delete_index(TENANT_ID) + elastic.delete_index(TENANT_ID_TENANTS_STREAMS) end \ No newline at end of file diff --git a/test/spec/elastic_helper.rb b/test/spec/elastic_helper.rb index f1b5585..cfeba13 100644 --- a/test/spec/elastic_helper.rb +++ b/test/spec/elastic_helper.rb @@ -7,8 +7,8 @@ class ElasticHelper def initialize @helper = Helper.new @elastic_url = ENV['ELASTIC_URL'] - @headers = { 'Content-Type': 'application/json' } - @basic_auth = { user: ENV['ELASTIC_USER'], password: ENV['ELASTIC_PASSWORD'] } + @headers = {'Content-Type': 'application/json'} + @basic_auth = {user: ENV['ELASTIC_USER'], password: ENV['ELASTIC_PASSWORD']} end def es_health_check @@ -27,12 +27,17 @@ def es_delete_batch(index, batch_id) @helper.rest_delete("#{@elastic_url}/#{index}-batches/_doc/#{batch_id}", nil, @headers, @basic_auth) end + def es_delete_by_query(index, query) + @helper.rest_post("#{@elastic_url}/#{index}-batches/_delete_by_query?q=#{query}", nil, @headers, @basic_auth) + end + def es_batch_search(index, query) @helper.rest_get("#{@elastic_url}/#{index}-batches/_search?pretty&q=#{query}", @headers, @basic_auth) end - def es_batch_update(index, batch, script) - @helper.rest_post("#{@elastic_url}/#{index}-batches/_doc/#{batch}/_update?refresh=wait_for", script, @headers, @basic_auth) + def es_batch_update(index, batch_id, script) + # wait_for waits for an index refresh to happen, so increase the standard timeout + @helper.rest_post("#{@elastic_url}/#{index}-batches/_doc/#{batch_id}/_update?refresh=wait_for", script, @headers, @basic_auth.merge({timeout: 120})) end def delete_index(index) diff --git a/test/spec/event_streams_helper.rb b/test/spec/event_streams_helper.rb new file mode 100644 index 0000000..443e961 --- /dev/null +++ b/test/spec/event_streams_helper.rb @@ -0,0 +1,15 @@ +# (C) Copyright IBM Corp. 2020 +# +# SPDX-License-Identifier: Apache-2.0 + +class EventStreamsHelper + + def initialize + @helper = Helper.new + end + + def get_topics + @helper.exec_command("bx es topics").split("\n").map { |t| t.strip } + end + +end \ No newline at end of file diff --git a/test/spec/helper.rb b/test/spec/helper.rb index f48c989..2dfcb0a 100644 --- a/test/spec/helper.rb +++ b/test/spec/helper.rb @@ -35,6 +35,14 @@ def rest_delete(url, body, override_headers = {}, overrides = {}) end end + def exec_command(cmd) + shell_escape = cmd.split(" ").join("\\ ") + Open3.popen3("bash -c #{shell_escape}") do |stdin, stdout, stderr, wait_thr| + stdin.close + wait_thr.value.success? ? stdout.read : stderr.read + end + end + private def rest_client_resource_for @@ -45,7 +53,7 @@ def rest_client_resource_for end end - def logger_message(info, response) + def logger_message(info, error) printed_info = if info[:headers].nil? info else @@ -53,14 +61,14 @@ def logger_message(info, response) headers['Authorization'] = headers['Authorization'].split(' ')[0] + ' [REDACTED]' if headers['Authorization'] info.merge(headers: headers) end - Logger.new(STDOUT).info("Received exception hitting endpoint: #{printed_info}. Exception response: #{response}") + Logger.new(STDOUT).info("Received exception hitting endpoint: #{printed_info}. Exception: #{error}, response: #{error.response}") end def response_rescue_wrapper yield rescue Exception => e raise e unless defined?(e.response) - logger_message(@hri_api_info, e.response) + logger_message(@hri_api_info, e) e.response end diff --git a/test/spec/hri_management_api_spec.rb b/test/spec/hri_management_api_spec.rb index 07f6289..76568d5 100644 --- a/test/spec/hri_management_api_spec.rb +++ b/test/spec/hri_management_api_spec.rb @@ -7,41 +7,26 @@ describe 'HRI Management API ' do INVALID_ID = 'INVALID' - TENANT_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-tenant".downcase - INTEGRATOR_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-integrator".downcase + TENANT_ID = 'test' + INTEGRATOR_ID = 'claims' TEST_TENANT_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-tenant".downcase TEST_INTEGRATOR_ID = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}-test-integrator".downcase DATA_TYPE = 'rspec-batch' STATUS = 'started' - KAFKA_BROKERS = ENV['EVENTSTREAMS_BROKERS'] + BATCH_INPUT_TOPIC = "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in" + KAFKA_TIMEOUT = 60 before(:all) do @elastic = ElasticHelper.new - @cos_helper = COSHelper.new - - @kafka = Kafka.new(KAFKA_BROKERS, sasl_plain_username: 'token', sasl_plain_password: ENV['SASL_PLAIN_PASSWORD'], ssl_ca_certs_from_system: true) - @kafka_consumer = @kafka.consumer(group_id: 'rspec-test-consumer') - @kafka_consumer.subscribe("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.notification") - - api_list = `bx fn api list`.scan(/https.*hri/) - @hri_helper = HRIHelper.new(api_list.first) + @app_id_helper = AppIDHelper.new + @hri_helper = HRIHelper.new(`bx fn api list`.scan(/https.*hri/).first) + @event_streams_helper = EventStreamsHelper.new @start_date = DateTime.now - #Create Tenant - response = @hri_helper.hri_post_tenant(TENANT_ID) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - expect(parsed_response['tenantId']).to eql TENANT_ID - - #Create Stream - stream_info = { - numPartitions: 1, - retentionMs: 3600000 - }.to_json - response = @hri_helper.hri_post_tenant_stream(TENANT_ID, INTEGRATOR_ID, stream_info) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - expect(parsed_response['id']).to eql INTEGRATOR_ID + #Initialize Kafka Consumer + @kafka = Kafka.new(ENV['EVENTSTREAMS_BROKERS'], sasl_plain_username: 'token', sasl_plain_password: ENV['SASL_PLAIN_PASSWORD'], ssl_ca_certs_from_system: true) + @kafka_consumer = @kafka.consumer(group_id: 'rspec-mgmt-api-consumer') + @kafka_consumer.subscribe("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.notification") #Create Batch @batch_prefix = "rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}" @@ -51,7 +36,7 @@ status: STATUS, recordCount: 1, dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", + topic: BATCH_INPUT_TOPIC, startDate: @start_date, metadata: { rspec1: 'test1', @@ -62,29 +47,30 @@ } } }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - @batch_id = parsed_response['_id'] + @batch_id, @new_batch_id = '-', '-' + while @batch_id[-1] == '-' + response = @elastic.es_create_batch(TENANT_ID, create_batch) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @batch_id = parsed_response['_id'] + Logger.new(STDOUT).info("New Batch Created With ID: #{@batch_id}") + end + + #Get AppId Access Tokens + @token_invalid_tenant = @app_id_helper.get_access_token('hri_integration_tenant_test_invalid', 'tenant_test_invalid') + @token_no_roles = @app_id_helper.get_access_token('hri_integration_tenant_test', 'tenant_test') + @token_integrator_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_integrator', 'tenant_test hri_data_integrator') + @token_consumer_role_only = @app_id_helper.get_access_token('hri_integration_tenant_test_data_consumer', 'tenant_test hri_consumer') + @token_all_roles = @app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer') + @token_invalid_audience = @app_id_helper.get_access_token('hri_integration_tenant_test_integrator_consumer', 'tenant_test hri_data_integrator hri_consumer', ENV['APPID_TENANT']) end after(:all) do #Delete Batches - response = JSON.parse(@elastic.es_batch_search(TENANT_ID, "name:rspec-#{ENV['TRAVIS_BRANCH'].delete('.')}*&size=100")) - unless (response['hits']['total']).zero? - response['hits']['hits'].each do |batch| - @elastic.es_delete_batch(TENANT_ID, batch['_id']) - end - end + response = @elastic.es_delete_by_query(TENANT_ID, "name:rspec-#{ENV['TRAVIS_BRANCH']}*") + response.nil? ? (raise 'Elastic batch delete did not return a response') : (expect(response.code).to eq 200) + Logger.new(STDOUT).info("Delete test batches by query response #{response.body}") @kafka_consumer.stop - - #Delete Stream - response = @hri_helper.hri_delete_tenant_stream(TENANT_ID, INTEGRATOR_ID) - expect(response.code).to eq 200 - - #Delete Tenant - response = @hri_helper.hri_delete_tenant(TENANT_ID) - expect(response.code).to eq 200 end context 'POST /tenants/{tenant_id}' do @@ -139,6 +125,13 @@ expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['results'][0]['id']).to eql TEST_INTEGRATOR_ID + + Timeout.timeout(30, nil, 'Kafka topics not created after 30 seconds') do + loop do + topics = @event_streams_helper.get_topics + break if (topics.include?("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.in") && topics.include?("ingest.#{TEST_TENANT_ID}.#{TEST_INTEGRATOR_ID}.notification")) + end + end end it 'Stream Already Exists' do @@ -273,7 +266,7 @@ response = @hri_helper.hri_delete_tenant(INVALID_ID) expect(response.code).to eq 404 parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to eql 'index_not_found_exception: no such index' + expect(parsed_response['errorDescription']).to eql 'index_not_found_exception: no such index [INVALID-batches]' end it 'Delete - Unauthorized' do @@ -358,7 +351,7 @@ context 'GET /tenants/{tenant_id}/batches' do it 'Success Without Status' do - response = @hri_helper.hri_get_batches(TENANT_ID, nil) + response = @hri_helper.hri_get_batches(TENANT_ID, 'size=1000', {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -373,7 +366,7 @@ end it 'Success With Status' do - response = @hri_helper.hri_get_batches(TENANT_ID, "status=#{STATUS}") + response = @hri_helper.hri_get_batches(TENANT_ID, "status=#{STATUS}&size=1000", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -383,7 +376,7 @@ end it 'Success With Name' do - response = @hri_helper.hri_get_batches(TENANT_ID, "name=#{@batch_name}") + response = @hri_helper.hri_get_batches(TENANT_ID, "name=#{@batch_name}&size=1000", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -394,7 +387,7 @@ it 'Success With Greater Than Date' do greater_than_date = Date.today - 365 - response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{greater_than_date}") + response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{greater_than_date}&size=1000", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -405,7 +398,7 @@ it 'Success With Less Than Date' do less_than_date = Date.today + 1 - response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{less_than_date}") + response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{less_than_date}&size=1000", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -414,22 +407,15 @@ end end - it 'Tenant ID Not Found' do - response = @hri_helper.hri_get_batches(INVALID_ID, nil, {}) - expect(response.code).to eq 404 - parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to eql('index_not_found_exception: no such index') - end - it 'Name Not Found' do - response = @hri_helper.hri_get_batches(TENANT_ID, "name=#{INVALID_ID}") + response = @hri_helper.hri_get_batches(TENANT_ID, "name=#{INVALID_ID}", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['results'].empty?).to be true end it 'Status Not Found' do - response = @hri_helper.hri_get_batches(TENANT_ID, "status=#{INVALID_ID}") + response = @hri_helper.hri_get_batches(TENANT_ID, "status=#{INVALID_ID}", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['results'].empty?).to be true @@ -437,7 +423,7 @@ it 'Greater Than Date With No Results' do greater_than_date = Date.today + 10000 - response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{greater_than_date}") + response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{greater_than_date}", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['results'].empty?).to be true @@ -445,47 +431,145 @@ it 'Less Than Date With No Results' do less_than_date = Date.today - 5000 - response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{less_than_date}") + response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{less_than_date}", {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['results'].empty?).to be true end it 'Invalid Greater Than Date' do - response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{INVALID_ID}") + response = @hri_helper.hri_get_batches(TENANT_ID, "gteDate=#{INVALID_ID}", {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include("failed to parse date field [#{INVALID_ID}]") end it 'Invalid Less Than Date' do - response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{INVALID_ID}") + response = @hri_helper.hri_get_batches(TENANT_ID, "lteDate=#{INVALID_ID}", {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include("failed to parse date field [#{INVALID_ID}]") end it 'Query Parameter With Restricted Characters' do - response = @hri_helper.hri_get_batches(TENANT_ID, 'status="[{started}]"') + response = @hri_helper.hri_get_batches(TENANT_ID, 'status="[{started}]"', {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql('query parameters may not contain these characters: "[]{}') end + it 'Integrator ID can not view batches created with a different Integrator ID' do + #Create Batch + @batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" + @batch_template = { + name: @batch_name, + dataType: DATA_TYPE, + topic: BATCH_INPUT_TOPIC, + metadata: { + rspec1: 'test1', + rspec2: 'test2', + rspec3: { + rspec3A: 'test3A', + rspec3B: 'test3B' + } + } + } + while @new_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @new_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Batch Created With ID: #{@new_batch_id}") + end + + #Modify Batch Integrator ID + update_batch_script = { + script: { + source: 'ctx._source.integratorId = params.integratorId', + lang: 'painless', + params: { + integratorId: 'modified-integrator-id' + } + } + }.to_json + response = @elastic.es_batch_update(TENANT_ID, @new_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch Integrator ID updated to "modified-integrator-id"') + + #Verify Integrator ID Modified + response = @elastic.es_get_batch(TENANT_ID, @new_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['_source']['integratorId']).to eql('modified-integrator-id') + + #Verify Batch Not Visible to Different Integrator ID + response = @hri_helper.hri_get_batches(TENANT_ID, 'size=1000', {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 200 + parsed_response = JSON.parse(response.body) + unless parsed_response['results'].empty? + parsed_response['results'].each do |batch| + raise "Batch ID #{@new_batch_id} found with different Integrator ID!" if batch['id'] == @new_batch_id + end + end + + #Verify Batch Visible To Consumer Role + @batch_found = false + response = @hri_helper.hri_get_batches(TENANT_ID, 'size=1000', {'Authorization' => "Bearer #{@token_consumer_role_only}"}) + expect(response.code).to eq 200 + parsed_response = JSON.parse(response.body) + parsed_response['results'].each do |batch| + if batch['id'] == @new_batch_id + @batch_found = true + expect(batch['integratorId']).to eql 'modified-integrator-id' + end + end + expect(@batch_found).to be true + end + + it 'Unauthorized - Missing Authorization' do + response = @hri_helper.hri_get_batches(TENANT_ID, nil) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Missing Authorization header') + end + + it 'Unauthorized - Invalid Tenant ID' do + response = @hri_helper.hri_get_batches(TENANT_ID, nil, {'Authorization' => "Bearer #{@token_invalid_tenant}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant 'test' is not included in the authorized scopes: .") + end + + it 'Unauthorized - No Roles' do + response = @hri_helper.hri_get_batches(TENANT_ID, nil, {'Authorization' => "Bearer #{@token_no_roles}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('The access token must have one of these scopes: hri_consumer, hri_data_integrator') + end + + it 'Unauthorized - Invalid Audience' do + response = @hri_helper.hri_get_batches(TENANT_ID, nil, {'Authorization' => "Bearer #{@token_invalid_audience}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant '#{TENANT_ID}' is not included in the authorized scopes: .") + end + end context 'GET /tenants/{tenantId}/batches/{batchId}' do - it 'Success' do - response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id) + it 'Success With Consumer Role Only' do + response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) - expect(parsed_response['id']).to eq "#{@batch_id}batch" + expect(parsed_response['id']).to eq @batch_id expect(parsed_response['name']).to eql @batch_name expect(parsed_response['status']).to eql STATUS expect(parsed_response['startDate']).to eql @start_date.to_s expect(parsed_response['dataType']).to eql DATA_TYPE - expect(parsed_response['topic']).to eql "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in" + expect(parsed_response['topic']).to eql BATCH_INPUT_TOPIC expect(parsed_response['recordCount']).to eql 1 expect(parsed_response['metadata']['rspec1']).to eql('test1') expect(parsed_response['metadata']['rspec2']).to eql('test2') @@ -493,21 +577,134 @@ expect(parsed_response['metadata']['rspec3']['rspec3B']).to eql('test3B') end - it 'Tenant ID Not Found' do - response = @hri_helper.hri_get_batch(INVALID_ID, @batch_id) - expect(response.code).to eq 404 + it 'Success With Integrator Role Only' do + #Create Batch + @batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" + @batch_template = { + name: @batch_name, + dataType: DATA_TYPE, + topic: BATCH_INPUT_TOPIC, + metadata: { + rspec1: 'test1', + rspec2: 'test2', + rspec3: { + rspec3A: 'test3A', + rspec3B: 'test3B' + } + } + } + while @new_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @new_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Batch Created With ID: #{@new_batch_id}") + end + + #Get Batch + response = @hri_helper.hri_get_batch(TENANT_ID, @new_batch_id, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to include 'index_not_found_exception: no such index' + expect(parsed_response['id']).to eq @new_batch_id + expect(parsed_response['name']).to eql @batch_name + expect(parsed_response['status']).to eql STATUS + expect(parsed_response['dataType']).to eql DATA_TYPE + expect(parsed_response['topic']).to eql BATCH_INPUT_TOPIC + expect(parsed_response['metadata']['rspec1']).to eql('test1') + expect(parsed_response['metadata']['rspec2']).to eql('test2') + expect(parsed_response['metadata']['rspec3']['rspec3A']).to eql('test3A') + expect(parsed_response['metadata']['rspec3']['rspec3B']).to eql('test3B') end it 'Batch ID Not Found' do - response = @hri_helper.hri_get_batch(TENANT_ID, INVALID_ID) + response = @hri_helper.hri_get_batch(TENANT_ID, INVALID_ID, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 404 parsed_response = JSON.parse(response.body) expected_batch_error = "The document for tenantId: #{TENANT_ID} with document (batch) ID: #{INVALID_ID} was not found" expect(parsed_response['errorDescription']).to eql expected_batch_error end + it 'Integrator ID can not view a batch created with a different Integrator ID' do + #Create Batch + @batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" + @batch_template = { + name: @batch_name, + dataType: DATA_TYPE, + topic: BATCH_INPUT_TOPIC, + metadata: { + rspec1: 'test1', + rspec2: 'test2', + rspec3: { + rspec3A: 'test3A', + rspec3B: 'test3B' + } + } + } + while @new_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @new_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Batch Created With ID: #{@new_batch_id}") + end + + #Modify Batch Integrator ID + update_batch_script = { + script: { + source: 'ctx._source.integratorId = params.integratorId', + lang: 'painless', + params: { + integratorId: 'modified-integrator-id' + } + } + }.to_json + response = @elastic.es_batch_update(TENANT_ID, @new_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch Integrator ID updated to "modified-integrator-id"') + + #Verify Integrator ID Modified + response = @elastic.es_get_batch(TENANT_ID, @new_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['_source']['integratorId']).to eql('modified-integrator-id') + + #Verify Batch Not Visible to Different Integrator ID + response = @hri_helper.hri_get_batch(TENANT_ID, @new_batch_id, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to include('does not match the data integratorId') + end + + it 'Unauthorized - Missing Authorization' do + response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Missing Authorization header') + end + + it 'Unauthorized - Invalid Tenant ID' do + response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id, {'Authorization' => "Bearer #{@token_invalid_tenant}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant 'test' is not included in the authorized scopes: .") + end + + it 'Unauthorized - No Roles' do + response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id, {'Authorization' => "Bearer #{@token_no_roles}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('The access token must have one of these scopes: hri_consumer, hri_data_integrator') + end + + it 'Unauthorized - Invalid Audience' do + response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id, {'Authorization' => "Bearer #{@token_invalid_audience}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant '#{TENANT_ID}' is not included in the authorized scopes: .") + end + end context 'POST /tenants/{tenant_id}/batches' do @@ -516,10 +713,8 @@ @batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" @batch_template = { name: @batch_name, - status: STATUS, - recordCount: 10, dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", + topic: BATCH_INPUT_TOPIC, startDate: Date.today, endDate: Date.today + 1, metadata: { @@ -535,20 +730,23 @@ it 'Successful Batch Creation' do #Create Batch - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - @new_batch_id = parsed_response['id'][0..-6] + while @new_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @new_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Batch Created With ID: #{@new_batch_id}") + end #Verify Batch in Elastic response = @elastic.es_get_batch(TENANT_ID, @new_batch_id) - expect(response.code).to eq 200 + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) parsed_response = JSON.parse(response.body) expect(parsed_response['_index']).to eql("#{TENANT_ID}-batches") expect(parsed_response['_id']).to eql(@new_batch_id) expect(parsed_response['found']).to be true expect(parsed_response['_source']['name']).to eql(@batch_name) - expect(parsed_response['_source']['topic']).to eql("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in") + expect(parsed_response['_source']['topic']).to eql(BATCH_INPUT_TOPIC) expect(parsed_response['_source']['dataType']).to eql(DATA_TYPE) expect(parsed_response['_source']['metadata']['rspec1']).to eql('test1') expect(parsed_response['_source']['metadata']['rspec2']).to eql('test2') @@ -557,22 +755,22 @@ expect(DateTime.parse(parsed_response['_source']['startDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@new_batch_id} and status: #{STATUS}") @kafka_consumer.each_message do |message| parsed_message = JSON.parse(message.value) - if parsed_message['id'] == "#{@new_batch_id}batch" + if parsed_message['id'] == @new_batch_id and parsed_message['status'] == STATUS @message_found = true expect(parsed_message['dataType']).to eql(DATA_TYPE) - expect(parsed_message['id']).to eql("#{@new_batch_id}batch") + expect(parsed_message['id']).to eql(@new_batch_id) expect(parsed_message['name']).to eql(@batch_name) - expect(parsed_message['topic']).to eql("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in") + expect(parsed_message['topic']).to eql(BATCH_INPUT_TOPIC) expect(parsed_message['status']).to eql(STATUS) expect(DateTime.parse(parsed_message['startDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) expect(parsed_message['metadata']['rspec1']).to eql('test1') expect(parsed_message['metadata']['rspec2']).to eql('test2') expect(parsed_message['metadata']['rspec3']['rspec3A']).to eql('test3A') expect(parsed_message['metadata']['rspec3']['rspec3B']).to eql('test3B') - expect(parsed_message['metadata']['rspec1']).to eql('test1') break end end @@ -590,7 +788,7 @@ it 'should auto-delete a batch from Elastic if the batch was created with an invalid Kafka topic' do #Gather existing batches existing_batches = [] - response = @hri_helper.hri_get_batches(TENANT_ID, nil) + response = @hri_helper.hri_get_batches(TENANT_ID, nil, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -601,7 +799,7 @@ #Create Batch with Bad Topic @batch_template[:topic] = 'INVALID-TEST-TOPIC' @batch_template[:dataType] = 'rspec-invalid-batch' - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 500 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('the request is for a topic or partition that does not exist on this broker') @@ -610,7 +808,7 @@ 50.times do new_batches = [] @batch_deleted = false - response = @hri_helper.hri_get_batches(TENANT_ID, nil) + response = @hri_helper.hri_get_batches(TENANT_ID, nil, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['total']).to be > 0 @@ -626,16 +824,9 @@ expect(@batch_deleted).to be true end - it 'Invalid Tenant ID' do - response = @hri_helper.hri_post_batch(INVALID_ID, @batch_template.to_json) - expect(response.code).to eq 404 - parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to eql 'index_not_found_exception: no such index' - end - it 'Invalid Name' do @batch_template[:name] = 12345 - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('name must be a string') @@ -643,7 +834,7 @@ it 'Invalid Topic' do @batch_template[:topic] = 12345 - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('topic must be a string') @@ -651,7 +842,7 @@ it 'Invalid Data Type' do @batch_template[:dataType] = 12345 - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('dataType must be a string') @@ -659,7 +850,7 @@ it 'Missing Name' do @batch_template.delete(:name) - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql 'Missing required parameter(s): [name]' @@ -667,7 +858,7 @@ it 'Missing Topic' do @batch_template.delete(:topic) - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql 'Missing required parameter(s): [topic]' @@ -675,52 +866,119 @@ it 'Missing Data Type' do @batch_template.delete(:dataType) - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql 'Missing required parameter(s): [dataType]' end + it 'Unauthorized - Missing Authorization' do + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Missing Authorization header') + end + + it 'Unauthorized - Invalid Tenant ID' do + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_invalid_tenant}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant 'test' is not included in the authorized scopes: .") + end + + it 'Unauthorized - No Roles' do + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_no_roles}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to create a batch') + end + + it 'Unauthorized - Incorrect Roles' do + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to create a batch') + end + + it 'Unauthorized - Invalid Audience' do + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_invalid_audience}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant '#{TENANT_ID}' is not included in the authorized scopes: .") + end + end context 'PUT /tenants/{tenantId}/batches/{batchId}/action/sendComplete' do before(:all) do @record_count = { - recordCount: 1 + recordCount: 1, + metadata: { + rspec1: 'test3', + rspec2: 'test4', + rspec4: { + rspec4A: 'test4A', + rspec4B: 'test4B' + } + } + } + @batch_template = { + name: @batch_name, + dataType: DATA_TYPE, + topic: BATCH_INPUT_TOPIC, + metadata: { + rspec1: 'test1', + rspec2: 'test2', + rspec3: { + rspec3A: 'test3A', + rspec3B: 'test3B' + } + } } + @send_complete_batch_id = '-' end it 'Success' do + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + #Set Batch Complete - response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'sendComplete', @record_count) + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) expect(response.code).to eq 200 #Verify Batch Complete - response = @hri_helper.hri_get_batch(TENANT_ID, @batch_id) + response = @hri_helper.hri_get_batch(TENANT_ID, @send_complete_batch_id, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['status']).to eql 'completed' expect(parsed_response['endDate']).to_not be_nil #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@send_complete_batch_id} and status: completed") @kafka_consumer.each_message do |message| parsed_message = JSON.parse(message.value) - if parsed_message['id'] == @batch_id + if parsed_message['id'] == @send_complete_batch_id && parsed_message['status'] == 'completed' @message_found = true expect(parsed_message['dataType']).to eql(DATA_TYPE) - expect(parsed_message['id']).to eql(@batch_id) + expect(parsed_message['id']).to eql(@send_complete_batch_id) expect(parsed_message['name']).to eql(@batch_name) - expect(parsed_message['topic']).to eql("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in") + expect(parsed_message['topic']).to eql(BATCH_INPUT_TOPIC) expect(parsed_message['status']).to eql('completed') expect(DateTime.parse(parsed_message['startDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) expect(DateTime.parse(parsed_message['endDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) - expect(parsed_message['metadata']['rspec1']).to eql('test1') - expect(parsed_message['metadata']['rspec2']).to eql('test2') - expect(parsed_message['metadata']['rspec3']['rspec3A']).to eql('test3A') - expect(parsed_message['metadata']['rspec3']['rspec3B']).to eql('test3B') - expect(parsed_message['metadata']['rspec1']).to eql('test1') + expect(parsed_message['metadata']['rspec1']).to eql('test3') + expect(parsed_message['metadata']['rspec2']).to eql('test4') + expect(parsed_message['metadata']['rspec4']['rspec4A']).to eql('test4A') + expect(parsed_message['metadata']['rspec4']['rspec4B']).to eql('test4B') + expect(parsed_message['metadata']['rspec3']).to be_nil break end end @@ -728,59 +986,61 @@ end end - it 'Invalid Tenant ID' do - response = @hri_helper.hri_put_batch(INVALID_ID, @batch_id, 'sendComplete', @record_count) - expect(response.code).to eq 404 - parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to eql('index_not_found_exception: no such index and [action.auto_create_index] is [false]') - end - it 'Invalid Batch ID' do - response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'sendComplete', @record_count) + response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 404 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('document_missing_exception') end it 'Missing Record Count' do - response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'sendComplete') + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'sendComplete', nil, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql('Missing required parameter(s): [recordCount]') end it 'Invalid Record Count' do - response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'sendComplete', {recordCount: "1"}) + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'sendComplete', {recordCount: "1"}, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 400 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql('Invalid parameter type(s): [recordCount must be a float64, got string instead.]') end it 'Conflict: Batch with a status other than started' do - #Create Batch with a status of terminated - create_batch = { - name: "#{@batch_prefix}-#{SecureRandom.uuid}", - status: 'terminated', - recordCount: 1, - dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", - startDate: @start_date, - metadata: { - rspec1: 'test1', - rspec2: 'test2', - rspec3: { - rspec3A: 'test3A', - rspec3B: 'test3B' + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + #Update Batch to Terminated Status + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'terminated' } } }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 + response = @elastic.es_batch_update(TENANT_ID, @send_complete_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) parsed_response = JSON.parse(response.body) - @send_complete_batch_id = parsed_response['_id'] + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch status updated to "terminated"') + + #Verify Batch Status Updated + response = @elastic.es_get_batch(TENANT_ID, @send_complete_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['_source']['status']).to eql('terminated') #Attempt to complete batch - response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count) + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) expect(response.code).to eq 409 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql "Batch status was not updated to 'completed', batch is already in 'terminated' state" @@ -791,53 +1051,155 @@ end it 'Conflict: Batch that already has a completed status' do - #Create Batch with a status of completed - create_batch = { - name: "#{@batch_prefix}-#{SecureRandom.uuid}", - status: 'completed', - recordCount: 1, - dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", - startDate: @start_date, - metadata: { - rspec1: 'test1', - rspec2: 'test2', - rspec3: { - rspec3A: 'test3A', - rspec3B: 'test3B' + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + #Update Batch to Completed Status + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'completed' } } }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 + response = @elastic.es_batch_update(TENANT_ID, @send_complete_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch status updated to "completed"') + + #Verify Batch Status Updated + response = @elastic.es_get_batch(TENANT_ID, @send_complete_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) parsed_response = JSON.parse(response.body) - @send_complete_batch_id = parsed_response['_id'] + expect(parsed_response['_source']['status']).to eql('completed') #Attempt to terminate batch - response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count) + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) expect(response.code).to eq 409 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql "Batch status was not updated to 'completed', batch is already in 'completed' state" + end - #Delete batch - response = @elastic.es_delete_batch(TENANT_ID, @send_complete_batch_id) - expect(response.code).to eq 200 + it 'Integrator ID can not update batches created with a different Integrator ID' do + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + #Modify Batch Integrator ID + update_batch_script = { + script: { + source: 'ctx._source.integratorId = params.integratorId', + lang: 'painless', + params: { + integratorId: 'modified-integrator-id' + } + } + }.to_json + response = @elastic.es_batch_update(TENANT_ID, @send_complete_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch Integrator ID updated to "modified-integrator-id"') + + #Verify Integrator ID Modified + response = @elastic.es_get_batch(TENANT_ID, @send_complete_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['_source']['integratorId']).to eql('modified-integrator-id') + + #Verify Batch Not Updated With Different Integrator ID + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to include("but owned by 'modified-integrator-id") + end + + it 'Unauthorized - Missing Authorization' do + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'sendComplete', @record_count) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Missing Authorization header') + end + + it 'Unauthorized - Invalid Tenant ID' do + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_invalid_tenant}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant 'test' is not included in the authorized scopes: .") + end + + it 'Unauthorized - No Roles' do + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_no_roles}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to update a batch') + end + + it 'Unauthorized - Consumer Role Can Not Update Batch Status' do + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to update a batch') + end + + it 'Unauthorized - Invalid Audience' do + #Create Batch + while @send_complete_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @send_complete_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Send Complete Batch Created With ID: #{@send_complete_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @send_complete_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_invalid_audience}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant '#{TENANT_ID}' is not included in the authorized scopes: .") end end context 'PUT /tenants/{tenantId}/batches/{batchId}/action/terminate' do - it 'Success' do - #Create Batch + before(:all) do @terminate_batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" - create_batch = { + @batch_template = { name: @terminate_batch_name, - status: STATUS, - recordCount: 1, dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", - startDate: @start_date, + topic: BATCH_INPUT_TOPIC, metadata: { rspec1: 'test1', rspec2: 'test2', @@ -846,41 +1208,60 @@ rspec3B: 'test3B' } } - }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - @terminate_batch_id = parsed_response['_id'] + } + @terminate_metadata = { + metadata: { + rspec1: 'test3', + rspec2: 'test4', + rspec4: { + rspec4A: 'test4A', + rspec4B: 'test4B' + } + } + } + @terminate_batch_id = '-' + end + + it 'Success' do + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end #Terminate Batch - response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate') + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', @terminate_metadata, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) expect(response.code).to eq 200 #Verify Batch Terminated - response = @hri_helper.hri_get_batch(TENANT_ID, @terminate_batch_id) + response = @hri_helper.hri_get_batch(TENANT_ID, @terminate_batch_id, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['status']).to eql 'terminated' expect(parsed_response['endDate']).to_not be_nil #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@terminate_batch_id} and status: terminated") @kafka_consumer.each_message do |message| parsed_message = JSON.parse(message.value) - if parsed_message['id'] == @terminate_batch_id + if parsed_message['id'] == @terminate_batch_id && parsed_message['status'] == 'terminated' @message_found = true expect(parsed_message['dataType']).to eql(DATA_TYPE) expect(parsed_message['id']).to eql(@terminate_batch_id) expect(parsed_message['name']).to eql(@terminate_batch_name) - expect(parsed_message['topic']).to eql("ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in") + expect(parsed_message['topic']).to eql(BATCH_INPUT_TOPIC) expect(parsed_message['status']).to eql('terminated') expect(DateTime.parse(parsed_message['startDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) expect(DateTime.parse(parsed_message['endDate']).strftime("%Y-%m-%d")).to eq(Date.today.strftime("%Y-%m-%d")) - expect(parsed_message['metadata']['rspec1']).to eql('test1') - expect(parsed_message['metadata']['rspec2']).to eql('test2') - expect(parsed_message['metadata']['rspec3']['rspec3A']).to eql('test3A') - expect(parsed_message['metadata']['rspec3']['rspec3B']).to eql('test3B') - expect(parsed_message['metadata']['rspec1']).to eql('test1') + expect(parsed_message['metadata']['rspec1']).to eql('test3') + expect(parsed_message['metadata']['rspec2']).to eql('test4') + expect(parsed_message['metadata']['rspec4']['rspec4A']).to eql('test4A') + expect(parsed_message['metadata']['rspec4']['rspec4B']).to eql('test4B') + expect(parsed_message['metadata']['rspec3']).to be_nil break end end @@ -888,45 +1269,47 @@ end end - it 'Invalid Tenant ID' do - response = @hri_helper.hri_put_batch(INVALID_ID, @batch_id, 'terminate') - expect(response.code).to eq 404 - parsed_response = JSON.parse(response.body) - expect(parsed_response['errorDescription']).to eql('index_not_found_exception: no such index and [action.auto_create_index] is [false]') - end - it 'Invalid Batch ID' do - response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'terminate') + response = @hri_helper.hri_put_batch(TENANT_ID, INVALID_ID, 'terminate', nil, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 404 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to include('document_missing_exception') end it 'Conflict: Batch with a status other than started' do - #Create Batch with a status of completed - create_batch = { - name: "#{@batch_prefix}-#{SecureRandom.uuid}", - status: 'completed', - recordCount: 1, - dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", - startDate: @start_date, - metadata: { - rspec1: 'test1', - rspec2: 'test2', - rspec3: { - rspec3A: 'test3A', - rspec3B: 'test3B' + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + #Update Batch to Completed Status + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'completed' } } }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 + response = @elastic.es_batch_update(TENANT_ID, @terminate_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch status updated to "completed"') + + #Verify Batch Status Updated + response = @elastic.es_get_batch(TENANT_ID, @terminate_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) parsed_response = JSON.parse(response.body) - @terminate_batch_id = parsed_response['_id'] + expect(parsed_response['_source']['status']).to eql('completed') #Attempt to terminate batch - response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate') + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) expect(response.code).to eq 409 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql "Batch status was not updated to 'terminated', batch is already in 'completed' state" @@ -937,37 +1320,143 @@ end it 'Conflict: Batch that already has a terminated status' do - #Create Batch with a status of terminated - create_batch = { - name: "#{@batch_prefix}-#{SecureRandom.uuid}", - status: 'terminated', - recordCount: 1, - dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", - startDate: @start_date, - metadata: { - rspec1: 'test1', - rspec2: 'test2', - rspec3: { - rspec3A: 'test3A', - rspec3B: 'test3B' + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + #Update Batch to Completed Status + update_batch_script = { + script: { + source: 'ctx._source.status = params.status', + lang: 'painless', + params: { + status: 'terminated' } } }.to_json - response = @elastic.es_create_batch(TENANT_ID, create_batch) - expect(response.code).to eq 201 + response = @elastic.es_batch_update(TENANT_ID, @terminate_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch status updated to "terminated"') + + #Verify Batch Status Updated + response = @elastic.es_get_batch(TENANT_ID, @terminate_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) parsed_response = JSON.parse(response.body) - @terminate_batch_id = parsed_response['_id'] + expect(parsed_response['_source']['status']).to eql('terminated') #Attempt to terminate batch - response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate') + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 409 parsed_response = JSON.parse(response.body) expect(parsed_response['errorDescription']).to eql "Batch status was not updated to 'terminated', batch is already in 'terminated' state" + end - #Delete batch - response = @elastic.es_delete_batch(TENANT_ID, @terminate_batch_id) - expect(response.code).to eq 200 + it 'Integrator ID can not update batches created with a different Integrator ID' do + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + #Modify Batch Integrator ID + update_batch_script = { + script: { + source: 'ctx._source.integratorId = params.integratorId', + lang: 'painless', + params: { + integratorId: 'modified-integrator-id' + } + } + }.to_json + response = @elastic.es_batch_update(TENANT_ID, @terminate_batch_id, update_batch_script) + response.nil? ? (raise 'Elastic batch update did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['result']).to eql('updated') + Logger.new(STDOUT).info('Batch Integrator ID updated to "modified-integrator-id"') + + #Verify Integrator ID Modified + response = @elastic.es_get_batch(TENANT_ID, @terminate_batch_id) + response.nil? ? (raise 'Elastic get batch did not return a response') : (expect(response.code).to eq 200) + parsed_response = JSON.parse(response.body) + expect(parsed_response['_source']['integratorId']).to eql('modified-integrator-id') + + #Verify Batch Not Updated With Different Integrator ID + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to include("but owned by 'modified-integrator-id") + end + + it 'Unauthorized - Missing Authorization' do + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'terminate', nil) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Missing Authorization header') + end + + it 'Unauthorized - Invalid Tenant ID' do + response = @hri_helper.hri_put_batch(TENANT_ID, @batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_invalid_tenant}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant 'test' is not included in the authorized scopes: .") + end + + it 'Unauthorized - No Roles' do + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_no_roles}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to update a batch') + end + + it 'Unauthorized - Consumer Role Can Not Update Batch Status' do + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_consumer_role_only}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql('Must have hri_data_integrator role to update a batch') + end + + it 'Unauthorized - Invalid Audience' do + #Create Batch + while @terminate_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_integrator_role_only}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @terminate_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("New Terminate Batch Created With ID: #{@terminate_batch_id}") + end + + response = @hri_helper.hri_put_batch(TENANT_ID, @terminate_batch_id, 'terminate', nil, {'Authorization' => "Bearer #{@token_invalid_audience}"}) + expect(response.code).to eq 401 + parsed_response = JSON.parse(response.body) + expect(parsed_response['errorDescription']).to eql("Unauthorized tenant access. Tenant '#{TENANT_ID}' is not included in the authorized scopes: .") end end @@ -975,7 +1464,8 @@ context 'End to End Test Using COS Object Data' do it 'Create Batch, Produce Kafka Message with COS Object Data, Read Kafka Message, and Send Complete' do - @input_data = @cos_helper.get_object_data('spark-output-2', 'dev_test_of_1/f_drug_clm/schema.json') + @end_to_end_batch_id = '-' + @input_data = COSHelper.new.get_object_data('spark-output-2', 'dev_test_of_2/f_drug_clm/schema.json') #Create Batch @batch_name = "#{@batch_prefix}-#{SecureRandom.uuid}" @@ -984,7 +1474,7 @@ status: STATUS, recordCount: 1, dataType: DATA_TYPE, - topic: "ingest.#{TENANT_ID}.#{INTEGRATOR_ID}.in", + topic: BATCH_INPUT_TOPIC, startDate: Date.today, endDate: Date.today + 1, metadata: { @@ -996,17 +1486,20 @@ } } } - response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json) - expect(response.code).to eq 201 - parsed_response = JSON.parse(response.body) - @end_to_end_batch_id = parsed_response['id'] - Logger.new(STDOUT).info("End to End: Batch Created With ID: #{@end_to_end_batch_id}") + while @end_to_end_batch_id[-1] == '-' + response = @hri_helper.hri_post_batch(TENANT_ID, @batch_template.to_json, {'Authorization' => "Bearer #{@token_all_roles}"}) + expect(response.code).to eq 201 + parsed_response = JSON.parse(response.body) + @end_to_end_batch_id = parsed_response['id'] + Logger.new(STDOUT).info("End to End: Batch Created With ID: #{@end_to_end_batch_id}") + end #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@end_to_end_batch_id} and status: started") @kafka_consumer.each_message do |message| parsed_message = JSON.parse(message.value) - if parsed_message['id'] == @end_to_end_batch_id + if parsed_message['id'] == @end_to_end_batch_id && parsed_message['status'] == 'started' @message_found = true expect(parsed_message['id']).to eql(@end_to_end_batch_id) break @@ -1021,7 +1514,8 @@ Logger.new(STDOUT).info('End to End: Kafka message containing COS object data successfully written') #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@end_to_end_batch_id} and status: started") @kafka_consumer.each_message do |message| unless message.headers.empty? if message.headers['batchId'] == @end_to_end_batch_id @@ -1041,18 +1535,19 @@ @record_count = { recordCount: 1 } - response = @hri_helper.hri_put_batch(TENANT_ID, @end_to_end_batch_id, 'sendComplete', @record_count) + response = @hri_helper.hri_put_batch(TENANT_ID, @end_to_end_batch_id, 'sendComplete', @record_count, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 200 #Verify Batch Complete - response = @hri_helper.hri_get_batch(TENANT_ID, @end_to_end_batch_id) + response = @hri_helper.hri_get_batch(TENANT_ID, @end_to_end_batch_id, {'Authorization' => "Bearer #{@token_all_roles}"}) expect(response.code).to eq 200 parsed_response = JSON.parse(response.body) expect(parsed_response['status']).to eql 'completed' Logger.new(STDOUT).info("End to End: Status of batch #{@end_to_end_batch_id} updated to 'completed'") #Verify Kafka Message - Timeout.timeout(10) do + Timeout.timeout(KAFKA_TIMEOUT) do + Logger.new(STDOUT).info("Waiting for a Kafka message with Batch ID: #{@end_to_end_batch_id} and status: completed") @kafka_consumer.each_message do |message| parsed_message = JSON.parse(message.value) if parsed_message['id'] == @end_to_end_batch_id @@ -1064,9 +1559,8 @@ expect(@message_found).to be true end Logger.new(STDOUT).info("End to End: Kafka message received for batch #{@end_to_end_batch_id} sendComplete") - end end -end \ No newline at end of file +end diff --git a/updateApi.sh b/updateApi.sh deleted file mode 100755 index b259ba5..0000000 --- a/updateApi.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -ibmcloud fn api create /hri /tenants/{tenantId}/batches post hri_mgmt_api/create_batch --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/batches get hri_mgmt_api/get_batches --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/batches/{batchId} get hri_mgmt_api/get_batch_by_id --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/batches/{batchId}/action/sendComplete put hri_mgmt_api/send_complete --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/batches/{batchId}/action/terminate put hri_mgmt_api/terminate_batch --response-type http -ibmcloud fn api create /hri /healthcheck get hri_mgmt_api/healthcheck --response-type http -ibmcloud fn api create /hri /tenants/{tenantId} post hri_mgmt_api/create_tenant --response-type http -ibmcloud fn api create /hri /tenants/{tenantId} delete hri_mgmt_api/delete_tenant --response-type http -ibmcloud fn api create /hri /tenants get hri_mgmt_api/get_tenants --response-type http -ibmcloud fn api create /hri /tenants/{tenantId} get hri_mgmt_api/get_tenant_by_id --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/streams get hri_mgmt_api/get_streams --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/streams/{streamId} post hri_mgmt_api/create_stream --response-type http -ibmcloud fn api create /hri /tenants/{tenantId}/streams/{streamId} delete hri_mgmt_api/delete_stream --response-type http \ No newline at end of file