From 040fd68a332e0fac12c3f477a1126aa79cdfa866 Mon Sep 17 00:00:00 2001 From: Dmytro Kondriukov Date: Wed, 16 Dec 2020 12:06:16 +0200 Subject: [PATCH 1/3] Update README.md add example config and run tests --- README.md | 192 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 191 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 9a7f2d9c3..f32d94734 100644 --- a/README.md +++ b/README.md @@ -30,10 +30,133 @@ datasets (configured in [pom.xml](framework/pom.xml)). ``` git clone git@github.com:mapr/drill-test-framework.git -cd drill-test-framework +cd drill-test-framework/conf +cp drillTestConfig.properties.example drillTestConfig.properties +nano drillTestConfig.properties bin/build_framework -Pdownload ``` +## Configuring drillTestConfig.properties for run on singlenode cluster +

+
+
+ +just uncomment property +

+# Drillbits
+########################################
+DRILLBITS=localhost
+########################################
+
+ +just uncomment property +

+# Number of Drillbits
+########################################
+NUMBER_OF_CLUSTER_NODES=1
+########################################
+
+ +set path to drill install dir +

+# Drill home
+########################################
+DRILL_HOME=/opt/mapr/drill/drill-1.16.1
+########################################
+
+ + +just uncomment property +

+# Drill test data dir
+########################################
+DRILL_TEST_DATA_DIR=framework/resources
+########################################
+# Drill actual resultset dir
+########################################
+DRILL_OUTPUT_DIR=drill-output
+########################################
+# Drill report dir
+########################################
+DRILL_REPORTS_DIR=drill-reports
+DRILL_REPORTS_DFS_DIR=/drill/reports
+########################################
+
+ +set location of hadoop system drill is installed +

+########################################
+HADOOP_INSTALL_LOC=/opt/mapr
+########################################
+
+ +just uncoment propery +

+########################################
+DRILL_TESTDATA=/drill/testdata
+########################################
+
+ + +set zookeeper. For example - if you used singlenode cluster, with ip 192.168.121.91, set: +

+########################################
+ZOOKEEPERS=192.168.121.91:5181/drill/drillbits1
+########################################
+
+ +just uncoment propery +

+# DRILL JDBC driver
+########################################
+# MapR DRILL JDBC driver
+JDBC_DRIVER=com.mapr.drill.jdbc41.Driver
+# Apache DRILL JDBC driver
+#JDBC_DRIVER=org.apache.drill.jdbc.Driver
+########################################
+# JDBC DRIVER CLASSPATH
+########################################
+#JDBC_DRIVER_CP=/root/DrillJDBC41/*
+JDBC_DRIVER_CP=${DRILL_HOME}/jars/jdbc-driver/*
+########################################
+
+ + +set zookeeper. For example - if you used singlenode cluster, with ip 192.168.121.91, set: +

+########################################
+CONNECTION_STRING=jdbc:drill:zk\=192.168.121.91:5181/zk_root/cluster_id
+########################################
+
+ +just uncoment propery +

+# Drill storage plugin server
+########################################
+DRILL_STORAGE_PLUGIN_SERVER=192.168.121.91
+########################################
+
+ +set example properyes +

+USERNAME=mapr
+PASSWORD=mapr
+export ZOOKEEPER_HOST=192.168.121.91
+export CLUSTER_NAME=Drill-ATS
+export ZK_ROOT=drill-ats-build
+export DRILL_USER=root
+export CLUSH_GROUP=all
+export DRILL_STORAGE_PLUGIN_SERVER
+export DRILL_HOME=${DRILL_HOME}
+export M2_HOME=/usr/share/maven
+export M2=$M2_HOME/bin
+export PATH=.:$M2:$PATH
+export DRILL_VERSION=$(grep 'git.build.version' ${DRILL_HOME}/git.properties | tr '=' '\n' | tail -1)
+export HADOOP_VERSION=2.7.4.0-mapr-700
+export USERNAME
+export PASSWORD
+
+ If you've already downloaded the datasets previously, you can simply skip the download. ## Execute Tests @@ -62,6 +185,70 @@ Example: Use this option to provide the usage of the command, which includes additional options +## List of used tests + +

+bin/run_tests -s Functional/aggregates -g functional -t 180 -d
+bin/run_tests -s Functional/amplab -g functional -t 180 -d
+bin/run_tests -s Functional/case_expr -g functional -t 180 -d
+bin/run_tests -s Functional/complex -g functional -t 180 -d
+bin/run_tests -s Functional/convert -g functional -t 180 -d
+bin/run_tests -s Functional/crossjoin -g functional -t 180 -d
+bin/run_tests -s Functional/cross-sources -g functional -t 180 -d
+bin/run_tests -s Functional/ctas -g functional -t 180 -d
+bin/run_tests -s Functional/data-shapes -g functional -t 180 -d
+bin/run_tests -s Functional/datetime -g functional -t 180 -d
+bin/run_tests -s Functional/date_trunc -g functional -t 180 -d
+bin/run_tests -s Functional/decimal -g functional -t 180 -d
+bin/run_tests -s Functional/decimal_avro -g functional -t 180 -d
+bin/run_tests -s Functional/decimal_negative -g functional -t 180 -d
+bin/run_tests -s Functional/decimal_parquet -g functional -t 180 -d
+bin/run_tests -s Functional/drill_fragments -g functional -t 180 -d
+bin/run_tests -s Functional/droptable -g functional -t 180 -d
+bin/run_tests -s Functional/explicit_cast -g functional -t 180 -d
+bin/run_tests -s Functional/filter -g functional -t 180 -d
+bin/run_tests -s Functional/flatten_operators -g functional -t 180 -d
+bin/run_tests -s Functional/functions -g functional -t 180 -d
+bin/run_tests -s Functional/group_by_alias, -g functional -t 180 -d
+bin/run_tests -s Functional/hbase -g functional -t 180 -d
+bin/run_tests -s Functional/hive -g functional -t 180 -d
+bin/run_tests -s Functional/hive_functions_nonhivedata -g functional -t 180 -d
+bin/run_tests -s Functional/identifiers -g functional -t 180 -d
+bin/run_tests -s Functional/impersonation -g functional -t 180 -d
+bin/run_tests -s Functional/implicit_cast -g functional -t 180 -d
+bin/run_tests -s Functional/int96 -g functional -t 180 -d
+bin/run_tests -s Functional/interpreted_partition_pruning -g functional -t 180 -d
+bin/run_tests -s Functional/joins -g functional -t 180 -d
+bin/run_tests -s Functional/json -g functional -t 180 -d
+bin/run_tests -s Functional/limit0 -g functional -t 180 -d
+bin/run_tests -s Functional/metadata_caching -g functional -t 180 -d
+bin/run_tests -s Functional/min_max_dir -g functional -t 180 -d
+bin/run_tests -s Functional/misc -g functional -t 180 -d
+bin/run_tests -s Functional/morefiles -g functional -t 180 -d
+bin/run_tests -s Functional/no_result_set -g functional -t 180 -d
+bin/run_tests -s Functional/orderby -g functional -t 180 -d
+bin/run_tests -s Functional/p1tests -g functional -t 180 -d
+bin/run_tests -s Functional/parquet_storage -g functional -t 180 -d
+bin/run_tests -s Functional/partition_pruning -g functional -t 180 -d
+bin/run_tests -s Functional/query_parser -g functional -t 180 -d
+bin/run_tests -s Functional/s3minio -g functional -t 180 -d
+bin/run_tests -s Functional/schema_change_empty_batch -g functional -t 180 -d
+bin/run_tests -s Functional/schema-changes -g functional -t 180 -d
+bin/run_tests -s Functional/semijoin -g functional -t 180 -d
+bin/run_tests -s Functional/subqueries -g functional -t 180 -d
+bin/run_tests -s Functional/table_function -g functional -t 180 -d
+bin/run_tests -s Functional/table_stats -g functional -t 180 -d
+bin/run_tests -s Functional/text_storage -g functional -t 180 -d
+bin/run_tests -s Functional/timestamp_functions -g functional -t 180 -d
+bin/run_tests -s Functional/tpcds -g functional -t 180 -d
+bin/run_tests -s Functional/tpch -g functional -t 180 -d
+bin/run_tests -s Functional/udfs -g functional -t 180 -d
+bin/run_tests -s Functional/union -g functional -t 180 -d
+bin/run_tests -s Functional/union_all -g functional -t 180 -d
+bin/run_tests -s Functional/values -g functional -t 180 -d
+bin/run_tests -s Functional/views -g functional -t 180 -d
+bin/run_tests -s Functional/window_functions -g functional -t 180 -d
+
## Contributing We encourage contributions from users! You can fix bugs, make enhancements or add new tests. Create a PR here on GitHub for your change. @@ -71,3 +258,6 @@ Refer to [CONTRIBUTING.md](CONTRIBUTING.md) for details on the test framework st ## License Licensed under the Apache License 2.0. Please see [LICENSE.md](LICENSE.md) + + + From 18e8283b9b7a35dcbb3c4bc41ae7fa37ec0fa61f Mon Sep 17 00:00:00 2001 From: Dmytro Kondriukov Date: Wed, 23 Dec 2020 14:33:00 +0200 Subject: [PATCH 2/3] Update README.md --- README.md | 147 ++++++++++++++++++++++++++++++------------------------ 1 file changed, 83 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index f32d94734..cd6dc2b17 100644 --- a/README.md +++ b/README.md @@ -9,64 +9,75 @@ regression testing and as part of the release criteria. ## Requirements - 1. The test framework requires a distributed file system such as HDFS or MapR-FS to be configured. - Some of the tests can also be run against a local file system. By default, it's configured to - run against MapR-FS. You can change the default behavior by modifying - [conf/core-site.xml](conf/core-site.xml). Refer to [conf/core-site.xml.example](conf/core-site.xml.example) - for settings. - 2. To run all tests, Hive and HBase needs to be installed and running. To exclude Hive and HBase tests, - please refer to the example in the Execute Tests section. - 3. The test framework should be run on a Drill cluster node. Refer to - [Drill documentation](http://drill.apache.org/docs/installing-drill-in-distributed-mode) for details - on how to setup Drill. It can also be run on a client node with additional configuration required. - 4. Cluster information are set in the [conf/drillTestConfig.properties](conf/drillTestConfig.properties) - file. This is the main configuration file for the framework. It needs to be modified with local - cluster info before compiling the framework and running tests. +1. The test framework requires a distributed file system such as HDFS or MapR-FS to be configured. + Some of the tests can also be run against a local file system. By default, it's configured to + run against MapR-FS. You can change the default behavior by modifying + [conf/core-site.xml](conf/core-site.xml). Refer to [conf/core-site.xml.example](conf/core-site.xml.example) + for settings. +2. To run all tests, Hive and HBase needs to be installed and running. To exclude Hive and HBase tests, + please refer to the example in the Execute Tests section. +3. The test framework should be run on a Drill cluster node. Refer to + [Drill documentation](http://drill.apache.org/docs/installing-drill-in-distributed-mode) for details + on how to setup Drill. It can also be run on a client node with additional configuration required. +4. Cluster information are set in the [conf/drillTestConfig.properties](conf/drillTestConfig.properties.example) + file. This is the main configuration file for the framework. It needs to be modified with local + cluster info before compiling the framework and running tests. + ## Build Project To begin using the test framework, you need to build the project and download dependent datasets (configured in [pom.xml](framework/pom.xml)). +## Step 1: +While on a Mapr cluster, get code of test framework: ``` git clone git@github.com:mapr/drill-test-framework.git +``` + +## Step 2 +Chose proper branch for tested drill version +for example: if you will test Drill 1.16.1 - switch to the branch `1.16.1-mapr` +``` +git checkout 1.16.1-mapr +``` +Also, you can check existed branches and chose branch for tested drill version + +## Step 3 +you need create and configure `drillTestConfig.properties` +framework got example drillTestConfig.properties file. You can create own, or copy new one from example, +and edit it. +``` cd drill-test-framework/conf cp drillTestConfig.properties.example drillTestConfig.properties nano drillTestConfig.properties -bin/build_framework -Pdownload ``` +Configuring drillTestConfig.properties for run on singlenode cluster -## Configuring drillTestConfig.properties for run on singlenode cluster -

-
-
- -just uncomment property -

-# Drillbits
+### For singlee-node cluster set property
+
 ########################################
 DRILLBITS=localhost
 ########################################
-
+
-just uncomment property -

-# Number of Drillbits
+### Set property
+For single-node cluster:
+
 ########################################
 NUMBER_OF_CLUSTER_NODES=1
 ########################################
-
+
-set path to drill install dir +### Set path to drill install dir +For example default location of Drill 1.16.1 is:

-# Drill home
 ########################################
 DRILL_HOME=/opt/mapr/drill/drill-1.16.1
 ########################################
 
- -just uncomment property +###Just uncomment properties, and leave it as default

 # Drill test data dir
 ########################################
@@ -83,31 +94,30 @@ DRILL_REPORTS_DFS_DIR=/drill/reports
 ########################################
 
-set location of hadoop system drill is installed -

+### Set location of hadoop system drill is installed
+
 ########################################
-HADOOP_INSTALL_LOC=/opt/mapr
+HADOOP_INSTALL_LOC=/opt/map/hadoop/
 ########################################
-
+
-just uncoment propery -

+### Just uncomment properties, and leave it as deafult
+
 ########################################
 DRILL_TESTDATA=/drill/testdata
 ########################################
-
- +
-set zookeeper. For example - if you used singlenode cluster, with ip 192.168.121.91, set: -

+### Set zookeeper. 
+For example - if you used `singlenode` cluster, with ip `192.168.121.91`, set:
+
 ########################################
 ZOOKEEPERS=192.168.121.91:5181/drill/drillbits1
 ########################################
-
+
-just uncoment propery -

-# DRILL JDBC driver
+### Just uncomment properties `JDBC_DRIVER`, `JDBC_DRIVER_CP`, and leave it as default
+
 ########################################
 # MapR DRILL JDBC driver
 JDBC_DRIVER=com.mapr.drill.jdbc41.Driver
@@ -119,26 +129,29 @@ JDBC_DRIVER=com.mapr.drill.jdbc41.Driver
 #JDBC_DRIVER_CP=/root/DrillJDBC41/*
 JDBC_DRIVER_CP=${DRILL_HOME}/jars/jdbc-driver/*
 ########################################
-
+
-set zookeeper. For example - if you used singlenode cluster, with ip 192.168.121.91, set: -

+### Set jdbc connection string. 
+For example - if you used singlenode cluster, with ip  `192.168.121.91`, set:
+
 ########################################
 CONNECTION_STRING=jdbc:drill:zk\=192.168.121.91:5181/zk_root/cluster_id
 ########################################
-
+
-just uncoment propery -

-# Drill storage plugin server
+### Set ip for plugin server
+if you used singlenode cluster, with ip  `192.168.121.91`, set:
+
 ########################################
 DRILL_STORAGE_PLUGIN_SERVER=192.168.121.91
 ########################################
-
+
-set example properyes -

+check `mvn -version` for M2_HOME and  `hadoop version` for HADOOP_VERSION properties 
+ 
+### Set
+
 USERNAME=mapr
 PASSWORD=mapr
 export ZOOKEEPER_HOST=192.168.121.91
@@ -155,17 +168,26 @@ export DRILL_VERSION=$(grep 'git.build.version' ${DRILL_HOME}/git.properties | t
 export HADOOP_VERSION=2.7.4.0-mapr-700
 export USERNAME
 export PASSWORD
-
+
+ +## step 4 +Build framwork + +``` +cd drill-test-framework +bin/build_framework -Pdownload +``` + If you've already downloaded the datasets previously, you can simply skip the download. -## Execute Tests +# Execute Tests In the root directory of your repository, execute the following command to run tests: `bin/run_tests -s -g -t -x -n -d` -Example: +## Example:
 bin/run_tests -s Functional/aggregates,Functional/joins -g functional -x hbase -t 180 -n 20 -d
   -s suites (required)
      Here, 'Functional/aggregates,Functional/joins' are directories inside
@@ -185,9 +207,9 @@ Example:
      Use this option to provide the usage of the command, which includes additional options
 
-## List of used tests +# List of used tests -

+
 bin/run_tests -s Functional/aggregates -g functional -t 180 -d
 bin/run_tests -s Functional/amplab -g functional -t 180 -d
 bin/run_tests -s Functional/case_expr -g functional -t 180 -d
@@ -248,7 +270,7 @@ bin/run_tests -s Functional/union_all -g functional -t 180 -d
 bin/run_tests -s Functional/values -g functional -t 180 -d
 bin/run_tests -s Functional/views -g functional -t 180 -d
 bin/run_tests -s Functional/window_functions -g functional -t 180 -d
-
+
## Contributing We encourage contributions from users! You can fix bugs, make enhancements or add new tests. Create a PR here on GitHub for your change. @@ -258,6 +280,3 @@ Refer to [CONTRIBUTING.md](CONTRIBUTING.md) for details on the test framework st ## License Licensed under the Apache License 2.0. Please see [LICENSE.md](LICENSE.md) - - - From a42f5ff58fef1b6c8e6e4a3b8da2a09ca4f3077c Mon Sep 17 00:00:00 2001 From: Dmytro Kondriukov Date: Wed, 23 Dec 2020 14:38:42 +0200 Subject: [PATCH 3/3] Update README.md --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index cd6dc2b17..ba6071e36 100644 --- a/README.md +++ b/README.md @@ -36,12 +36,12 @@ git clone git@github.com:mapr/drill-test-framework.git ``` ## Step 2 -Chose proper branch for tested drill version +Choose proper branch for tested drill version for example: if you will test Drill 1.16.1 - switch to the branch `1.16.1-mapr` ``` git checkout 1.16.1-mapr ``` -Also, you can check existed branches and chose branch for tested drill version +Also, you can check existed branches and choose branch for tested drill version ## Step 3 you need create and configure `drillTestConfig.properties` @@ -77,7 +77,7 @@ DRILL_HOME=/opt/mapr/drill/drill-1.16.1 ######################################## -###Just uncomment properties, and leave it as default +### Just uncomment properties, and leave it as default

 # Drill test data dir
 ########################################
@@ -170,8 +170,8 @@ export USERNAME
 export PASSWORD
 
-## step 4 -Build framwork +## Step 4 +Build framework ``` cd drill-test-framework