diff --git a/.appveyor.yml b/.appveyor.yml index f2291efefcb..52395be924a 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -17,7 +17,7 @@ # under the License. # -version: '0.11.1.{build}' +version: '0.12.0.{build}' shallow_clone: true diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index b197fd16ebd..c283e5178da 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -40,20 +40,20 @@ jobs: strategy: fail-fast: false matrix: - hadoop: [hadoop2, hadoop3] + hadoop: [hadoop3] java: [ 8, 11 ] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -69,7 +69,7 @@ jobs: - name: install and test plugins run: ./mvnw package -pl zeppelin-plugins -amd ${MAVEN_ARGS} - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_3.9_with_R.yml @@ -95,19 +95,19 @@ jobs: matrix: java: [ 8, 11 ] env: - INTERPRETERS: 'hbase,jdbc,file,flink-cmd,cassandra,elasticsearch,bigquery,alluxio,livy,groovy,java,neo4j,submarine,sparql,mongodb,influxdb,shell' + INTERPRETERS: 'hbase,jdbc,file,flink-cmd,cassandra,elasticsearch,bigquery,alluxio,livy,groovy,java,neo4j,sparql,mongodb,influxdb,shell' steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -120,7 +120,7 @@ jobs: - name: install environment run: ./mvnw install -DskipTests -am -pl ${INTERPRETERS} ${MAVEN_ARGS} - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R_and_tensorflow environment-file: testing/env_python_3_with_R_and_tensorflow.yml @@ -148,16 +148,16 @@ jobs: java: 8 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -168,7 +168,7 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: Setup conda environment with python ${{ matrix.python }} and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_${{ matrix.python }}_with_R.yml @@ -183,7 +183,7 @@ jobs: R -e "IRkernel::installspec()" - name: install environment run: | - ./mvnw install -DskipTests -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop2 ${MAVEN_ARGS} + ./mvnw install -DskipTests -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop3 ${MAVEN_ARGS} - name: run tests with ${{ matrix.python }} run: | ./mvnw test -pl python,rlang,zeppelin-jupyter-interpreter -DfailIfNoTests=false ${MAVEN_ARGS} @@ -200,16 +200,16 @@ jobs: - name: Start mysql run: sudo systemctl start mysql.service - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -221,10 +221,10 @@ jobs: ${{ runner.os }}-zeppelin- - name: install environment run: | - ./mvnw install -DskipTests -Phadoop2 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.12,jdbc,shell -am -Pflink-117 ${MAVEN_ARGS} + ./mvnw install -DskipTests -Phadoop3 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.12,jdbc,shell -am -Pflink-117 ${MAVEN_ARGS} ./mvnw package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_3_with_R.yml @@ -238,7 +238,7 @@ jobs: run: | R -e "IRkernel::installspec()" - name: run tests - run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest + run: ./mvnw test -pl zeppelin-interpreter-integration -Phadoop3 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -257,16 +257,16 @@ jobs: flink: 115 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 8 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -278,10 +278,10 @@ jobs: ${{ runner.os }}-zeppelin- - name: install environment for flink run: | - ./mvnw install -DskipTests -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration ${MAVEN_ARGS} + ./mvnw install -DskipTests -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop3 -Pintegration ${MAVEN_ARGS} ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python ${{ matrix.python }} and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_flink environment-file: testing/env_python_3_with_flink_${{ matrix.flink }}.yml @@ -292,7 +292,7 @@ jobs: auto-activate-base: false use-mamba: true - name: run tests for flink - run: ./mvnw verify -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -am -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} + run: ./mvnw verify -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -am -Phadoop3 -Pintegration -DfailIfNoTests=false -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }} ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -307,16 +307,16 @@ jobs: java: [ 8, 11 ] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -328,10 +328,10 @@ jobs: ${{ runner.os }}-zeppelin- - name: install environment run: | - ./mvnw install -DskipTests -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop2 -Pintegration ${MAVEN_ARGS} + ./mvnw install -DskipTests -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop3 -Pintegration ${MAVEN_ARGS} ./mvnw clean package -pl zeppelin-plugins -amd -DskipTests ${MAVEN_ARGS} - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_3_with_R.yml @@ -362,16 +362,16 @@ jobs: java: 8 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -382,9 +382,9 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: install environment - run: ./mvnw install -DskipTests -pl spark-submit,spark/scala-2.12,spark/scala-2.13 -am -Phadoop2 ${MAVEN_ARGS} + run: ./mvnw install -DskipTests -pl spark-submit,spark/scala-2.12,spark/scala-2.13 -am -Phadoop3 ${MAVEN_ARGS} - name: Setup conda environment with python ${{ matrix.python }} and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_${{ matrix.python }}_with_R.yml @@ -400,11 +400,11 @@ jobs: - name: run spark-3.2 tests with scala-2.12 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db - ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} + ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.12 -Phadoop3 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.2 tests with scala-2.13 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db - ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.13 -Phadoop2 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} + ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.2 -Pspark-scala-2.13 -Phadoop3 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python }} run: | rm -rf spark/interpreter/metastore_db @@ -423,20 +423,24 @@ jobs: rm -rf spark/interpreter/metastore_db ./mvnw verify -pl spark-submit,spark/interpreter -am -Dtest=org/apache/zeppelin/spark/* -Pspark-3.5 -Pspark-scala-2.13 -Phadoop3 -Pintegration -DfailIfNoTests=false ${MAVEN_ARGS} - livy-0-7-with-spark-3-4-1-under-python3: + # The version combination is based on the facts: + # 1. official Livy 0.8 binary tarball is built against Spark 2.4 + # 2. official Spark 2.4 binary tarball is built against Scala 2.11 + # 3. Spark 2.4 support Python 2.7, 3.4 to 3.7 + livy-0-8-with-spark-2-4-under-python37: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK 8 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 8 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -449,14 +453,14 @@ jobs: - name: install environment run: | ./mvnw install -DskipTests -pl livy -am ${MAVEN_ARGS} - ./testing/downloadSpark.sh "3.4.1" "3" - ./testing/downloadLivy.sh "0.7.1-incubating" - - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + ./testing/downloadSpark.sh "2.4.8" "2.7" + ./testing/downloadLivy.sh "0.8.0-incubating" "2.11" + - name: Setup conda environment with python 3.7 and R + uses: conda-incubator/setup-miniconda@v3 with: - activate-environment: python_3_with_R - environment-file: testing/env_python_3_with_R.yml - python-version: 3.9 + activate-environment: python_37_with_R + environment-file: testing/env_python_3.7_with_R.yml + python-version: 3.7 miniforge-variant: Mambaforge channels: conda-forge,defaults channel-priority: true @@ -466,7 +470,10 @@ jobs: run: | R -e "IRkernel::installspec()" - name: run tests - run: ./mvnw verify -pl livy -am ${MAVEN_ARGS} + run: | + export SPARK_HOME=$PWD/spark-2.4.8-bin-hadoop2.7 + export LIVY_HOME=$PWD/apache-livy-0.8.0-incubating_2.11-bin + ./mvnw verify -pl livy -am ${MAVEN_ARGS} default-build: runs-on: ubuntu-20.04 @@ -476,16 +483,16 @@ jobs: java: [ 8, 11 ] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK ${{ matrix.java }} - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: ${{ matrix.java }} - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index d7fcbc2fa76..5ad15ab62e0 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -23,7 +23,7 @@ env: SPARK_PRINT_LAUNCH_COMMAND: "true" SPARK_LOCAL_IP: 127.0.0.1 ZEPPELIN_LOCAL_IP: 127.0.0.1 - INTERPRETERS: '!hbase,!jdbc,!file,!flink,!cassandra,!elasticsearch,!bigquery,!alluxio,!livy,!groovy,!java,!neo4j,!submarine,!sparql,!mongodb' + INTERPRETERS: '!hbase,!jdbc,!file,!flink,!cassandra,!elasticsearch,!bigquery,!alluxio,!livy,!groovy,!java,!neo4j,!sparql,!mongodb' permissions: contents: read # to fetch code (actions/checkout) @@ -33,16 +33,16 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -53,9 +53,9 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: Install application - run: ./mvnw clean install -DskipTests -am -pl zeppelin-web -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist ${MAVEN_ARGS} + run: ./mvnw clean install -DskipTests -am -pl zeppelin-web -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist ${MAVEN_ARGS} - name: Run headless test - run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -pl zeppelin-web -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist -Pweb-e2e ${MAVEN_ARGS} + run: xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24" ./mvnw verify -pl zeppelin-web -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist -Pweb-e2e ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi @@ -64,16 +64,16 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -93,16 +93,16 @@ jobs: shell: bash -l {0} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Tune Runner VM uses: ./.github/actions/tune-runner-vm - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 - name: Cache local Maven repository - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.m2/repository @@ -113,7 +113,7 @@ jobs: restore-keys: | ${{ runner.os }}-zeppelin- - name: Setup conda environment with python 3.9 and R - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: activate-environment: python_3_with_R environment-file: testing/env_python_3_with_R.yml @@ -128,10 +128,10 @@ jobs: R -e "IRkernel::installspec()" - name: Install Environment run: | - ./mvnw clean install -DskipTests -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist ${MAVEN_ARGS} + ./mvnw clean install -DskipTests -am -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist ${MAVEN_ARGS} - name: run tests run: | - source ./testing/downloadSpark.sh "3.4.1" "3" && echo "SPARK_HOME: ${SPARK_HOME}" && xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop2 -Pweb-dist -Pusing-source-tree ${MAVEN_ARGS} + xvfb-run --auto-servernum --server-args="-screen 0 1600x1024x16" ./mvnw verify -DfailIfNoTests=false -pl zeppelin-integration -Pintegration -Pspark-scala-2.12 -Pspark-3.4 -Phadoop3 -Pweb-dist -Pusing-source-tree ${MAVEN_ARGS} - name: Print zeppelin logs if: always() run: if [ -d "logs" ]; then cat logs/*; fi diff --git a/.github/workflows/quick.yml b/.github/workflows/quick.yml index 5bc8ac17356..b26f015c6b4 100644 --- a/.github/workflows/quick.yml +++ b/.github/workflows/quick.yml @@ -28,9 +28,9 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 @@ -41,12 +41,12 @@ jobs: strategy: fail-fast: false matrix: - hadoop: [hadoop2, hadoop3] + hadoop: [hadoop3] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v3 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 diff --git a/.gitignore b/.gitignore index 29bb190ebca..6784673088e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,10 @@ /interpreter/* !/interpreter/lib +# metals +.bloop +.metals + # interpreter temp files derby.log spark/metastore_db diff --git a/Dockerfile b/Dockerfile index 5d97d7dcc0f..b77782f44f3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ RUN echo "unsafe-perm=true" > ~/.npmrc && \ echo '{ "allow_root": true }' > ~/.bowerrc && \ ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.3 -Pinclude-hadoop -Phadoop3 -Pspark-scala-2.12 -Pweb-angular -Pweb-dist && \ # Example with doesn't compile all interpreters - # ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.2 -Pinclude-hadoop -Phadoop3 -Pspark-scala-2.12 -Pweb-angular -Pweb-dist -pl '!groovy,!submarine,!livy,!hbase,!file,!flink' && \ + # ./mvnw -B package -DskipTests -Pbuild-distr -Pspark-3.2 -Pinclude-hadoop -Phadoop3 -Pspark-scala-2.12 -Pweb-angular -Pweb-dist -pl '!groovy,!livy,!hbase,!file,!flink' && \ mv /workspace/zeppelin/zeppelin-distribution/target/zeppelin-*/zeppelin-* /opt/zeppelin/ && \ # Removing stuff saves time, because docker creates a temporary layer rm -rf ~/.m2 && \ diff --git a/alluxio/pom.xml b/alluxio/pom.xml index a98e05a3309..4ba57f448a1 100644 --- a/alluxio/pom.xml +++ b/alluxio/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -68,15 +68,27 @@ alluxio-minicluster ${alluxio.version} test + + + org.apache.hadoop + hadoop-client + + org.apache.hadoop - hadoop-common - 3.2.4 + hadoop-client-api + ${hadoop.version} test + + org.apache.hadoop + hadoop-client-runtime + ${hadoop.version} + test + diff --git a/angular/pom.xml b/angular/pom.xml index 271554a87d2..8c943b026bc 100644 --- a/angular/pom.xml +++ b/angular/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/bigquery/pom.xml b/bigquery/pom.xml index dbf16636376..849e9626d99 100644 --- a/bigquery/pom.xml +++ b/bigquery/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/bin/common.cmd b/bin/common.cmd index 12ca1d7498d..0ddbad8a512 100644 --- a/bin/common.cmd +++ b/bin/common.cmd @@ -86,6 +86,10 @@ if not defined JAVA_HOME ( set ZEPPELIN_RUNNER=%JAVA_HOME%\bin\java ) +if not defined ZEPPELIN_IDENT_STRING ( + set ZEPPELIN_IDENT_STRING=%USERNAME% +) + if not defined ZEPPELIN_INTERPRETER_REMOTE_RUNNER ( set ZEPPELIN_INTERPRETER_REMOTE_RUNNER=bin\interpreter.cmd ) diff --git a/bin/common.sh b/bin/common.sh index 411e915bbfe..56f8aa45ff3 100644 --- a/bin/common.sh +++ b/bin/common.sh @@ -171,6 +171,14 @@ else fi export ZEPPELIN_RUNNER +if [[ -z "$ZEPPELIN_IDENT_STRING" ]]; then + # if for some reason the shell doesn't have $USER defined + # (e.g., ssh'd in to execute a command) + # let's get the effective username and use that + USER=${USER:-$(id -nu)} + export ZEPPELIN_IDENT_STRING="${USER}" +fi + if [[ -z "$ZEPPELIN_INTERPRETER_REMOTE_RUNNER" ]]; then export ZEPPELIN_INTERPRETER_REMOTE_RUNNER="bin/interpreter.sh" fi diff --git a/bin/zeppelin-daemon.sh b/bin/zeppelin-daemon.sh index f3c31ff9168..319f527e935 100755 --- a/bin/zeppelin-daemon.sh +++ b/bin/zeppelin-daemon.sh @@ -50,7 +50,7 @@ HOSTNAME=$(hostname) ZEPPELIN_NAME="Zeppelin" ZEPPELIN_LOGFILE="${ZEPPELIN_LOG_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.log" ZEPPELIN_OUTFILE="${ZEPPELIN_LOG_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.out" -ZEPPELIN_PID="${ZEPPELIN_PID_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}-${HOSTNAME}.pid" +ZEPPELIN_PID="${ZEPPELIN_PID_DIR}/zeppelin-${ZEPPELIN_IDENT_STRING}.pid" ZEPPELIN_MAIN=org.apache.zeppelin.server.ZeppelinServer JAVA_OPTS+=" -Dzeppelin.log.file=${ZEPPELIN_LOGFILE}" diff --git a/build-tools/pom.xml b/build-tools/pom.xml index bfd6aed153b..7e02b325e47 100644 --- a/build-tools/pom.xml +++ b/build-tools/pom.xml @@ -22,6 +22,6 @@ org.apache.zeppelin zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 - \ No newline at end of file + diff --git a/cassandra/pom.xml b/cassandra/pom.xml index 55cfb4c897e..3668d552843 100644 --- a/cassandra/pom.xml +++ b/cassandra/pom.xml @@ -21,7 +21,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/conf/interpreter-list b/conf/interpreter-list index 76b1141313b..b1191cd4f6d 100644 --- a/conf/interpreter-list +++ b/conf/interpreter-list @@ -17,21 +17,20 @@ # # [name] [maven artifact] [description] -alluxio org.apache.zeppelin:zeppelin-alluxio:0.11.1 Alluxio interpreter -angular org.apache.zeppelin:zeppelin-angular:0.11.1 HTML and AngularJS view rendering -bigquery org.apache.zeppelin:zeppelin-bigquery:0.11.1 BigQuery interpreter -cassandra org.apache.zeppelin:zeppelin-cassandra:0.11.1 Cassandra interpreter -elasticsearch org.apache.zeppelin:zeppelin-elasticsearch:0.11.1 Elasticsearch interpreter -file org.apache.zeppelin:zeppelin-file:0.11.1 HDFS file interpreter -flink org.apache.zeppelin:zeppelin-flink:0.11.1 Flink interpreter -groovy org.apache.zeppelin:zeppelin-groovy:0.11.1 Groovy interpreter -hbase org.apache.zeppelin:zeppelin-hbase:0.11.1 Hbase interpreter -java org.apache.zeppelin:zeppelin-java:0.11.1 Java interpreter -jdbc org.apache.zeppelin:zeppelin-jdbc:0.11.1 Jdbc interpreter -livy org.apache.zeppelin:zeppelin-livy:0.11.1 Livy interpreter -md org.apache.zeppelin:zeppelin-markdown:0.11.1 Markdown support -neo4j org.apache.zeppelin:zeppelin-neo4j:0.11.1 Neo4j interpreter -python org.apache.zeppelin:zeppelin-python:0.11.1 Python interpreter -shell org.apache.zeppelin:zeppelin-shell:0.11.1 Shell command -sparql org.apache.zeppelin:zeppelin-sparql:0.11.1 Sparql interpreter -submarine org.apache.zeppelin:zeppelin-submarine:0.11.1 Submarine interpreter +alluxio org.apache.zeppelin:zeppelin-alluxio:0.11.2.3.3.6.0-1 Alluxio interpreter +angular org.apache.zeppelin:zeppelin-angular:0.11.2.3.3.6.0-1 HTML and AngularJS view rendering +bigquery org.apache.zeppelin:zeppelin-bigquery:0.11.2.3.3.6.0-1 BigQuery interpreter +cassandra org.apache.zeppelin:zeppelin-cassandra:0.11.2.3.3.6.0-1 Cassandra interpreter +elasticsearch org.apache.zeppelin:zeppelin-elasticsearch:0.11.2.3.3.6.0-1 Elasticsearch interpreter +file org.apache.zeppelin:zeppelin-file:0.11.2.3.3.6.0-1 HDFS file interpreter +flink org.apache.zeppelin:zeppelin-flink:0.11.2.3.3.6.0-1 Flink interpreter +groovy org.apache.zeppelin:zeppelin-groovy:0.11.2.3.3.6.0-1 Groovy interpreter +hbase org.apache.zeppelin:zeppelin-hbase:0.11.2.3.3.6.0-1 Hbase interpreter +java org.apache.zeppelin:zeppelin-java:0.11.2.3.3.6.0-1 Java interpreter +jdbc org.apache.zeppelin:zeppelin-jdbc:0.11.2.3.3.6.0-1 Jdbc interpreter +livy org.apache.zeppelin:zeppelin-livy:0.11.2.3.3.6.0-1 Livy interpreter +md org.apache.zeppelin:zeppelin-markdown:0.11.2.3.3.6.0-1 Markdown support +neo4j org.apache.zeppelin:zeppelin-neo4j:0.11.2.3.3.6.0-1 Neo4j interpreter +python org.apache.zeppelin:zeppelin-python:0.11.2.3.3.6.0-1 Python interpreter +shell org.apache.zeppelin:zeppelin-shell:0.11.2.3.3.6.0-1 Shell command +sparql org.apache.zeppelin:zeppelin-sparql:0.11.2.3.3.6.0-1 Sparql interpreter diff --git a/dev/change_zeppelin_version.sh b/dev/change_zeppelin_version.sh index 00ac88a1778..0acebba586c 100755 --- a/dev/change_zeppelin_version.sh +++ b/dev/change_zeppelin_version.sh @@ -61,6 +61,12 @@ sed -i '' 's/"version": "'"${FROM_VERSION}"'",/"version": "'"${TO_VERSION}"'",/g # Change version in Dockerfile sed -i '' 's/Z_VERSION="'"${FROM_VERSION}"'"/Z_VERSION="'"${TO_VERSION}"'"/g' scripts/docker/zeppelin/bin/Dockerfile +sed -i '' 's/version="'"${FROM_VERSION}"'"/version="'"${TO_VERSION}"'"/g' scripts/docker/zeppelin-interpreter/Dockerfile +sed -i '' 's/version="'"${FROM_VERSION}"'"/version="'"${TO_VERSION}"'"/g' scripts/docker/zeppelin-server/Dockerfile + +# Change version in Kubernetes yaml +sed -i '' 's/zeppelin-interpreter:${FROM_VERSION}/zeppelin-interpreter:${TO_VERSION}/g' k8s/zeppelin-server.yaml +sed -i '' 's/zeppelin-server:${FROM_VERSION}/zeppelin-server:${TO_VERSION}/g' k8s/zeppelin-server.yaml # Change docker image version in configuration sed -i '' sed 's/zeppelin:'"${OLD_VERSION}"'/zeppelin:'"${NEW_VERSION}"'/g' conf/zeppelin-site.xml.template diff --git a/dev/common_release.sh b/dev/common_release.sh index e3de77778a7..14dc3f4c41f 100644 --- a/dev/common_release.sh +++ b/dev/common_release.sh @@ -20,11 +20,18 @@ # common fucntions if [[ -z "${TAR}" ]]; then - TAR="/usr/bin/tar" + TAR="tar" + if [ "$(uname -s)" = "Darwin" ]; then + export COPYFILE_DISABLE=1 + TAR="tar --no-mac-metadata --no-xattrs --no-fflags" + fi fi if [[ -z "${SHASUM}" ]]; then - SHASUM="/usr/bin/shasum" + SHASUM="sha512sum" + if [ "$(uname -s)" = "Darwin" ]; then + SHASUM="shasum -a 512" + fi fi if [[ -z "${WORKING_DIR}" ]]; then diff --git a/dev/create_release.sh b/dev/create_release.sh index ff9c6d95dda..b0854f4e11d 100755 --- a/dev/create_release.sh +++ b/dev/create_release.sh @@ -97,9 +97,9 @@ function make_binary_release() { git_clone make_source_package -make_binary_release netinst "-Pweb-angular -pl !hbase,!jdbc,!file,!flink,!cassandra,!elasticsearch,!bigquery,!alluxio,!livy,!groovy,!java,!neo4j,!submarine,!sparql,!mongodb -am" +make_binary_release netinst "-Pweb-angular -pl !hbase,!jdbc,!file,!flink,!cassandra,!elasticsearch,!bigquery,!alluxio,!livy,!groovy,!java,!neo4j,!sparql,!mongodb,!shell -am" -make_binary_release all "-Pweb-angular" +make_binary_release all "-Pweb-angular -pl !shell" # remove non release files and dirs rm -rf "${WORKING_DIR}/zeppelin" diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock index aaccff9451e..9aca3c2f03d 100644 --- a/docs/Gemfile.lock +++ b/docs/Gemfile.lock @@ -12,7 +12,7 @@ GEM github-pages (10) RedCloth (= 4.2.9) jekyll (= 1.3.0) - kramdown (= 1.2.0) + kramdown (= 2.4.0) liquid (= 2.5.4) maruku (= 0.6.1) rdiscount (= 2.1.7) @@ -28,7 +28,7 @@ GEM pygments.rb (~> 0.5.0) redcarpet (~> 2.3.0) safe_yaml (~> 1.0.4) - kramdown (1.2.0) + kramdown (2.4.0) liquid (2.5.4) listen (1.3.1) rb-fsevent (>= 0.9.3) diff --git a/docs/_config.yml b/docs/_config.yml index 0e2ef78a197..0ff14ef6994 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -21,7 +21,7 @@ author : twitter : ASF feedburner : feedname -ZEPPELIN_VERSION : 0.11.1 +ZEPPELIN_VERSION : 0.11.2.3.3.6.0-1 # The production_url is only used when full-domain names are needed # such as sitemap.txt @@ -59,7 +59,7 @@ JB : # - Only the following values are falsy: ["", null, false] # - When setting BASE_PATH it must be a valid url. # This means always setting the protocol (http|https) or prefixing with "/" - BASE_PATH : /docs/0.11.1 + BASE_PATH : /docs/0.11.2.3.3.6.0-1 # By default, the asset_path is automatically defined relative to BASE_PATH plus the enabled theme. # ex: [BASE_PATH]/assets/themes/[THEME-NAME] diff --git a/docs/_includes/themes/zeppelin/_navigation.html b/docs/_includes/themes/zeppelin/_navigation.html index d2d1b3edb2e..9305359fdfb 100644 --- a/docs/_includes/themes/zeppelin/_navigation.html +++ b/docs/_includes/themes/zeppelin/_navigation.html @@ -158,7 +158,6 @@
  • Postgresql, HAWQ
  • Shell
  • Sparql
  • -
  • Submarine
  • diff --git a/docs/index.md b/docs/index.md index b0fc32bb55e..75d362ca868 100644 --- a/docs/index.md +++ b/docs/index.md @@ -158,7 +158,6 @@ limitations under the License. * [Shell](./interpreter/shell.html) * [Spark](./interpreter/spark.html) * [Sparql](./interpreter/sparql.html) - * [Submarine](./interpreter/submarine.html) #### External Resources * [Mailing List](https://zeppelin.apache.org/community.html) diff --git a/docs/interpreter/flink.md b/docs/interpreter/flink.md index 06864d24a3f..dcff3801f84 100644 --- a/docs/interpreter/flink.md +++ b/docs/interpreter/flink.md @@ -301,7 +301,7 @@ You can also add and set other Flink properties which are not listed in the tabl zeppelin.flink.hive.version - 2.3.4 + 2.3.7 Hive version that you would like to connect diff --git a/docs/interpreter/jdbc.md b/docs/interpreter/jdbc.md index bc14678b947..c668ee55998 100644 --- a/docs/interpreter/jdbc.md +++ b/docs/interpreter/jdbc.md @@ -162,7 +162,7 @@ The last step is **Dependency Setting**. Since Zeppelin only includes `PostgreSQ -That's it. You can find more JDBC connection setting examples([Mysql](#mysql), [MariaDB](#mariadb), [Redshift](#redshift), [Apache Hive](#apache-hive), [Presto/Trino](#prestotrino), [Impala](#impala), [Apache Kyuubi](#apache-kyuubi), [Apache Phoenix](#apache-phoenix), and [Apache Tajo](#apache-tajo)) in [this section](#examples). +That's it. You can find more JDBC connection setting examples([MySQL](#mysql), [MariaDB](#mariadb), [Redshift](#redshift), [Apache Hive](#apache-hive), [Presto/Trino](#prestotrino), [Impala](#impala), [Apache Kyuubi](#apache-kyuubi), [Apache Phoenix](#apache-phoenix), and [Apache Tajo](#apache-tajo)) in [this section](#examples). ## JDBC Interpreter Datasource Pool Configuration The Jdbc interpreter uses the connection pool technology, and supports users to do some personal configuration of the connection pool. For example, we can configure `default.validationQuery='select 1'` and `default.testOnBorrow=true` in the Interpreter configuration to avoid the "Invalid SessionHandle" runtime error caused by Session timeout when connecting to HiveServer2 through JDBC interpreter. @@ -450,11 +450,11 @@ Here are some examples you can refer to. Including the below connectors, you can default.user - mysql_user + pg_user default.password - mysql_password + pg_password @@ -829,7 +829,7 @@ Dependencies ### Apache Kyuubi -Zeppelin connect to `Kyuubi` to run sql via `KyuubiHiveDriver`. There are 2 cases of connecting with Kyuubi: +Zeppelin connect to Kyuubi to run SQL via [Kyuubi JDBC Driver](https://kyuubi.readthedocs.io/en/master/client/jdbc/kyuubi_jdbc.html). There are 2 cases of connecting with Kyuubi: * Connect to Kyuubi without KERBEROS * Connect to Kyuubi with KERBEROS @@ -853,7 +853,7 @@ Properties default.url - jdbc:hive2://kyuubi-server:10009 + jdbc:kyuubi://kyuubi-server:10009 @@ -865,11 +865,7 @@ Dependencies Excludes - org.apache.kyuubi:kyuubi-hive-jdbc-shaded:1.6.1-incubating - - - - org.apache.hive:hive-jdbc:3.1.2 + org.apache.kyuubi:kyuubi-hive-jdbc-shaded:1.9.0 @@ -892,7 +888,7 @@ Properties default.url - jdbc:hive2://kyuubi-server:10009/default;principal={kyuubi_principal} + jdbc:kyuubi://kyuubi-server:10009/default;principal={kyuubi_server_principal} zeppelin.jdbc.auth.type @@ -916,11 +912,15 @@ Dependencies Excludes - org.apache.kyuubi:kyuubi-hive-jdbc-shaded:1.6.1-incubating + org.apache.kyuubi:kyuubi-hive-jdbc-shaded:1.9.0 - org.apache.hive:hive-jdbc:3.1.2 + org.apache.hadoop:hadoop-client-api:3.3.6 + + + + org.apache.hadoop:hadoop-client-runtime:3.3.6 diff --git a/docs/interpreter/submarine.md b/docs/interpreter/submarine.md deleted file mode 100644 index 3031155f6da..00000000000 --- a/docs/interpreter/submarine.md +++ /dev/null @@ -1,407 +0,0 @@ ---- -layout: page -title: "Apache Hadoop Submarine Interpreter for Apache Zeppelin" -description: "Hadoop Submarine is the latest machine learning framework subproject in the Hadoop 3.1 release. It allows Hadoop to support Tensorflow, MXNet, Caffe, Spark, etc." -group: interpreter ---- - -{% include JB/setup %} - -# Submarine Interpreter for Apache Zeppelin - -
    - -[Hadoop Submarine ](https://submarine.apache.org/) is the latest machine learning framework subproject in the Hadoop 3.1 release. It allows Hadoop to support Tensorflow, MXNet, Caffe, Spark, etc. A variety of deep learning frameworks provide a full-featured system framework for machine learning algorithm development, distributed model training, model management, and model publishing, combined with hadoop's intrinsic data storage and data processing capabilities to enable data scientists to Good mining and the value of the data. - -A deep learning algorithm project requires data acquisition, data processing, data cleaning, interactive visual programming adjustment parameters, algorithm testing, algorithm publishing, algorithm job scheduling, offline model training, model online services and many other processes and processes. Zeppelin is a web-based notebook that supports interactive data analysis. You can use SQL, Scala, Python, etc. to make data-driven, interactive, collaborative documents. - -You can use the more than 20 interpreters in zeppelin (for example: spark, hive, Cassandra, Elasticsearch, HBase, etc.) to collect data, clean data, feature extraction, etc. in the data in Hadoop before completing the machine learning model training. The data preprocessing process. - -By integrating submarine in zeppelin, we use zeppelin's data discovery, data analysis and data visualization and collaboration capabilities to visualize the results of algorithm development and parameter adjustment during machine learning model training. - -## Architecture - - - -As shown in the figure above, how the Submarine develops and models the machine learning algorithms through Zeppelin is explained from the system architecture. - -After installing and deploying Hadoop 3.1+ and Zeppelin, submarine will create a fully separate Zeppelin Submarine interpreter Docker container for each user in YARN. This container contains the development and runtime environment for Tensorflow. Zeppelin Server connects to the Zeppelin Submarine interpreter Docker container in YARN. allows algorithmic engineers to perform algorithm development and data visualization in Tensorflow's stand-alone environment in Zeppelin Notebook. - -After the algorithm is developed, the algorithm engineer can submit the algorithm directly to the YARN in offline transfer training in Zeppelin, real-time demonstration of model training with Submarine's TensorBoard for each algorithm engineer. - -You can not only complete the model training of the algorithm, but you can also use the more than twenty interpreters in Zeppelin. Complete the data preprocessing of the model, For example, you can perform data extraction, filtering, and feature extraction through the Spark interpreter in Zeppelin in the Algorithm Note. - -In the future, you can also use Zeppelin's upcoming Workflow workflow orchestration service. You can complete Spark, Hive data processing and Tensorflow model training in one Note. It is organized into a workflow through visualization, etc., and the scheduling of jobs is performed in the production environment. - -## Overview - - - -As shown in the figure above, from the internal implementation, how Submarine combines Zeppelin's machine learning algorithm development and model training. - -1. The algorithm engineer created a Tensorflow notebook (left image) in Zeppelin by using Submarine interpreter. - - It is important to note that you need to complete the development of the entire algorithm in a Note. - -2. You can use Spark for data preprocessing in some of the paragraphs in Note. - -3. Use Python for algorithm development and debugging of Tensorflow in other paragraphs of notebook, Submarine creates a Zeppelin Submarine Interpreter Docker Container for you in YARN, which contains the following features and services: - - + **Shell Command line tool**:Allows you to view the system environment in the Zeppelin Submarine Interpreter Docker Container, Install the extension tools you need or the Python dependencies. - + **Kerberos lib**:Allows you to perform kerberos authentication and access to Hadoop clusters with Kerberos authentication enabled. - + **Tensorflow environment**:Allows you to develop tensorflow algorithm code. - + **Python environment**:Allows you to develop tensorflow code. - + Complete a complete algorithm development with a Note in Zeppelin. If this algorithm contains multiple modules, You can write different algorithm modules in multiple paragraphs in Note. The title of each paragraph is the name of the algorithm module. The content of the paragraph is the code content of this algorithm module. - + **HDFS Client**:Zeppelin Submarine Interpreter will automatically submit the algorithm code you wrote in Note to HDFS. - - **Submarine interpreter Docker Image** It is Submarine that provides you with an image file that supports Tensorflow (CPU and GPU versions). -And installed the algorithm library commonly used by Python. -You can also install other development dependencies you need on top of the base image provided by Submarine. - -4. When you complete the development of the algorithm module, You can do this by creating a new paragraph in Note and typing `%submarine dashboard`. Zeppelin will create a Submarine Dashboard. The machine learning algorithm written in this Note can be submitted to YARN as a JOB by selecting the `JOB RUN` command option in the Control Panel. Create a Tensorflow Model Training Docker Container, The container contains the following sections: - - + Tensorflow environment - + HDFS Client Will automatically download the algorithm file Mount from HDFS into the container for distributed model training. Mount the algorithm file to the Work Dir path of the container. - - **Submarine Tensorflow Docker Image** There is Submarine that provides you with an image file that supports Tensorflow (CPU and GPU versions). And installed the algorithm library commonly used by Python. You can also install other development dependencies you need on top of the base image provided by Submarine. - - - - - - - - - - - - - - - - - - - - - - -
    NameClassDescription
    %submarineSubmarineInterpreterProvides interpreter for Apache Submarine dashboard
    %submarine.shSubmarineShellInterpreterProvides interpreter for Apache Submarine shell
    %submarine.pythonPySubmarineInterpreterProvides interpreter for Apache Submarine python
    - -### Submarine shell - -After creating a Note with Submarine Interpreter in Zeppelin, You can add a paragraph to Note if you need it. Using the %submarine.sh identifier, you can use the Shell command to perform various operations on the Submarine Interpreter Docker Container, such as: - -1. View the Pythone version in the Container -2. View the system environment of the Container -3. Install the dependencies you need yourself -4. Kerberos certification with kinit -5. Use Hadoop in Container for HDFS operations, etc. - -### Submarine python - -You can add one or more paragraphs to Note. Write the algorithm module for Tensorflow in Python using the `%submarine.python` identifier. - -### Submarine Dashboard - -After writing the Tensorflow algorithm by using `%submarine.python`, You can add a paragraph to Note. Enter the %submarine dashboard and execute it. Zeppelin will create a Submarine Dashboard. - - - -With Submarine Dashboard you can do all the operational control of Submarine, for example: - -1. **Usage**:Display Submarine's command description to help developers locate problems. - -2. **Refresh**:Zeppelin will erase all your input in the Dashboard. - -3. **Tensorboard**:You will be redirected to the Tensorboard WEB system created by Submarine for each user. With Tensorboard you can view the real-time status of the Tensorflow model training in real time. - -4. **Command** - - + **JOB RUN**:Selecting `JOB RUN` will display the parameter input interface for submitting JOB. - - - - - - - - - - - - - - - - - - - - - - -
    NameDescription
    Checkpoint Path/td> - Submarine sets up a separate Checkpoint path for each user's Note for Tensorflow training. Saved the training data for this Note history, Used to train the output of model data, Tensorboard uses the data in this path for model presentation. Users cannot modify it. For example: `hdfs://cluster1/...` , The environment variable name for Checkpoint Path is `%checkpoint_path%`, You can use `%checkpoint_path%` instead of the input value in Data Path in `PS Launch Cmd` and `Worker Launch Cmd`.
    Input PathThe user specifies the data data directory of the Tensorflow algorithm. Only HDFS-enabled directories are supported. The environment variable name for Data Path is `%input_path%`, You can use `%input_path%` instead of the input value in Data Path in `PS Launch Cmd` and `Worker Launch Cmd`.
    PS Launch CmdTensorflow Parameter services launch command,例如:`python cifar10_main.py --data-dir=%input_path% --job-dir=%checkpoint_path% --num-gpus=0 ...`
    Worker Launch CmdTensorflow Worker services launch command,例如:`python cifar10_main.py --data-dir=%input_path% --job-dir=%checkpoint_path% --num-gpus=1 ...`
    - - + **JOB STOP** - - You can choose to execute the `JOB STOP` command. Stop a Tensorflow model training task that has been submitted and is running - - + **TENSORBOARD START** - - You can choose to execute the `TENSORBOARD START` command to create your TENSORBOARD Docker Container. - - + **TENSORBOARD STOP** - - You can choose to execute the `TENSORBOARD STOP` command to stop and destroy your TENSORBOARD Docker Container. - -5. **Run Command**:Execute the action command of your choice -6. **Clean Chechkpoint**:Checking this option will clear the data in this Note's Checkpoint Path before each `JOB RUN` execution. - -### Configuration - -Zeppelin Submarine interpreter provides the following properties to customize the Submarine interpreter - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Attribute nameAttribute valueDescription
    DOCKER_CONTAINER_TIME_ZONEEtc/UTCSet the time zone in the container | -
    DOCKER_HADOOP_HDFS_HOME/hadoop-3.1-0Hadoop path in the following 3 images(SUBMARINE_INTERPRETER_DOCKER_IMAGE、tf.parameter.services.docker.image、tf.worker.services.docker.image) | -
    DOCKER_JAVA_HOME/opt/javaJAVA path in the following 3 images(SUBMARINE_INTERPRETER_DOCKER_IMAGE、tf.parameter.services.docker.image、tf.worker.services.docker.image) | -
    HADOOP_YARN_SUBMARINE_JARPath to the Submarine JAR package in the Hadoop-3.1+ release installed on the Zeppelin server | -
    INTERPRETER_LAUNCH_MODElocal/yarnRun the Submarine interpreter instance in local or YARN local mainly for submarine interpreter development and debugging YARN mode for production environment | -
    SUBMARINE_HADOOP_CONF_DIRSet the HADOOP-CONF path to support multiple Hadoop cluster environments
    SUBMARINE_HADOOP_HOMEHadoop-3.1+ above path installed on the Zeppelin server
    SUBMARINE_HADOOP_KEYTABKeytab file path for a hadoop cluster with kerberos authentication turned on
    SUBMARINE_HADOOP_PRINCIPALPRINCIPAL information for the keytab file of the hadoop cluster with kerberos authentication turned on
    SUBMARINE_INTERPRETER_DOCKER_IMAGEAt INTERPRETER_LAUNCH_MODE=yarn, Submarine uses this image to create a Zeppelin Submarine interpreter container to create an algorithm development environment for the user. | -
    docker.container.networkYARN's Docker network name
    machinelearing.distributed.enableWhether to use the model training of the distributed mode JOB RUN submission
    shell.command.timeout.millisecs60000Execute timeout settings for shell commands in the Submarine interpreter container
    submarine.algorithm.hdfs.pathSave machine-based algorithms developed using Submarine interpreter to HDFS as files
    submarine.yarn.queueroot.defaultSubmarine submits model training YARN queue name
    tf.checkpoint.pathTensorflow checkpoint path, Each user will create a user's checkpoint secondary path using the username under this path. Each algorithm submitted by the user will create a checkpoint three-level path using the note id (the user's Tensorboard uses the checkpoint data in this path for visual display)
    tf.parameter.services.cpuNumber of CPU cores applied to Tensorflow parameter services when Submarine submits model distributed training
    tf.parameter.services.docker.imageSubmarine creates a mirror for Tensorflow parameter services when submitting model distributed training
    tf.parameter.services.gpuGPU cores applied to Tensorflow parameter services when Submarine submits model distributed training
    tf.parameter.services.memory2GMemory resources requested by Tensorflow parameter services when Submarine submits model distributed training
    tf.parameter.services.numNumber of Tensorflow parameter services used by Submarine to submit model distributed training
    tf.tensorboard.enabletrueCreate a separate Tensorboard for each user
    tf.worker.services.cpuSubmarine submits model resources for Tensorflow worker services when submitting model training
    tf.worker.services.docker.imageSubmarine creates a mirror for Tensorflow worker services when submitting model distributed training
    tf.worker.services.gpuSubmarine submits GPU resources for Tensorflow worker services when submitting model training
    tf.worker.services.memorySubmarine submits model resources for Tensorflow worker services when submitting model training
    tf.worker.services.numNumber of Tensorflow worker services used by Submarine to submit model distributed training
    yarn.webapp.http.addresshttp://hadoop:8088YARN web ui address
    zeppelin.interpreter.rpc.portRange29914You need to export this port in the SUBMARINE_INTERPRETER_DOCKER_IMAGE configuration image. RPC communication for Zeppelin Server and Submarine interpreter containers
    zeppelin.ipython.grpc.message_size33554432Message size setting for IPython grpc in Submarine interpreter container
    zeppelin.ipython.launch.timeout30000IPython execution timeout setting in Submarine interpreter container
    zeppelin.pythonpythonExecution path of python in Submarine interpreter container
    zeppelin.python.maxResult10000The maximum number of python execution results returned from the Submarine interpreter container
    zeppelin.python.useIPythonfalseIPython is currently not supported and must be false
    zeppelin.submarine.auth.typesimple/kerberosHas Hadoop turned on kerberos authentication?
    - -### Docker images - -The docker images file is stored in the `zeppelin/scripts/docker/submarine` directory. - -1. submarine interpreter cpu version - -2. submarine interpreter gpu version - -3. tensorflow 1.10 & hadoop 3.1.2 cpu version - -4. tensorflow 1.10 & hadoop 3.1.2 gpu version - - -## Change Log - -**0.1.0** _(Zeppelin 0.9.0)_ : - -* Support distributed or standolone tensorflow model training. -* Support submarine interpreter running local. -* Support submarine interpreter running YARN. -* Support Docker on YARN-3.3.0, Plan compatible with lower versions of yarn. - -## Bugs & Contacts - -+ **Submarine interpreter BUG** - If you encounter a bug for this interpreter, please create a sub **JIRA** ticket on [ZEPPELIN-3856](https://issues.apache.org/jira/browse/ZEPPELIN-3856). -+ **Submarine Running problem** - If you encounter a problem for Submarine runtime, please create a **ISSUE** on [apache-hadoop-submarine](https://github.com/apache/submarine/issues). -+ **YARN Submarine BUG** - If you encounter a bug for Yarn Submarine, please create a **JIRA** ticket on [SUBMARINE](https://issues.apache.org/jira/browse/SUBMARINE). - -## Dependency - -1. **YARN** - Submarine currently need to run on Hadoop 3.3+ - - + The hadoop version of the hadoop submarine team git repository is periodically submitted to the code repository of the hadoop. - + The version of the git repository for the hadoop submarine team will be faster than the hadoop version release cycle. - + You can use the hadoop version of the hadoop submarine team git repository. - -2. **Submarine runtime environment** - you can use Submarine-installer https://github.com/apache/submarine, Deploy Docker and network environments. - -## More - -**Hadoop Submarine Project**: https://submarine.apache.org/ -**Youtube Submarine Channel**: https://www.youtube.com/channel/UC4JBt8Y8VJ0BW0IM9YpdCyQ \ No newline at end of file diff --git a/docs/quickstart/docker.md b/docs/quickstart/docker.md index 187e9f9c835..17e6229d7bd 100644 --- a/docs/quickstart/docker.md +++ b/docs/quickstart/docker.md @@ -149,7 +149,6 @@ Zeppelin service runs on local server, it auto configure itself to use `DockerIn - Keytab file configured in the interpreter properties - zeppelin.shell.keytab.location - spark.yarn.keytab - - submarine.hadoop.keytab - zeppelin.jdbc.keytab.location - zeppelin.server.kerberos.keytab diff --git a/docs/quickstart/install.md b/docs/quickstart/install.md index 7fb18899b64..aaab1b3e117 100644 --- a/docs/quickstart/install.md +++ b/docs/quickstart/install.md @@ -35,8 +35,8 @@ Apache Zeppelin officially supports and is tested on the following environments: Value - OpenJDK or Oracle JDK - 1.8 (151+)
    (set JAVA_HOME) + Java + JDK 11
    (set JAVA_HOME) OS diff --git a/docs/quickstart/kubernetes.md b/docs/quickstart/kubernetes.md index 1fbcdabd214..23ac5668440 100644 --- a/docs/quickstart/kubernetes.md +++ b/docs/quickstart/kubernetes.md @@ -70,19 +70,19 @@ Next, we will build our `zeppelin-server` image: ```sh cd scripts/docker/zeppelin-server -# Looking at the "./pom.xml" we can see the version is 0.11.1 +# Looking at the "./pom.xml" we can see the version is 0.11.2.3.3.6.0-1 # Let's set the correct version in our Dockerfile: # vi Dockerfile -# ARG version="0.11.1" +# ARG version="0.11.2.3.3.6.0-1" # Once you saved the Dockerfile with the correct version we can build our image: -docker build -t zeppelin-server:0.11.1 -f ./Dockerfile . +docker build -t zeppelin-server:0.11.2.3.3.6.0-1 -f ./Dockerfile . ``` The last image we build is `zeppelin-interpreter`: ```sh cd scripts/docker/zeppelin-interpreter -docker build -t zeppelin-interpreter:0.11.1 -f ./Dockerfile . +docker build -t zeppelin-interpreter:0.11.2.3.3.6.0-1 -f ./Dockerfile . ``` So we should now have the following images: @@ -92,8 +92,8 @@ So we should now have the following images: $ docker images REPOSITORY TAG IMAGE ID CREATED SIZE -zeppelin-interpreter 0.11.1 4f77fe989eed 3 minutes ago 622MB -zeppelin-server 0.11.1 4f77fe989eed 3 minutes ago 622MB +zeppelin-interpreter 0.11.2.3.3.6.0-1 4f77fe989eed 3 minutes ago 622MB +zeppelin-server 0.11.2.3.3.6.0-1 4f77fe989eed 3 minutes ago 622MB zeppelin-distribution latest bd2fb4b321d2 40 minutes ago 1.27GB ``` diff --git a/docs/setup/basics/how_to_build.md b/docs/setup/basics/how_to_build.md index b131ec57d10..8c8cd947f84 100644 --- a/docs/setup/basics/how_to_build.md +++ b/docs/setup/basics/how_to_build.md @@ -123,7 +123,6 @@ Set hadoop major version (default hadoop3). Available profiles are ``` --Phadoop2 -Phadoop3 ``` diff --git a/docs/setup/deployment/flink_and_spark_cluster.md b/docs/setup/deployment/flink_and_spark_cluster.md index d395ccab67f..76f9063cf13 100644 --- a/docs/setup/deployment/flink_and_spark_cluster.md +++ b/docs/setup/deployment/flink_and_spark_cluster.md @@ -225,16 +225,16 @@ Building from source is recommended where possible, for simplicity in this tuto To download the Flink Binary use `wget` ```bash -wget "http://mirror.cogentco.com/pub/apache/flink/flink-1.1.3/flink-1.1.3-bin-hadoop24-scala_2.10.tgz" -tar -xzvf flink-1.1.3-bin-hadoop24-scala_2.10.tgz +wget "http://mirror.cogentco.com/pub/apache/flink/flink-1.16.2/flink-1.16.2-bin-scala_2.12.tgz" +tar -xzvf flink-1.16.2-bin-scala_2.12.tgz ``` -This will download Flink 1.1.3, compatible with Hadoop 2.4. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `24` to your appropriate version. +This will download Flink 1.16.2. Start the Flink Cluster. ```bash -flink-1.1.3/bin/start-cluster.sh +flink-1.16.2/bin/start-cluster.sh ``` ###### Building From source @@ -295,12 +295,12 @@ Using binaries is also To download the Spark Binary use `wget` ```bash -wget "http://d3kbcqa49mib13.cloudfront.net/spark-1.6.3-bin-hadoop2.6.tgz" -tar -xzvf spark-1.6.3-bin-hadoop2.6.tgz -mv spark-1.6.3-bin-hadoop2.6 spark +wget "https://dlcdn.apache.org/spark/spark-3.4.1/spark-3.4.1-bin-hadoop3.tgz" +tar -xzvf spark-3.4.1-bin-hadoop3.tgz +mv spark-3.4.1-bin-hadoop3 spark ``` -This will download Spark 1.6.3, compatible with Hadoop 2.6. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `2.6` to your appropriate version. +This will download Spark 3.4.1, compatible with Hadoop 3. You do not have to install Hadoop for this binary to work, but if you are using Hadoop, please change `3` to your appropriate version. ###### Building From source diff --git a/docs/usage/interpreter/installation.md b/docs/usage/interpreter/installation.md index bc4baf158af..bd888757b1e 100644 --- a/docs/usage/interpreter/installation.md +++ b/docs/usage/interpreter/installation.md @@ -47,31 +47,6 @@ You can get full list of community managed interpreters by running ./bin/install-interpreter.sh --list ``` -#### Install interpreter built with Scala 2.10 -Zeppelin support both Scala 2.10 and 2.11 for several interpreters as below: - - - - - - - - - - - - -
    NameMaven Artifact for Scala 2.10Maven Artifact for Scala 2.11
    sparkorg.apache.zeppelin:zeppelin-spark_2.10:0.11.1org.apache.zeppelin:zeppelin-spark_2.11:0.11.1
    - -#### Install Spark interpreter built with Scala 2.10 - -Spark distribution package has been built with Scala 2.10 until 1.6.2. If you have `SPARK_HOME` set pointing to Spark version earlier than 2.0.0, you need to download Spark interpreter packaged with Scala 2.10. To do so, use follow command: - -```bash -rm -rf ./interpreter/spark -./bin/install-interpreter.sh --name spark --artifact org.apache.zeppelin:zeppelin-spark_2.10:0.11.1 -``` -
    Once you have installed interpreters, you need to restart Zeppelin. And then [create interpreter setting](./overview.html#what-is-zeppelin-interpreter) and [bind it with your notebook](./overview.html#what-is-zeppelin-interpreter-setting). @@ -109,92 +84,87 @@ You can also find the below community managed interpreter list in `conf/interpre alluxio - org.apache.zeppelin:zeppelin-alluxio:0.11.1 + org.apache.zeppelin:zeppelin-alluxio:0.11.2.3.3.6.0-1 Alluxio interpreter angular - org.apache.zeppelin:zeppelin-angular:0.11.1 + org.apache.zeppelin:zeppelin-angular:0.11.2.3.3.6.0-1 HTML and AngularJS view rendering bigquery - org.apache.zeppelin:zeppelin-bigquery:0.11.1 + org.apache.zeppelin:zeppelin-bigquery:0.11.2.3.3.6.0-1 BigQuery interpreter cassandra - org.apache.zeppelin:zeppelin-cassandra:0.11.1 + org.apache.zeppelin:zeppelin-cassandra:0.11.2.3.3.6.0-1 Cassandra interpreter elasticsearch - org.apache.zeppelin:zeppelin-elasticsearch:0.11.1 + org.apache.zeppelin:zeppelin-elasticsearch:0.11.2.3.3.6.0-1 Elasticsearch interpreter file - org.apache.zeppelin:zeppelin-file:0.11.1 + org.apache.zeppelin:zeppelin-file:0.11.2.3.3.6.0-1 HDFS file interpreter flink - org.apache.zeppelin:zeppelin-flink:0.11.1 + org.apache.zeppelin:zeppelin-flink:0.11.2.3.3.6.0-1 Flink interpreter hbase - org.apache.zeppelin:zeppelin-hbase:0.11.1 + org.apache.zeppelin:zeppelin-hbase:0.11.2.3.3.6.0-1 Hbase interpreter groovy - org.apache.zeppelin:zeppelin-groovy:0.11.1 + org.apache.zeppelin:zeppelin-groovy:0.11.2.3.3.6.0-1 Groovy interpreter java - org.apache.zeppelin:zeppelin-java:0.11.1 + org.apache.zeppelin:zeppelin-java:0.11.2.3.3.6.0-1 Java interpreter jdbc - org.apache.zeppelin:zeppelin-jdbc:0.11.1 + org.apache.zeppelin:zeppelin-jdbc:0.11.2.3.3.6.0-1 Jdbc interpreter livy - org.apache.zeppelin:zeppelin-livy:0.11.1 + org.apache.zeppelin:zeppelin-livy:0.11.2.3.3.6.0-1 Livy interpreter md - org.apache.zeppelin:zeppelin-markdown:0.11.1 + org.apache.zeppelin:zeppelin-markdown:0.11.2.3.3.6.0-1 Markdown support neo4j - org.apache.zeppelin:zeppelin-neo4j:0.11.1 + org.apache.zeppelin:zeppelin-neo4j:0.11.2.3.3.6.0-1 Neo4j interpreter python - org.apache.zeppelin:zeppelin-python:0.11.1 + org.apache.zeppelin:zeppelin-python:0.11.2.3.3.6.0-1 Python interpreter shell - org.apache.zeppelin:zeppelin-shell:0.11.1 + org.apache.zeppelin:zeppelin-shell:0.11.2.3.3.6.0-1 Shell command sparql - org.apache.zeppelin:zeppelin-sparql:0.11.1 + org.apache.zeppelin:zeppelin-sparql:0.11.2.3.3.6.0-1 Sparql interpreter - - submarine - org.apache.zeppelin:zeppelin-submarine:0.11.1 - Submarine interpreter - diff --git a/elasticsearch/pom.xml b/elasticsearch/pom.xml index 6c095dc9695..f15450db001 100644 --- a/elasticsearch/pom.xml +++ b/elasticsearch/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/file/pom.xml b/file/pom.xml index 1932a0a10cf..523d37b4325 100644 --- a/file/pom.xml +++ b/file/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/flink-cmd/pom.xml b/flink-cmd/pom.xml index 98f3059b9d6..cb9b48bbc4e 100644 --- a/flink-cmd/pom.xml +++ b/flink-cmd/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -44,14 +44,7 @@ org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-client + hadoop-client-runtime ${hadoop.version} provided diff --git a/flink/flink-scala-2.12/pom.xml b/flink/flink-scala-2.12/pom.xml index be8c596477c..ef2d2830511 100644 --- a/flink/flink-scala-2.12/pom.xml +++ b/flink/flink-scala-2.12/pom.xml @@ -23,7 +23,7 @@ flink-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -38,14 +38,14 @@ 2.12.7 2.12 ${flink.scala.version} - ${hadoop2.7.version} - 2.3.4 - 4.0.0 + 2.3.7 + 14.0.1 + 10.14.2.0 + 5.3.0 1.15.0 https://archive.apache.org/dist/flink/flink-${flink.version}/flink-${flink.version}-bin-scala_${flink.scala.binary.version}.tgz - @@ -160,10 +160,6 @@ org.apache.hadoop * - - org.eclipse.jetty - * - @@ -289,22 +285,12 @@ ${flink.version} tests test - - - org.apache.hive - hive-exec - - - org.apache.hive - hive-metastore - - - org.apache.hadoop - hadoop-common - ${flink.hadoop.version} + com.google.guava + guava + ${hive.guava.version} provided @@ -314,40 +300,6 @@ - - org.apache.hadoop - hadoop-hdfs - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-common - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-yarn-client - ${flink.hadoop.version} - provided - - - - org.apache.hadoop - hadoop-mapreduce-client-core - ${flink.hadoop.version} - provided - - - - org.mockito - mockito-core - test - - org.apache.hive hive-metastore @@ -355,32 +307,156 @@ provided - hadoop-auth - org.apache.hadoop + org.apache.hive + hive-shims + + + javolution + javolution com.google.guava guava - io.netty - netty + com.google.protobuf + protobuf-java - io.netty - netty-all + org.apache.hbase + hbase-client - com.google.protobuf - protobuf-java + commons-lang + commons-lang + + + co.cask.tephra + tephra-api + + + co.cask.tephra + tephra-core + + + co.cask.tephra + tephra-hbase-compat-1.0 + + + commons-cli + commons-cli + + + org.apache.thrift + libfb303 + + + javax.transaction + transaction-api + + + org.apache.orc + orc-core + + + joda-time + joda-time + + + org.apache.logging.log4j + log4j-1.2-api org.apache.logging.log4j log4j-slf4j-impl - jdk.tools - jdk.tools + org.apache.ant + ant + + + com.tdunning + json + + + jline + jline + + + org.eclipse.jetty.aggregate + jetty-all + + + org.eclipse.jetty.orbit + javax.servlet + + + org.apache.logging.log4j + log4j-web + + + io.dropwizard.metrics + metrics-core + + + io.dropwizard.metrics + metrics-jvm + + + io.dropwizard.metrics + metrics-json + + + com.github.joshelser + dropwizard-metrics-hadoop-metrics2-reporter + + + + + tomcat + jasper-compiler + + + tomcat + jasper-runtime + + + org.apache.httpcomponents + httpclient + + + org.apache.httpcomponents + httpcore + + + + + commons-codec + commons-codec + + + org.apache.avro + avro + + + net.sf.opencsv + opencsv + + + org.apache.parquet + parquet-hadoop-bundle + + + org.slf4j + slf4j-log4j12 + + + org.apache.derby + derby + + + org.pentaho + pentaho-aggdesigner-algorithm @@ -392,16 +468,16 @@ provided - org.apache.calcite - calcite-core + org.apache.hive + hive-vector-code-gen - org.apache.calcite - calcite-druid + org.apache.hive + hive-llap-tez - org.apache.calcite.avatica - avatica + org.apache.hive + hive-shims commons-codec @@ -412,20 +488,28 @@ commons-httpclient - commons-io - commons-io + org.apache.logging.log4j + log4j-slf4j-impl + + + org.antlr + antlr-runtime - org.apache.logging.log4j - log4j-1.2-api + org.antlr + ST4 - org.apache.logging.log4j - log4j-slf4j-impl + org.apache.ant + ant - org.slf4j - slf4j-api + org.apache.commons + commons-compress + + + org.apache.ivy + ivy org.apache.zookeeper @@ -433,35 +517,63 @@ org.apache.curator - curator-framework + apache-curator org.apache.curator - apache-curator + curator-framework + + + org.codehaus.groovy + groovy-all + + + org.apache.calcite + calcite-core + + + org.apache.calcite + calcite-druid + + + org.apache.calcite.avatica + avatica + + + org.apache.calcite + calcite-avatica com.google.code.gson gson - jline - jline + stax + stax-api com.google.guava guava - io.netty - netty + log4j + log4j - io.netty - netty-all + log4j + apache-log4j-extras - com.google.protobuf - protobuf-java + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j org.pentaho @@ -471,36 +583,222 @@ - org.apache.hive.hcatalog - hive-webhcat-java-client - ${hive.version} + com.klarna + hiverunner + ${hiverunner.version} test - org.apache.calcite - * + org.apache.hive + hive-serde - com.google.guava - guava + org.apache.hive + hive-jdbc - io.netty - netty + org.apache.hive + hive-service + + + org.apache.hive + hive-contrib + + + org.apache.hive + hive-exec + + + org.apache.hive + hive-hcatalog-core + + + org.apache.hive.hcatalog + hive-webhcat-java-client + + + org.apache.tez + tez-common + + + + jdk.tools + jdk.tools + + + hadoop-common + org.apache.hadoop + + + hadoop-auth + org.apache.hadoop + + + hadoop-annotations + org.apache.hadoop + + + hadoop-hdfs + org.apache.hadoop + + + hadoop-mapreduce-client-common + org.apache.hadoop + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + hadoop-yarn-api + org.apache.hadoop + + + hadoop-yarn-client + org.apache.hadoop + + + hadoop-yarn-common + org.apache.hadoop + + + hadoop-yarn-server-common + org.apache.hadoop + + + hadoop-yarn-server-web-proxy + org.apache.hadoop + + + hadoop-shim + org.apache.tez - javax.jms jms + javax.jms + + + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j org.apache.hive - hive-contrib + hive-service ${hive.version} test + + org.jamon + jamon-runtime + + + org.apache.hive + hive-exec + + + org.apache.hive + hive-metastore + + + com.google.guava + guava + + + + jdk.tools + jdk.tools + + + hadoop-common + org.apache.hadoop + + + hadoop-auth + org.apache.hadoop + + + hadoop-client + org.apache.hadoop + + + hadoop-annotations + org.apache.hadoop + + + hadoop-hdfs + org.apache.hadoop + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + hadoop-yarn-api + org.apache.hadoop + + + hadoop-yarn-common + org.apache.hadoop + + + hadoop-yarn-registry + org.apache.hadoop + + + hadoop-yarn-server-applicationhistoryservice + org.apache.hadoop + + + hadoop-yarn-server-common + org.apache.hadoop + + + hadoop-yarn-server-resourcemanager + org.apache.hadoop + + + hbase-hadoop-compat + org.apache.hbase + + + org.apache.hbase + hbase-client + + + org.apache.hbase + hbase-common + + + org.apache.hbase + hbase-server + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j + org.pentaho pentaho-aggdesigner-algorithm @@ -513,27 +811,62 @@ hive-hcatalog-core ${hive.version} test - test-jar - jline - jline + org.jamon + jamon-runtime + + + org.apache.hive + hive-exec com.google.guava guava - io.netty - netty + hadoop-common + org.apache.hadoop - io.netty - netty-all + hadoop-archives + org.apache.hadoop - org.apache.logging.log4j - log4j-slf4j-impl + hadoop-annotations + org.apache.hadoop + + + hadoop-hdfs + org.apache.hadoop + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + + + log4j + log4j + + + log4j + apache-log4j-extras + + + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j org.pentaho @@ -543,21 +876,91 @@ - net.jodah - concurrentunit - 0.4.6 + org.apache.hive.hcatalog + hive-webhcat-java-client + ${hive.version} test + + + org.apache.hadoop + hadoop-common + + + org.apache.hadoop + hadoop-mapreduce-client-core + + + org.jamon + jamon-runtime + + + jdk.tools + jdk.tools + + + jms + javax.jms + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j + + + org.pentaho + pentaho-aggdesigner-algorithm + + + - com.klarna - hiverunner - ${hiverunner.version} + org.apache.derby + derby + ${derby.version} + test + + + + org.apache.hive + hive-jdbc + ${hive.version} test - com.google.guava - guava + org.apache.hadoop + hadoop-yarn-server-resourcemanager + + + org.jamon + jamon-runtime + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + ch.qos.reload4j + reload4j + + + org.slf4j + slf4j-reload4j org.pentaho @@ -567,9 +970,27 @@ - org.scalatest - scalatest_${flink.scala.binary.version} - 3.0.8 + org.apache.hadoop + hadoop-client-runtime + provided + + + + org.mockito + mockito-core + test + + + + org.assertj + assertj-core + test + + + + net.jodah + concurrentunit + 0.4.6 test @@ -578,6 +999,14 @@ slf4j-api + + + com.lmax + disruptor + 3.4.4 + test + + org.junit.jupiter junit-jupiter-engine @@ -904,31 +1333,5 @@ - - - hive2 - - true - - - 2.3.4 - 4.0.0 - - - - hive1 - - 1.2.1 - 3.2.1 - - - - org.apache.hadoop - hadoop-common - 2.7.5 - provided - - - diff --git a/flink/flink-scala-2.12/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java b/flink/flink-scala-2.12/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java index 439a0e26d47..244d00f49c0 100644 --- a/flink/flink-scala-2.12/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java +++ b/flink/flink-scala-2.12/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java @@ -22,6 +22,7 @@ import org.apache.flink.configuration.Configuration; import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment; import org.apache.flink.table.api.TableEnvironment; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.*; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.slf4j.Logger; @@ -90,8 +91,8 @@ private FlinkScalaInterpreter loadFlinkScalaInterpreter() throws Exception { Class clazz = Class.forName(innerIntpClassName); return (FlinkScalaInterpreter) - clazz.getConstructor(Properties.class, ClassLoader.class) - .newInstance(getProperties(), flinkScalaClassLoader); + clazz.getConstructor(Properties.class, ClassLoader.class, ZeppelinConfiguration.class) + .newInstance(getProperties(), flinkScalaClassLoader, zConf); } @Override diff --git a/flink/flink-scala-2.12/src/main/resources/interpreter-setting.json b/flink/flink-scala-2.12/src/main/resources/interpreter-setting.json index 9da270aace4..f5968526c7d 100644 --- a/flink/flink-scala-2.12/src/main/resources/interpreter-setting.json +++ b/flink/flink-scala-2.12/src/main/resources/interpreter-setting.json @@ -155,7 +155,7 @@ "zeppelin.flink.hive.version": { "envName": null, "propertyName": null, - "defaultValue": "2.3.4", + "defaultValue": "2.3.7", "description": "Hive version that you would like to connect", "type": "string" }, diff --git a/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScala212Interpreter.scala b/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScala212Interpreter.scala index b7af4ad3b10..070daf6a2b8 100644 --- a/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScala212Interpreter.scala +++ b/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScala212Interpreter.scala @@ -24,13 +24,15 @@ import java.util.Properties import org.apache.zeppelin.interpreter.InterpreterContext import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion +import org.apache.zeppelin.conf.ZeppelinConfiguration import scala.tools.nsc.Settings import scala.tools.nsc.interpreter.{IMain, JPrintWriter} class FlinkScala212Interpreter(override val properties: Properties, - override val flinkScalaClassLoader: ClassLoader) - extends FlinkScalaInterpreter(properties, flinkScalaClassLoader) { + override val flinkScalaClassLoader: ClassLoader, + override val zConf: ZeppelinConfiguration) + extends FlinkScalaInterpreter(properties, flinkScalaClassLoader, zConf) { override def completion(buf: String, cursor: Int, diff --git a/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala b/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala index c6ce85c9857..c2ceb60360c 100644 --- a/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala +++ b/flink/flink-scala-2.12/src/main/scala/org/apache/zeppelin/flink/FlinkScalaInterpreter.scala @@ -44,6 +44,7 @@ import org.apache.flink.table.catalog.hive.HiveCatalog import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableAggregateFunction, TableFunction} import org.apache.flink.table.module.hive.HiveModule import org.apache.flink.yarn.cli.FlinkYarnSessionCli +import org.apache.zeppelin.conf.ZeppelinConfiguration import org.apache.zeppelin.dep.DependencyResolver import org.apache.zeppelin.flink.internal.FlinkShell import org.apache.zeppelin.flink.internal.FlinkShell._ @@ -65,7 +66,8 @@ import scala.tools.nsc.interpreter.{Completion, IMain, IR, JPrintWriter, Results * @param properties */ abstract class FlinkScalaInterpreter(val properties: Properties, - val flinkScalaClassLoader: ClassLoader) { + val flinkScalaClassLoader: ClassLoader, + val zConf: ZeppelinConfiguration) { private lazy val LOGGER: Logger = LoggerFactory.getLogger(getClass) @@ -491,7 +493,7 @@ abstract class FlinkScalaInterpreter(val properties: Properties, throw new InterpreterException("HIVE_CONF_DIR is not specified"); } val database = properties.getProperty("zeppelin.flink.hive.database", "default") - val hiveVersion = properties.getProperty("zeppelin.flink.hive.version", "2.3.4") + val hiveVersion = properties.getProperty("zeppelin.flink.hive.version", "2.3.7") val hiveCatalog = new HiveCatalog("hive", database, hiveConfDir.toString, hiveVersion) this.btenv.registerCatalog("hive", hiveCatalog) this.btenv.useCatalog("hive") @@ -798,7 +800,7 @@ abstract class FlinkScalaInterpreter(val properties: Properties, val flinkPackageJars = if (!StringUtils.isBlank(properties.getProperty("flink.execution.packages", ""))) { val packages = properties.getProperty("flink.execution.packages") - val dependencyResolver = new DependencyResolver(System.getProperty("user.home") + "/.m2/repository") + val dependencyResolver = new DependencyResolver(System.getProperty("user.home") + "/.m2/repository", zConf) packages.split(",") .flatMap(e => JavaConversions.asScalaBuffer(dependencyResolver.load(e))) .map(e => e.getAbsolutePath).toSeq diff --git a/flink/flink-scala-2.12/src/test/java/org/apache/zeppelin/flink/FlinkSqlInterpreterTest.java b/flink/flink-scala-2.12/src/test/java/org/apache/zeppelin/flink/FlinkSqlInterpreterTest.java index e51042dc14b..29940d46717 100644 --- a/flink/flink-scala-2.12/src/test/java/org/apache/zeppelin/flink/FlinkSqlInterpreterTest.java +++ b/flink/flink-scala-2.12/src/test/java/org/apache/zeppelin/flink/FlinkSqlInterpreterTest.java @@ -90,7 +90,7 @@ protected Properties getFlinkProperties() throws IOException { p.setProperty("zeppelin.flink.enableHive", "true"); p.setProperty("taskmanager.managed.memory.size", "32"); p.setProperty("taskmanager.memory.task.off-heap.size", "80mb"); - p.setProperty("zeppelin.flink.hive.version", "2.3.4"); + p.setProperty("zeppelin.flink.hive.version", "2.3.7"); p.setProperty("zeppelin.pyflink.useIPython", "false"); p.setProperty("local.number-taskmanager", "4"); p.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); diff --git a/flink/flink-shims/pom.xml b/flink/flink-shims/pom.xml index f1d4724436e..9a875b291ad 100644 --- a/flink/flink-shims/pom.xml +++ b/flink/flink-shims/pom.xml @@ -22,7 +22,7 @@ flink-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -66,4 +66,4 @@ - \ No newline at end of file + diff --git a/flink/flink1.15-shims/pom.xml b/flink/flink1.15-shims/pom.xml index 15a063cab62..aafd90b249a 100644 --- a/flink/flink1.15-shims/pom.xml +++ b/flink/flink1.15-shims/pom.xml @@ -21,14 +21,12 @@ flink-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml 4.0.0 - org.apache.zeppelin flink1.15-shims - 0.11.1.3.3.6.0-1 jar Zeppelin: Flink1.15 Shims diff --git a/flink/flink1.16-shims/pom.xml b/flink/flink1.16-shims/pom.xml index 30fd5a19fbe..774eb4b3435 100644 --- a/flink/flink1.16-shims/pom.xml +++ b/flink/flink1.16-shims/pom.xml @@ -21,14 +21,12 @@ flink-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml 4.0.0 - org.apache.zeppelin flink1.16-shims - 0.11.1.3.3.6.0-1 jar Zeppelin: Flink1.16 Shims diff --git a/flink/flink1.17-shims/pom.xml b/flink/flink1.17-shims/pom.xml index fea3a1c63b6..861824b6480 100644 --- a/flink/flink1.17-shims/pom.xml +++ b/flink/flink1.17-shims/pom.xml @@ -21,14 +21,14 @@ flink-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml 4.0.0 org.apache.zeppelin flink1.17-shims - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 jar Zeppelin: Flink1.17 Shims diff --git a/flink/pom.xml b/flink/pom.xml index f55f2ede829..765d5b82c18 100644 --- a/flink/pom.xml +++ b/flink/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/groovy/pom.xml b/groovy/pom.xml index b701eef9e19..280efd5cd95 100644 --- a/groovy/pom.xml +++ b/groovy/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/hbase/pom.xml b/hbase/pom.xml index 8310cc30a31..c799bc8a69d 100644 --- a/hbase/pom.xml +++ b/hbase/pom.xml @@ -22,7 +22,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -33,56 +33,15 @@ hbase - 2.4.12 - ${hadoop2.7.version} 1.6.8 - 2.5.0 - 2.12.1 - - - - org.apache.commons - commons-exec - ${commons.exec.version} - org.jruby jruby-complete ${jruby.version} - - org.apache.hadoop - hadoop-yarn-common - ${hbase.hadoop.version} - - - org.apache.hadoop - hadoop-yarn-api - ${hbase.hadoop.version} - - - org.apache.hbase - hbase-client - ${hbase.hbase.version} - - - org.apache.hbase - hbase-annotations - ${hbase.hbase.version} - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - jline - jline - ${jline.version} - diff --git a/helium-dev/pom.xml b/helium-dev/pom.xml index 24dcfda8a17..242abcb3ab8 100644 --- a/helium-dev/pom.xml +++ b/helium-dev/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/influxdb/pom.xml b/influxdb/pom.xml index bde6ad907a3..42bf32a06db 100644 --- a/influxdb/pom.xml +++ b/influxdb/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/java/pom.xml b/java/pom.xml index b446946b9ec..33309a1e3ae 100644 --- a/java/pom.xml +++ b/java/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/jdbc/pom.xml b/jdbc/pom.xml index abf00d88798..e710e179bc6 100644 --- a/jdbc/pom.xml +++ b/jdbc/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -35,10 +35,15 @@ jdbc 42.7.2 - ${hadoop3.1.version} + ${hadoop3.3.version} 2.2.220 2.0.1 + 3.1.3 + 1.9.1 + + 4.0.0 + 1.0.8 @@ -82,15 +87,20 @@ ${mockrunner.jdbc.version} test - + + org.apache.thrift + libthrift + 0.14.1 + org.apache.hadoop - hadoop-client + hadoop-client-runtime ${hadoop.version} provided + org.apache.hadoop hadoop-common ${hadoop.version} @@ -145,10 +155,23 @@ - org.apache.hive - hive-jdbc - ${hive3.version} - provided + org.apache.hive + hive-exec + ${hive4.version} + provided + + + org.apache.hive + hive-jdbc + ${hive4.version} + provided + + + org.apache.hive + hive-common + ${hive4.version} + provided + org.apache.hadoop @@ -170,20 +193,31 @@ org.apache.hbase hbase-server + + org.apache.hbase + hbase-client + + + org.apache.hbase + hbase-mapreduce + + + org.apache.hadoop + hadoop-yarn-server-resourcemanager + + + hbase-hadoop2-compat + org.apache.hbase + - org.apache.httpcomponents - httpcore - 4.4.1 + org.apache.kyuubi + kyuubi-hive-jdbc-shaded + ${kyuubi.version} provided - - org.apache.httpcomponents - httpclient - 4.5.13 - net.jodah @@ -234,71 +268,10 @@ - - jdbc-hadoop2 - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop-common.version} - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-server - - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - - - - jdbc-hadoop3 - ${hadoop3.0.version} + ${hadoop3.3.version} @@ -355,6 +328,7 @@ + diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java index f6fed74ad0a..b4cfba25b40 100644 --- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java @@ -35,6 +35,7 @@ import org.apache.zeppelin.interpreter.ZeppelinContext; import org.apache.zeppelin.interpreter.util.SqlSplitter; import org.apache.zeppelin.jdbc.hive.HiveUtils; +import org.apache.zeppelin.jdbc.kyuubi.KyuubiUtils; import org.apache.zeppelin.tabledata.TableDataUtils; import org.apache.zeppelin.util.PropertiesUtil; import org.slf4j.Logger; @@ -763,6 +764,7 @@ private InterpreterResult executeSql(String sql, Statement statement; ResultSet resultSet = null; String paragraphId = context.getParagraphId(); + String noteId = context.getNoteId(); String user = getUser(context); try { @@ -799,7 +801,7 @@ private InterpreterResult executeSql(String sql, // so we need to trim it first in this case. sqlToExecute = sqlToExecute.trim(); } - LOGGER.info("Execute sql: " + sqlToExecute); + LOGGER.info("[{}|{}|{}] Execute sql: {}", user, noteId, paragraphId, sqlToExecute); statement = connection.createStatement(); // fetch n+1 rows in order to indicate there's more rows available (for large selects) @@ -824,10 +826,16 @@ private InterpreterResult executeSql(String sql, String jdbcURL = getJDBCConfiguration(user).getProperty().getProperty(URL_KEY); String driver = getJDBCConfiguration(user).getProperty().getProperty(DRIVER_KEY); - if (jdbcURL != null && jdbcURL.startsWith("jdbc:hive2://") - && driver != null && driver.equals("org.apache.hive.jdbc.HiveDriver")) { - HiveUtils.startHiveMonitorThread(statement, context, + if (jdbcURL != null && driver != null) { + if (driver.equals("org.apache.hive.jdbc.HiveDriver") && + jdbcURL.startsWith("jdbc:hive2://")) { + HiveUtils.startHiveMonitorThread(statement, context, Boolean.parseBoolean(getProperty("hive.log.display", "true")), this); + } else if (driver.equals("org.apache.kyuubi.jdbc.KyuubiHiveDriver") && + (jdbcURL.startsWith("jdbc:kyuubi://") || jdbcURL.startsWith("jdbc:hive2://"))) { + KyuubiUtils.startMonitorThread(connection, statement, context, + Boolean.parseBoolean(getProperty("kyuubi.log.display", "true")), this); + } } boolean isResultSetAvailable = statement.execute(sqlToExecute); getJDBCConfiguration(user).setConnectionInDBDriverPoolSuccessful(); diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/SqlCompleter.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/SqlCompleter.java index d0db613d15b..b41207a5527 100644 --- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/SqlCompleter.java +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/SqlCompleter.java @@ -207,8 +207,7 @@ private static void fillColumnNames(String schema, String table, DatabaseMetaDat } } - public static Set getSqlKeywordsCompletions(DatabaseMetaData meta) throws IOException, - SQLException { + public static Set getSqlKeywordsCompletions(DatabaseMetaData meta) throws IOException { // Add the default SQL completions String keywords = new BufferedReader(new InputStreamReader( @@ -217,11 +216,11 @@ public static Set getSqlKeywordsCompletions(DatabaseMetaData meta) throw Set completions = new TreeSet<>(); if (null != meta) { - // Add the driver specific SQL completions - String driverSpecificKeywords = - "/" + meta.getDriverName().replace(" ", "-").toLowerCase() + "-sql.keywords"; - logger.info("JDBC DriverName:" + driverSpecificKeywords); try { + // Add the driver specific SQL completions + String driverSpecificKeywords = + "/" + meta.getDriverName().replace(" ", "-").toLowerCase() + "-sql.keywords"; + logger.info("JDBC DriverName: {}", driverSpecificKeywords); if (SqlCompleter.class.getResource(driverSpecificKeywords) != null) { String driverKeywords = new BufferedReader(new InputStreamReader( @@ -229,35 +228,35 @@ public static Set getSqlKeywordsCompletions(DatabaseMetaData meta) throw .readLine(); keywords += "," + driverKeywords.toUpperCase(); } - } catch (Exception e) { + } catch (IOException | SQLException e) { logger.debug("fail to get driver specific SQL completions for " + - driverSpecificKeywords + " : " + e, e); + meta.getClass().getName() + " : " + e, e); } // Add the keywords from the current JDBC connection try { keywords += "," + meta.getSQLKeywords(); - } catch (Exception e) { + } catch (SQLException e) { logger.debug("fail to get SQL key words from database metadata: " + e, e); } try { keywords += "," + meta.getStringFunctions(); - } catch (Exception e) { + } catch (SQLException e) { logger.debug("fail to get string function names from database metadata: " + e, e); } try { keywords += "," + meta.getNumericFunctions(); - } catch (Exception e) { + } catch (SQLException e) { logger.debug("fail to get numeric function names from database metadata: " + e, e); } try { keywords += "," + meta.getSystemFunctions(); - } catch (Exception e) { + } catch (SQLException e) { logger.debug("fail to get system function names from database metadata: " + e, e); } try { keywords += "," + meta.getTimeDateFunctions(); - } catch (Exception e) { + } catch (SQLException e) { logger.debug("fail to get time date function names from database metadata: " + e, e); } diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/BeelineInPlaceUpdateStream.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/BeelineInPlaceUpdateStream.java new file mode 100644 index 00000000000..4218461373b --- /dev/null +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/BeelineInPlaceUpdateStream.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.jdbc.kyuubi; + + +import org.apache.hadoop.hive.common.log.InPlaceUpdate; +import org.apache.hadoop.hive.common.log.ProgressMonitor; +import org.apache.kyuubi.jdbc.hive.logs.InPlaceUpdateStream; +import org.apache.kyuubi.shaded.hive.service.rpc.thrift.TJobExecutionStatus; +import org.apache.kyuubi.shaded.hive.service.rpc.thrift.TProgressUpdateResp; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.PrintStream; +import java.util.List; + +public class BeelineInPlaceUpdateStream implements InPlaceUpdateStream { + private static final Logger LOGGER = LoggerFactory.getLogger(BeelineInPlaceUpdateStream.class); + + private InPlaceUpdate inPlaceUpdate; + private EventNotifier notifier; + private long lastUpdateTimestamp; + + public BeelineInPlaceUpdateStream(PrintStream out, + EventNotifier notifier) { + this.inPlaceUpdate = new InPlaceUpdate(out); + this.notifier = notifier; + } + + @Override + public void update(TProgressUpdateResp response) { + if (response == null || response.getStatus().equals(TJobExecutionStatus.NOT_AVAILABLE)) { + /* + we set it to completed if there is nothing the server has to report + for example, DDL statements + */ + notifier.progressBarCompleted(); + } else if (notifier.isOperationLogUpdatedAtLeastOnce()) { + /* + try to render in place update progress bar only if the operations logs is update at + least once + as this will hopefully allow printing the metadata information like query id, + application id + etc. have to remove these notifiers when the operation logs get merged into + GetOperationStatus + */ + lastUpdateTimestamp = System.currentTimeMillis(); + LOGGER.info("update progress: {}", response.getProgressedPercentage()); + inPlaceUpdate.render(new ProgressMonitorWrapper(response)); + } + } + + public long getLastUpdateTimestamp() { + return lastUpdateTimestamp; + } + + @Override + public EventNotifier getEventNotifier() { + return notifier; + } + + static class ProgressMonitorWrapper implements ProgressMonitor { + private TProgressUpdateResp response; + + ProgressMonitorWrapper(TProgressUpdateResp response) { + this.response = response; + } + + @Override + public List headers() { + return response.getHeaderNames(); + } + + @Override + public List> rows() { + return response.getRows(); + } + + @Override + public String footerSummary() { + return response.getFooterSummary(); + } + + @Override + public long startTime() { + return response.getStartTime(); + } + + @Override + public String executionStatus() { + throw new UnsupportedOperationException( + "This should never be used for anything. All the required data is " + + "available via other methods" + ); + } + + @Override + public double progressedPercentage() { + return response.getProgressedPercentage(); + } + } +} + diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/KyuubiUtils.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/KyuubiUtils.java new file mode 100644 index 00000000000..1d10845c46e --- /dev/null +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/KyuubiUtils.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.jdbc.kyuubi; + +import org.apache.commons.dbcp2.DelegatingConnection; +import org.apache.commons.dbcp2.DelegatingStatement; +import org.apache.commons.lang3.StringUtils; +import org.apache.kyuubi.jdbc.hive.KyuubiConnection; +import org.apache.kyuubi.jdbc.hive.KyuubiStatement; +import org.apache.zeppelin.interpreter.InterpreterContext; +import org.apache.zeppelin.jdbc.JDBCInterpreter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.Statement; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * This class include hive specific stuff. + * e.g. Display hive job execution info. + * + */ +public class KyuubiUtils { + + private static final Logger LOGGER = LoggerFactory.getLogger(KyuubiUtils.class); + private static final int DEFAULT_QUERY_PROGRESS_INTERVAL = 1000; + /** + * Display hive job execution info + */ + public static void startMonitorThread(Connection conn, + Statement stmt, + InterpreterContext context, + boolean displayLog, + JDBCInterpreter jdbcInterpreter) { + KyuubiConnection kyuubiConn = (KyuubiConnection) ((DelegatingConnection) + ((DelegatingConnection) conn).getDelegate()).getDelegate(); + KyuubiStatement kyuubiStmt = (KyuubiStatement) + ((DelegatingStatement) ((DelegatingStatement) stmt).getDelegate()).getDelegate(); + // need to use final variable progressBar in thread, so need progressBarTemp here. + final ProgressBar progressBar = new ProgressBar(); + final long queryInterval = Long.parseLong( + jdbcInterpreter.getProperty("zeppelin.jdbc.kyuubi.monitor.query_interval", + DEFAULT_QUERY_PROGRESS_INTERVAL + "")); + Thread thread = new Thread(() -> { + String jobUrlTemplate = jdbcInterpreter.getProperty("zeppelin.jdbc.kyuubi.jobUrl.template"); + boolean jobUrlExtracted = false; + + try { + while (kyuubiStmt.hasMoreLogs() && !kyuubiStmt.isClosed() && !Thread.interrupted()) { + Thread.sleep(queryInterval); + List logs = kyuubiStmt.getExecLog(); + String logsOutput = StringUtils.join(logs, System.lineSeparator()); + LOGGER.debug("Kyuubi job output: {}", logsOutput); + boolean displayLogProperty = context.getBooleanLocalProperty("displayLog", displayLog); + if (displayLogProperty && !StringUtils.isBlank(logsOutput)) { + context.out.write(logsOutput + "\n"); + context.out.flush(); + progressBar.operationLogShowedToUser(); + } + + if (!jobUrlExtracted) { + String appId = kyuubiConn.getEngineId(); + String appUrl = kyuubiConn.getEngineUrl(); + String jobUrl = null; + // prefer to use customized template, and fallback to Kyuubi returned engine url + if (StringUtils.isNotBlank(jobUrlTemplate) && StringUtils.isNotBlank(appId)) { + jobUrl = jobUrlTemplate.replace("{{applicationId}}", appId); + } else if (StringUtils.isNotBlank(appUrl)) { + jobUrl = appUrl; + } + if (jobUrl != null) { + LOGGER.info("Detected Kyuubi engine URL: {}", jobUrl); + Map infos = new HashMap<>(); + infos.put("jobUrl", jobUrl); + infos.put("label", "KYUUBI JOB"); + infos.put("tooltip", "View Application Web UI"); + infos.put("noteId", context.getNoteId()); + infos.put("paraId", context.getParagraphId()); + context.getIntpEventClient().onParaInfosReceived(infos); + jobUrlExtracted = true; + } + } + } + } catch (InterruptedException e) { + LOGGER.warn("Kyuubi monitor thread is interrupted", e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOGGER.warn("Fail to monitor KyuubiStatement", e); + } + + LOGGER.info("KyuubiMonitor-Thread is finished"); + }); + thread.setName("KyuubiMonitor-Thread"); + thread.setDaemon(true); + thread.start(); + LOGGER.info("Start KyuubiMonitor-Thread for sql: {}", kyuubiStmt); + + progressBar.setInPlaceUpdateStream(kyuubiStmt, context.out); + } +} diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/ProgressBar.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/ProgressBar.java new file mode 100644 index 00000000000..f80a4b0d094 --- /dev/null +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/kyuubi/ProgressBar.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.jdbc.kyuubi; + +import org.apache.kyuubi.jdbc.hive.KyuubiStatement; +import org.apache.kyuubi.jdbc.hive.logs.InPlaceUpdateStream; + +import java.io.OutputStream; +import java.io.PrintStream; + +public class ProgressBar { + private InPlaceUpdateStream.EventNotifier eventNotifier; + private BeelineInPlaceUpdateStream beelineInPlaceUpdateStream; + + public ProgressBar() { + this.eventNotifier = new InPlaceUpdateStream.EventNotifier(); + } + + public void operationLogShowedToUser() { + this.eventNotifier.operationLogShowedToUser(); + } + + public BeelineInPlaceUpdateStream getInPlaceUpdateStream(OutputStream out) { + beelineInPlaceUpdateStream = new BeelineInPlaceUpdateStream( + new PrintStream(out), + eventNotifier + ); + return beelineInPlaceUpdateStream; + } + + public BeelineInPlaceUpdateStream getBeelineInPlaceUpdateStream() { + return beelineInPlaceUpdateStream; + } + + public void setInPlaceUpdateStream(KyuubiStatement kyuubiStmt, OutputStream out){ + kyuubiStmt.setInPlaceUpdateStream(this.getInPlaceUpdateStream(out)); + } +} diff --git a/jdbc/src/main/resources/interpreter-setting.json b/jdbc/src/main/resources/interpreter-setting.json index a723660d6e2..3a22ff0a9b9 100644 --- a/jdbc/src/main/resources/interpreter-setting.json +++ b/jdbc/src/main/resources/interpreter-setting.json @@ -150,6 +150,27 @@ "defaultValue": true, "description": "Set application tag for applications started by hive engines", "type": "checkbox" + }, + "zeppelin.jdbc.kyuubi.timeout.threshold": { + "envName": null, + "propertyName": "zeppelin.jdbc.kyuubi.timeout.threshold", + "defaultValue": "60000", + "description": "Timeout for kyuubi job timeout", + "type": "number" + }, + "zeppelin.jdbc.kyuubi.monitor.query_interval": { + "envName": null, + "propertyName": "zeppelin.jdbc.kyuubi.monitor.query_interval", + "defaultValue": "1000", + "description": "Query interval for kyuubi statement", + "type": "number" + }, + "zeppelin.jdbc.kyuubi.jobUrl.template": { + "envName": null, + "propertyName": "zeppelin.jdbc.kyuubi.jobUrl.template", + "defaultValue": "", + "description": "The Kyuubi engine URL pattern, supports {{applicationId}} as placeholder", + "type": "string" } }, "editor": { diff --git a/k8s/zeppelin-server.yaml b/k8s/zeppelin-server.yaml index cd450ea6eee..754119309cc 100644 --- a/k8s/zeppelin-server.yaml +++ b/k8s/zeppelin-server.yaml @@ -29,7 +29,7 @@ data: # If you have your ingress controller configured to connect to `zeppelin-server` service and have a domain name for it (with wildcard subdomain point the same address), you can replace serviceDomain field with your own domain. SERVICE_DOMAIN: local.zeppelin-project.org:8080 ZEPPELIN_K8S_SPARK_CONTAINER_IMAGE: spark:3.4.1 - ZEPPELIN_K8S_CONTAINER_IMAGE: zeppelin-interpreter:0.11.1 + ZEPPELIN_K8S_CONTAINER_IMAGE: zeppelin-interpreter:0.11.2.3.3.6.0-1 ZEPPELIN_HOME: /opt/zeppelin ZEPPELIN_SERVER_RPC_PORTRANGE: 12320:12320 # default value of 'master' property for spark interpreter. @@ -115,7 +115,7 @@ spec: path: nginx.conf containers: - name: zeppelin-server - image: zeppelin-server:0.11.1 + image: zeppelin-server:0.11.2.3.3.6.0-1 command: ["sh", "-c", "$(ZEPPELIN_HOME)/bin/zeppelin.sh"] lifecycle: preStop: diff --git a/livy/README.md b/livy/README.md index 54311d93448..406af566ee9 100644 --- a/livy/README.md +++ b/livy/README.md @@ -2,14 +2,13 @@ Livy interpreter for Apache Zeppelin # Prerequisities -You can follow the instructions at [Livy Quick Start](http://livy.io/quickstart.html) to set up livy. +You can follow the instructions at [Livy Get Started](https://livy.apache.org/get-started/) to set up livy. # Run Integration Tests -You can add integration test to [LivyInterpreter.java](https://github.com/apache/zeppelin/blob/master/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java) and run the integration test either via the CI environment or locally. You need to download livy-0.2 and spark-1.5.2 to local, then use the following script to run the integration test. +You can add integration test to [LivyInterpreter.java](https://github.com/apache/zeppelin/blob/master/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java) and run the integration test either via the CI environment or locally. You need to download livy-0.8 and spark-2.4.8 to local, then use the following script to run the integration test. ```bash -#!/usr/bin/env bash -export LIVY_HOME= -export SPARK_HOME= +export LIVY_HOME= +export SPARK_HOME= ./mvnw clean verify -pl livy -DfailIfNoTests=false ``` diff --git a/livy/pom.xml b/livy/pom.xml index 94557801147..38476949bc1 100644 --- a/livy/pom.xml +++ b/livy/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -37,111 +37,9 @@ 1.3 4.3.0.RELEASE 1.0.1.RELEASE - - - 0.7.1-incubating - 2.4.8 - ${hadoop2.7.version} - - org.apache.livy - livy-integration-test - ${livy.version} - test - - - org.xerial.snappy - snappy-java - - - org.apache.spark - spark-core_${scala.binary.version} - - - org.apache.spark - spark-sql_${scala.binary.version} - - - org.apache.spark - spark-streaming_${scala.binary.version} - - - org.apache.spark - spark-hive_${scala.binary.version} - - - org.apache.spark - spark-repl_${scala.binary.version} - - - org.apache.spark - spark-yarn_${scala.binary.version} - - - org.apache.hadoop - hadoop-auth - - - org.apache.hadoop - hadoop-common - - - org.apache.hadoop - hadoop-hdfs - - - org.apache.hadoop - hadoop-yarn-client - - - org.apache.hadoop - hadoop-client - - - org.apache.hadoop - hadoop-yarn-server-tests - - - - - org.apache.livy - livy-test-lib - ${livy.version} - test - - - org.xerial.snappy - snappy-java - - - org.apache.spark - spark-core_${scala.binary.version} - - - org.apache.spark - spark-sql_${scala.binary.version} - - - org.apache.spark - spark-streaming_${scala.binary.version} - - - org.apache.spark - spark-hive_${scala.binary.version} - - - org.apache.spark - spark-repl_${scala.binary.version} - - - org.apache.spark - spark-yarn_${scala.binary.version} - - - - org.apache.commons commons-exec @@ -176,125 +74,15 @@ - org.apache.hadoop - hadoop-auth - ${hadoop.version} - test - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-common - tests - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test - - - io.netty - netty - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-hdfs - tests - ${hadoop.version} - test - - - io.netty - netty - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop.version} - test - - - com.google.guava - guava - - - - - - org.apache.hadoop - hadoop-yarn-api - ${hadoop.version} + org.awaitility + awaitility test - - - com.google.guava - guava - - - org.apache.hadoop - hadoop-yarn-server-tests - tests - ${hadoop.version} + org.mockito + mockito-core test - - - com.google.guava - guava - - diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java index 3a3a17c5afb..16c060011f3 100644 --- a/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java +++ b/livy/src/test/java/org/apache/zeppelin/livy/LivyInterpreterIT.java @@ -18,8 +18,6 @@ package org.apache.zeppelin.livy; import org.apache.commons.io.IOUtils; -import org.apache.livy.test.framework.Cluster; -import org.apache.livy.test.framework.Cluster$; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; @@ -31,7 +29,6 @@ import org.apache.zeppelin.interpreter.LazyOpenInterpreter; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.user.AuthenticationInfo; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.slf4j.Logger; @@ -47,45 +44,23 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; -public class LivyInterpreterIT { - private static final Logger LOGGER = LoggerFactory.getLogger(LivyInterpreterIT.class); - private static Cluster cluster; +public class LivyInterpreterIT extends WithLivyServer { + private static final Logger LOG = LoggerFactory.getLogger(LivyInterpreterIT.class); private static Properties properties; @BeforeAll - public static void setUp() { + public static void beforeAll() throws IOException { if (!checkPreCondition()) { return; } - cluster = Cluster$.MODULE$.get(); - LOGGER.info("Starting livy at {}", cluster.livyEndpoint()); + WithLivyServer.beforeAll(); properties = new Properties(); - properties.setProperty("zeppelin.livy.url", cluster.livyEndpoint()); + properties.setProperty("zeppelin.livy.url", LIVY_ENDPOINT); properties.setProperty("zeppelin.livy.session.create_timeout", "120"); properties.setProperty("zeppelin.livy.spark.sql.maxResult", "100"); properties.setProperty("zeppelin.livy.displayAppInfo", "false"); } - @AfterAll - public static void tearDown() { - if (cluster != null) { - LOGGER.info("Shutting down livy at {}", cluster.livyEndpoint()); - cluster.cleanUp(); - } - } - - public static boolean checkPreCondition() { - if (System.getenv("LIVY_HOME") == null) { - LOGGER.warn(("livy integration is skipped because LIVY_HOME is not set")); - return false; - } - if (System.getenv("SPARK_HOME") == null) { - LOGGER.warn(("livy integration is skipped because SPARK_HOME is not set")); - return false; - } - return true; - } - @Test void testSparkInterpreter() throws InterpreterException { @@ -139,7 +114,6 @@ private void testRDD(final LivySparkInterpreter sparkInterpreter, boolean isSpar .setAuthenticationInfo(authInfo) .setInterpreterOut(output) .build(); - ; InterpreterResult result = sparkInterpreter.interpret("sc.parallelize(1 to 10).sum()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); @@ -292,11 +266,10 @@ private void testDataFrame(LivySparkInterpreter sparkInterpreter, assertEquals(InterpreterResult.Code.ERROR, result.code()); assertEquals(InterpreterResult.Type.TEXT, result.message().get(0).getType()); - if (!isSpark2) { - assertTrue(result.message().get(0).getData().contains("Table not found")); - } else { - assertTrue(result.message().get(0).getData().contains("Table or view not found")); - } + String errMsg = result.message().get(0).getData(); + assertTrue(errMsg.contains("Table not found") || + errMsg.contains("Table or view not found") || + errMsg.contains("TABLE_OR_VIEW_NOT_FOUND")); // test sql cancel if (sqlInterpreter.getLivyVersion().newerThanEquals(LivyVersion.LIVY_0_3_0)) { @@ -429,7 +402,7 @@ void testPySparkInterpreter() throws InterpreterException { assertTrue(result.message().get(0).getData().contains("[Row(_1=u'hello', _2=20)]") || result.message().get(0).getData().contains("[Row(_1='hello', _2=20)]")); } else { - result = pysparkInterpreter.interpret("df=spark.createDataFrame([(\"hello\",20)])\n" + result = pysparkInterpreter.interpret("df=spark.createDataFrame([('hello',20)])\n" + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); assertEquals(1, result.message().size()); @@ -483,7 +456,7 @@ public void run() { } @Test - void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() + void testSparkInterpreterStringWithoutTruncation() throws InterpreterException { if (!checkPreCondition()) { return; @@ -491,7 +464,6 @@ void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() InterpreterGroup interpreterGroup = new InterpreterGroup("group_1"); interpreterGroup.put("session_1", new ArrayList()); Properties properties2 = new Properties(properties); - properties2.put("zeppelin.livy.displayAppInfo", "true"); // enable spark ui because it is disabled by livy integration test properties2.put("livy.spark.ui.enabled", "true"); properties2.put(LivySparkSQLInterpreter.ZEPPELIN_LIVY_SPARK_SQL_FIELD_TRUNCATE, "false"); @@ -517,21 +489,19 @@ void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() try { InterpreterResult result = sparkInterpreter.interpret("sc.version", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); - assertEquals(2, result.message().size()); - // check yarn appId and ensure it is not null - assertTrue(result.message().get(1).getData().contains("Spark Application Id: application_")); + assertEquals(1, result.message().size(), result.toString()); // html output String htmlCode = "println(\"%html

    hello

    \")"; result = sparkInterpreter.interpret(htmlCode, context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); - assertEquals(2, result.message().size()); + assertEquals(1, result.message().size()); assertEquals(InterpreterResult.Type.HTML, result.message().get(0).getType()); // detect spark version result = sparkInterpreter.interpret("sc.version", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); - assertEquals(2, result.message().size()); + assertEquals(1, result.message().size()); boolean isSpark2 = isSpark2(sparkInterpreter, context); @@ -550,7 +520,7 @@ void testSparkInterpreterWithDisplayAppInfo_StringWithoutTruncation() + ".toDF(\"col_1\", \"col_2\")\n" + "df.collect()", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); - assertEquals(2, result.message().size()); + assertEquals(1, result.message().size()); assertTrue(result.message().get(0).getData() .contains("Array[org.apache.spark.sql.Row] = Array([12characters12characters,20])")); } @@ -671,7 +641,7 @@ void testLivyParams() throws InterpreterException { try { InterpreterResult result = sparkInterpreter.interpret("sc.version\n" + "assert(sc.getConf.get(\"spark.executor.cores\") == \"4\" && " + - "sc.getConf.get(\"spark.app.name\") == \"zeppelin-livy\")" + "sc.getConf.get(\"spark.app.name\") == \"zeppelin-livy\")" , context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), result.toString()); assertEquals(1, result.message().size()); diff --git a/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java b/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java index 6294473371f..3461c096a45 100644 --- a/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java +++ b/livy/src/test/java/org/apache/zeppelin/livy/LivySQLInterpreterTest.java @@ -35,7 +35,7 @@ class LivySQLInterpreterTest { private LivySparkSQLInterpreter sqlInterpreter; @BeforeEach - public void setUp() { + public void beforeEach() { Properties properties = new Properties(); properties.setProperty("zeppelin.livy.url", "http://localhost:8998"); properties.setProperty("zeppelin.livy.session.create_timeout", "120"); diff --git a/livy/src/test/java/org/apache/zeppelin/livy/WithLivyServer.java b/livy/src/test/java/org/apache/zeppelin/livy/WithLivyServer.java new file mode 100644 index 00000000000..d0a77e427ba --- /dev/null +++ b/livy/src/test/java/org/apache/zeppelin/livy/WithLivyServer.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.livy; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Optional; +import java.util.stream.Stream; + +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +abstract class WithLivyServer { + private static final Logger LOG = LoggerFactory.getLogger(WithLivyServer.class); + + private static Optional livy = Optional.empty(); + + protected static final String LIVY_HOME = System.getenv("LIVY_HOME"); + protected static final String SPARK_HOME = System.getenv("SPARK_HOME"); + + protected static final File TMP_DIR = new File(System.getProperty("java.io.tmpdir"), "livy-it"); + protected static final File LIVY_CONF_DIR = new File(TMP_DIR, "conf"); + protected static final File LIVY_LOG_DIR = new File(TMP_DIR, "logs"); + + protected static String LIVY_ENDPOINT; + + public static boolean checkPreCondition() { + if (System.getenv("LIVY_HOME") == null) { + LOG.warn(("livy integration is skipped because LIVY_HOME is not set")); + return false; + } + if (System.getenv("SPARK_HOME") == null) { + LOG.warn(("livy integration is skipped because SPARK_HOME is not set")); + return false; + } + return true; + } + + @BeforeAll + public static void beforeAll() throws IOException { + if (!checkPreCondition()) { + return; + } + assertFalse(livy.isPresent()); + if (TMP_DIR.exists()) { + FileUtils.deleteQuietly(TMP_DIR); + } + assertTrue(LIVY_CONF_DIR.mkdirs()); + assertTrue(LIVY_LOG_DIR.mkdirs()); + Files.copy( + Paths.get(LIVY_HOME, "conf", "log4j.properties.template"), + LIVY_CONF_DIR.toPath().resolve("log4j.properties")); + + LOG.info("SPARK_HOME: {}", SPARK_HOME); + LOG.info("LIVY_HOME: {}", LIVY_HOME); + LOG.info("LIVY_CONF_DIR: {}", LIVY_CONF_DIR.getAbsolutePath()); + LOG.info("LIVY_LOG_DIR: {}", LIVY_LOG_DIR.getAbsolutePath()); + + File logFile = new File(TMP_DIR, "output.log"); + assertTrue(logFile.createNewFile()); + LOG.info("Redirect Livy's log to {}", logFile.getAbsolutePath()); + + ProcessBuilder pb = new ProcessBuilder(LIVY_HOME + "/bin/livy-server") + .directory(TMP_DIR) + .redirectErrorStream(true) + .redirectOutput(ProcessBuilder.Redirect.appendTo(logFile)); + + pb.environment().put("JAVA_HOME", System.getProperty("java.home")); + pb.environment().put("LIVY_CONF_DIR", LIVY_CONF_DIR.getAbsolutePath()); + pb.environment().put("LIVY_LOG_DIR", LIVY_LOG_DIR.getAbsolutePath()); + pb.environment().put("SPARK_LOCAL_IP", "127.0.0.1"); + Process livyProc = pb.start(); + + await().atMost(30, SECONDS).pollInterval(2, SECONDS).until(() -> { + try { + int exitCode = livyProc.exitValue(); + throw new IOException("Child process exited unexpectedly (exit code " + exitCode + ")"); + } catch (IllegalThreadStateException ignore) { + // Process does not exit, try again. + } + try (Stream lines = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)) { + // An example of bootstrap log: + // 24/03/24 05:51:38 INFO WebServer: Starting server on http://cheng-pan-mbp.lan:8998 + Optional started = + lines.filter(line -> line.contains("Starting server on ")).findFirst(); + started.ifPresent(line -> + LIVY_ENDPOINT = StringUtils.substringAfter(line, "Starting server on ").trim()); + return started.isPresent(); + } + }); + + LOG.info("Livy Server is started at {}", LIVY_ENDPOINT); + livy = Optional.of(livyProc); + } + + @AfterAll + public static void afterAll() { + livy.filter(Process::isAlive).ifPresent(proc -> { + try { + LOG.info("Stopping the Livy Server running at {}", LIVY_ENDPOINT); + proc.destroy(); + if (!proc.waitFor(10, SECONDS)) { + LOG.warn("Forcibly stopping the Livy Server running at {}", LIVY_ENDPOINT); + proc.destroyForcibly(); + assertFalse(proc.isAlive()); + } + } catch (InterruptedException ignore) { + if (proc.isAlive()) { + LOG.warn("Forcibly stopping the Livy Server running at {}", LIVY_ENDPOINT); + proc.destroyForcibly(); + assertFalse(proc.isAlive()); + } + } + }); + } +} diff --git a/markdown/pom.xml b/markdown/pom.xml index d0dd8c9b188..51d0fae694d 100644 --- a/markdown/pom.xml +++ b/markdown/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/markdown/src/main/java/org/apache/zeppelin/markdown/FlexmarkParser.java b/markdown/src/main/java/org/apache/zeppelin/markdown/FlexmarkParser.java index 911a094bfc7..82c5b9d3541 100644 --- a/markdown/src/main/java/org/apache/zeppelin/markdown/FlexmarkParser.java +++ b/markdown/src/main/java/org/apache/zeppelin/markdown/FlexmarkParser.java @@ -44,8 +44,7 @@ public class FlexmarkParser implements MarkdownParser { private Parser parser; private HtmlRenderer renderer; - public FlexmarkParser() { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + public FlexmarkParser(ZeppelinConfiguration zConf) { MutableDataSet options = new MutableDataSet(); options.set(Parser.EXTENSIONS, Arrays.asList(StrikethroughExtension.create(), TablesExtension.create(), diff --git a/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java b/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java index d0f896a43c9..46733e43a0d 100644 --- a/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java +++ b/markdown/src/main/java/org/apache/zeppelin/markdown/Markdown.java @@ -70,11 +70,11 @@ public Markdown(Properties property) { super(property); } - public static MarkdownParser createMarkdownParser(String parserType) { + public MarkdownParser createMarkdownParser(String parserType) { LOGGER.debug("Creating {} markdown interpreter", parserType); if (MarkdownParserType.FLEXMARK.toString().equals(parserType)) { - return new FlexmarkParser(); + return new FlexmarkParser(zConf); } else { // default parser return new Markdown4jParser(); diff --git a/markdown/src/test/java/org/apache/zeppelin/markdown/FlexmarkParserTest.java b/markdown/src/test/java/org/apache/zeppelin/markdown/FlexmarkParserTest.java index ef9327e89e1..01540b09c4c 100644 --- a/markdown/src/test/java/org/apache/zeppelin/markdown/FlexmarkParserTest.java +++ b/markdown/src/test/java/org/apache/zeppelin/markdown/FlexmarkParserTest.java @@ -17,6 +17,7 @@ package org.apache.zeppelin.markdown; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterResult; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -43,6 +44,7 @@ public void setUp() throws Exception { Properties props = new Properties(); props.put(Markdown.MARKDOWN_PARSER_TYPE, Markdown.PARSER_TYPE_FLEXMARK); md = new Markdown(props); + md.setZeppelinConfiguration(ZeppelinConfiguration.load()); md.open(); } diff --git a/mongodb/pom.xml b/mongodb/pom.xml index 79f6bdb1c4e..d4f5d831745 100644 --- a/mongodb/pom.xml +++ b/mongodb/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/neo4j/pom.xml b/neo4j/pom.xml index 206e5810076..2cc2656cdc0 100644 --- a/neo4j/pom.xml +++ b/neo4j/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/pom.xml b/pom.xml index cb52d39770b..ce9f1226e41 100644 --- a/pom.xml +++ b/pom.xml @@ -24,7 +24,7 @@ org.apache.zeppelin zeppelin pom - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin Zeppelin project https://zeppelin.apache.org @@ -50,47 +50,27 @@ 2013 - build-tools zeppelin-interpreter-parent zeppelin-interpreter zeppelin-interpreter-shaded zeppelin-zengine - rlang - zeppelin-jupyter-interpreter - zeppelin-jupyter-interpreter-shaded - groovy - spark - spark-submit - submarine + python markdown - mongodb + spark angular shell livy - hbase jdbc - file - flink - flink-cmd - influxdb - python - cassandra - elasticsearch - bigquery - alluxio - neo4j - java - sparql zeppelin-common zeppelin-client zeppelin-client-examples zeppelin-web zeppelin-server - zeppelin-jupyter zeppelin-plugins zeppelin-distribution + zeppelin-test @@ -101,8 +81,8 @@ ${java.version} ${java.version} - ${scala.2.11.version} - 2.11 + ${scala.2.12.version} + 2.12 2.11.12 2.12.17 3.2.15 @@ -116,6 +96,7 @@ 1.7.35 1.2.25 + 2.23.1 0.13.0 0.62.2 2.8.9 @@ -125,37 +106,32 @@ 4.4.1 4.5.13 4.0.2 - 1.21 - 3.12.0 + 1.26.1 + 3.14.0 1.10.0 - 2.8.0 + 2.10.1 1.3 - 1.14 - 2.7 + 1.16.1 + 2.15.1 3.2.2 1.4 - 1.10.0 + 1.13.0 1.70 3.6.3 4.1.14 1.6.0 + 3.0.2 - 2.7.7 - 3.0.3 - 3.1.3 3.2.4 - 3.3.6.3.3.6.0-1 - ${hadoop2.7.version} + 3.3.6 + ${hadoop3.3.version} provided - hadoop-client - hadoop-yarn-api - hadoop-client 2.3.2 1.5.4 1.16.1 3.21.7 - 1.51.0 + 1.55.1 2.14.0 @@ -179,7 +155,6 @@ 1.6.0 3.0.0-M3 1.6.0 - 2.17 4.0.0 1.6 3.2.0 @@ -195,7 +170,7 @@ 2.0.0 1.11.2 3.2.1 - 2.22.2 + 3.2.2 1.4.1.Final 1.19.0 @@ -242,914 +217,171 @@
    - ch.qos.reload4j - reload4j - ${reload4j.version} - - - - - org.slf4j - jcl-over-slf4j - ${slf4j.version} - - - - org.apache.thrift - libthrift - ${libthrift.version} - - - javax.annotation - javax.annotation-api - - - - - - org.apache.httpcomponents - httpcore - ${httpcomponents.core.version} - - - - org.apache.httpcomponents - httpclient - ${httpcomponents.client.version} - - - - commons-logging - commons-logging - - - - - - org.apache.httpcomponents - httpasyncclient - ${httpcomponents.asyncclient.version} - - - - commons-logging - commons-logging - - - - - - org.apache.commons - commons-lang3 - ${commons.lang3.version} - - - - org.apache.commons - commons-text - ${commons.text.version} - - - - org.apache.commons - commons-exec - ${commons.exec.version} - - - - com.google.code.gson - gson - ${gson.version} - - - - org.json - json - ${org-json.version} - - - - org.danilopianini - gson-extras - ${gson-extras.version} - - - - org.apache.commons - commons-configuration2 - ${commons.configuration2.version} - - - - - commons-lang - commons-lang - 2.6 - - - - commons-codec - commons-codec - ${commons.codec.version} - - - - commons-io - commons-io - ${commons.io.version} - - - - commons-collections - commons-collections - ${commons.collections.version} - - - - commons-cli - commons-cli - ${commons.cli.version} - - - - - org.apache.shiro - shiro-core - ${shiro.version} - - - org.apache.shiro - shiro-web - ${shiro.version} - - - org.apache.shiro - shiro-config-core - ${shiro.version} - - - - org.bouncycastle - bcpkix-jdk15on - ${bouncycastle.version} - - - - org.codehaus.jettison - jettison - ${jettison.version} - - - - org.apache.hadoop - ${hadoop-client-api.artifact} - ${hadoop.version} - ${hadoop.deps.scope} - - - org.apache.zookeeper - zookeeper - - - org.apache.hadoop - hadoop-common - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - io.netty - netty-all - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - com.fasterxml.jackson.core - jackson-annotations - - - com.nimbusds - nimbus-jose-jwt - - - org.eclipse.jetty - jetty-xml - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - commons-beanutils - commons-beanutils - - - org.apache.commons - commons-configuration2 - - - commons-beanutils - commons-beanutils-core - - - org.eclipse.jetty - jetty-webapp - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - - commons-logging - commons-logging - - - - - - org.apache.hadoop - hadoop-yarn-common - ${hadoop.version} - ${hadoop.deps.scope} - - - asm - asm - - - org.ow2.asm - asm - - - org.jboss.netty - netty - - - javax.servlet - servlet-api - - - commons-logging - commons-logging - - - com.sun.jersey - * - - - com.sun.jersey.jersey-test-framework - * - - - com.sun.jersey.contribs - * - - - com.google.guava - guava - - - org.apache.commons - commons-compress - - - - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop.version} - ${hadoop.deps.scope} - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - - commons-logging - commons-logging - - - log4j - log4j - - - - - - org.apache.hadoop - hadoop-yarn-api - ${hadoop.version} - ${hadoop.deps.scope} - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - - commons-logging - commons-logging - - - - - - - - org.apache.hadoop - hadoop-yarn-server-tests - ${hadoop.version} - tests - test - - - org.apache.hadoop - hadoop-yarn-common - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-core-asl - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-mapper-asl - - - com.google.guava - guava - - - javax.xml.bind - jaxb-api - - - com.fasterxml.jackson.core - jackson-core - - - org.eclipse.jetty - jetty-util - - - com.zaxxer - HikariCP-java7 - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.module - jackson-module-jaxb-annotations - - - - commons-logging - commons-logging - - - io.dropwizard.metrics - metrics-core - - - com.google.guava - guava - - - com.fasterxml.jackson.core - jackson-databind - - - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - ${hadoop.deps.scope} - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - commons-beanutils - commons-beanutils - - - commons-beanutils - commons-beanutils-core - - - org.apache.commons - commons-configuration2 - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - com.nimbusds - nimbus-jose-jwt - - - com.fasterxml.jackson.core - jackson-databind - - - - commons-logging - commons-logging - - - org.ow2.asm - asm - - - com.jamesmurty.utils - java-xmlbuilder - - - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - tests - test - - - com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - com.sun.jersey - jersey-server - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-core-asl - - - com.google.guava - guava - - - com.google.code.findbugs - jsr305 - - - org.apache.commons - commons-math3 - - - commons-beanutils - commons-beanutils - - - org.apache.commons - commons-configuration2 - - - org.apache.zookeeper - zookeeper - - - org.eclipse.jetty - jetty-servlet - - - org.eclipse.jetty - jetty-util - - - org.eclipse.jetty - jetty-webapp - - - org.eclipse.jetty - jetty-server - - - com.nimbusds - nimbus-jose-jwt - + ch.qos.reload4j + reload4j + ${reload4j.version} + + + + + org.slf4j + jcl-over-slf4j + ${slf4j.version} + + + + org.apache.thrift + libthrift + ${libthrift.version} + - com.fasterxml.jackson.core - jackson-databind + javax.annotation + javax.annotation-api + + + + + org.apache.httpcomponents + httpcore + ${httpcomponents.core.version} + + + + org.apache.httpcomponents + httpclient + ${httpcomponents.client.version} + commons-logging commons-logging + + + + + org.apache.httpcomponents + httpasyncclient + ${httpcomponents.asyncclient.version} + + - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - org.ow2.asm - asm + commons-logging + commons-logging + + org.apache.commons + commons-lang3 + ${commons.lang3.version} + + + + org.apache.commons + commons-text + ${commons.text.version} + + + + org.apache.commons + commons-exec + ${commons.exec.version} + + + + com.google.code.gson + gson + ${gson.version} + + + + org.json + json + ${org-json.version} + + + + org.danilopianini + gson-extras + ${gson-extras.version} + + + + org.apache.commons + commons-configuration2 + ${commons.configuration2.version} + + + + commons-codec + commons-codec + ${commons.codec.version} + + + + commons-io + commons-io + ${commons.io.version} + + + + commons-collections + commons-collections + ${commons.collections.version} + + + + commons-cli + commons-cli + ${commons.cli.version} + + + + + com.google.code.findbugs + jsr305 + ${findbugs.jsr305.version} + + + + + org.apache.shiro + shiro-core + ${shiro.version} + + + org.apache.shiro + shiro-web + ${shiro.version} + + + org.apache.shiro + shiro-config-core + ${shiro.version} + + + + org.bouncycastle + bcpkix-jdk15on + ${bouncycastle.version} + + + + org.codehaus.jettison + jettison + ${jettison.version} + + + + org.apache.hadoop + hadoop-client-api + ${hadoop.version} + ${hadoop.deps.scope} + + org.junit.jupiter @@ -1209,196 +441,22 @@ org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - test - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - io.netty - netty-all - - - org.eclipse.jetty - jetty-util - - - com.fasterxml.jackson.core - jackson-annotations - - - - commons-logging - commons-logging - - - com.fasterxml.jackson.core - jackson-databind - - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - tests - test - - - com.sun.jersey - jersey-json - - - com.sun.jersey - jersey-client - - - javax.servlet - servlet-api - - - org.apache.avro - avro - - - org.apache.jackrabbit - jackrabbit-webdav - - - io.netty - netty - - - commons-httpclient - commons-httpclient - - - org.eclipse.jgit - org.eclipse.jgit - - - com.jcraft - jsch - - - org.apache.commons - commons-compress - - - xml-apis - xml-apis - - - xerces - xercesImpl - - - com.google.guava - guava - - - io.netty - netty-all - - - org.eclipse.jetty - jetty-util - - - com.fasterxml.jackson.core - jackson-annotations - - - - commons-logging - commons-logging - - - log4j - log4j - - - com.fasterxml.jackson.core - jackson-databind - - - - - - org.apache.hadoop - ${hadoop-client-runtime.artifact} + hadoop-client-runtime ${hadoop.version} ${hadoop.deps.scope} - commons-logging commons-logging - - - com.google.code.findbugs - jsr305 + commons-logging org.apache.hadoop - ${hadoop-client-minicluster.artifact} + hadoop-client-minicluster ${hadoop.version} test - @@ -1594,6 +652,8 @@ maven-surefire-plugin ${plugin.surefire.version} + false + false -Xmx2g -Xms1g -Dfile.encoding=UTF-8 true @@ -1683,7 +743,11 @@ org.apache.maven.plugins maven-failsafe-plugin - ${plugin.failsafe.version} + ${plugin.surefire.version} + + false + false + diff --git a/python/pom.xml b/python/pom.xml index 517704a776d..8e97d4c6c8d 100644 --- a/python/pom.xml +++ b/python/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java index 0916130281d..8dfaa074069 100644 --- a/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java +++ b/python/src/test/java/org/apache/zeppelin/python/IPythonInterpreterTest.java @@ -265,7 +265,7 @@ void testIPythonPlotting() throws InterpreterException, InterruptedException, IO "x = [1, 2, 3, 4, 5]\n" + "y = [6, 7, 2, 4, 5]\n" + "p = figure(title=\"simple line example\", x_axis_label='x', y_axis_label='y')\n" + - "p.line(x, y, legend=\"Temp.\", line_width=2)\n" + + "p.line(x, y, legend_label=\"Temp.\", line_width=2)\n" + "show(p)", context); assertEquals(InterpreterResult.Code.SUCCESS, result.code(), context.out.toInterpreterResultMessage().toString()); diff --git a/rlang/pom.xml b/rlang/pom.xml index 457fb2a418f..f3943256e6c 100644 --- a/rlang/pom.xml +++ b/rlang/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -34,7 +34,7 @@ r - 3.4.1 + 3.5.1 spark-${spark.version} @@ -105,12 +105,6 @@ org.apache.spark spark-core_2.12 ${spark.version} - - - org.apache.hadoop - hadoop-client-api - - @@ -121,22 +115,12 @@ org.apache.hadoop - hadoop-client - ${hadoop.version} - compile + hadoop-client-api org.apache.hadoop - hadoop-common - ${hadoop.version} - compile - - - log4j - log4j - - + hadoop-client-runtime @@ -145,7 +129,6 @@ 1.4.9 test - diff --git a/rlang/src/test/java/org/apache/zeppelin/r/RInterpreterTest.java b/rlang/src/test/java/org/apache/zeppelin/r/RInterpreterTest.java index 78cb1ea1d0f..7f1711c38f8 100644 --- a/rlang/src/test/java/org/apache/zeppelin/r/RInterpreterTest.java +++ b/rlang/src/test/java/org/apache/zeppelin/r/RInterpreterTest.java @@ -31,6 +31,7 @@ import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; @@ -128,7 +129,9 @@ void testInvalidR() throws InterpreterException { fail("Should fail to open SparkRInterpreter"); } catch (InterpreterException e) { String stacktrace = ExceptionUtils.getStackTrace(e); - assertTrue(stacktrace.contains("No such file or directory"), stacktrace); + assertNotNull(stacktrace); + // depends on JVM language + // assertTrue(stacktrace.contains("No such file or directory"), stacktrace); } } diff --git a/rlang/src/test/java/org/apache/zeppelin/r/ShinyInterpreterTest.java b/rlang/src/test/java/org/apache/zeppelin/r/ShinyInterpreterTest.java index 2df5d65f7b5..1afffa196e3 100644 --- a/rlang/src/test/java/org/apache/zeppelin/r/ShinyInterpreterTest.java +++ b/rlang/src/test/java/org/apache/zeppelin/r/ShinyInterpreterTest.java @@ -221,8 +221,11 @@ void testInvalidShinyApp() assertEquals(500, response.getStatus()); resultMessages = context2.out.toInterpreterResultMessage(); - assertTrue(resultMessages.get(1).getData().contains("object 'Invalid_code' not found"), - resultMessages.get(1).getData()); + assertTrue(resultMessages.get(1).getData().contains("Invalid_code"), + resultMessages.get(1).getData()); + // depends on JVM language + // assertTrue(resultMessages.get(1).getData().contains("object 'Invalid_code' not found"), + // resultMessages.get(1).getData()); // cancel paragraph to stop shiny app interpreter.cancel(getInterpreterContext()); diff --git a/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/Dockerfile b/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/Dockerfile deleted file mode 100644 index ec1ca6f5932..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM zeppelin-cpu-tensorflow_1.13.0-hadoop_3.1.2:1.0.0 -MAINTAINER Apache Software Foundation - -# expose interpreter thrift port -EXPOSE 29914 diff --git a/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/build.sh b/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/build.sh deleted file mode 100644 index 78350f71fe3..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-cpu-submarine-interpreter/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -docker build -t zeppelin-cpu-submarine-interpreter:1.0.0 . diff --git a/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile b/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile deleted file mode 100644 index fe2ada6987d..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM ubuntu:16.04 -MAINTAINER Apache Software Foundation - -# Pick up some TF dependencies -RUN apt-get update && apt-get install -y --allow-downgrades --no-install-recommends \ - --allow-change-held-packages --allow-unauthenticated \ - build-essential libfreetype6-dev libpng12-dev \ - libzmq3-dev pkg-config python python-dev \ - rsync software-properties-common curl unzip wget grep sed vim iputils-ping net-tools gdb python2.7-dbg tzdata && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -RUN export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -yq krb5-user libpam-krb5 && apt-get clean - -RUN curl -O https://bootstrap.pypa.io/get-pip.py && \ - python get-pip.py && \ - rm get-pip.py - -RUN echo "Install python related packages" && \ - apt-get -y update && \ - apt-get install -y gfortran && \ - # numerical/algebra packages - apt-get install -y libblas-dev libatlas-dev liblapack-dev && \ - # font, image for matplotlib - apt-get install -y libpng-dev libxft-dev && \ - # for tkinter - apt-get install -y python-tk libxml2-dev libxslt-dev zlib1g-dev - -RUN pip --no-cache-dir install Pillow h5py ipykernel jupyter matplotlib numpy pandas scipy sklearn && \ - python -m ipykernel.kernelspec - -# Set the locale -# disable bash: warning: setlocale: LC_ALL: cannot change locale (en_US.UTF-8) -RUN apt-get clean && apt-get update && apt-get install -y locales -RUN locale-gen en_US.UTF-8 - -# Install TensorFlow CPU version. -ENV TENSORFLOW_VERSION="1.13.1" -RUN pip --no-cache-dir install \ - http://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-${TENSORFLOW_VERSION}-cp27-none-linux_x86_64.whl -RUN apt-get update && apt-get install git -y - -# Install hadoop -ENV HADOOP_VERSION="3.1.2" -RUN wget https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz -RUN tar zxf hadoop-${HADOOP_VERSION}.tar.gz -RUN ln -s hadoop-${HADOOP_VERSION} hadoop-current -RUN chmod 777 -R hadoop-${HADOOP_VERSION} -RUN rm hadoop-${HADOOP_VERSION}.tar.gz - -ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -RUN echo "$LOG_TAG Install java8" && \ - apt-get -y update && \ - apt-get install -y openjdk-8-jdk && \ - rm -rf /var/lib/apt/lists/* diff --git a/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh b/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh deleted file mode 100644 index 0eccdf4596b..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -docker build -t zeppelin-cpu-tensorflow_1.13.1-hadoop_3.1.2:1.0.0 . diff --git a/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/Dockerfile b/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/Dockerfile deleted file mode 100644 index ec9eec67387..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM zeppelin-gpu-tensorflow_1.13.0-hadoop_3.1.2:1.0.0 -MAINTAINER Apache Software Foundation - -# expose interpreter thrift port -EXPOSE 29914 diff --git a/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/build.sh b/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/build.sh deleted file mode 100644 index 786d5ccc612..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-gpu-submarine-interpreter/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -docker build -t zeppelin-gpu-submarine-interpreter:1.0.0 . diff --git a/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile b/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile deleted file mode 100644 index f846aeb377c..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/Dockerfile +++ /dev/null @@ -1,72 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM nvidia/cuda:9.0-cudnn7-devel-ubuntu16.04 -MAINTAINER Apache Software Foundation - -# Pick up some TF dependencies -RUN apt-get update && apt-get install -y --allow-downgrades --no-install-recommends \ - --allow-change-held-packages --allow-unauthenticated \ - build-essential libfreetype6-dev libpng12-dev \ - libzmq3-dev pkg-config python python-dev \ - rsync software-properties-common curl unzip wget grep sed vim \ - iputils-ping net-tools gdb python2.7-dbg tzdata \ - cuda-cublas-9-0 cuda-cufft-9-0 cuda-curand-9-0 cuda-cusolver-9-0 \ - cuda-cusparse-9-0 libcudnn7=7.0.5.15-1+cuda9.0 && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -RUN export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -yq krb5-user libpam-krb5 && apt-get clean - -RUN curl -O https://bootstrap.pypa.io/get-pip.py && \ - python get-pip.py && \ - rm get-pip.py - -RUN echo "Install python related packages" && \ - apt-get -y update && \ - apt-get install -y gfortran && \ - # numerical/algebra packages - apt-get install -y libblas-dev libatlas-dev liblapack-dev && \ - # font, image for matplotlib - apt-get install -y libpng-dev libxft-dev && \ - # for tkinter - apt-get install -y python-tk libxml2-dev libxslt-dev zlib1g-dev - -RUN pip --no-cache-dir install Pillow h5py ipykernel jupyter matplotlib numpy pandas scipy sklearn && \ - python -m ipykernel.kernelspec - -# Set the locale -# disable bash: warning: setlocale: LC_ALL: cannot change locale (en_US.UTF-8) -RUN apt-get clean && apt-get update && apt-get install -y locales -RUN locale-gen en_US.UTF-8 - -# Install TensorFlow GPU version. -ENV TENSORFLOW_VERSION="1.13.1" -RUN pip --no-cache-dir install \ - http://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-${TENSORFLOW_VERSION}-cp27-none-linux_x86_64.whl -RUN apt-get update && apt-get install git -y - -# Install hadoop -ENV HADOOP_VERSION="3.1.2" -RUN wget https://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz -RUN tar zxf hadoop-${HADOOP_VERSION}.tar.gz -RUN ln -s hadoop-${HADOOP_VERSION} hadoop-current -RUN rm hadoop-${HADOOP_VERSION}.tar.gz - -ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 -RUN echo "$LOG_TAG Install java8" && \ - apt-get -y update && \ - apt-get install -y openjdk-8-jdk && \ - rm -rf /var/lib/apt/lists/* diff --git a/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh b/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh deleted file mode 100644 index 54af6b5523b..00000000000 --- a/scripts/docker/submarine/1.0.0/zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2/build.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -docker build -t zeppelin-gpu-tensorflow_1.13.1-hadoop_3.1.2:1.0.0 . diff --git a/scripts/docker/zeppelin-interpreter/Dockerfile b/scripts/docker/zeppelin-interpreter/Dockerfile index 87d9af8ebc5..e02b0f73663 100644 --- a/scripts/docker/zeppelin-interpreter/Dockerfile +++ b/scripts/docker/zeppelin-interpreter/Dockerfile @@ -20,7 +20,7 @@ FROM ubuntu:22.04 LABEL maintainer="Apache Software Foundation " -ARG version="0.11.1" +ARG version="0.11.2.3.3.6.0-1" ENV VERSION="${version}" \ ZEPPELIN_HOME="/opt/zeppelin" @@ -30,7 +30,7 @@ ENV VERSION="${version}" \ RUN set -ex && \ /usr/bin/apt-get update && \ DEBIAN_FRONTEND=noninteractive /usr/bin/apt-get install -y openjdk-11-jre-headless wget tini bzip2 && \ - /usr/bin/wget -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba && \ + /usr/bin/wget --remote-encoding=utf-8 -qO- https://micromamba.snakepit.net/api/micromamba/linux-64/latest | tar -xvj bin/micromamba && \ # Cleanup /usr/bin/apt-get clean && \ /bin/rm -rf /var/lib/apt/lists/* diff --git a/scripts/docker/zeppelin-server/Dockerfile b/scripts/docker/zeppelin-server/Dockerfile index 8ab088f0898..be4d17157c9 100644 --- a/scripts/docker/zeppelin-server/Dockerfile +++ b/scripts/docker/zeppelin-server/Dockerfile @@ -36,11 +36,11 @@ RUN set -ex && \ apt-get autoclean && \ apt-get clean -ARG version="0.11.1" +ARG version="0.11.2.3.3.6.0-1" ENV LANG=en_US.UTF-8 \ LC_ALL=en_US.UTF-8 \ - JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 \ + JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64 \ VERSION="${version}" \ HOME="/opt/zeppelin" \ ZEPPELIN_HOME="/opt/zeppelin" \ diff --git a/scripts/docker/zeppelin/bin/Dockerfile b/scripts/docker/zeppelin/bin/Dockerfile index d9f3b7bab3e..cc76efb5b8c 100644 --- a/scripts/docker/zeppelin/bin/Dockerfile +++ b/scripts/docker/zeppelin/bin/Dockerfile @@ -17,20 +17,20 @@ FROM ubuntu:20.04 LABEL maintainer="Apache Software Foundation " -ENV Z_VERSION="0.11.1" +ENV Z_VERSION="0.11.2.3.3.6.0-1" ENV LOG_TAG="[ZEPPELIN_${Z_VERSION}]:" \ ZEPPELIN_HOME="/opt/zeppelin" \ HOME="/opt/zeppelin" \ LANG=en_US.UTF-8 \ LC_ALL=en_US.UTF-8 \ - JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 \ + JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64 \ ZEPPELIN_ADDR="0.0.0.0" RUN echo "$LOG_TAG install basic packages" && \ apt-get -y update && \ # Switch back to install JRE instead of JDK when moving to JDK9 or later. - DEBIAN_FRONTEND=noninteractive apt-get install -y locales language-pack-en tini openjdk-8-jdk-headless wget unzip && \ + DEBIAN_FRONTEND=noninteractive apt-get install -y locales language-pack-en tini openjdk-11-jdk-headless wget unzip && \ # Cleanup rm -rf /var/lib/apt/lists/* && \ apt-get autoclean && \ diff --git a/shell/pom.xml b/shell/pom.xml index e46d2643cac..175f2984966 100644 --- a/shell/pom.xml +++ b/shell/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -35,7 +35,7 @@ sh - 0.9.3 + 0.12.25 2.4.0 32.0.0-jre diff --git a/spark-submit/pom.xml b/spark-submit/pom.xml index fa8dc257bad..41397b73683 100644 --- a/spark-submit/pom.xml +++ b/spark-submit/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/spark/interpreter/pom.xml b/spark/interpreter/pom.xml index 4ef71c640e6..97a59623438 100644 --- a/spark/interpreter/pom.xml +++ b/spark/interpreter/pom.xml @@ -23,7 +23,7 @@ spark-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -45,7 +45,7 @@ 4.1.17 - 3.4.1 + 3.5.1 3.21.12 0.10.9.7 2.12.17 @@ -166,36 +166,6 @@ spark-core_${spark.scala.binary.version} ${spark.version} provided - - - org.apache.hadoop - hadoop-client - - - - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - - com.google.protobuf - protobuf-java - - - commons-lang - commons-lang - - @@ -593,7 +563,7 @@ spark-3.5 - 3.5.0 + 3.5.1 3.21.12 0.10.9.7 diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkIRInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkIRInterpreterTest.java deleted file mode 100644 index 73832a9b458..00000000000 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkIRInterpreterTest.java +++ /dev/null @@ -1,144 +0,0 @@ - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.spark; - -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterOutput; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessage; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient; -import org.apache.zeppelin.r.IRInterpreterTest; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -public class SparkIRInterpreterTest extends IRInterpreterTest { - - private RemoteInterpreterEventClient mockRemoteIntpEventClient = mock(RemoteInterpreterEventClient.class); - - @Override - protected Interpreter createInterpreter(Properties properties) { - return new SparkIRInterpreter(properties); - } - - @Override - @BeforeEach - public void setUp() throws InterpreterException { - Properties properties = new Properties(); - properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local"); - properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test"); - properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("spark.r.backendConnectionTimeout", "10"); - properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); - properties.setProperty("spark.sql.execution.arrow.sparkr.enabled", "false"); - - InterpreterContext context = getInterpreterContext(); - InterpreterContext.set(context); - interpreter = createInterpreter(properties); - - InterpreterGroup interpreterGroup = new InterpreterGroup(); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1"); - interpreter.setInterpreterGroup(interpreterGroup); - - SparkInterpreter sparkInterpreter = new SparkInterpreter(properties); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkInterpreter), "session_1"); - sparkInterpreter.setInterpreterGroup(interpreterGroup); - - interpreter.open(); - } - - - @Test - public void testSparkRInterpreter() throws InterpreterException, InterruptedException, IOException { - InterpreterContext context = getInterpreterContext(); - InterpreterResult result = interpreter.interpret("1+1", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - List interpreterResultMessages = context.out.toInterpreterResultMessage(); - assertTrue(interpreterResultMessages.get(0).getData().contains("2")); - - context = getInterpreterContext(); - result = interpreter.interpret("sparkR.version()", context); - - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - interpreterResultMessages = context.out.toInterpreterResultMessage(); - if (interpreterResultMessages.get(0).getData().contains("2.2")) { - ENABLE_GOOGLEVIS_TEST = false; - } - context = getInterpreterContext(); - result = interpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", context); - interpreterResultMessages = context.out.toInterpreterResultMessage(); - assertEquals(InterpreterResult.Code.SUCCESS, result.code(), context.out.toString()); - assertTrue(interpreterResultMessages.get(0).getData().contains(">eruptions")); - // spark job url is sent - verify(mockRemoteIntpEventClient, atLeastOnce()).onParaInfosReceived(any(Map.class)); - - // cancel - final InterpreterContext context2 = getInterpreterContext(); - Thread thread = new Thread() { - @Override - public void run() { - try { - InterpreterResult result = interpreter.interpret("ldf <- dapplyCollect(\n" + - " df,\n" + - " function(x) {\n" + - " Sys.sleep(3)\n" + - " x <- cbind(x, \"waiting_secs\" = x$waiting * 60)\n" + - " })\n" + - "head(ldf, 3)", context2); - assertTrue(result.message().get(0).getData().contains("cancelled")); - } catch (InterpreterException e) { - fail("Should not throw InterpreterException"); - } - } - }; - thread.setName("Cancel-Thread"); - thread.start(); - Thread.sleep(1000); - interpreter.cancel(context2); - } - - @Override - protected InterpreterContext getInterpreterContext() { - InterpreterContext context = InterpreterContext.builder() - .setNoteId("note_1") - .setParagraphId("paragraph_1") - .setInterpreterOut(new InterpreterOutput()) - .setLocalProperties(new HashMap<>()) - .setIntpEventClient(mockRemoteIntpEventClient) - .build(); - return context; - } -} diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java deleted file mode 100644 index 3f0baa72391..00000000000 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkRInterpreterTest.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.spark; - -import org.apache.commons.lang3.exception.ExceptionUtils; -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterOutput; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.atLeastOnce; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; - -class SparkRInterpreterTest { - - private SparkRInterpreter sparkRInterpreter; - private SparkInterpreter sparkInterpreter; - private RemoteInterpreterEventClient mockRemoteIntpEventClient = mock(RemoteInterpreterEventClient.class); - - @BeforeEach - public void setUp() throws InterpreterException { - Properties properties = new Properties(); - properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local"); - properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test"); - properties.setProperty("zeppelin.spark.maxResult", "100"); - properties.setProperty("zeppelin.R.knitr", "true"); - properties.setProperty("spark.r.backendConnectionTimeout", "10"); - properties.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); - properties.setProperty("spark.sql.execution.arrow.sparkr.enabled", "false"); - - InterpreterContext context = getInterpreterContext(); - InterpreterContext.set(context); - sparkRInterpreter = new SparkRInterpreter(properties); - sparkInterpreter = new SparkInterpreter(properties); - - InterpreterGroup interpreterGroup = new InterpreterGroup(); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkRInterpreter), "session_1"); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkInterpreter), "session_1"); - sparkRInterpreter.setInterpreterGroup(interpreterGroup); - sparkInterpreter.setInterpreterGroup(interpreterGroup); - - sparkRInterpreter.open(); - } - - @AfterEach - public void tearDown() throws InterpreterException { - sparkInterpreter.close(); - } - - @Test - void testSparkRInterpreter() throws InterpreterException, InterruptedException { - InterpreterResult result = sparkRInterpreter.interpret("1+1", getInterpreterContext()); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - assertTrue(result.message().get(0).getData().contains("2")); - - result = sparkRInterpreter.interpret("sparkR.version()", getInterpreterContext()); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - - result = sparkRInterpreter.interpret("df <- as.DataFrame(faithful)\nhead(df)", getInterpreterContext()); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - assertTrue(result.message().get(0).getData().contains("eruptions waiting"), result.toString()); - // spark job url is sent - verify(mockRemoteIntpEventClient, atLeastOnce()).onParaInfosReceived(any(Map.class)); - - // cancel - InterpreterContext context = getInterpreterContext(); - InterpreterContext finalContext = context; - Thread thread = new Thread() { - @Override - public void run() { - try { - InterpreterResult result = sparkRInterpreter.interpret("ldf <- dapplyCollect(\n" + - " df,\n" + - " function(x) {\n" + - " Sys.sleep(3)\n" + - " x <- cbind(x, \"waiting_secs\" = x$waiting * 60)\n" + - " })\n" + - "head(ldf, 3)", finalContext); - assertTrue(result.message().get(0).getData().contains("cancelled")); - } catch (InterpreterException e) { - fail("Should not throw InterpreterException"); - } - } - }; - thread.setName("Cancel-Thread"); - thread.start(); - Thread.sleep(1000); - sparkRInterpreter.cancel(context); - - // plotting - context = getInterpreterContext(); - context.getLocalProperties().put("imageWidth", "100"); - result = sparkRInterpreter.interpret("hist(mtcars$mpg)", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - assertEquals(1, result.message().size()); - assertEquals(InterpreterResult.Type.HTML, result.message().get(0).getType()); - assertTrue(result.message().get(0).getData().contains("()) - .build(); - return context; - } -} - diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShinyInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShinyInterpreterTest.java deleted file mode 100644 index 33b60f621b9..00000000000 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkShinyInterpreterTest.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.spark; - -import com.mashape.unirest.http.HttpResponse; -import com.mashape.unirest.http.Unirest; -import com.mashape.unirest.http.exceptions.UnirestException; -import org.apache.commons.io.IOUtils; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessage; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.apache.zeppelin.r.ShinyInterpreterTest; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -class SparkShinyInterpreterTest extends ShinyInterpreterTest { - - private SparkInterpreter sparkInterpreter; - - @Override - @BeforeEach - public void setUp() throws InterpreterException { - Properties properties = new Properties(); - properties.setProperty(SparkStringConstants.MASTER_PROP_NAME, "local[*]"); - properties.setProperty(SparkStringConstants.APP_NAME_PROP_NAME, "test"); - - InterpreterContext context = getInterpreterContext(); - InterpreterContext.set(context); - interpreter = new SparkShinyInterpreter(properties); - - InterpreterGroup interpreterGroup = new InterpreterGroup(); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1"); - interpreter.setInterpreterGroup(interpreterGroup); - - sparkInterpreter = new SparkInterpreter(properties); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(sparkInterpreter), "session_1"); - sparkInterpreter.setInterpreterGroup(interpreterGroup); - - interpreter.open(); - } - - @Override - @AfterEach - public void tearDown() throws InterpreterException { - if (interpreter != null) { - interpreter.close(); - } - } - - @Test - void testSparkShinyApp() - throws IOException, InterpreterException, InterruptedException, UnirestException { - /****************** Launch Shiny app with default app name *****************************/ - InterpreterContext context = getInterpreterContext(); - context.getLocalProperties().put("type", "ui"); - InterpreterResult result = - interpreter.interpret( - IOUtils.toString(getClass().getResource("/spark_ui.R"), StandardCharsets.UTF_8), context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - - context = getInterpreterContext(); - context.getLocalProperties().put("type", "server"); - result = interpreter.interpret( - IOUtils.toString(getClass().getResource("/spark_server.R"), StandardCharsets.UTF_8), context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - - final InterpreterContext context2 = getInterpreterContext(); - context2.getLocalProperties().put("type", "run"); - Thread thread = new Thread(() -> { - try { - interpreter.interpret("", context2); - } catch (Exception e) { - e.printStackTrace(); - } - }); - thread.start(); - // wait for the shiny app start - Thread.sleep(5 * 1000); - // extract shiny url - List resultMessages = context2.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size(), resultMessages.toString()); - assertEquals(InterpreterResult.Type.HTML, resultMessages.get(0).getType()); - String resultMessageData = resultMessages.get(0).getData(); - assertTrue(resultMessageData.contains(" response = Unirest.get(shinyURL).asString(); - assertEquals(200, response.getStatus()); - assertTrue(response.getBody().contains("Spark Version"), response.getBody()); - } -} diff --git a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java index 20594c4bde2..36646cef361 100644 --- a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java +++ b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/SparkSqlInterpreterTest.java @@ -290,6 +290,7 @@ public void run() { @Test void testDDL() throws InterpreterException, IOException { InterpreterContext context = getInterpreterContext(); + sqlInterpreter.interpret(("drop table if exists t1"), context); InterpreterResult ret = sqlInterpreter.interpret("create table t1(id int, name string)", context); assertEquals(InterpreterResult.Code.SUCCESS, ret.code(), context.out.toString()); // spark 1.x will still return DataFrame with non-empty columns. diff --git a/spark/pom.xml b/spark/pom.xml index 31282f51163..168660803fd 100644 --- a/spark/pom.xml +++ b/spark/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml @@ -38,7 +38,7 @@ 3.2.10 - 3.4.1 + 3.5.1 2.5.0 0.10.9.7 2.12.17 diff --git a/spark/scala-2.12/pom.xml b/spark/scala-2.12/pom.xml index 6ace3371bce..d4bb22efb64 100644 --- a/spark/scala-2.12/pom.xml +++ b/spark/scala-2.12/pom.xml @@ -21,7 +21,7 @@ org.apache.zeppelin spark-scala-parent - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../spark-scala-parent/pom.xml @@ -31,7 +31,7 @@ Zeppelin: Spark Interpreter Scala_2.12 - 3.4.1 + 3.5.1 2.12.17 2.12 ${spark.scala.version} diff --git a/spark/scala-2.13/pom.xml b/spark/scala-2.13/pom.xml index 176c2710dba..f6bfbf5b697 100644 --- a/spark/scala-2.13/pom.xml +++ b/spark/scala-2.13/pom.xml @@ -21,7 +21,7 @@ org.apache.zeppelin spark-scala-parent - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../spark-scala-parent/pom.xml @@ -31,7 +31,7 @@ Zeppelin: Spark Interpreter Scala_2.13 - 3.4.1 + 3.5.1 2.13.8 2.13 ${spark.scala.version} diff --git a/spark/spark-scala-parent/pom.xml b/spark/spark-scala-parent/pom.xml index 9f907acce44..28f4ac2fea3 100644 --- a/spark/spark-scala-parent/pom.xml +++ b/spark/spark-scala-parent/pom.xml @@ -22,7 +22,7 @@ org.apache.zeppelin zeppelin-interpreter-parent - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../zeppelin-interpreter-parent/pom.xml @@ -32,7 +32,7 @@ Zeppelin: Spark Scala Parent - 3.4.1 + 3.5.1 2.12 2.12.18 ${spark.scala.binary.version} @@ -80,21 +80,6 @@ provided - - - org.apache.hadoop - hadoop-client - ${hadoop.version} - provided - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - provided - - org.scala-lang scala-compiler diff --git a/spark/spark-shims/pom.xml b/spark/spark-shims/pom.xml index 783afabd505..41ba24a3395 100644 --- a/spark/spark-shims/pom.xml +++ b/spark/spark-shims/pom.xml @@ -22,7 +22,7 @@ spark-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -38,8 +38,7 @@ --> org.apache.hadoop - hadoop-common - ${hadoop.version} + hadoop-client-api provided diff --git a/spark/spark3-shims/pom.xml b/spark/spark3-shims/pom.xml index ecb5db25835..dc32120538a 100644 --- a/spark/spark3-shims/pom.xml +++ b/spark/spark3-shims/pom.xml @@ -21,7 +21,7 @@ spark-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -32,7 +32,7 @@ 2.12 - 3.4.1 + 3.5.1 diff --git a/sparql/pom.xml b/sparql/pom.xml index 8e519daf8ef..33c129fcf05 100644 --- a/sparql/pom.xml +++ b/sparql/pom.xml @@ -22,7 +22,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/submarine/pom.xml b/submarine/pom.xml deleted file mode 100644 index ee5890ec043..00000000000 --- a/submarine/pom.xml +++ /dev/null @@ -1,185 +0,0 @@ - - - - 4.0.0 - - - zeppelin-interpreter-parent - org.apache.zeppelin - 0.11.1.3.3.6.0-1 - ../zeppelin-interpreter-parent/pom.xml - - - zeppelin-submarine - jar - Zeppelin: Submarine interpreter - - - - submarine - ${hadoop2.7.version} - 2.5.4 - 0.3.8 - 24.1.1-jre - - 1.3 - - - - - jetbrains-intellij-dependencies - JetBrains Pty4j Repository - https://packages.jetbrains.team/maven/p/ij/intellij-dependencies - - - - - - org.apache.zeppelin - zeppelin-python - ${project.version} - - - org.apache.zeppelin - zeppelin-jupyter-interpreter - ${project.version} - - - net.sf.py4j - py4j - - - - - org.apache.zeppelin - zeppelin-shell - ${project.version} - - - org.hamcrest - hamcrest-all - ${hamcrest.all.version} - test - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - - - org.apache.commons - commons-compress - - - com.google.guava - guava - - - org.codehaus.jackson - jackson-mapper-asl - - - org.codehaus.jackson - jackson-xc - - - org.codehaus.jackson - jackson-jaxrs - - - org.codehaus.jackson - jackson-core-asl - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - - org.apache.hadoop - hadoop-hdfs - ${hadoop.version} - - - com.google.guava - guava - - - io.netty - netty - - - - - com.hubspot.jinjava - jinjava - ${jinjava.version} - - - com.google.guava - guava - - - - - - - org.mockito - mockito-core - test - - - org.junit.jupiter - junit-jupiter-params - test - - - - - - - ${project.basedir}/src/test/resources - - - ${project.basedir}/src/main/resources - - - - - maven-enforcer-plugin - - - maven-resources-plugin - - - maven-shade-plugin - - - org.apache.maven.plugins - maven-checkstyle-plugin - - - - - diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/IPySubmarineInterpreter.java b/submarine/src/main/java/org/apache/zeppelin/submarine/IPySubmarineInterpreter.java deleted file mode 100644 index 41516764798..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/IPySubmarineInterpreter.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.python.IPythonInterpreter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -public class IPySubmarineInterpreter extends IPythonInterpreter { - - private static final Logger LOGGER = LoggerFactory.getLogger(IPySubmarineInterpreter.class); - - private PySubmarineInterpreter pySubmarineInterpreter; - - public IPySubmarineInterpreter(Properties properties) { - super(properties); - } - - @Override - public void open() throws InterpreterException { - PySubmarineInterpreter pySparkInterpreter = - getInterpreterInTheSameSessionByClassName(PySubmarineInterpreter.class, false); - - pySubmarineInterpreter - = getInterpreterInTheSameSessionByClassName(PySubmarineInterpreter.class); - - super.open(); - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/PySubmarineInterpreter.java b/submarine/src/main/java/org/apache/zeppelin/submarine/PySubmarineInterpreter.java deleted file mode 100644 index 852891cb83a..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/PySubmarineInterpreter.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.thrift.ParagraphInfo; -import org.apache.zeppelin.python.IPythonInterpreter; -import org.apache.zeppelin.python.PythonInterpreter; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; -import java.util.Properties; - -public class PySubmarineInterpreter extends PythonInterpreter { - private static final Logger LOGGER = LoggerFactory.getLogger(PySubmarineInterpreter.class); - - private SubmarineInterpreter submarineInterpreter = null; - private SubmarineContext submarineContext = null; - - public PySubmarineInterpreter(Properties property) { - super(property); - submarineContext = SubmarineContext.getInstance(); - } - - @Override - public InterpreterResult interpret(String st, InterpreterContext context) - throws InterpreterException { - setParagraphConfig(context); - - // algorithm & checkpoint path support replaces ${username} with real user name - String algorithmPath = properties.getProperty( - SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, ""); - if (algorithmPath.contains(SubmarineConstants.USERNAME_SYMBOL)) { - algorithmPath = algorithmPath.replace(SubmarineConstants.USERNAME_SYMBOL, userName); - properties.setProperty(SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, algorithmPath); - } - String checkpointPath = properties.getProperty( - SubmarineConstants.TF_CHECKPOINT_PATH, ""); - if (checkpointPath.contains(SubmarineConstants.USERNAME_SYMBOL)) { - checkpointPath = checkpointPath.replace(SubmarineConstants.USERNAME_SYMBOL, userName); - properties.setProperty(SubmarineConstants.TF_CHECKPOINT_PATH, checkpointPath); - } - - if (null == submarineInterpreter) { - submarineInterpreter = getInterpreterInTheSameSessionByClassName(SubmarineInterpreter.class); - if (null != submarineInterpreter) { - submarineInterpreter.setPythonWorkDir(context.getNoteId(), getPythonWorkDir()); - } - } - - SubmarineJob submarineJob = submarineContext.addOrGetSubmarineJob(this.properties, context); - if (null != submarineJob && null != submarineJob.getHdfsClient()) { - try { - String noteId = context.getNoteId(); - List paragraphInfos = context.getIntpEventClient() - .getParagraphList(userName, noteId); - submarineJob.getHdfsClient().saveParagraphToFiles( - noteId, paragraphInfos, getPythonWorkDir().getAbsolutePath(), properties); - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - return super.interpret(st, context); - } - - private void setParagraphConfig(InterpreterContext context) { - context.getConfig().put("editorHide", false); - context.getConfig().put("title", true); - } - - @Override - protected IPythonInterpreter getIPythonInterpreter() throws InterpreterException { - return getInterpreterInTheSameSessionByClassName(IPySubmarineInterpreter.class, false); - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineContext.java b/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineContext.java deleted file mode 100644 index 1a0ae13c1b6..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineContext.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Properties; - -public class SubmarineContext { - private Logger LOGGER = LoggerFactory.getLogger(SubmarineContext.class); - - private static SubmarineContext instance = null; - - // noteId -> SubmarineJob - private Map mapSubmarineJob = new HashMap<>(); - - public static SubmarineContext getInstance() { - synchronized (SubmarineContext.class) { - if (instance == null) { - instance = new SubmarineContext(); - } - return instance; - } - } - - public SubmarineJob addOrGetSubmarineJob(Properties properties, InterpreterContext context) { - SubmarineJob submarineJob = null; - String noteId = context.getNoteId(); - if (!mapSubmarineJob.containsKey(noteId)) { - submarineJob = new SubmarineJob(context, properties); - mapSubmarineJob.put(noteId, submarineJob); - } else { - submarineJob = mapSubmarineJob.get(noteId); - } - // need update InterpreterContext - submarineJob.setIntpContext(context); - - return submarineJob; - } - - public SubmarineJob getSubmarineJob(String nodeId) { - if (!mapSubmarineJob.containsKey(nodeId)) { - return null; - } - - return mapSubmarineJob.get(nodeId); - } - - public void stopAllSubmarineJob() { - Iterator> iterator = mapSubmarineJob.entrySet().iterator(); - - while (iterator.hasNext()) { - Map.Entry entry = iterator.next(); - - entry.getValue().stopRunning(); - } - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineInterpreter.java b/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineInterpreter.java deleted file mode 100644 index 3708e7166b8..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineInterpreter.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.lang3.StringUtils; -import org.apache.zeppelin.display.ui.OptionInput.ParamOption; -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; -import org.apache.zeppelin.scheduler.Scheduler; -import org.apache.zeppelin.scheduler.SchedulerFactory; -import org.apache.zeppelin.submarine.commons.SubmarineCommand; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.apache.zeppelin.submarine.commons.SubmarineUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.List; -import java.util.Properties; - -import static org.apache.zeppelin.submarine.commons.SubmarineCommand.CLEAN_RUNTIME_CACHE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.CHECKPOINT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.CLEAN_CHECKPOINT; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_CLEAN; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_JOB_RUN; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_JOB_SHOW; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_TYPE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.COMMAND_USAGE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.INPUT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.MACHINELEARNING_DISTRIBUTED_ENABLE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.OPERATION_TYPE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.PS_LAUNCH_CMD; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_CHECKPOINT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.USERNAME_SYMBOL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.WORKER_LAUNCH_CMD; -import static org.apache.zeppelin.submarine.commons.SubmarineUtils.unifyKey; - -/** - * SubmarineInterpreter of Hadoop Submarine implementation. - * Support for Hadoop Submarine cli. All the commands documented here - * https://github.com/apache/hadoop/tree/trunk/hadoop-submarine/hadoop-submarine-core - * /src/site/markdown/QuickStart.md is supported. - */ -public class SubmarineInterpreter extends Interpreter { - private Logger LOGGER = LoggerFactory.getLogger(SubmarineInterpreter.class); - - // Number of submarines executed in parallel for each interpreter instance - protected int concurrentExecutedMax = 1; - - private boolean needUpdateConfig = true; - private String currentReplName = ""; - - private SubmarineContext submarineContext = null; - - public SubmarineInterpreter(Properties properties) { - super(properties); - - String concurrentMax = getProperty(SubmarineConstants.SUBMARINE_CONCURRENT_MAX, "1"); - concurrentExecutedMax = Integer.parseInt(concurrentMax); - - submarineContext = SubmarineContext.getInstance(); - } - - @Override - public void open() { - LOGGER.info("SubmarineInterpreter open()"); - } - - @Override - public void close() { - submarineContext.stopAllSubmarineJob(); - } - - private void setParagraphConfig(InterpreterContext context) { - context.getConfig().put("editorHide", true); - context.getConfig().put("title", false); - } - - @Override - public InterpreterResult interpret(String script, InterpreterContext context) { - try { - setParagraphConfig(context); - - // algorithm & checkpoint path support replaces ${username} with real user name - String algorithmPath = properties.getProperty(SUBMARINE_ALGORITHM_HDFS_PATH, ""); - if (algorithmPath.contains(USERNAME_SYMBOL)) { - algorithmPath = algorithmPath.replace(USERNAME_SYMBOL, userName); - properties.setProperty(SUBMARINE_ALGORITHM_HDFS_PATH, algorithmPath); - } - String checkpointPath = properties.getProperty(TF_CHECKPOINT_PATH, ""); - if (checkpointPath.contains(USERNAME_SYMBOL)) { - checkpointPath = checkpointPath.replace(USERNAME_SYMBOL, userName); - properties.setProperty(TF_CHECKPOINT_PATH, checkpointPath); - } - - SubmarineJob submarineJob = submarineContext.addOrGetSubmarineJob(properties, context); - - LOGGER.debug("Run shell command '" + script + "'"); - String command = "", operation = "", cleanCheckpoint = ""; - String inputPath = "", chkPntPath = "", psLaunchCmd = "", workerLaunchCmd = ""; - String noteId = context.getNoteId(); - String noteName = context.getNoteName(); - - if (script.equalsIgnoreCase(COMMAND_CLEAN)) { - // Clean Registry Angular Object - command = CLEAN_RUNTIME_CACHE.getCommand(); - } else { - operation = SubmarineUtils.getAgulObjValue(context, OPERATION_TYPE); - if (!StringUtils.isEmpty(operation)) { - SubmarineUtils.removeAgulObjValue(context, OPERATION_TYPE); - command = operation; - } else { - command = SubmarineUtils.getAgulObjValue(context, COMMAND_TYPE); - } - } - - String distributed = this.properties.getProperty(MACHINELEARNING_DISTRIBUTED_ENABLE, "false"); - SubmarineUtils.setAgulObjValue(context, unifyKey(MACHINELEARNING_DISTRIBUTED_ENABLE), - distributed); - - inputPath = SubmarineUtils.getAgulObjValue(context, INPUT_PATH); - cleanCheckpoint = SubmarineUtils.getAgulObjValue(context, CLEAN_CHECKPOINT); - chkPntPath = submarineJob.getJobDefaultCheckpointPath(); - SubmarineUtils.setAgulObjValue(context, CHECKPOINT_PATH, chkPntPath); - psLaunchCmd = SubmarineUtils.getAgulObjValue(context, PS_LAUNCH_CMD); - workerLaunchCmd = SubmarineUtils.getAgulObjValue(context, WORKER_LAUNCH_CMD); - properties.put(INPUT_PATH, inputPath != null ? inputPath : ""); - properties.put(CHECKPOINT_PATH, chkPntPath != null ? chkPntPath : ""); - properties.put(PS_LAUNCH_CMD, psLaunchCmd != null ? psLaunchCmd : ""); - properties.put(WORKER_LAUNCH_CMD, workerLaunchCmd != null ? workerLaunchCmd : ""); - - SubmarineCommand submarineCmd = SubmarineCommand.fromCommand(command); - switch (submarineCmd) { - case USAGE: - submarineJob.showUsage(); - break; - case JOB_RUN: - if (StringUtils.equals(cleanCheckpoint, "true")) { - submarineJob.cleanJobDefaultCheckpointPath(); - } - submarineJob.runJob(); - break; - case JOB_STOP: - String jobName = SubmarineUtils.getJobName(userName, noteId); - submarineJob.deleteJob(jobName); - break; - case TENSORBOARD_RUN: - submarineJob.runTensorBoard(); - break; - case TENSORBOARD_STOP: - String user = context.getAuthenticationInfo().getUser(); - String tensorboardName = SubmarineUtils.getTensorboardName(user); - submarineJob.deleteJob(tensorboardName); - break; - case OLD_UI: - createOldGUI(context); - break; - case CLEAN_RUNTIME_CACHE: - submarineJob.cleanRuntimeCache(); - break; - default: - submarineJob.onDashboard(); - break; - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - return new InterpreterResult(InterpreterResult.Code.ERROR, e.getMessage()); - } - - return new InterpreterResult(InterpreterResult.Code.SUCCESS); - } - - @Override - public void cancel(InterpreterContext context) { - SubmarineJob submarineJob = submarineContext.addOrGetSubmarineJob(properties, context); - String userName = context.getAuthenticationInfo().getUser(); - String noteId = context.getNoteId(); - String jobName = SubmarineUtils.getJobName(userName, noteId); - submarineJob.deleteJob(jobName); - } - - @Override - public FormType getFormType() { - return FormType.SIMPLE; - } - - @Override - public int getProgress(InterpreterContext context) { - return 0; - } - - @Override - public Scheduler getScheduler() { - String schedulerName = SubmarineInterpreter.class.getName() + this.hashCode(); - if (concurrentExecutedMax > 1) { - return SchedulerFactory.singleton().createOrGetParallelScheduler(schedulerName, - concurrentExecutedMax); - } else { - return SchedulerFactory.singleton().createOrGetFIFOScheduler(schedulerName); - } - } - - @Override - public List completion( - String buf, int cursor, InterpreterContext intpContext) { - return null; - } - - public void setPythonWorkDir(String noteId, File pythonWorkDir) { - SubmarineJob submarineJob = submarineContext.getSubmarineJob(noteId); - if (null != submarineJob) { - submarineJob.setPythonWorkDir(pythonWorkDir); - } - } - - private String createOldGUI(InterpreterContext context) { - // submarine command - Format - ParamOption[] commandOptions = new ParamOption[4]; - commandOptions[0] = new ParamOption(COMMAND_JOB_RUN, COMMAND_JOB_RUN); - commandOptions[1] = new ParamOption(COMMAND_JOB_SHOW, COMMAND_JOB_SHOW); - commandOptions[2] = new ParamOption(COMMAND_USAGE, COMMAND_USAGE); - String command = (String) context.getGui(). - select("Submarine Command", commandOptions, ""); - - String distributed = this.properties.getProperty(MACHINELEARNING_DISTRIBUTED_ENABLE, "false"); - - if (command.equals(COMMAND_JOB_RUN)) { - String inputPath = (String) context.getGui().textbox("Input Path(input_path)"); - String checkpoinkPath = (String) context.getGui().textbox("Checkpoint Path(checkpoint_path)"); - if (distributed.equals("true")) { - String psLaunchCmd = (String) context.getGui().textbox("PS Launch Command"); - } - String workerLaunchCmd = (String) context.getGui().textbox("Worker Launch Command"); - } - - /* Active - ParamOption[] auditOptions = new ParamOption[1]; - auditOptions[0] = new ParamOption("Active", "Active command"); - List flags = intpContext.getGui().checkbox("Active", Arrays.asList(""), auditOptions); - boolean activeChecked = flags.contains("Active"); - intpContext.getResourcePool().put(intpContext.getNoteId(), - intpContext.getParagraphId(), "Active", activeChecked);*/ - - return command; - } - - @VisibleForTesting - public SubmarineContext getSubmarineContext() { - return submarineContext; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineShellInterpreter.java b/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineShellInterpreter.java deleted file mode 100644 index f282fee298b..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/SubmarineShellInterpreter.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.commons.exec.CommandLine; -import org.apache.commons.exec.DefaultExecutor; -import org.apache.commons.lang3.StringUtils; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.shell.ShellInterpreter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_KEYTAB; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_CHECKPOINT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.USERNAME_SYMBOL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.ZEPPELIN_SUBMARINE_AUTH_TYPE; - -/** - * Submarine Shell interpreter for Zeppelin. - */ -public class SubmarineShellInterpreter extends ShellInterpreter { - private static final Logger LOGGER = LoggerFactory.getLogger(SubmarineShellInterpreter.class); - - private final boolean isWindows = System.getProperty("os.name").startsWith("Windows"); - private final String shell = isWindows ? "cmd /c" : "bash -c"; - - public SubmarineShellInterpreter(Properties property) { - super(property); - } - - @Override - public InterpreterResult internalInterpret(String cmd, InterpreterContext context) { - setParagraphConfig(context); - - // algorithm path & checkpoint path support replaces ${username} with real user name - String algorithmPath = properties.getProperty(SUBMARINE_ALGORITHM_HDFS_PATH, ""); - if (algorithmPath.contains(USERNAME_SYMBOL)) { - algorithmPath = algorithmPath.replace(USERNAME_SYMBOL, userName); - properties.setProperty(SUBMARINE_ALGORITHM_HDFS_PATH, algorithmPath); - } - String checkpointPath = properties.getProperty( - TF_CHECKPOINT_PATH, ""); - if (checkpointPath.contains(USERNAME_SYMBOL)) { - checkpointPath = checkpointPath.replace(USERNAME_SYMBOL, userName); - properties.setProperty(TF_CHECKPOINT_PATH, checkpointPath); - } - - return super.internalInterpret(cmd, context); - } - - private void setParagraphConfig(InterpreterContext context) { - context.getConfig().put("editorHide", false); - context.getConfig().put("title", true); - } - - @Override - protected boolean runKerberosLogin() { - try { - createSecureConfiguration(); - return true; - } catch (Exception e) { - LOGGER.error("Unable to run kinit for zeppelin", e); - } - return false; - } - - public void createSecureConfiguration() throws InterpreterException { - Properties properties = getProperties(); - CommandLine cmdLine = CommandLine.parse(shell); - cmdLine.addArgument("-c", false); - String kinitCommand = String.format("kinit -k -t %s %s", - properties.getProperty(SUBMARINE_HADOOP_KEYTAB), - properties.getProperty(SUBMARINE_HADOOP_PRINCIPAL)); - cmdLine.addArgument(kinitCommand, false); - DefaultExecutor executor = new DefaultExecutor(); - try { - executor.execute(cmdLine); - } catch (Exception e) { - LOGGER.error("Unable to run kinit for zeppelin user " + kinitCommand, e); - throw new InterpreterException(e); - } - } - - @Override - protected boolean isKerboseEnabled() { - String authType = getProperty(ZEPPELIN_SUBMARINE_AUTH_TYPE, ""); - if (StringUtils.equals(authType, "kerberos")) { - return true; - } - return false; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineCommand.java b/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineCommand.java deleted file mode 100644 index 7a499da121d..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineCommand.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.commons; - -public enum SubmarineCommand { - DASHBOARD("DASHBOARD"), - USAGE("USAGE"), - JOB_RUN("JOB_RUN"), - JOB_SHOW("JOB_SHOW"), - JOB_LIST("JOB_LIST"), - JOB_STOP("JOB_STOP"), - CLEAN_RUNTIME_CACHE("CLEAN_RUNTIME_CACHE"), - OLD_UI("OLD_UI"), - TENSORBOARD_RUN("TENSORBOARD_RUN"), - TENSORBOARD_STOP("TENSORBOARD_STOP"), - UNKNOWN("Unknown"); - - private String command; - - SubmarineCommand(String command){ - this.command = command; - } - - public String getCommand(){ - return command; - } - - public static SubmarineCommand fromCommand(String command) { - for (SubmarineCommand type : SubmarineCommand.values()) { - if (type.getCommand().equals(command)) { - return type; - } - } - - return UNKNOWN; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineConstants.java b/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineConstants.java deleted file mode 100644 index 7202fcf924b..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineConstants.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.commons; - -/* - * NOTE: use lowercase + "_" for the option name - */ -public class SubmarineConstants { - // Docker container Environmental variable at `submarine-job-run-tf.jinja` - // and `/bin/interpreter.sh` - public static final String DOCKER_HADOOP_HDFS_HOME = "DOCKER_HADOOP_HDFS_HOME"; - public static final String DOCKER_JAVA_HOME = "DOCKER_JAVA_HOME"; - public static final String DOCKER_CONTAINER_TIME_ZONE = "DOCKER_CONTAINER_TIME_ZONE"; - public static final String INTERPRETER_LAUNCH_MODE = "INTERPRETER_LAUNCH_MODE"; - - // interpreter.sh Environmental variable - public static final String SUBMARINE_HADOOP_HOME = "SUBMARINE_HADOOP_HOME"; - public static final String HADOOP_YARN_SUBMARINE_JAR = "HADOOP_YARN_SUBMARINE_JAR"; - public static final String SUBMARINE_INTERPRETER_DOCKER_IMAGE - = "SUBMARINE_INTERPRETER_DOCKER_IMAGE"; - - public static final String ZEPPELIN_SUBMARINE_AUTH_TYPE = "zeppelin.submarine.auth.type"; - public static final String SUBMARINE_HADOOP_CONF_DIR = "SUBMARINE_HADOOP_CONF_DIR"; - public static final String SUBMARINE_HADOOP_KEYTAB = "SUBMARINE_HADOOP_KEYTAB"; - public static final String SUBMARINE_HADOOP_PRINCIPAL = "SUBMARINE_HADOOP_PRINCIPAL"; - public static final String SUBMARINE_HADOOP_KRB5_CONF = "submarine.hadoop.krb5.conf"; - - public static final String JOB_NAME = "JOB_NAME"; - public static final String CLEAN_CHECKPOINT = "CLEAN_CHECKPOINT"; - public static final String INPUT_PATH = "INPUT_PATH"; - public static final String CHECKPOINT_PATH = "CHECKPOINT_PATH"; - public static final String PS_LAUNCH_CMD = "PS_LAUNCH_CMD"; - public static final String WORKER_LAUNCH_CMD = "WORKER_LAUNCH_CMD"; - public static final String MACHINELEARNING_DISTRIBUTED_ENABLE - = "machinelearning.distributed.enable"; - - public static final String DOCKER_CONTAINER_NETWORK = "docker.container.network"; - public static final String SUBMARINE_YARN_QUEUE = "submarine.yarn.queue"; - public static final String SUBMARINE_CONCURRENT_MAX = "submarine.concurrent.max"; - - public static final String SUBMARINE_ALGORITHM_HDFS_PATH = "submarine.algorithm.hdfs.path"; - public static final String SUBMARINE_ALGORITHM_HDFS_FILES = "submarine.algorithm.hdfs.files"; - - public static final String TF_PARAMETER_SERVICES_DOCKER_IMAGE - = "tf.parameter.services.docker.image"; - public static final String TF_PARAMETER_SERVICES_NUM = "tf.parameter.services.num"; - public static final String TF_PARAMETER_SERVICES_GPU = "tf.parameter.services.gpu"; - public static final String TF_PARAMETER_SERVICES_CPU = "tf.parameter.services.cpu"; - public static final String TF_PARAMETER_SERVICES_MEMORY = "tf.parameter.services.memory"; - - public static final String TF_WORKER_SERVICES_DOCKER_IMAGE = "tf.worker.services.docker.image"; - public static final String TF_WORKER_SERVICES_NUM = "tf.worker.services.num"; - public static final String TF_WORKER_SERVICES_GPU = "tf.worker.services.gpu"; - public static final String TF_WORKER_SERVICES_CPU = "tf.worker.services.cpu"; - public static final String TF_WORKER_SERVICES_MEMORY = "tf.worker.services.memory"; - - public static final String TF_TENSORBOARD_ENABLE = "tf.tensorboard.enable"; - public static final String TF_CHECKPOINT_PATH = "tf.checkpoint.path"; - - public static final String COMMAND_TYPE = "COMMAND_TYPE"; - public static final String OPERATION_TYPE = "OPERATION_TYPE"; - public static final String COMMAND_USAGE = "USAGE"; - public static final String COMMAND_JOB_LIST = "JOB LIST"; - public static final String COMMAND_JOB_SHOW = "JOB SHOW"; - public static final String COMMAND_JOB_RUN = "JOB RUN"; - public static final String COMMAND_JOB_STOP = "JOB STOP"; - public static final String COMMAND_CLEAN = "CLEAN"; - public static final String COMMAND_ACTIVE = "COMMAND_ACTIVE"; - - public static final String PARAGRAPH_ID = "PARAGRAPH_ID"; - - public static final String COMMANDLINE_OPTIONS = "COMMANDLINE_OPTIONS"; - - // YARN - public static final String YARN_WEB_HTTP_ADDRESS - = "yarn.webapp.http.address"; - - public static final String YARN_APPLICATION_ID = "YARN_APPLICATION_ID"; - public static final String YARN_APPLICATION_NAME = "YARN_APPLICATION_NAME"; - public static final String YARN_APPLICATION_URL = "YARN_APPLICATION_URL"; - public static final String YARN_APPLICATION_STATUS = "YARN_APPLICATION_STATUS"; - public static final String YARN_APPLICATION_FINAL_STATUS = "YARN_APPLICATION_FINAL_STATUS"; - public static final String YARN_TENSORBOARD_URL = "YARN_TENSORBOARD_URL"; - public static final String TENSORBOARD_URL = "TENSORBOARD_URL"; - public static final String YARN_APP_STARTED_TIME = "YARN_APP_STARTED_TIME"; - public static final String YARN_APP_LAUNCH_TIME = "YARN_APP_LAUNCH_TIME"; - public static final String YARN_APP_FINISHED_TIME = "YARN_APP_FINISHED_TIME"; - public static final String YARN_APP_ELAPSED_TIME = "YARN_APP_ELAPSED_TIME"; - public static final String YARN_APP_STATE_NAME = "state"; - public static final String YARN_APP_FINAL_STATUS_NAME = "finalStatus"; - public static final String YARN_APP_STARTEDTIME_NAME = "startedTime"; - public static final String YARN_APP_LAUNCHTIME_NAME = "launchTime"; - - public static final String YARN_APPLICATION_STATUS_ACCEPT = "ACCEPT"; - public static final String YARN_APPLICATION_STATUS_RUNNING = "RUNNING"; - public static final String YARN_APPLICATION_STATUS_FINISHED = "EXECUTE_SUBMARINE_FINISHED"; - public static final String YARN_APPLICATION_STATUS_FAILED = "FAILED"; - - public static final String JOB_STATUS = "JOB_STATUS"; - - - // submarine.algorithm.hdfs.path support for replacing ${username} with real user name - public static final String USERNAME_SYMBOL = "${username}"; -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUI.java b/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUI.java deleted file mode 100644 index 9c6b5404d87..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUI.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.commons; - -import com.google.common.base.Charsets; -import com.google.common.io.Resources; -import com.google.gson.Gson; -import com.google.gson.reflect.TypeToken; -import com.hubspot.jinjava.Jinjava; -import org.apache.commons.lang3.StringUtils; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessageOutput; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.net.URL; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -import static org.apache.zeppelin.submarine.commons.SubmarineUtils.unifyKey; - -public class SubmarineUI { - private Logger LOGGER = LoggerFactory.getLogger(SubmarineUI.class); - - InterpreterContext intpContext; - - private static final String SUBMARINE_DASHBOARD_JINJA - = "ui_templates/submarine-dashboard.jinja"; - //private static final String SUBMARINE_COMMAND_RUN_JINJA - // = "ui_templates/submarine-command-run.jinja"; - private static final String SUBMARINE_USAGE_JINJA - = "ui_templates/submarine-usage.jinja"; - private static final String SUBMARINE_COMMAND_OPTIONS_JSON - = "ui_templates/submarine-command-options.json"; - private static final String SUBMARINE_LOG_HEAD_JINJA - = "ui_templates/submarine-log-head.jinja"; - - public SubmarineUI(InterpreterContext context) { - intpContext = context; - - createUILayer(); - } - - // context.out::List resultMessageOutputs - // resultMessageOutputs[0] is UI - // resultMessageOutputs[1] is Execution information - // resultMessageOutputs[2] is Execution LOG - public void createUILayer() { - try { - // The first one is the UI interface. - intpContext.out.setType(InterpreterResult.Type.ANGULAR); - // The second is Execution information. - intpContext.out.setType(InterpreterResult.Type.ANGULAR); - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - } - } - - public void createSubmarineUI(SubmarineCommand submarineCmd) { - try { - HashMap mapParams = new HashMap(); - mapParams.put(unifyKey(SubmarineConstants.PARAGRAPH_ID), intpContext.getParagraphId()); - mapParams.put(unifyKey(SubmarineConstants.COMMAND_TYPE), submarineCmd.getCommand()); - - String templateName = ""; - switch (submarineCmd) { - case USAGE: - templateName = SUBMARINE_USAGE_JINJA; - List commandlineOptions = getCommandlineOptions(); - mapParams.put(SubmarineConstants.COMMANDLINE_OPTIONS, commandlineOptions); - break; - default: - templateName = SUBMARINE_DASHBOARD_JINJA; - break; - } - - URL urlTemplate = Resources.getResource(templateName); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineUsage = jinjava.render(template, mapParams); - - // UI - InterpreterResultMessageOutput outputUI = intpContext.out.getOutputAt(0); - outputUI.clear(); - outputUI.write(submarineUsage); - outputUI.flush(); - - // UI update, log needs to be cleaned at the same time - InterpreterResultMessageOutput outputLOG = intpContext.out.getOutputAt(1); - outputLOG.clear(); - outputLOG.flush(); - } catch (IOException e) { - LOGGER.error("Can't print usage", e); - } - } - - public List getCommandlineOptions() throws IOException { - List commandlineOptions = new ArrayList<>(); - - BufferedReader bufferedReader = new BufferedReader(new InputStreamReader( - this.getClass().getResourceAsStream("/" + SUBMARINE_COMMAND_OPTIONS_JSON))); - - String line; - StringBuffer strbuf = new StringBuffer(); - int licensedLineCount = 14; - while ((line = bufferedReader.readLine()) != null) { - if (licensedLineCount-- > 0) { - continue; - } - strbuf.append(line); - } - Gson gson = new Gson(); - commandlineOptions = gson.fromJson(strbuf.toString(), - new TypeToken>() { - }.getType()); - - return commandlineOptions; - } - - public void createUsageUI() { - try { - List commandlineOptions = getCommandlineOptions(); - HashMap mapParams = new HashMap(); - mapParams.put(unifyKey(SubmarineConstants.PARAGRAPH_ID), intpContext.getParagraphId()); - mapParams.put(SubmarineConstants.COMMANDLINE_OPTIONS, commandlineOptions); - - URL urlTemplate = Resources.getResource(SUBMARINE_USAGE_JINJA); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineUsage = jinjava.render(template, mapParams); - - InterpreterResultMessageOutput outputUI = intpContext.out.getOutputAt(0); - outputUI.clear(); - outputUI.write(submarineUsage); - outputUI.flush(); - - // UI update, log needs to be cleaned at the same time - InterpreterResultMessageOutput outputLOG = intpContext.out.getOutputAt(1); - outputLOG.clear(); - outputLOG.flush(); - } catch (IOException e) { - LOGGER.error("Can't print usage", e); - } - } - - public void createLogHeadUI() { - try { - HashMap mapParams = new HashMap(); - URL urlTemplate = Resources.getResource(SUBMARINE_LOG_HEAD_JINJA); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineUsage = jinjava.render(template, mapParams); - - InterpreterResultMessageOutput outputUI = intpContext.out.getOutputAt(1); - outputUI.clear(); - outputUI.write(submarineUsage); - outputUI.flush(); - } catch (IOException e) { - LOGGER.error("Can't print usage", e); - } - } - - public void printCommnadUI(SubmarineCommand submarineCmd) { - try { - HashMap mapParams = new HashMap(); - mapParams.put(unifyKey(SubmarineConstants.PARAGRAPH_ID), intpContext.getParagraphId()); - - URL urlTemplate = Resources.getResource(SUBMARINE_DASHBOARD_JINJA); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineUsage = jinjava.render(template, mapParams); - - InterpreterResultMessageOutput outputUI = intpContext.out.getOutputAt(0); - outputUI.clear(); - outputUI.write(submarineUsage); - outputUI.flush(); - - // UI update, log needs to be cleaned at the same time - InterpreterResultMessageOutput outputLOG = intpContext.out.getOutputAt(1); - outputLOG.clear(); - outputLOG.flush(); - } catch (IOException e) { - LOGGER.error("Can't print usage", e); - } - } - - public void outputLog(String title, String message) { - try { - StringBuffer formatMsg = new StringBuffer(); - InterpreterResultMessageOutput output = null; - output = intpContext.out.getOutputAt(1); - formatMsg.append("
    "); - if (!StringUtils.isEmpty(title)) { - formatMsg.append(title); - } - formatMsg.append("
    ");
    -      formatMsg.append(message);
    -      formatMsg.append("
    "); - formatMsg.append("
    \n"); - output.write(formatMsg.toString()); - output.flush(); - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - } - } - - /** - * submarine Commandline Option - */ - class CommandlineOption { - private String name; - private String description; - - CommandlineOption(String name, String description) { - this.name = name; - this.description = description; - } - - public String getName() { - return name; - } - - public String getDescription() { - return description; - } - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUtils.java b/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUtils.java deleted file mode 100644 index 2c6aa7cd567..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/commons/SubmarineUtils.java +++ /dev/null @@ -1,301 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.commons; - -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.zeppelin.display.AngularObject; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.CHECKPOINT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.DOCKER_CONTAINER_NETWORK; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.DOCKER_CONTAINER_TIME_ZONE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.DOCKER_HADOOP_HDFS_HOME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.DOCKER_JAVA_HOME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.HADOOP_YARN_SUBMARINE_JAR; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.INPUT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.INTERPRETER_LAUNCH_MODE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.JOB_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.MACHINELEARNING_DISTRIBUTED_ENABLE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.PS_LAUNCH_CMD; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_FILES; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_HOME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_KEYTAB; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.SUBMARINE_YARN_QUEUE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_CHECKPOINT_PATH; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_PARAMETER_SERVICES_CPU; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_PARAMETER_SERVICES_DOCKER_IMAGE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_PARAMETER_SERVICES_GPU; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_PARAMETER_SERVICES_MEMORY; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_PARAMETER_SERVICES_NUM; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_TENSORBOARD_ENABLE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_WORKER_SERVICES_CPU; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_WORKER_SERVICES_DOCKER_IMAGE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_WORKER_SERVICES_GPU; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_WORKER_SERVICES_MEMORY; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_WORKER_SERVICES_NUM; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.WORKER_LAUNCH_CMD; - -public class SubmarineUtils { - private static Logger LOGGER = LoggerFactory.getLogger(SubmarineUI.class); - - public static String unifyKey(String key) { - key = key.replace(".", "_").toUpperCase(); - return key; - } - - // 1. yarn application match the pattern [a-z][a-z0-9-]* - // 2. yarn limit appName can not be greater than 30 characters - public static String getJobName(String userName, String noteId) - throws RuntimeException { - userName = userName.toLowerCase(); - userName = userName.replace("_", "-"); - userName = userName.replace(".", "-"); - - noteId = noteId.toLowerCase(); - noteId = noteId.replace("_", "-"); - noteId = noteId.replace(".", "-"); - - String jobName = userName + "-" + noteId; - - String yarnAppPatternString = "[a-z][a-z0-9-]*"; - Pattern pattern = Pattern.compile(yarnAppPatternString); - Matcher matcher = pattern.matcher(jobName); - boolean matches = matcher.matches(); - if (false == matches) { - throw new RuntimeException("Job Name(`noteName`-`noteId`) " + - "does not matcher the `[a-z][a-z0-9-]*` Pattern!"); - } - - if (jobName.length() > 30) { - throw new RuntimeException("Job Name can not be greater than 30 characters"); - } - - return jobName; - } - - // 1. Yarn application match the pattern [a-z][a-z0-9-]* - // 2. Yarn registry dns Hostname can not be greater than 64 characters, - // The name needs to be short. - public static String getTensorboardName(String user) { - return user.toLowerCase() + "-tb"; - } - - public static String getAgulObjValue(InterpreterContext context, String name) { - String value = ""; - AngularObject angularObject = context.getAngularObjectRegistry() - .get(name, context.getNoteId(), context.getParagraphId()); - if (null != angularObject && null != angularObject.get()) { - value = angularObject.get().toString(); - } - return value; - } - - public static void setAgulObjValue(InterpreterContext context, String name, Object value) { - AngularObject angularObject = context.getAngularObjectRegistry() - .add(name, value, context.getNoteId(), context.getParagraphId(), true); - } - - public static void removeAgulObjValue(InterpreterContext context, String name) { - context.getAngularObjectRegistry().remove(name, context.getNoteId(), - context.getParagraphId(), true); - } - - private static String getProperty(Properties properties, String key, - boolean outputLog, StringBuffer sbMessage) { - String value = properties.getProperty(key, ""); - if (StringUtils.isEmpty(value) && outputLog) { - sbMessage.append("EXECUTE_SUBMARINE_ERROR: " + - "Please set the submarine interpreter properties : "); - sbMessage.append(key).append("\n"); - } - - return value; - } - - // Convert properties to Map and check that the variable cannot be empty - public static HashMap propertiesToJinjaParams(Properties properties, SubmarineJob submarineJob, - boolean outLog) - throws IOException { - StringBuffer sbMessage = new StringBuffer(); - String noteId = submarineJob.getNoteId(); - - // Check user-set job variables - String machinelearingDistributed = getProperty(properties, MACHINELEARNING_DISTRIBUTED_ENABLE, - outLog, sbMessage); - String inputPath = getProperty(properties, INPUT_PATH, outLog, sbMessage); - String checkPointPath = getProperty(properties, CHECKPOINT_PATH, outLog, sbMessage); - String psLaunchCmd = ""; - if (StringUtils.equals(machinelearingDistributed, "true")) { - psLaunchCmd = getProperty(properties, PS_LAUNCH_CMD, outLog, sbMessage); - } - - String workerLaunchCmd = getProperty(properties, WORKER_LAUNCH_CMD, outLog, sbMessage); - - // Check interpretere set Properties - String submarineHadoopHome; - submarineHadoopHome = getProperty(properties, SUBMARINE_HADOOP_HOME, outLog, sbMessage); - File file = new File(submarineHadoopHome); - if (!file.exists()) { - sbMessage.append(SUBMARINE_HADOOP_HOME + ": " - + submarineHadoopHome + " is not a valid file path!\n"); - } - - String submarineJar = getProperty(properties, HADOOP_YARN_SUBMARINE_JAR, outLog, sbMessage); - file = new File(submarineJar); - if (!file.exists()) { - sbMessage.append(HADOOP_YARN_SUBMARINE_JAR + ":" - + submarineJar + " is not a valid file path!\n"); - } - String submarineYarnQueue = getProperty(properties, SUBMARINE_YARN_QUEUE, outLog, sbMessage); - String containerNetwork = getProperty(properties, DOCKER_CONTAINER_NETWORK, outLog, sbMessage); - String parameterServicesImage = getProperty(properties, TF_PARAMETER_SERVICES_DOCKER_IMAGE, - outLog, sbMessage); - String parameterServicesNum = getProperty(properties, TF_PARAMETER_SERVICES_NUM, - outLog, sbMessage); - String parameterServicesGpu = getProperty(properties, TF_PARAMETER_SERVICES_GPU, - outLog, sbMessage); - String parameterServicesCpu = getProperty(properties, TF_PARAMETER_SERVICES_CPU, - outLog, sbMessage); - String parameterServicesMemory = getProperty(properties, TF_PARAMETER_SERVICES_MEMORY, - outLog, sbMessage); - String workerServicesImage = getProperty(properties, TF_WORKER_SERVICES_DOCKER_IMAGE, - outLog, sbMessage); - String workerServicesNum = getProperty(properties, TF_WORKER_SERVICES_NUM, outLog, sbMessage); - String workerServicesGpu = getProperty(properties, TF_WORKER_SERVICES_GPU, outLog, sbMessage); - String workerServicesCpu = getProperty(properties, TF_WORKER_SERVICES_CPU, outLog, sbMessage); - String workerServicesMemory = getProperty(properties, TF_WORKER_SERVICES_MEMORY, - outLog, sbMessage); - String algorithmUploadPath = getProperty(properties, SUBMARINE_ALGORITHM_HDFS_PATH, - outLog, sbMessage); - String submarineHadoopKeytab = getProperty(properties, SUBMARINE_HADOOP_KEYTAB, - outLog, sbMessage); - file = new File(submarineHadoopKeytab); - if (!file.exists()) { - sbMessage.append(SUBMARINE_HADOOP_KEYTAB + ":" - + submarineHadoopKeytab + " is not a valid file path!\n"); - } - String submarineHadoopPrincipal = getProperty(properties, SUBMARINE_HADOOP_PRINCIPAL, - outLog, sbMessage); - String dockerHadoopHdfsHome = getProperty(properties, DOCKER_HADOOP_HDFS_HOME, - outLog, sbMessage); - String dockerJavaHome = getProperty(properties, DOCKER_JAVA_HOME, outLog, sbMessage); - String intpLaunchMode = getProperty(properties, INTERPRETER_LAUNCH_MODE, outLog, sbMessage); - if (StringUtils.isEmpty(intpLaunchMode)) { - intpLaunchMode = "local"; // default - } - String tensorboardEnable = getProperty(properties, TF_TENSORBOARD_ENABLE, outLog, sbMessage); - if (StringUtils.isEmpty(tensorboardEnable)) { - tensorboardEnable = "false"; // default - } - - // check - String tensorboardCheckpoint = getProperty(properties, TF_CHECKPOINT_PATH, outLog, sbMessage); - if (StringUtils.equals(tensorboardEnable, "true") - && StringUtils.isEmpty(tensorboardCheckpoint)) { - sbMessage.append("Tensorboard checkpoint path cannot be empty!\n"); - } - String userTensorboardCheckpoint = submarineJob.getUserTensorboardPath(); - Path chkpntPath = new Path(userTensorboardCheckpoint); - if (chkpntPath.depth() <= 3) { - sbMessage.append("Checkpoint path depth must be greater than 3!\n"); - } - - String sumbarineHadoopConfDir = getProperty(properties, SUBMARINE_HADOOP_CONF_DIR, - outLog, sbMessage); - - String dockerContainerTimezone = getProperty(properties, DOCKER_CONTAINER_TIME_ZONE, - outLog, sbMessage); - - String notePath = algorithmUploadPath + File.separator + noteId; - List arrayHdfsFiles = new ArrayList<>(); - List hdfsFiles = submarineJob.getHdfsClient().list(new Path(notePath + "/*")); - if (hdfsFiles.size() == 0) { - sbMessage.append("EXECUTE_SUBMARINE_ERROR: The " + notePath - + " file directory was is empty in HDFS!\n"); - } else { - if (outLog) { - StringBuffer sbCommitFiles = new StringBuffer(); - sbCommitFiles.append("INFO: You commit total of " + hdfsFiles.size() - + " algorithm files.\n"); - for (int i = 0; i < hdfsFiles.size(); i++) { - String filePath = hdfsFiles.get(i).toUri().toString(); - arrayHdfsFiles.add(filePath); - sbCommitFiles.append("INFO: [" + hdfsFiles.get(i).getName() + "] -> " + filePath + "\n"); - } - submarineJob.getSubmarineUI().outputLog("Execution information", - sbCommitFiles.toString()); - } - } - - // Found null variable, throw exception - if (!StringUtils.isEmpty(sbMessage.toString()) && outLog) { - throw new RuntimeException(sbMessage.toString()); - } - - // Save user-set variables and interpreter configuration parameters - String jobName = SubmarineUtils.getJobName(submarineJob.getUserName(), - submarineJob.getNoteId()); - HashMap mapParams = new HashMap(); - mapParams.put(unifyKey(INTERPRETER_LAUNCH_MODE), intpLaunchMode); - mapParams.put(unifyKey(SUBMARINE_HADOOP_HOME), submarineHadoopHome); - mapParams.put(unifyKey(SUBMARINE_HADOOP_CONF_DIR), sumbarineHadoopConfDir); - mapParams.put(unifyKey(DOCKER_HADOOP_HDFS_HOME), dockerHadoopHdfsHome); - mapParams.put(unifyKey(DOCKER_JAVA_HOME), dockerJavaHome); - mapParams.put(unifyKey(DOCKER_CONTAINER_TIME_ZONE), dockerContainerTimezone); - mapParams.put(unifyKey(HADOOP_YARN_SUBMARINE_JAR), submarineJar); - mapParams.put(unifyKey(JOB_NAME), jobName); - mapParams.put(unifyKey(DOCKER_CONTAINER_NETWORK), containerNetwork); - mapParams.put(unifyKey(SUBMARINE_YARN_QUEUE), submarineYarnQueue); - mapParams.put(unifyKey(SUBMARINE_HADOOP_KEYTAB), submarineHadoopKeytab); - mapParams.put(unifyKey(SUBMARINE_HADOOP_PRINCIPAL), submarineHadoopPrincipal); - mapParams.put(unifyKey(MACHINELEARNING_DISTRIBUTED_ENABLE), machinelearingDistributed); - mapParams.put(unifyKey(SUBMARINE_ALGORITHM_HDFS_PATH), notePath); - mapParams.put(unifyKey(SUBMARINE_ALGORITHM_HDFS_FILES), arrayHdfsFiles); - mapParams.put(unifyKey(INPUT_PATH), inputPath); - mapParams.put(unifyKey(CHECKPOINT_PATH), checkPointPath); - mapParams.put(unifyKey(PS_LAUNCH_CMD), psLaunchCmd); - mapParams.put(unifyKey(WORKER_LAUNCH_CMD), workerLaunchCmd); - mapParams.put(unifyKey(TF_PARAMETER_SERVICES_DOCKER_IMAGE), parameterServicesImage); - mapParams.put(unifyKey(TF_PARAMETER_SERVICES_NUM), parameterServicesNum); - mapParams.put(unifyKey(TF_PARAMETER_SERVICES_GPU), parameterServicesGpu); - mapParams.put(unifyKey(TF_PARAMETER_SERVICES_CPU), parameterServicesCpu); - mapParams.put(unifyKey(TF_PARAMETER_SERVICES_MEMORY), parameterServicesMemory); - mapParams.put(unifyKey(TF_WORKER_SERVICES_DOCKER_IMAGE), workerServicesImage); - mapParams.put(unifyKey(TF_WORKER_SERVICES_NUM), workerServicesNum); - mapParams.put(unifyKey(TF_WORKER_SERVICES_GPU), workerServicesGpu); - mapParams.put(unifyKey(TF_WORKER_SERVICES_CPU), workerServicesCpu); - mapParams.put(unifyKey(TF_WORKER_SERVICES_MEMORY), workerServicesMemory); - mapParams.put(unifyKey(TF_TENSORBOARD_ENABLE), tensorboardEnable); - mapParams.put(unifyKey(TF_CHECKPOINT_PATH), userTensorboardCheckpoint); - - return mapParams; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/FinalApplicationStatus.java b/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/FinalApplicationStatus.java deleted file mode 100644 index a9bfdbb7ed0..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/FinalApplicationStatus.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.hadoop; - -// org/apache/hadoop/yarn/api/records/FinalApplicationStatus.class -public enum FinalApplicationStatus { - UNDEFINED, - SUCCEEDED, - FAILED, - KILLED, - ENDED; -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/HdfsClient.java b/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/HdfsClient.java deleted file mode 100644 index 3fc2b33eb2a..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/HdfsClient.java +++ /dev/null @@ -1,379 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.hadoop; - -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.security.UserGroupInformation; -import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.interpreter.thrift.ParagraphInfo; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.ByteArrayOutputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.security.PrivilegedExceptionAction; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * Hadoop FileSystem wrapper. Support both secure and no-secure mode - */ -public class HdfsClient { - private static Logger LOGGER = LoggerFactory.getLogger(HdfsClient.class); - - private ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - private Configuration hadoopConf; - private boolean isSecurityEnabled; - private FileSystem fs; - - private static Pattern REPL_PATTERN = - Pattern.compile("(\\s*)%([\\w\\.]+)(\\(.*?\\))?.*", Pattern.DOTALL); - - public HdfsClient(Properties properties) { - String krb5conf = properties.getProperty(SubmarineConstants.SUBMARINE_HADOOP_KRB5_CONF, ""); - if (!StringUtils.isEmpty(krb5conf)) { - System.setProperty("java.security.krb5.conf", krb5conf); - } - - this.hadoopConf = new Configuration(); - // disable checksum for local file system. because interpreter.json may be updated by - // non-hadoop filesystem api - // disable caching for file:// scheme to avoid getting LocalFS which does CRC checks - // this.hadoopConf.setBoolean("fs.file.impl.disable.cache", true); - this.hadoopConf.set("fs.file.impl", "org.apache.hadoop.hdfs.DistributedFileSystem"); - // UserGroupInformation.setConfiguration(hadoopConf); - this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); - - if (isSecurityEnabled) { - String keytab = properties.getProperty( - SubmarineConstants.SUBMARINE_HADOOP_KEYTAB, ""); - String principal = properties.getProperty( - SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, ""); - - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - if (StringUtils.isEmpty(keytab)) { - keytab = zConf.getString( - ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB); - } - if (StringUtils.isEmpty(principal)) { - principal = zConf.getString( - ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL); - } - if (StringUtils.isBlank(keytab) || StringUtils.isBlank(principal)) { - throw new RuntimeException("keytab and principal can not be empty, keytab: " + keytab - + ", principal: " + principal); - } - try { - UserGroupInformation.loginUserFromKeytab(principal, keytab); - } catch (IOException e) { - throw new RuntimeException("Fail to login via keytab:" + keytab + - ", principal:" + principal, e); - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - - try { - this.fs = FileSystem.get(new URI("/"), this.hadoopConf); - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - } catch (URISyntaxException e) { - LOGGER.error(e.getMessage(), e); - } - } - - public FileSystem getFs() { - return fs; - } - - public Path makeQualified(Path path) { - return fs.makeQualified(path); - } - - public boolean exists(final Path path) throws IOException { - return callHdfsOperation(new HdfsOperation() { - - @Override - public Boolean call() throws IOException { - return fs.exists(path); - } - }); - } - - public void tryMkDir(final Path dir) throws IOException { - callHdfsOperation(new HdfsOperation() { - @Override - public Void call() throws IOException { - if (!fs.exists(dir)) { - fs.mkdirs(dir); - LOGGER.info("Create dir {} in hdfs", dir.toString()); - } - if (fs.isFile(dir)) { - throw new IOException(dir.toString() + " is file instead of directory, please remove " + - "it or specify another directory"); - } - fs.mkdirs(dir); - return null; - } - }); - } - - public List list(final Path path) throws IOException { - return callHdfsOperation(new HdfsOperation>() { - @Override - public List call() throws IOException { - List paths = new ArrayList<>(); - for (FileStatus status : fs.globStatus(path)) { - paths.add(status.getPath()); - } - return paths; - } - }); - } - - // recursive search path, (list folder in sub folder on demand, instead of load all - // data when zeppelin server start) - public List listAll(final Path path) throws IOException { - return callHdfsOperation(new HdfsOperation>() { - @Override - public List call() throws IOException { - List paths = new ArrayList<>(); - collectNoteFiles(path, paths); - return paths; - } - - private void collectNoteFiles(Path folder, List noteFiles) throws IOException { - FileStatus[] paths = fs.listStatus(folder); - for (FileStatus path : paths) { - if (path.isDirectory()) { - collectNoteFiles(path.getPath(), noteFiles); - } else { - if (path.getPath().getName().endsWith(".zpln")) { - noteFiles.add(path.getPath()); - } else { - LOGGER.warn("Unknown file: " + path.getPath()); - } - } - } - } - }); - } - - public boolean delete(final Path path) throws IOException { - return callHdfsOperation(new HdfsOperation() { - @Override - public Boolean call() throws IOException { - return fs.delete(path, true); - } - }); - } - - public String readFile(final Path file) throws IOException { - return callHdfsOperation(new HdfsOperation() { - @Override - public String call() throws IOException { - LOGGER.debug("Read from file: " + file); - ByteArrayOutputStream noteBytes = new ByteArrayOutputStream(); - IOUtils.copyBytes(fs.open(file), noteBytes, hadoopConf); - return new String(noteBytes.toString( - zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_ENCODING))); - } - }); - } - - public void writeFile(final String content, final Path file) - throws IOException { - callHdfsOperation(new HdfsOperation() { - @Override - public Void call() throws IOException { - InputStream in = new ByteArrayInputStream(content.getBytes( - zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_ENCODING))); - Path tmpFile = new Path(file.toString() + ".tmp"); - IOUtils.copyBytes(in, fs.create(tmpFile), hadoopConf); - fs.delete(file, true); - fs.rename(tmpFile, file); - return null; - } - }); - } - - public void move(Path src, Path dest) throws IOException { - callHdfsOperation(() -> { - fs.rename(src, dest); - return null; - }); - } - - private interface HdfsOperation { - T call() throws IOException; - } - - private synchronized T callHdfsOperation(final HdfsOperation func) throws IOException { - if (isSecurityEnabled) { - try { - return UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction() { - @Override - public T run() throws Exception { - return func.call(); - } - }); - } catch (InterruptedException e) { - throw new IOException(e); - } - } else { - return func.call(); - } - } - - public String parseText(String text) { - String script = "", intpText = ""; - - // parse text to get interpreter component - if (text != null) { - Matcher matcher = REPL_PATTERN.matcher(text); - if (matcher.matches()) { - String headingSpace = matcher.group(1); - intpText = matcher.group(2); - - if (matcher.groupCount() == 3 && matcher.group(3) != null) { - String localPropertiesText = matcher.group(3); - String[] splits = localPropertiesText.substring(1, localPropertiesText.length() - 1) - .split(","); - for (String split : splits) { - String[] kv = split.split("="); - if (StringUtils.isBlank(split) || kv.length == 0) { - continue; - } - if (kv.length > 2) { - throw new RuntimeException("Invalid paragraph properties format: " + split); - } - } - script = text.substring(headingSpace.length() + intpText.length() + - localPropertiesText.length() + 1).trim(); - } else { - script = text.substring(headingSpace.length() + intpText.length() + 1).trim(); - } - } else { - script = text.trim(); - } - } - - return script; - } - - public String saveParagraphToFiles(String noteId, List paragraphInfos, - String dirName, Properties properties) - throws Exception { - StringBuffer outputMsg = new StringBuffer(); - - String hdfsUploadPath = properties.getProperty( - SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, ""); - - HashMap mapParagraph = new HashMap<>(); - for (int i = 0; i < paragraphInfos.size(); i++) { - ParagraphInfo paragraph = paragraphInfos.get(i); - String paragraphTitle = paragraph.getParagraphTitle(); - if (org.apache.commons.lang3.StringUtils.isEmpty(paragraphTitle)) { - String message = "WARN: The title of the [" + i - + "] paragraph is empty and was not submitted to HDFS.\n"; - LOGGER.warn(message); - outputMsg.append(message); - continue; - } - if (!mapParagraph.containsKey(paragraphTitle)) { - StringBuffer mergeScript = new StringBuffer(); - mapParagraph.put(paragraphTitle, mergeScript); - } - StringBuffer mergeScript = mapParagraph.get(paragraphTitle); - String parapraphText = paragraph.getParagraphText(); - String text = parseText(parapraphText); - mergeScript.append(text + "\n\n"); - } - - // Clear all files in the local noteId directory - if (!org.apache.commons.lang3.StringUtils.isEmpty(dirName)) { - String noteDir = dirName + "/" + noteId; - File fileNoteDir = new File(noteDir); - if (fileNoteDir.exists()) { - fileNoteDir.delete(); - } - fileNoteDir.mkdirs(); - } - - // Clear all files in the noteid directory in HDFS - if (!org.apache.commons.lang3.StringUtils.isEmpty(hdfsUploadPath)) { - Path hdfsPath = new Path(hdfsUploadPath + "/" + noteId); - try { - if (exists(hdfsPath)) { - delete(hdfsPath); - tryMkDir(hdfsPath); - } - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - throw new Exception(e); - } - } - - for (Map.Entry entry : mapParagraph.entrySet()) { - try { - String fileName = entry.getKey(); - String fileContext = entry.getValue().toString(); - String paragraphFile = dirName + "/" + noteId + "/" + fileName; - - // save to local file - if (!org.apache.commons.lang3.StringUtils.isEmpty(dirName)) { - File fileParagraph = new File(paragraphFile); - if (!fileParagraph.exists()) { - fileParagraph.createNewFile(); - } - FileWriter writer = new FileWriter(paragraphFile); - writer.write(fileContext); - writer.close(); - } - - // save to hdfs - if (!StringUtils.isEmpty(hdfsUploadPath)) { - String fileDir = hdfsUploadPath + "/" + noteId + "/" + fileName; - // upload algorithm file - LOGGER.info("Commit algorithm to HDFS: {}", fileDir); - Path filePath = new Path(fileDir); - writeFile(fileContext, filePath); - } - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - throw new Exception(e); - } - } - - return outputMsg.toString(); - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnApplicationState.java b/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnApplicationState.java deleted file mode 100644 index 50d27428342..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnApplicationState.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.hadoop; - -// org/apache/hadoop/yarn/api/records/YarnApplicationState.class -public enum YarnApplicationState { - NEW, - NEW_SAVING, - SUBMITTED, - ACCEPTED, - RUNNING, - FINISHED, - FAILED, - KILLED; -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnClient.java b/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnClient.java deleted file mode 100644 index 9e9dab892e2..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/hadoop/YarnClient.java +++ /dev/null @@ -1,623 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.hadoop; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.security.Principal; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.stream.Collectors; - -import com.google.gson.Gson; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonIOException; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import com.google.gson.JsonPrimitive; -import com.google.gson.JsonSyntaxException; -import com.google.gson.reflect.TypeToken; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.security.UserGroupInformation; - -import org.apache.http.HttpResponse; -import org.apache.http.auth.AuthSchemeProvider; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.Credentials; -import org.apache.http.client.HttpClient; -import org.apache.http.client.config.AuthSchemes; -import org.apache.http.client.config.CookieSpecs; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.config.Lookup; -import org.apache.http.config.RegistryBuilder; -import org.apache.http.impl.auth.SPNegoSchemeFactory; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import javax.security.auth.Subject; -import javax.security.auth.kerberos.KerberosPrincipal; -import javax.security.auth.login.AppConfigurationEntry; -import javax.security.auth.login.LoginContext; - -public class YarnClient { - private Logger LOGGER = LoggerFactory.getLogger(YarnClient.class); - - private Configuration hadoopConf; - private String yarnWebHttpAddr; - private String principal = ""; - private String keytab = ""; - - public static final String YARN_REST_APPATTEMPTS = "appAttempts"; - public static final String YARN_REST_CONTAINER = "container"; - public static final String YARN_REST_APPATTEMPT = "appAttempt"; - public static final String YARN_REST_APPATTEMPTID = "appAttemptId"; - - public static final String YARN_REST_EXPOSEDPORTS = "EXPOSEDPORTS"; - public static final String CONTAINER_IP = "CONTAINER_IP"; - public static final String CONTAINER_PORT = "CONTAINER_PORT"; - public static final String HOST_IP = "HOST_IP"; - public static final String HOST_PORT = "HOST_PORT"; - - String SERVICE_PATH = "/services/{service_name}"; - - private boolean hadoopSecurityEnabled = true; // simple or kerberos - - public YarnClient(Properties properties) { - this.hadoopConf = new Configuration(); - - String hadoopAuthType = properties.getProperty( - SubmarineConstants.ZEPPELIN_SUBMARINE_AUTH_TYPE, "kerberos"); - if (StringUtils.equals(hadoopAuthType, "simple")) { - hadoopSecurityEnabled = false; - } - - yarnWebHttpAddr = properties.getProperty(SubmarineConstants.YARN_WEB_HTTP_ADDRESS, ""); - boolean isSecurityEnabled = UserGroupInformation.isSecurityEnabled(); - if (isSecurityEnabled || hadoopSecurityEnabled) { - String krb5conf = properties.getProperty(SubmarineConstants.SUBMARINE_HADOOP_KRB5_CONF, ""); - if (StringUtils.isEmpty(krb5conf)) { - krb5conf = "/etc/krb5.conf"; - System.setProperty("java.security.krb5.conf", krb5conf); - } - - String keytab = properties.getProperty( - SubmarineConstants.SUBMARINE_HADOOP_KEYTAB, ""); - String principal = properties.getProperty( - SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, ""); - - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - if (StringUtils.isEmpty(keytab)) { - keytab = zConf.getString( - ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB); - } - if (StringUtils.isEmpty(principal)) { - principal = zConf.getString( - ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL); - } - if (StringUtils.isBlank(keytab) || StringUtils.isBlank(principal)) { - throw new RuntimeException("keytab and principal can not be empty, keytab: " - + keytab + ", principal: " + principal); - } - - this.principal = principal; - this.keytab = keytab; - if (LOGGER.isDebugEnabled()) { - System.setProperty("sun.security.spnego.debug", "true"); - System.setProperty("sun.security.krb5.debug", "true"); - } - } - } - - // http://yarn-web-http-address/app/v1/services/{service_name} - public void deleteService(String serviceName) { - String appUrl = this.yarnWebHttpAddr + "/app/v1/services/" + serviceName - + "?_=" + System.currentTimeMillis(); - - InputStream inputStream = null; - try { - HttpResponse response = callRestUrl(appUrl, principal, HTTP.DELETE); - inputStream = response.getEntity().getContent(); - String result = new BufferedReader(new InputStreamReader(inputStream)) - .lines().collect(Collectors.joining(System.lineSeparator())); - if (response.getStatusLine().getStatusCode() != 200 /*success*/) { - LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); - LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); - LOGGER.warn("result:\n" + result); - } - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - try { - if (null != inputStream) { - inputStream.close(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - } - - // http://yarn-web-http-address/app/v1/services/{appIdOrName} - // test/resources/app-v1-services-app_name.json - public Map getAppServices(String appIdOrName) { - Map mapStatus = new HashMap<>(); - String appUrl = this.yarnWebHttpAddr + "/app/v1/services/" + appIdOrName - + "?_=" + System.currentTimeMillis(); - - InputStream inputStream = null; - try { - HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET); - inputStream = response.getEntity().getContent(); - String result = new BufferedReader(new InputStreamReader(inputStream)) - .lines().collect(Collectors.joining(System.lineSeparator())); - if (response.getStatusLine().getStatusCode() != 200 /*success*/ - && response.getStatusLine().getStatusCode() != 404 /*Not found*/) { - LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); - LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); - LOGGER.warn("result:\n" + result); - } - - // parse app status json - mapStatus = parseAppServices(result); - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - try { - if (null != inputStream) { - inputStream.close(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - - return mapStatus; - } - - // http://yarn-web-http-address/ws/v1/cluster/apps/{appId} - // test/resources/ws-v1-cluster-apps-application_id-failed.json - // test/resources/ws-v1-cluster-apps-application_id-finished.json - // test/resources/ws-v1-cluster-apps-application_id-running.json - public Map getClusterApps(String appId) { - Map appAttempts = new HashMap<>(); - String appUrl = this.yarnWebHttpAddr + "/ws/v1/cluster/apps/" + appId - + "?_=" + System.currentTimeMillis(); - - InputStream inputStream = null; - try { - HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET); - inputStream = response.getEntity().getContent(); - String result = new BufferedReader(new InputStreamReader(inputStream)) - .lines().collect(Collectors.joining(System.lineSeparator())); - if (response.getStatusLine().getStatusCode() != 200 /*success*/) { - LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); - LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); - LOGGER.warn("result:\n" + result); - } - // parse app status json - appAttempts = parseClusterApps(result); - - return appAttempts; - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - try { - if (null != inputStream) { - inputStream.close(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - - return appAttempts; - } - - public Map parseClusterApps(String jsonContent) { - Map appAttempts = new HashMap<>(); - - try { - JsonParser jsonParser = new JsonParser(); - JsonObject jsonObject = (JsonObject) jsonParser.parse(jsonContent); - - JsonObject jsonAppAttempts = jsonObject.get("app").getAsJsonObject(); - if (null == jsonAppAttempts) { - return appAttempts; - } - for (Map.Entry entry : jsonAppAttempts.entrySet()) { - String key = entry.getKey(); - if (null != entry.getValue() && entry.getValue() instanceof JsonPrimitive) { - Object value = entry.getValue().getAsString(); - appAttempts.put(key, value); - } - } - } catch (JsonIOException e) { - LOGGER.error(e.getMessage(), e); - } catch (JsonSyntaxException e) { - LOGGER.error(e.getMessage(), e); - } - - return appAttempts; - } - - // http://yarn-web-http-address/ws/v1/cluster/apps/{appId}/appattempts - // test/resources/ws-v1-cluster-apps-application_id-appattempts.json - public List> getAppAttempts(String appId) { - List> appAttempts = new ArrayList<>(); - String appUrl = this.yarnWebHttpAddr + "/ws/v1/cluster/apps/" + appId - + "/appattempts?_=" + System.currentTimeMillis(); - - InputStream inputStream = null; - try { - HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET); - inputStream = response.getEntity().getContent(); - String result = new BufferedReader(new InputStreamReader(inputStream)) - .lines().collect(Collectors.joining(System.lineSeparator())); - if (response.getStatusLine().getStatusCode() != 200 /*success*/) { - LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); - LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); - LOGGER.warn("result:\n" + result); - } - - // parse app status json - appAttempts = parseAppAttempts(result); - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - try { - if (null != inputStream) { - inputStream.close(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - - return appAttempts; - } - - // http://yarn-web-http-address/ws/v1/cluster/apps/{appId}/appattempts/{appAttemptId}/containers - // test/resources/ws-v1-cluster-apps-application_id-appattempts-appattempt_id-containers.json - public List> getAppAttemptsContainers(String appId, String appAttemptId) { - List> appAttemptsContainers = new ArrayList<>(); - String appUrl = this.yarnWebHttpAddr + "/ws/v1/cluster/apps/" + appId - + "/appattempts/" + appAttemptId + "/containers?_=" + System.currentTimeMillis(); - - InputStream inputStream = null; - try { - HttpResponse response = callRestUrl(appUrl, principal, HTTP.GET); - inputStream = response.getEntity().getContent(); - String result = new BufferedReader(new InputStreamReader(inputStream)) - .lines().collect(Collectors.joining(System.lineSeparator())); - if (response.getStatusLine().getStatusCode() != 200 /*success*/) { - LOGGER.warn("Status code " + response.getStatusLine().getStatusCode()); - LOGGER.warn("message is :" + Arrays.deepToString(response.getAllHeaders())); - LOGGER.warn("result:\n" + result); - } - - // parse app status json - appAttemptsContainers = parseAppAttemptsContainers(result); - } catch (Exception exp) { - exp.printStackTrace(); - } finally { - try { - if (null != inputStream) { - inputStream.close(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - } - - return appAttemptsContainers; - } - - public List> getAppAttemptsContainersExportPorts(String appId) { - List> listExportPorts = new ArrayList<>(); - - // appId -> appAttemptId - List> listAppAttempts = getAppAttempts(appId); - for (Map mapAppAttempts : listAppAttempts) { - if (mapAppAttempts.containsKey(YARN_REST_APPATTEMPTID)) { - String appAttemptId = (String) mapAppAttempts.get(YARN_REST_APPATTEMPTID); - List> exportPorts = getAppAttemptsContainers(appId, appAttemptId); - if (exportPorts.size() > 0) { - listExportPorts.addAll(exportPorts); - } - } - } - - return listExportPorts; - } - - // Kerberos authentication for simulated curling - private static HttpClient buildSpengoHttpClient() { - HttpClientBuilder builder = HttpClientBuilder.create(); - Lookup authSchemeRegistry - = RegistryBuilder.create().register( - AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true)).build(); - builder.setDefaultAuthSchemeRegistry(authSchemeRegistry); - BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(new AuthScope(null, -1, null), new Credentials() { - @Override - public Principal getUserPrincipal() { - return null; - } - - @Override - public String getPassword() { - return null; - } - }); - builder.setDefaultCredentialsProvider(credentialsProvider); - - // Avoid output WARN: Cookie rejected - RequestConfig globalConfig = RequestConfig.custom().setCookieSpec(CookieSpecs.IGNORE_COOKIES) - .build(); - builder.setDefaultRequestConfig(globalConfig); - - CloseableHttpClient httpClient = builder.build(); - - return httpClient; - } - - public HttpResponse callRestUrl(final String url, final String userId, HTTP operation) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug(String.format("Calling YarnClient %s %s %s", - this.principal, this.keytab, url)); - } - javax.security.auth.login.Configuration config = new javax.security.auth.login.Configuration() { - @SuppressWarnings("serial") - @Override - public AppConfigurationEntry[] getAppConfigurationEntry(String name) { - return new AppConfigurationEntry[]{new AppConfigurationEntry( - "com.sun.security.auth.module.Krb5LoginModule", - AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, - new HashMap() { - { - put("useTicketCache", "false"); - put("useKeyTab", "true"); - put("keyTab", keytab); - // Krb5 in GSS API needs to be refreshed so it does not throw the error - // Specified version of key is not available - put("refreshKrb5Config", "true"); - put("principal", principal); - put("storeKey", "true"); - put("doNotPrompt", "true"); - put("isInitiator", "true"); - if (LOGGER.isDebugEnabled()) { - put("debug", "true"); - } - } - })}; - } - }; - - Set principals = new HashSet(1); - principals.add(new KerberosPrincipal(userId)); - Subject sub = new Subject(false, principals, new HashSet(), new HashSet()); - try { - // Authentication module: Krb5Login - LoginContext loginContext = new LoginContext("Krb5Login", sub, null, config); - loginContext.login(); - Subject serviceSubject = loginContext.getSubject(); - return Subject.doAs(serviceSubject, new PrivilegedAction() { - HttpResponse httpResponse = null; - - @Override - public HttpResponse run() { - try { - HttpUriRequest request = null; - switch (operation) { - case DELETE: - request = new HttpDelete(url); - break; - case POST: - request = new HttpPost(url); - break; - default: - request = new HttpGet(url); - break; - } - - HttpClient spengoClient = buildSpengoHttpClient(); - httpResponse = spengoClient.execute(request); - return httpResponse; - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - } - return httpResponse; - } - }); - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } - return null; - } - - private Map parseAppServices(String appJson) { - Map mapStatus = new HashMap<>(); - - try { - JsonParser jsonParser = new JsonParser(); - JsonObject jsonObject = (JsonObject) jsonParser.parse(appJson); - - JsonElement elementAppId = jsonObject.get("id"); - JsonElement elementAppState = jsonObject.get("state"); - JsonElement elementAppName = jsonObject.get("name"); - - String appId = (elementAppId == null) ? "" : elementAppId.getAsString(); - String appState = (elementAppState == null) ? "" : elementAppState.getAsString(); - String appName = (elementAppName == null) ? "" : elementAppName.getAsString(); - - if (!StringUtils.isEmpty(appId)) { - mapStatus.put(SubmarineConstants.YARN_APPLICATION_ID, appId); - } - if (!StringUtils.isEmpty(appName)) { - mapStatus.put(SubmarineConstants.YARN_APPLICATION_NAME, appName); - } - if (!StringUtils.isEmpty(appState)) { - mapStatus.put(SubmarineConstants.YARN_APPLICATION_STATUS, appState); - } - } catch (JsonIOException e) { - LOGGER.error(e.getMessage(), e); - } catch (JsonSyntaxException e) { - LOGGER.error(e.getMessage(), e); - } - - return mapStatus; - } - - // appJson format : submarine/src/test/resources/appAttempts.json - public List> parseAppAttempts(String jsonContent) { - List> appAttempts = new ArrayList<>(); - - try { - JsonParser jsonParser = new JsonParser(); - JsonObject jsonObject = (JsonObject) jsonParser.parse(jsonContent); - - JsonObject jsonAppAttempts = jsonObject.get(YARN_REST_APPATTEMPTS).getAsJsonObject(); - if (null == jsonAppAttempts) { - return appAttempts; - } - JsonArray jsonAppAttempt = jsonAppAttempts.get(YARN_REST_APPATTEMPT).getAsJsonArray(); - if (null == jsonAppAttempt) { - return appAttempts; - } - for (int i = 0; i < jsonAppAttempt.size(); i++) { - Map mapAppAttempt = new HashMap<>(); - - JsonObject jsonParagraph = jsonAppAttempt.get(i).getAsJsonObject(); - - JsonElement jsonElement = jsonParagraph.get("id"); - String id = (jsonElement == null) ? "" : jsonElement.getAsString(); - mapAppAttempt.put("id", id); - - jsonElement = jsonParagraph.get(YARN_REST_APPATTEMPTID); - String appAttemptId = (jsonElement == null) ? "" : jsonElement.getAsString(); - mapAppAttempt.put(YARN_REST_APPATTEMPTID, appAttemptId); - - appAttempts.add(mapAppAttempt); - } - } catch (JsonIOException e) { - LOGGER.error(e.getMessage(), e); - } catch (JsonSyntaxException e) { - LOGGER.error(e.getMessage(), e); - } - - return appAttempts; - } - - // appJson format : submarine/src/test/resources/appAttempts.json - public List> parseAppAttemptsContainers(String jsonContent) { - List> appContainers = new ArrayList<>(); - - try { - JsonParser jsonParser = new JsonParser(); - JsonObject jsonObject = (JsonObject) jsonParser.parse(jsonContent); - - JsonArray jsonContainers = jsonObject.get(YARN_REST_CONTAINER).getAsJsonArray(); - for (int i = 0; i < jsonContainers.size(); i++) { - String hostIp = ""; - - JsonObject jsonContainer = jsonContainers.get(i).getAsJsonObject(); - - JsonElement jsonElement = jsonContainer.get("nodeId"); - String nodeId = (jsonElement == null) ? "" : jsonElement.getAsString(); - String[] nodeIdParts = nodeId.split(":"); - if (nodeIdParts.length == 2) { - hostIp = nodeIdParts[0]; - } - - jsonElement = jsonContainer.get("exposedPorts"); - String exposedPorts = (jsonElement == null) ? "" : jsonElement.getAsString(); - - Gson gson = new Gson(); - Map>> listExposedPorts = gson.fromJson(exposedPorts, - new TypeToken>>>() { - }.getType()); - if (null == listExposedPorts) { - continue; - } - for (Map.Entry>> entry : listExposedPorts.entrySet()) { - String containerPort = entry.getKey(); - String[] containerPortParts = containerPort.split("/"); - if (containerPortParts.length == 2) { - List> hostIps = entry.getValue(); - for (Map hostAttrib : hostIps) { - Map containerExposedPort = new HashMap<>(); - String hostPort = hostAttrib.get("HostPort"); - containerExposedPort.put(HOST_IP, hostIp); - containerExposedPort.put(HOST_PORT, hostPort); - containerExposedPort.put(CONTAINER_PORT, containerPortParts[0]); - appContainers.add(containerExposedPort); - } - } - } - } - } catch (JsonIOException e) { - LOGGER.error(e.getMessage(), e); - } catch (JsonSyntaxException e) { - LOGGER.error(e.getMessage(), e); - } - - return appContainers; - } - - public List> getAppExportPorts(String name) { - // Query the IP and port of the submarine interpreter process through the yarn client - Map mapAppStatus = getAppServices(name); - if (mapAppStatus.containsKey(SubmarineConstants.YARN_APPLICATION_ID) - && mapAppStatus.containsKey(SubmarineConstants.YARN_APPLICATION_NAME) - && mapAppStatus.containsKey(SubmarineConstants.YARN_APPLICATION_STATUS)) { - String appId = mapAppStatus.get(SubmarineConstants.YARN_APPLICATION_ID).toString(); - String appStatus = mapAppStatus.get(SubmarineConstants.YARN_APPLICATION_STATUS).toString(); - - // if (StringUtils.equals(appStatus, SubmarineJob.YarnApplicationState.RUNNING.toString())) { - List> mapAppAttempts = getAppAttemptsContainersExportPorts(appId); - return mapAppAttempts; - //} - } - - return new ArrayList>() { - }; - } - - public enum HTTP { - GET, - POST, - DELETE; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJob.java b/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJob.java deleted file mode 100644 index aec20a676d3..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJob.java +++ /dev/null @@ -1,502 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.job; - -import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.lang3.StringUtils; -import org.apache.hadoop.fs.Path; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.submarine.hadoop.HdfsClient; -import org.apache.zeppelin.submarine.job.thread.JobRunThread; -import org.apache.zeppelin.submarine.commons.SubmarineCommand; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.commons.SubmarineUI; -import org.apache.zeppelin.submarine.commons.SubmarineUtils; -import org.apache.zeppelin.submarine.job.thread.TensorboardRunThread; -import org.apache.zeppelin.submarine.hadoop.FinalApplicationStatus; -import org.apache.zeppelin.submarine.hadoop.YarnApplicationState; -import org.apache.zeppelin.submarine.hadoop.YarnClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.JOB_STATUS; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TENSORBOARD_URL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.TF_TENSORBOARD_ENABLE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APPLICATION_FINAL_STATUS; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APPLICATION_ID; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APPLICATION_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APPLICATION_STATUS; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APPLICATION_URL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_ELAPSED_TIME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_FINAL_STATUS_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_FINISHED_TIME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_LAUNCHTIME_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_LAUNCH_TIME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_STARTEDTIME_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_STARTED_TIME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_APP_STATE_NAME; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_TENSORBOARD_URL; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.YARN_WEB_HTTP_ADDRESS; -import static org.apache.zeppelin.submarine.job.SubmarineJobStatus.EXECUTE_SUBMARINE; - -public class SubmarineJob extends Thread { - - private Logger LOGGER = LoggerFactory.getLogger(SubmarineJob.class); - - private AtomicBoolean running = new AtomicBoolean(true); - - private static final long SYNC_SUBMARINE_RUNTIME_CYCLE = 3000; - - private YarnClient yarnClient = null; - - private SubmarineUI submarineUI = null; - - private Properties properties = null; - - private HdfsClient hdfsClient = null; - - private File pythonWorkDir = null; - - private String noteId = null; - private String noteName = null; - private String userName = null; - private String applicationId = null; - private YarnApplicationState yarnApplicationState = null; - private FinalApplicationStatus finalApplicationStatus = null; - private long startTime = 0; - private long launchTime = 0; - private long finishTime = 0; - private float progress = 0; // [0 ~ 100] - private SubmarineJobStatus currentJobStatus = EXECUTE_SUBMARINE; - - private InterpreterContext intpContext = null; - - JobRunThread jobRunThread = null; - TensorboardRunThread tensorboardRunThread = null; - - public static final String DIRECTORY_USER_HOME = "shell.working.directory.userName.home"; - private static final boolean isWindows = System.getProperty("os.name").startsWith("Windows"); - public static final String shell = isWindows ? "cmd /c" : "bash -c"; - public static final String TIMEOUT_PROPERTY = "submarine.command.timeout.millisecond"; - public static final String defaultTimeout = "100000"; - - public static final String SUBMARINE_JOBRUN_TF_JINJA - = "jinja_templates/submarine-job-run-tf.jinja"; - public static final String SUBMARINE_COMMAND_JINJA - = "jinja_templates/submarine-command.jinja"; - public static final String SUBMARINE_TENSORBOARD_JINJA - = "jinja_templates/submarine-tensorboard.jinja"; - - public SubmarineJob(InterpreterContext context, Properties properties) { - this.intpContext = context; - this.properties = properties; - this.noteId = context.getNoteId(); - this.noteName = context.getNoteName(); - this.userName = context.getAuthenticationInfo().getUser(); - this.yarnClient = new YarnClient(properties); - this.hdfsClient = new HdfsClient(properties); - this.submarineUI = new SubmarineUI(intpContext); - - this.start(); - } - - // 1. Synchronize submarine runtime state - @Override - public void run() { - while (running.get()) { - String jobName = SubmarineUtils.getJobName(userName, noteId); - updateJobStateByYarn(jobName); - - getTensorboardStatus(); - - try { - Thread.sleep(SYNC_SUBMARINE_RUNTIME_CYCLE); - } catch (InterruptedException e) { - LOGGER.error(e.getMessage(), e); - } - } - } - - @VisibleForTesting - public boolean getRunning() { - return running.get(); - } - - // Stop SubmarineJob - public void stopRunning() { - running.set(false); - - // stop JobRunThread - if (null != jobRunThread && jobRunThread.isAlive()) { - jobRunThread.stopRunning(); - } - - // stop TensorboardRunThread - if (null != tensorboardRunThread && tensorboardRunThread.isAlive()) { - tensorboardRunThread.stopRunning(); - } - } - - public String getUserTensorboardPath() { - String tfCheckpointPath = properties.getProperty(SubmarineConstants.TF_CHECKPOINT_PATH, ""); - return tfCheckpointPath; - } - - public String getJobDefaultCheckpointPath() { - String userTensorboardPath = getUserTensorboardPath(); - return userTensorboardPath + "/" + noteId; - } - - public void cleanJobDefaultCheckpointPath() { - String jobCheckpointPath = getJobDefaultCheckpointPath(); - Path notePath = new Path(jobCheckpointPath); - if (notePath.depth() <= 3) { - submarineUI.outputLog("ERROR", "Checkpoint path depth must be greater than 3"); - return; - } - try { - String message = "Clean up the checkpoint directory: " + jobCheckpointPath; - submarineUI.outputLog("", message); - hdfsClient.delete(notePath); - } catch (IOException e) { - LOGGER.error(e.getMessage(), e); - } - } - - public Properties getProperties() { - return properties; - } - - public HdfsClient getHdfsClient() { - return hdfsClient; - } - - public SubmarineUI getSubmarineUI() { - return submarineUI; - } - - public void setPythonWorkDir(File pythonWorkDir) { - this.pythonWorkDir = pythonWorkDir; - } - - public File getPythonWorkDir() { - return this.pythonWorkDir; - } - - public void onDashboard() { - submarineUI.createSubmarineUI(SubmarineCommand.DASHBOARD); - } - - public void runJob() { - // Need to display the UI when the page is reloaded, don't create it in the thread - submarineUI.createSubmarineUI(SubmarineCommand.JOB_RUN); - submarineUI.createLogHeadUI(); - - // Check if job already exists - String jobName = SubmarineUtils.getJobName(userName, noteId); - Map mapAppStatus = getJobStateByYarn(jobName); - if (mapAppStatus.size() == 0) { - if (null == jobRunThread || !jobRunThread.isAlive()) { - jobRunThread = new JobRunThread(this); - jobRunThread.start(); - } else { - submarineUI.outputLog("INFO", "JOB " + jobName + " being start up."); - } - } else { - submarineUI.outputLog("INFO", "JOB " + jobName + " already running."); - } - } - - public void deleteJob(String serviceName) { - submarineUI.createSubmarineUI(SubmarineCommand.JOB_STOP); - yarnClient.deleteService(serviceName); - } - - public void runTensorBoard() { - submarineUI.createSubmarineUI(SubmarineCommand.TENSORBOARD_RUN); - submarineUI.createLogHeadUI(); - - String tensorboardName = SubmarineUtils.getTensorboardName(userName); - Map mapAppStatus = getJobStateByYarn(tensorboardName); - if (mapAppStatus.size() == 0) { - if (null == tensorboardRunThread || !tensorboardRunThread.isAlive()) { - tensorboardRunThread = new TensorboardRunThread(this); - tensorboardRunThread.start(); - } else { - submarineUI.outputLog("INFO", "Tensorboard being start up."); - } - } else { - submarineUI.outputLog("INFO", "Tensorboard already running."); - } - } - - // Check if tensorboard already exists - public boolean getTensorboardStatus() { - String enableTensorboard = properties.getProperty(TF_TENSORBOARD_ENABLE, "false"); - boolean tensorboardExist = false; - if (StringUtils.equals(enableTensorboard, "true")) { - String tensorboardName = SubmarineUtils.getTensorboardName(userName); - - // create tensorboard link of YARN - Map mapAppStatus = getJobStateByYarn(tensorboardName); - String appId = ""; - if (mapAppStatus.containsKey(YARN_APPLICATION_ID)) { - appId = mapAppStatus.get(YARN_APPLICATION_ID).toString(); - StringBuffer sbUrl = new StringBuffer(); - String yarnBaseUrl = properties.getProperty(YARN_WEB_HTTP_ADDRESS, ""); - sbUrl.append(yarnBaseUrl).append("/ui2/#/yarn-app/").append(appId); - sbUrl.append("/components?service=").append(tensorboardName); - SubmarineUtils.setAgulObjValue(intpContext, YARN_TENSORBOARD_URL, sbUrl.toString()); - - // Detection tensorboard Container export port - List> listExportPorts = yarnClient.getAppExportPorts(tensorboardName); - for (Map exportPorts : listExportPorts) { - if (exportPorts.containsKey(YarnClient.HOST_IP) - && exportPorts.containsKey(YarnClient.HOST_PORT) - && exportPorts.containsKey(YarnClient.CONTAINER_PORT)) { - String intpAppHostIp = (String) exportPorts.get(YarnClient.HOST_IP); - String intpAppHostPort = (String) exportPorts.get(YarnClient.HOST_PORT); - String intpAppContainerPort = (String) exportPorts.get(YarnClient.CONTAINER_PORT); - if (StringUtils.equals("6006", intpAppContainerPort)) { - tensorboardExist = true; - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Detection tensorboard Container hostIp:{}, hostPort:{}, " + - "containerPort:{}.", intpAppHostIp, intpAppHostPort, intpAppContainerPort); - } - - // show tensorboard link button - String tensorboardUrl = "http://" + intpAppHostIp + ":" + intpAppHostPort; - SubmarineUtils.setAgulObjValue(intpContext, TENSORBOARD_URL, tensorboardUrl); - break; - } - } - } - } else { - SubmarineUtils.removeAgulObjValue(intpContext, YARN_TENSORBOARD_URL); - } - - if (false == tensorboardExist) { - SubmarineUtils.removeAgulObjValue(intpContext, TENSORBOARD_URL); - } - } - - return tensorboardExist; - } - - public void showUsage() { - submarineUI.createSubmarineUI(SubmarineCommand.USAGE); - } - - public void cleanRuntimeCache() { - intpContext.getAngularObjectRegistry().removeAll(noteId, intpContext.getParagraphId()); - submarineUI.createSubmarineUI(SubmarineCommand.DASHBOARD); - } - - public String getNoteId() { - return noteId; - } - - public String getUserName() { - return this.userName; - } - - // from state to state - public void setCurrentJobState(SubmarineJobStatus toStatus) { - SubmarineUtils.setAgulObjValue(intpContext, JOB_STATUS, - toStatus.getStatus()); - currentJobStatus = toStatus; - } - - public Map getJobStateByYarn(String jobName) { - Map mapAppStatus = new HashMap<>(); - Map mapStatus = yarnClient.getAppServices(jobName); - - if (mapStatus.containsKey(YARN_APPLICATION_ID) - && mapStatus.containsKey(YARN_APPLICATION_NAME)) { - String appId = mapStatus.get(YARN_APPLICATION_ID).toString(); - mapAppStatus = yarnClient.getClusterApps(appId); - - mapAppStatus.putAll(mapStatus); - } - - return mapAppStatus; - } - - public void updateJobStateByYarn(String appName) { - Map mapAppStatus = getJobStateByYarn(appName); - - if (mapAppStatus.size() == 0) { - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APPLICATION_ID); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APPLICATION_STATUS); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APPLICATION_URL); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APP_STARTED_TIME); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APP_LAUNCH_TIME); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APP_FINISHED_TIME); - SubmarineUtils.removeAgulObjValue(intpContext, YARN_APP_ELAPSED_TIME); - - // TODO(Xun Liu) Not wait job run ??? - SubmarineUtils.removeAgulObjValue(intpContext, JOB_STATUS); - } else { - String state = "", finalStatus = "", appId = ""; - if (mapAppStatus.containsKey(YARN_APPLICATION_ID)) { - appId = mapAppStatus.get(YARN_APPLICATION_ID).toString(); - } - if (mapAppStatus.containsKey(YARN_APP_STATE_NAME)) { - state = mapAppStatus.get(YARN_APP_STATE_NAME).toString(); - SubmarineUtils.setAgulObjValue(intpContext, YARN_APPLICATION_STATUS, state); - } - if (mapAppStatus.containsKey(YARN_APP_FINAL_STATUS_NAME)) { - finalStatus = mapAppStatus.get(YARN_APP_FINAL_STATUS_NAME).toString(); - SubmarineUtils.setAgulObjValue(intpContext, - YARN_APPLICATION_FINAL_STATUS, finalStatus); - } - SubmarineJobStatus jobStatus = convertYarnState(state, finalStatus); - setCurrentJobState(jobStatus); - try { - if (mapAppStatus.containsKey(YARN_APP_STARTEDTIME_NAME)) { - String startedTime = mapAppStatus.get(YARN_APP_STARTEDTIME_NAME).toString(); - long lStartedTime = Long.parseLong(startedTime); - if (lStartedTime > 0) { - Date startedDate = new Date(lStartedTime); - SubmarineUtils.setAgulObjValue(intpContext, YARN_APP_STARTED_TIME, - startedDate.toString()); - } - } - if (mapAppStatus.containsKey(YARN_APP_LAUNCHTIME_NAME)) { - String launchTime = mapAppStatus.get(YARN_APP_LAUNCHTIME_NAME).toString(); - long lLaunchTime = Long.parseLong(launchTime); - if (lLaunchTime > 0) { - Date launchDate = new Date(lLaunchTime); - SubmarineUtils.setAgulObjValue(intpContext, YARN_APP_LAUNCH_TIME, - launchDate.toString()); - } - } - if (mapAppStatus.containsKey("finishedTime")) { - String finishedTime = mapAppStatus.get("finishedTime").toString(); - long lFinishedTime = Long.parseLong(finishedTime); - if (lFinishedTime > 0) { - Date finishedDate = new Date(lFinishedTime); - SubmarineUtils.setAgulObjValue(intpContext, YARN_APP_FINISHED_TIME, - finishedDate.toString()); - } - } - if (mapAppStatus.containsKey("elapsedTime")) { - String elapsedTime = mapAppStatus.get("elapsedTime").toString(); - long lElapsedTime = Long.parseLong(elapsedTime); - if (lElapsedTime > 0) { - String finishedDate = org.apache.hadoop.util.StringUtils.formatTime(lElapsedTime); - SubmarineUtils.setAgulObjValue(intpContext, YARN_APP_ELAPSED_TIME, finishedDate); - } - } - } catch (NumberFormatException e) { - LOGGER.error(e.getMessage()); - } - - // create YARN UI link - StringBuffer sbUrl = new StringBuffer(); - String yarnBaseUrl = properties.getProperty(YARN_WEB_HTTP_ADDRESS, ""); - sbUrl.append(yarnBaseUrl).append("/ui2/#/yarn-app/").append(appId); - sbUrl.append("/components?service=").append(appName); - - SubmarineUtils.setAgulObjValue(intpContext, YARN_APPLICATION_ID, appId); - - SubmarineUtils.setAgulObjValue(intpContext, YARN_APPLICATION_URL, sbUrl.toString()); - } - } - - private SubmarineJobStatus convertYarnState(String status, String finalStatus) { - SubmarineJobStatus submarineJobStatus = SubmarineJobStatus.UNKNOWN; - switch (status) { - case "NEW": - submarineJobStatus = SubmarineJobStatus.YARN_NEW; - break; - case "NEW_SAVING": - submarineJobStatus = SubmarineJobStatus.YARN_NEW_SAVING; - break; - case "SUBMITTED": - submarineJobStatus = SubmarineJobStatus.YARN_SUBMITTED; - break; - case "ACCEPTED": - submarineJobStatus = SubmarineJobStatus.YARN_ACCEPTED; - break; - case "RUNNING": - submarineJobStatus = SubmarineJobStatus.YARN_RUNNING; - break; - case "FINISHED": - submarineJobStatus = SubmarineJobStatus.YARN_FINISHED; - break; - case "FAILED": - submarineJobStatus = SubmarineJobStatus.YARN_FAILED; - break; - case "KILLED": - submarineJobStatus = SubmarineJobStatus.YARN_KILLED; - break; - case "STOPPED": - submarineJobStatus = SubmarineJobStatus.YARN_STOPPED; - } - switch (finalStatus) { - case "NEW": - submarineJobStatus = SubmarineJobStatus.YARN_NEW; - break; - case "NEW_SAVING": - submarineJobStatus = SubmarineJobStatus.YARN_NEW_SAVING; - break; - case "SUBMITTED": - submarineJobStatus = SubmarineJobStatus.YARN_SUBMITTED; - break; - case "ACCEPTED": - submarineJobStatus = SubmarineJobStatus.YARN_ACCEPTED; - break; - case "RUNNING": - submarineJobStatus = SubmarineJobStatus.YARN_RUNNING; - break; - case "FINISHED": - submarineJobStatus = SubmarineJobStatus.YARN_FINISHED; - break; - case "FAILED": - submarineJobStatus = SubmarineJobStatus.YARN_FAILED; - break; - case "KILLED": - submarineJobStatus = SubmarineJobStatus.YARN_KILLED; - break; - case "STOPPED": - submarineJobStatus = SubmarineJobStatus.YARN_STOPPED; - break; - default: // UNDEFINED - break; - } - - return submarineJobStatus; - } - - public InterpreterContext getIntpContext() { - return intpContext; - } - - public void setIntpContext(InterpreterContext intpContext) { - this.intpContext = intpContext; - this.submarineUI = new SubmarineUI(intpContext); - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJobStatus.java b/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJobStatus.java deleted file mode 100644 index 2c8bf76a50a..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/job/SubmarineJobStatus.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.job; - -public enum SubmarineJobStatus { - UNKNOWN("UNKNOWN"), - READY("READY"), - EXECUTE_SUBMARINE("EXECUTE SUBMARINE"), - EXECUTE_SUBMARINE_ERROR("EXECUTE SUBMARINE ERROR"), - EXECUTE_SUBMARINE_FINISHED("EXECUTE SUBMARINE FINISHED"), - YARN_NEW("YARN NEW"), - YARN_NEW_SAVING("YARN NEW SAVING"), - YARN_SUBMITTED("YARN SUBMITTED"), - YARN_ACCEPTED("YARN ACCEPTED"), - YARN_RUNNING("YARN RUNNING"), - YARN_FINISHED("YARN FINISHED"), - YARN_FAILED("YARN FAILED"), - YARN_STOPPED("YARN STOPPED"), - YARN_KILLED("YARN KILLED"); - - private String status; - - SubmarineJobStatus(String status){ - this.status = status; - } - - public String getStatus(){ - return status; - } - - public static SubmarineJobStatus fromState(String status) { - for (SubmarineJobStatus noteStatus : SubmarineJobStatus.values()) { - if (noteStatus.getStatus().equals(status)) { - return noteStatus; - } - } - - return EXECUTE_SUBMARINE_ERROR; - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/JobRunThread.java b/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/JobRunThread.java deleted file mode 100644 index f70485437d9..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/JobRunThread.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.job.thread; - -import com.google.common.io.Resources; -import com.hubspot.jinjava.Jinjava; -import org.apache.commons.exec.CommandLine; -import org.apache.commons.exec.DefaultExecuteResultHandler; -import org.apache.commons.exec.DefaultExecutor; -import org.apache.commons.exec.ExecuteException; -import org.apache.commons.exec.ExecuteWatchdog; -import org.apache.commons.exec.LogOutputStream; -import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.io.Charsets; -import org.apache.commons.lang3.StringUtils; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.thrift.ParagraphInfo; -import org.apache.zeppelin.submarine.hadoop.HdfsClient; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.commons.SubmarineUI; -import org.apache.zeppelin.submarine.commons.SubmarineUtils; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.net.URL; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - -import static org.apache.zeppelin.submarine.job.SubmarineJobStatus.EXECUTE_SUBMARINE; -import static org.apache.zeppelin.submarine.job.SubmarineJobStatus.EXECUTE_SUBMARINE_ERROR; -import static org.apache.zeppelin.submarine.job.SubmarineJobStatus.EXECUTE_SUBMARINE_FINISHED; - -public class JobRunThread extends Thread { - private Logger LOGGER = LoggerFactory.getLogger(JobRunThread.class); - - private SubmarineJob submarineJob; - - private AtomicBoolean running = new AtomicBoolean(false); - - private Lock lockRunning = new ReentrantLock(); - - public JobRunThread(SubmarineJob submarineJob) { - this.submarineJob = submarineJob; - } - - public void run() { - boolean tryLock = lockRunning.tryLock(); - if (false == tryLock) { - LOGGER.warn("Can not get JobRunThread lockRunning!"); - return; - } - - SubmarineUI submarineUI = submarineJob.getSubmarineUI(); - try { - InterpreterContext intpContext = submarineJob.getIntpContext(); - String noteId = intpContext.getNoteId(); - String userName = intpContext.getAuthenticationInfo().getUser(); - String jobName = SubmarineUtils.getJobName(userName, noteId); - - if (true == running.get()) { - String message = String.format("Job %s already running.", jobName); - submarineUI.outputLog("WARN", message); - LOGGER.warn(message); - return; - } - running.set(true); - - Properties properties = submarineJob.getProperties(); - HdfsClient hdfsClient = submarineJob.getHdfsClient(); - File pythonWorkDir = submarineJob.getPythonWorkDir(); - - submarineJob.setCurrentJobState(EXECUTE_SUBMARINE); - - String algorithmPath = properties.getProperty( - SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH, ""); - if (!algorithmPath.startsWith("hdfs://")) { - String message = "Algorithm file upload HDFS path, " + - "Must be `hdfs://` prefix. now setting " + algorithmPath; - submarineUI.outputLog("Configuration error", message); - return; - } - - List paragraphInfos = intpContext.getIntpEventClient() - .getParagraphList(userName, noteId); - String outputMsg = hdfsClient.saveParagraphToFiles(noteId, paragraphInfos, - pythonWorkDir == null ? "" : pythonWorkDir.getAbsolutePath(), properties); - if (!StringUtils.isEmpty(outputMsg)) { - submarineUI.outputLog("Save algorithm file", outputMsg); - } - - HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams( - properties, submarineJob, true); - - URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineCmd = jinjava.render(template, jinjaParams); - // If the first line is a newline, delete the newline - int firstLineIsNewline = submarineCmd.indexOf("\n"); - if (firstLineIsNewline == 0) { - submarineCmd = submarineCmd.replaceFirst("\n", ""); - } - - StringBuffer sbLogs = new StringBuffer(submarineCmd); - submarineUI.outputLog("Submarine submit command", sbLogs.toString()); - - long timeout = Long.valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, - SubmarineJob.defaultTimeout)); - CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell); - cmdLine.addArgument(submarineCmd, false); - DefaultExecutor executor = new DefaultExecutor(); - ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout); - executor.setWatchdog(watchDog); - StringBuffer sbLogOutput = new StringBuffer(); - executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { - @Override - protected void processLine(String line, int level) { - line = line.trim(); - if (!StringUtils.isEmpty(line)) { - sbLogOutput.append(line + "\n"); - } - } - })); - - if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) { - executor.setWorkingDirectory(new File(System.getProperty("user.home"))); - } - - Map env = new HashMap<>(); - String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE); - if (StringUtils.equals(launchMode, "yarn")) { - // Set environment variables in the submarine interpreter container run on yarn - String javaHome, hadoopHome, hadoopConf; - javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME); - hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME); - hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR); - env.put("JAVA_HOME", javaHome); - env.put("HADOOP_HOME", hadoopHome); - env.put("HADOOP_HDFS_HOME", hadoopHome); - env.put("HADOOP_CONF_DIR", hadoopConf); - env.put("YARN_CONF_DIR", hadoopConf); - env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); - env.put("ZEPPELIN_FORCE_STOP", "true"); - } - - LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd); - AtomicBoolean cmdLineRunning = new AtomicBoolean(true); - executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { - @Override - public void onProcessComplete(int exitValue) { - String message = String.format( - "jobName %s ProcessComplete exit value is : %d", jobName, exitValue); - LOGGER.info(message); - submarineUI.outputLog("JOR RUN COMPLETE", message); - cmdLineRunning.set(false); - submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_FINISHED); - } - @Override - public void onProcessFailed(ExecuteException e) { - String message = String.format( - "jobName %s ProcessFailed exit value is : %d, exception is : %s", - jobName, e.getExitValue(), e.getMessage()); - LOGGER.error(message); - submarineUI.outputLog("JOR RUN FAILED", message); - cmdLineRunning.set(false); - submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); - } - }); - int loopCount = 100; - while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) { - Thread.sleep(1000); - } - if (watchDog.isWatching()) { - watchDog.destroyProcess(); - Thread.sleep(1000); - } - if (watchDog.isWatching()) { - watchDog.killedProcess(); - } - - // Check if it has been submitted to YARN - Map jobState = submarineJob.getJobStateByYarn(jobName); - loopCount = 50; - while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) { - Thread.sleep(3000); - jobState = submarineJob.getJobStateByYarn(jobName); - } - - if (!jobState.containsKey("state")) { - String message = String.format("JOB %s was not submitted to YARN!", jobName); - LOGGER.error(message); - submarineUI.outputLog("JOR RUN FAILED", message); - submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - submarineJob.setCurrentJobState(EXECUTE_SUBMARINE_ERROR); - submarineUI.outputLog("Exception", e.getMessage()); - } finally { - running.set(false); - lockRunning.unlock(); - } - } - - public void stopRunning() { - try { - running.set(false); - - // If can not get the lockRunning, the thread is executed. - boolean tryLock = lockRunning.tryLock(); - int loop = 0; - while (false == tryLock && loop++ < 100) { - LOGGER.warn("Can not get the JobRunThread lockRunning [{}] !", loop); - Thread.sleep(500); - tryLock = lockRunning.tryLock(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } finally { - lockRunning.unlock(); - } - } -} diff --git a/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/TensorboardRunThread.java b/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/TensorboardRunThread.java deleted file mode 100644 index 046abfac06c..00000000000 --- a/submarine/src/main/java/org/apache/zeppelin/submarine/job/thread/TensorboardRunThread.java +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine.job.thread; - -import com.google.common.io.Resources; -import com.hubspot.jinjava.Jinjava; -import org.apache.commons.exec.CommandLine; -import org.apache.commons.exec.DefaultExecuteResultHandler; -import org.apache.commons.exec.DefaultExecutor; -import org.apache.commons.exec.ExecuteException; -import org.apache.commons.exec.ExecuteWatchdog; -import org.apache.commons.exec.LogOutputStream; -import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.io.Charsets; -import org.apache.commons.lang3.StringUtils; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.commons.SubmarineUI; -import org.apache.zeppelin.submarine.commons.SubmarineUtils; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.net.URL; -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantLock; - -public class TensorboardRunThread extends Thread { - private Logger LOGGER = LoggerFactory.getLogger(TensorboardRunThread.class); - - private SubmarineJob submarineJob; - - private AtomicBoolean running = new AtomicBoolean(false); - - private Lock lockRunning = new ReentrantLock(); - - public TensorboardRunThread(SubmarineJob submarineJob) { - this.submarineJob = submarineJob; - } - - public void run() { - SubmarineUI submarineUI = submarineJob.getSubmarineUI(); - - boolean tryLock = lockRunning.tryLock(); - - try { - Properties properties = submarineJob.getProperties(); - String tensorboardName = SubmarineUtils.getTensorboardName(submarineJob.getUserName()); - if (true == running.get()) { - String message = String.format("tensorboard %s already running.", tensorboardName); - submarineUI.outputLog("WARN", message); - LOGGER.warn(message); - return; - } - running.set(true); - - HashMap jinjaParams = SubmarineUtils.propertiesToJinjaParams( - properties, submarineJob, false); - // update jobName -> tensorboardName - jinjaParams.put(SubmarineConstants.JOB_NAME, tensorboardName); - - URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA); - String template = Resources.toString(urlTemplate, Charsets.UTF_8); - Jinjava jinjava = new Jinjava(); - String submarineCmd = jinjava.render(template, jinjaParams); - // If the first line is a newline, delete the newline - int firstLineIsNewline = submarineCmd.indexOf("\n"); - if (firstLineIsNewline == 0) { - submarineCmd = submarineCmd.replaceFirst("\n", ""); - } - StringBuffer sbLogs = new StringBuffer(submarineCmd); - submarineUI.outputLog("Submarine submit command", sbLogs.toString()); - - long timeout = Long.valueOf(properties.getProperty(SubmarineJob.TIMEOUT_PROPERTY, - SubmarineJob.defaultTimeout)); - CommandLine cmdLine = CommandLine.parse(SubmarineJob.shell); - cmdLine.addArgument(submarineCmd, false); - DefaultExecutor executor = new DefaultExecutor(); - ExecuteWatchdog watchDog = new ExecuteWatchdog(timeout); - executor.setWatchdog(watchDog); - StringBuffer sbLogOutput = new StringBuffer(); - executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { - @Override - protected void processLine(String line, int level) { - line = line.trim(); - if (!StringUtils.isEmpty(line)) { - sbLogOutput.append(line + "\n"); - } - } - })); - - if (Boolean.valueOf(properties.getProperty(SubmarineJob.DIRECTORY_USER_HOME))) { - executor.setWorkingDirectory(new File(System.getProperty("user.home"))); - } - - Map env = new HashMap<>(); - String launchMode = (String) jinjaParams.get(SubmarineConstants.INTERPRETER_LAUNCH_MODE); - if (StringUtils.equals(launchMode, "yarn")) { - // Set environment variables in the container - String javaHome, hadoopHome, hadoopConf; - javaHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_JAVA_HOME); - hadoopHome = (String) jinjaParams.get(SubmarineConstants.DOCKER_HADOOP_HDFS_HOME); - hadoopConf = (String) jinjaParams.get(SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR); - env.put("JAVA_HOME", javaHome); - env.put("HADOOP_HOME", hadoopHome); - env.put("HADOOP_HDFS_HOME", hadoopHome); - env.put("HADOOP_CONF_DIR", hadoopConf); - env.put("YARN_CONF_DIR", hadoopConf); - env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); - } - - LOGGER.info("Execute EVN: {}, Command: {} ", env.toString(), submarineCmd); - - AtomicBoolean cmdLineRunning = new AtomicBoolean(true); - executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { - @Override - public void onProcessComplete(int exitValue) { - String message = String.format( - "jobName %s ProcessComplete exit value is : %d", tensorboardName, exitValue); - LOGGER.info(message); - submarineUI.outputLog("TENSORBOARD RUN COMPLETE", message); - cmdLineRunning.set(false); - } - @Override - public void onProcessFailed(ExecuteException e) { - String message = String.format( - "jobName %s ProcessFailed exit value is : %d, exception is : %s", - tensorboardName, e.getExitValue(), e.getMessage()); - LOGGER.error(message); - submarineUI.outputLog("TENSORBOARD RUN FAILED", message); - cmdLineRunning.set(false); - } - }); - int loopCount = 100; - while ((loopCount-- > 0) && cmdLineRunning.get() && running.get()) { - Thread.sleep(1000); - } - if (watchDog.isWatching()) { - watchDog.destroyProcess(); - Thread.sleep(1000); - } - if (watchDog.isWatching()) { - watchDog.killedProcess(); - } - - // Check if it has been submitted to YARN - Map jobState = submarineJob.getJobStateByYarn(tensorboardName); - loopCount = 50; - while ((loopCount-- > 0) && !jobState.containsKey("state") && running.get()) { - Thread.sleep(3000); - jobState = submarineJob.getJobStateByYarn(tensorboardName); - } - - if (!jobState.containsKey("state")) { - String message - = String.format("tensorboard %s was not submitted to YARN!", tensorboardName); - LOGGER.error(message); - submarineUI.outputLog("JOR RUN FAILED", message); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - submarineUI.outputLog("Exception", e.getMessage()); - } finally { - running.set(false); - lockRunning.unlock(); - } - } - - public void stopRunning() { - try { - running.set(false); - - // If can not get the lock, the thread is executed. - boolean tryLock = lockRunning.tryLock(); - int loop = 0; - while (false == tryLock && loop++ < 100) { - LOGGER.warn("Can not get the TensorboardRunThread lock [{}] !", loop); - Thread.sleep(500); - tryLock = lockRunning.tryLock(); - } - } catch (Exception e) { - LOGGER.error(e.getMessage(), e); - } finally { - lockRunning.unlock(); - } - } -} diff --git a/submarine/src/main/resources/interpreter-setting.json b/submarine/src/main/resources/interpreter-setting.json deleted file mode 100644 index ffabdc112a8..00000000000 --- a/submarine/src/main/resources/interpreter-setting.json +++ /dev/null @@ -1,314 +0,0 @@ -[ - { - "group": "submarine", - "name": "submarine", - "className": "org.apache.zeppelin.submarine.SubmarineInterpreter", - "defaultInterpreter": true, - "properties": { - "zeppelin.submarine.auth.type": { - "envName": "ZEPPELIN_SUBMARINE_AUTH_TYPE", - "propertyName": "zeppelin.submarine.auth.type", - "defaultValue": "kerberos", - "description": "simple or kerberos", - "type": "string" - }, - "yarn.webapp.http.address": { - "envName": "YARN_WEBAPP_HTTP_ADDRESS", - "propertyName": "yarn.webapp.http.address", - "defaultValue": "", - "description": "YARN web ui address", - "type": "string" - }, - "INTERPRETER_LAUNCH_MODE": { - "envName": "INTERPRETER_LAUNCH_MODE", - "propertyName": "interpreter.launch.mode", - "defaultValue": "local", - "description": "Submarine interpreter launch in local/yarn", - "type": "string" - }, - "HADOOP_YARN_SUBMARINE_JAR": { - "envName": "HADOOP_YARN_SUBMARINE_JAR", - "propertyName": "hadoop.yarn.submarine.jar", - "defaultValue": "", - "description": "Submarine executive full path, ex) ../hadoop/share/hadoop/yarn/hadoop-yarn-submarine-x.x.x.jar", - "type": "string" - }, - "DOCKER_HADOOP_HDFS_HOME": { - "envName": "DOCKER_HADOOP_HDFS_HOME", - "propertyName": "DOCKER_HADOOP_HDFS_HOME", - "defaultValue": "", - "description": "hadoop home in docker container", - "type": "string" - }, - "DOCKER_JAVA_HOME": { - "envName": "DOCKER_JAVA_HOME", - "propertyName": "DOCKER_JAVA_HOME", - "defaultValue": "", - "description": "java home in docker container", - "type": "string" - }, - "SUBMARINE_INTERPRETER_DOCKER_IMAGE": { - "envName": "SUBMARINE_INTERPRETER_DOCKER_IMAGE", - "propertyName": "SUBMARINE_INTERPRETER_DOCKER_IMAGE", - "defaultValue": "", - "description": "Docker image of submarine interpreter", - "type": "string" - }, - "zeppelin.interpreter.rpc.portRange": { - "envName": "ZEPPELIN_INTERPRETER_RPC_PORTRANGE", - "propertyName": "zeppelin.interpreter.rpc.portRange", - "defaultValue": "29914", - "description": "The process port of the submarine interpreter container mapping defined in the `scripts/docker/interpreter/submarine/tensorflow_gpu/Dockerfile` file", - "type": "string" - }, - "submarine.yarn.queue": { - "envName": "SUBMARINE_YARN_QUEUE", - "propertyName": "submarine.yarn.queue", - "defaultValue": "root.default", - "description": "submarine queue name of yarn", - "type": "string" - }, - "submarine.hadoop.home": { - "envName": "SUBMARINE_HADOOP_HOME", - "propertyName": "submarine.hadoop.home", - "defaultValue": "", - "description": "submarine user-defined HADOOP_HOME", - "type": "string" - }, - "SUBMARINE_HADOOP_CONF_DIR": { - "envName": "SUBMARINE_HADOOP_CONF_DIR", - "propertyName": "submarine.hadoop.conf.dir", - "defaultValue": "", - "description": "submarine user-defined HADOOP_CONF_DIR", - "type": "string" - }, - "submarine.hadoop.krb5.conf": { - "envName": "SUBMARINE_HADOOP_KRB5_CONF", - "propertyName": "submarine.hadoop.krb5.conf", - "defaultValue": "/etc/krb5.conf", - "description": "submarine user-defined hdfs/yarn kerberos authentication krb5.conf", - "type": "string" - }, - "SUBMARINE_HADOOP_KEYTAB": { - "envName": "SUBMARINE_HADOOP_KEYTAB", - "propertyName": "submarine.hadoop.keytab", - "defaultValue": "", - "description": "submarine hdfs/yarn kerberos authentication", - "type": "string" - }, - "SUBMARINE_HADOOP_PRINCIPAL": { - "envName": "SUBMARINE_HADOOP_PRINCIPAL", - "propertyName": "submarine.hadoop.principal", - "defaultValue": "", - "description": "submarine hdfs/yarn kerberos authentication", - "type": "string" - }, - "docker.container.network": { - "envName": "DOCKER_CONTAINER_NETWORK", - "propertyName": "docker.container.network", - "defaultValue": "", - "description": "Network name in the docker container", - "type": "string" - }, - "DOCKER_CONTAINER_TIME_ZONE": { - "envName": "DOCKER_CONTAINER_TIME_ZONE", - "propertyName": "docker.container.time.zone", - "defaultValue": "Etc/UTC", - "description": "docker container time zone", - "type": "string" - }, - "zeppelin.interpreter.connect.timeout": { - "envName": "ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT", - "propertyName": "zeppelin.interpreter.connect.timeout", - "defaultValue": "100000", - "description": "zeppelin interpreter connect timeout", - "type": "number" - }, - "submarine.algorithm.hdfs.path": { - "envName": "SUBMARINE_ALGORITHM_HDFS_PATH", - "propertyName": "submarine.algorithm.hdfs.path", - "defaultValue": "hdfs://...", - "description": "Algorithm file upload HDFS path, Support ${username} variable symbol, For example: hdfs:///usr/${username}", - "type": "string" - }, - "tf.parameter.services.num": { - "envName": "TF_PARAMETER_SERVICES_NUM", - "propertyName": "tf.parameter.services.num", - "defaultValue": "1", - "description": "Number of parameter services", - "type": "number" - }, - "tf.worker.services.num": { - "envName": "TF_WORKER_SERVICES_NUM", - "propertyName": "tf.worker.services.num", - "defaultValue": "0", - "description": "Number of worker services", - "type": "number" - }, - "tf.parameter.services.docker.image": { - "envName": "TF_PARAMETER_SERVICES_DOCKER_IMAGE", - "propertyName": "tf.parameter.services.docker.image", - "defaultValue": "", - "description": "Docker image of parameter services", - "type": "string" - }, - "tf.parameter.services.gpu": { - "envName": "TF_PARAMETER_SERVICES_GPU", - "propertyName": "tf.parameter.services.gpu", - "defaultValue": "0", - "description": "GPU number of parameter services", - "type": "number" - }, - "tf.parameter.services.cpu": { - "envName": "TF_PARAMETER_SERVICES_CPU", - "propertyName": "tf.parameter.services.cpu", - "defaultValue": "2", - "description": "CPU number of parameter services", - "type": "number" - }, - "tf.parameter.services.memory": { - "envName": "TF_PARAMETER_SERVICES_MEMORY", - "propertyName": "tf.parameter.services.memory", - "defaultValue": "2G", - "description": "Memory number of parameter services", - "type": "string" - }, - "tf.worker.services.docker.image": { - "envName": "TF_WORKER_SERVICES_DOCKER_IMAGE", - "propertyName": "tf.worker.services.docker.image", - "defaultValue": "", - "description": "Docker image of worker services", - "type": "string" - }, - "tf.worker.services.gpu": { - "envName": "TF_WORKER_SERVICES_GPU", - "propertyName": "tf.worker.services.gpu", - "defaultValue": "0", - "description": "GPU number of worker services", - "type": "number" - }, - "tf.worker.services.cpu": { - "envName": "TF_WORKER_SERVICES_CPU", - "propertyName": "tf.worker.services.cpu", - "defaultValue": "2", - "description": "CPU number of worker services", - "type": "number" - }, - "tf.worker.services.memory": { - "envName": "TF_WORKER_SERVICES_MEMORY", - "propertyName": "tf.worker.services.memory", - "defaultValue": "4G", - "description": "Memory number of worker services", - "type": "string" - }, - "tf.tensorboard.enable": { - "envName": "TF_TENSORBOARD_ENABLE", - "propertyName": "tf.tensorboard.enable", - "defaultValue": true, - "description": "Whether to enable tensorboard", - "type": "checkbox" - }, - "tf.checkpoint.path": { - "envName": "TF_CHECKPOINT_PATH", - "propertyName": "tf.checkpoint.path", - "defaultValue": "", - "description": "tensorflow checkpoint path", - "type": "string" - } - }, - "editor": { - "language": "sh", - "editOnDblClick": false, - "completionSupport": false - }, - "config": { - "fontSize": 9, - "colWidth": 12, - "runOnSelectionChange": false, - "title": true - } - }, - { - "group": "submarine", - "name": "python", - "className": "org.apache.zeppelin.submarine.PySubmarineInterpreter", - "properties": { - "zeppelin.python": { - "envName": null, - "propertyName": "zeppelin.python", - "defaultValue": "python", - "description": "Python directory. It is set to python by default.(assume python is in your $PATH)", - "type": "string" - }, - "zeppelin.python.maxResult": { - "envName": null, - "propertyName": "zeppelin.python.maxResult", - "defaultValue": "1000", - "description": "Max number of dataframe rows to display.", - "type": "number" - }, - "zeppelin.python.useIPython": { - "propertyName": "zeppelin.python.useIPython", - "defaultValue": false, - "description": "whether use IPython when it is available", - "type": "checkbox" - }, - "machinelearning.distributed.enable": { - "envName": "MACHINELEARNING_DISTRIBUTED", - "propertyName": "machinelearning.distributed.enable", - "defaultValue": false, - "description": "Running distributed machine learning", - "type": "checkbox" - } - }, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionSupport": true - } - }, - { - "group": "python", - "name": "ipython", - "className": "org.apache.zeppelin.submarine.IPySubmarineInterpreter", - "properties": { - "zeppelin.ipython.launch.timeout": { - "propertyName": "zeppelin.ipython.launch.timeout", - "defaultValue": "30000", - "description": "time out for ipython launch", - "type": "number" - }, - "zeppelin.ipython.grpc.message_size": { - "propertyName": "zeppelin.ipython.grpc.message_size", - "defaultValue": "33554432", - "description": "grpc message size, default is 32M", - "type": "number" - } - }, - "editor": { - "language": "python", - "editOnDblClick": false, - "completionKey": "TAB", - "completionSupport": true - } - }, - { - "group": "submarine", - "name": "sh", - "className": "org.apache.zeppelin.submarine.SubmarineShellInterpreter", - "properties": { - "shell.command.timeout.millisecs": { - "envName": "SHELL_COMMAND_TIMEOUT", - "propertyName": "shell.command.timeout.millisecs", - "defaultValue": "60000", - "description": "Shell command time out in millisecs. Default = 60000", - "type": "number" - } - }, - "editor": { - "language": "sh", - "editOnDblClick": false, - "completionSupport": false - } - } -] diff --git a/submarine/src/main/resources/jinja_templates/submarine-job-run-tf.jinja b/submarine/src/main/resources/jinja_templates/submarine-job-run-tf.jinja deleted file mode 100644 index 8fe4676b7fc..00000000000 --- a/submarine/src/main/resources/jinja_templates/submarine-job-run-tf.jinja +++ /dev/null @@ -1,55 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} -{%- if INTERPRETER_LAUNCH_MODE and INTERPRETER_LAUNCH_MODE == 'yarn' -%} -{{ DOCKER_HADOOP_HDFS_HOME }}/bin/yarn jar {{ HADOOP_YARN_SUBMARINE_JAR }} \ -{%- else -%} -{{ SUBMARINE_HADOOP_HOME }}/bin/yarn jar {{ HADOOP_YARN_SUBMARINE_JAR }} \ -{%- endif %} - job run \ - --name {{ JOB_NAME }} \ - --env DOCKER_JAVA_HOME={{ DOCKER_JAVA_HOME }} \ - --env DOCKER_HADOOP_HDFS_HOME={{ DOCKER_HADOOP_HDFS_HOME }} \ - --env PYTHONPATH="./submarine_algorithm:$PYTHONPATH" \ - --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK={{ DOCKER_CONTAINER_NETWORK }} \ - --env HADOOP_LOG_DIR=/tmp \ - --env TZ="{{ DOCKER_CONTAINER_TIME_ZONE }}" \ - --input_path {{ INPUT_PATH }} \ - --checkpoint_path {{ CHECKPOINT_PATH }} \ - --queue {{ SUBMARINE_YARN_QUEUE }} \ -{%- if MACHINELEARNING_DISTRIBUTED_ENABLE and MACHINELEARNING_DISTRIBUTED_ENABLE == 'true' %} - --num_ps {{ TF_PARAMETER_SERVICES_NUM }} \ - --ps_docker_image {{ TF_PARAMETER_SERVICES_DOCKER_IMAGE }} \ - --ps_resources memory={{ TF_PARAMETER_SERVICES_MEMORY }},vcores={{ TF_PARAMETER_SERVICES_CPU }},gpu={{ TF_PARAMETER_SERVICES_GPU }} \ - --ps_launch_cmd "{{ PS_LAUNCH_CMD }}" \ - --num_workers {{ TF_WORKER_SERVICES_NUM }} \ -{%- else -%} - --num_workers 1 \ -{%- endif %} - --worker_docker_image {{ TF_WORKER_SERVICES_DOCKER_IMAGE }} \ - --worker_resources memory={{ TF_WORKER_SERVICES_MEMORY }},vcores={{ TF_WORKER_SERVICES_CPU }},gpu={{ TF_WORKER_SERVICES_GPU }} \ - --worker_launch_cmd "{{ WORKER_LAUNCH_CMD }}" \ -{%- for file in SUBMARINE_ALGORITHM_HDFS_FILES %} - --localization "{{ file }}:." \ -{%- endfor %} - --localization "{{ SUBMARINE_ALGORITHM_HDFS_PATH }}:./submarine_algorithm" \ -{%- if SUBMARINE_HADOOP_CONF_DIR %} - --localization "{{ SUBMARINE_HADOOP_CONF_DIR }}:{{ SUBMARINE_HADOOP_CONF_DIR }}" \ -{%- endif %} - --keytab {{ SUBMARINE_HADOOP_KEYTAB }} \ - --principal {{ SUBMARINE_HADOOP_PRINCIPAL }} \ - --distribute_keytab \ - --verbose \ No newline at end of file diff --git a/submarine/src/main/resources/jinja_templates/submarine-tensorboard.jinja b/submarine/src/main/resources/jinja_templates/submarine-tensorboard.jinja deleted file mode 100644 index 5b5f8d539ce..00000000000 --- a/submarine/src/main/resources/jinja_templates/submarine-tensorboard.jinja +++ /dev/null @@ -1,36 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#} -{%- if INTERPRETER_LAUNCH_MODE and INTERPRETER_LAUNCH_MODE == 'yarn' -%} -{{ DOCKER_HADOOP_HDFS_HOME }}/bin/yarn jar \ -{%- else -%} -{{ SUBMARINE_HADOOP_HOME }}/bin/yarn jar \ -{%- endif %} - {{ HADOOP_YARN_SUBMARINE_JAR }} job run \ - --name {{ JOB_NAME }} \ - --env DOCKER_JAVA_HOME={{ DOCKER_JAVA_HOME }} \ - --env DOCKER_HADOOP_HDFS_HOME={{ DOCKER_HADOOP_HDFS_HOME }} \ - --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=bridge \ - --env TZ="{{ DOCKER_CONTAINER_TIME_ZONE }}" \ - --checkpoint_path {{ CHECKPOINT_PATH }} \ - --queue {{ SUBMARINE_YARN_QUEUE }} \ - --num_workers 0 \ - --tensorboard \ - --tensorboard_docker_image {{ TF_PARAMETER_SERVICES_DOCKER_IMAGE }} \ - --keytab {{ SUBMARINE_HADOOP_KEYTAB }} \ - --principal {{ SUBMARINE_HADOOP_PRINCIPAL }} \ - --distribute_keytab \ - --verbose \ No newline at end of file diff --git a/submarine/src/main/resources/ui_templates/submarine-command-options.json b/submarine/src/main/resources/ui_templates/submarine-command-options.json deleted file mode 100644 index 4843b187ec3..00000000000 --- a/submarine/src/main/resources/ui_templates/submarine-command-options.json +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -[ - { - "name": "-checkpoint_path ", - "description": "Training output directory of the job, could be local or other FS directory. This typically includes checkpoint files and exported model" - }, - { - "name": "-docker_image ", - "description": "Docker image name/tag" - }, - { - "name": "-env ", - "description": "Common environment variable of worker/ps" - }, - { - "name": "-input_path ", - "description": "Input of the job, could be local or other FS directory" - }, - { - "name": "-name ", - "description": "Name of the job" - }, - { - "name": "-num_ps ", - "description": "Number of PS tasks of the job, by default it's 0" - }, - { - "name": "-num_workers ", - "description": "Numnber of worker tasks of the job, by default it's 1" - }, - { - "name": "-ps_docker_image ", - "description": "Specify docker image for PS, when this is not specified, PS uses --docker_image as default." - }, - { - "name": "-ps_launch_cmd ", - "description": "Commandline of worker, arguments will be directly used to launch the PS" - }, - { - "name": "-ps_resources ", - "description": "Resource of each PS, for example memory-mb=2048,vcores=2,yarn.io/gpu=2" - }, - { - "name": "-queue ", - "description": "Name of queue to run the job, by default it uses default queue" - }, - { - "name": "-saved_model_path ", - "description": "Model exported path (savedmodel) of the job, which is needed when exported model is not placed under ${checkpoint_path}could be local or other FS directory. This will be used to serve." - }, - { - "name": "-tensorboard ", - "description": "Should we run TensorBoard for this job? By default it's true" - }, - { - "name": "-verbose", - "description": "Print verbose log for troubleshooting" - }, - { - "name": "-wait_job_finish", - "description": "Specified when user want to wait the job finish" - }, - { - "name": "-worker_docker_image ", - "description": "Specify docker image for WORKER, when this is not specified, WORKER uses --docker_image as default." - }, - { - "name": "-worker_resources ", - "description": "Resource of each worker, for example memory-mb=2048,vcores=2,yarn.io/gpu=2" - }, - { - "name": "-localization ", - "description": "Specify localization to remote/local file/directory available to all container(Docker). Argument format is \"RemoteUri:LocalFilePath[:rw]\" (ro permission is not supported yet).
    The RemoteUri can be a file or directory in local or HDFS or s3 or abfs or http .etc.
    The LocalFilePath can be absolute or relative. If relative, it'll be under container's implied working directory.
    This option can be set mutiple times.
    Examples are
    -localization \"hdfs:///user/yarn/mydir2:/opt/data\"
    -localization \"s3a:///a/b/myfile1:./\"
    -localization \"https:///a/b/myfile2:./myfile\"
    -localization \"/user/yarn/mydir3:/opt/mydir3\"
    -localization \"./mydir1:.\"" - } -] \ No newline at end of file diff --git a/submarine/src/main/resources/ui_templates/submarine-dashboard.jinja b/submarine/src/main/resources/ui_templates/submarine-dashboard.jinja deleted file mode 100644 index d998592e2ad..00000000000 --- a/submarine/src/main/resources/ui_templates/submarine-dashboard.jinja +++ /dev/null @@ -1,157 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. --#} -
    -
    -
    -
    -

    - Submarine Dashboard -

    -
    -
    -
    -
    - You can execute the selected command by entering the parameters in the interface below. - - - Tensorboard - - - Tensorboard on YARN - - - - Job on YARN - - - - -
    -
    -
    - -
    - -
    -
    -
    - - - - - - -
    -
    -
    -
    - -
    -
    -
    - - - -
    -
    - -
    -
    - - -
    -
    - -
    -
    -
    - - -
    -
    -
    - -
    -
    - - -
    -
    -
    - -
    -
    - - - {%raw%}{{JOB_STATUS}}{%endraw%} - {%raw%}{{JOB_STATUS}}{%endraw%} - {%raw%}{{JOB_STATUS}}{%endraw%} - {%raw%}{{JOB_STATUS}}{%endraw%} - - Started at: {%raw%}{{YARN_APP_STARTED_TIME}}{%endraw%} - Launch at: {%raw%}{{YARN_APP_LAUNCH_TIME}}{%endraw%} - Finishe at: {%raw%}{{YARN_APP_FINISHED_TIME}}{%endraw%} - Elapsed time: {%raw%}{{YARN_APP_ELAPSED_TIME}}{%endraw%} - - -
    -
    -
    -
    -
    - -
    -
    diff --git a/submarine/src/main/resources/ui_templates/submarine-log-head.jinja b/submarine/src/main/resources/ui_templates/submarine-log-head.jinja deleted file mode 100644 index d742c2a7ba2..00000000000 --- a/submarine/src/main/resources/ui_templates/submarine-log-head.jinja +++ /dev/null @@ -1,24 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. --#} -
    -
    -
    -

    - Sumbrine execution log -

    -
    -
    diff --git a/submarine/src/main/resources/ui_templates/submarine-usage.jinja b/submarine/src/main/resources/ui_templates/submarine-usage.jinja deleted file mode 100644 index 5695044a11a..00000000000 --- a/submarine/src/main/resources/ui_templates/submarine-usage.jinja +++ /dev/null @@ -1,111 +0,0 @@ -{# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. --#} -
    -
    -
    -
    -

    - Submarine Usage -

    -
    -
    -
    -
    - You can execute the selected command by entering the parameters in the interface below. - - Submarine dashboard - -
    -
    -
    - -
    - -
    - Launch Standalone Tensorflow Application: -
    -yarn jar path-to/hadoop-yarn-applications-submarine-3.2.0-SNAPSHOT.jar job run \
    -  --env DOCKER_JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre/ \
    -  --env DOCKER_HADOOP_HDFS_HOME=/hadoop-3.1.0 --name tf-job-001 \
    -  --docker_image  \
    -  --input_path hdfs://default/dataset/cifar-10-data  \
    -  --checkpoint_path hdfs://default/tmp/cifar-10-jobdir \
    -  --worker_resources memory=4G,vcores=2,gpu=2  \
    -  --worker_launch_cmd "python ... (Your training application cmd)" \
    -  --tensorboard # this will launch a companion tensorboard container for monitoring
    -
    -
    -
    - Launch Distributed Tensorflow Application: -
    -yarn jar hadoop-yarn-applications-submarine-${version}.jar job run \
    - --name tf-job-001 --docker_image  \
    - --input_path hdfs://default/dataset/cifar-10-data \
    - --checkpoint_path hdfs://default/tmp/cifar-10-jobdir \
    - --env DOCKER_JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre/ \
    - --env DOCKER_HADOOP_HDFS_HOME=/hadoop-3.1.0 \
    - --num_workers 2 \
    - --worker_resources memory=8G,vcores=2,gpu=1 --worker_launch_cmd "cmd for worker ..." \
    - --num_ps 2 \
    - --ps_resources memory=4G,vcores=2,gpu=0 --ps_launch_cmd "cmd for ps"
    -
    -
    - -
    - Commandline options -
    -
    -
    -
    -
    - - - - - - - - -{% for item in COMMANDLINE_OPTIONS %} - - - - -{% endfor %} - -
    namevalue
    {{item.name}} -
    - {{item.description}} -
    -
    -
    -
    -
    -
    - -
    -
    -
    - -
    -
    -
    - -
    diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/BaseInterpreterTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/BaseInterpreterTest.java deleted file mode 100644 index 25061fd144d..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/BaseInterpreterTest.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.display.AngularObject; -import org.apache.zeppelin.display.AngularObjectRegistry; -import org.apache.zeppelin.display.AngularObjectRegistryListener; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterOutput; -import org.apache.zeppelin.interpreter.remote.RemoteInterpreterEventClient; -import org.apache.zeppelin.user.AuthenticationInfo; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; - -import java.util.concurrent.atomic.AtomicInteger; - -import static org.mockito.Mockito.mock; - -public abstract class BaseInterpreterTest { - - @BeforeEach - public abstract void setUp() throws InterpreterException; - - @AfterEach - public abstract void tearDown() throws InterpreterException; - - protected InterpreterContext getIntpContext() { - final AtomicInteger onAdd = new AtomicInteger(0); - final AtomicInteger onUpdate = new AtomicInteger(0); - final AtomicInteger onRemove = new AtomicInteger(0); - AngularObjectRegistry registry = new AngularObjectRegistry("intpId", - new AngularObjectRegistryListener() { - - @Override - public void onAddAngularObject(String interpreterGroupId, - AngularObject angularObject) { - onAdd.incrementAndGet(); - } - - @Override - public void onUpdateAngularObject(String interpreterGroupId, - AngularObject angularObject) { - onUpdate.incrementAndGet(); - } - - @Override - public void onRemoveAngularObject(String interpreterGroupId, - AngularObject angularObject) { - onRemove.incrementAndGet(); - } - }); - - AuthenticationInfo authenticationInfo = new AuthenticationInfo("user"); - - return InterpreterContext.builder() - .setNoteId("noteId") - .setNoteName("noteName") - .setParagraphId("paragraphId") - .setAuthenticationInfo(authenticationInfo) - .setAngularObjectRegistry(registry) - .setInterpreterOut(new InterpreterOutput()) - .setIntpEventClient(mock(RemoteInterpreterEventClient.class)) - .build(); - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/HdfsClientTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/HdfsClientTest.java deleted file mode 100644 index 25378510e0a..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/HdfsClientTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.submarine.hadoop.HdfsClient; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.io.IOException; -import java.util.Properties; - -public class HdfsClientTest { - private static Logger LOGGER = LoggerFactory.getLogger(HdfsClientTest.class); - - private static HdfsClient hdfsClient = null; - - @BeforeAll - public static void initEnv() { - Properties properties = new Properties(); - hdfsClient = new HdfsClient(properties); - } - - @ParameterizedTest - @ValueSource(strings = { - "abc", - "%submarine abc", - "%submarine.sh abc", - "%submarine.sh(k1=v1,k2=v2) abc" }) - void testParseText0(String text) throws IOException { - String script = hdfsClient.parseText(text); - LOGGER.info(script); - assertEquals("abc", script); - } - - @Test - void testParseText5() throws IOException { - String text = ""; - String script = hdfsClient.parseText(text); - LOGGER.info(script); - assertEquals("", script); - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/JinjaTemplatesTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/JinjaTemplatesTest.java deleted file mode 100644 index 0d89bd4851c..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/JinjaTemplatesTest.java +++ /dev/null @@ -1,473 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import com.google.common.io.Resources; -import com.hubspot.jinjava.Jinjava; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.junit.jupiter.api.Test; -import org.apache.zeppelin.submarine.commons.SubmarineUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.io.IOException; -import java.net.URL; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; - -class JinjaTemplatesTest { - private static Logger LOGGER = LoggerFactory.getLogger(JinjaTemplatesTest.class); - - @Test - void jobRunJinjaTemplateTest1() throws IOException { - String str = jobRunJinjaTemplateTest(Boolean.TRUE, Boolean.TRUE); - - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("DOCKER_HADOOP_HDFS_HOME_VALUE/bin/yarn jar " + - "HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_ps TF_PARAMETER_SERVICES_NUM_VALUE \\\n" + - " --ps_docker_image TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --ps_resources memory=TF_PARAMETER_SERVICES_MEMORY_VALUE," + - "vcores=TF_PARAMETER_SERVICES_CPU_VALUE,gpu=TF_PARAMETER_SERVICES_GPU_VALUE \\\n" + - " --ps_launch_cmd \"PS_LAUNCH_CMD_VALUE\" \\\n" + - " --num_workers TF_WORKER_SERVICES_NUM_VALUE \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest2() throws IOException { - String str = jobRunJinjaTemplateTest(Boolean.TRUE, Boolean.FALSE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_ps TF_PARAMETER_SERVICES_NUM_VALUE \\\n" + - " --ps_docker_image TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --ps_resources memory=TF_PARAMETER_SERVICES_MEMORY_VALUE," + - "vcores=TF_PARAMETER_SERVICES_CPU_VALUE,gpu=TF_PARAMETER_SERVICES_GPU_VALUE \\\n" + - " --ps_launch_cmd \"PS_LAUNCH_CMD_VALUE\" \\\n" + - " --num_workers TF_WORKER_SERVICES_NUM_VALUE \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest3() throws IOException { - String str = jobRunJinjaTemplateTest(Boolean.TRUE, null); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_ps TF_PARAMETER_SERVICES_NUM_VALUE \\\n" + - " --ps_docker_image TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --ps_resources memory=TF_PARAMETER_SERVICES_MEMORY_VALUE," + - "vcores=TF_PARAMETER_SERVICES_CPU_VALUE,gpu=TF_PARAMETER_SERVICES_GPU_VALUE \\\n" + - " --ps_launch_cmd \"PS_LAUNCH_CMD_VALUE\" \\\n" + - " --num_workers TF_WORKER_SERVICES_NUM_VALUE \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest4() throws IOException { - String str = jobRunJinjaTemplateTest(Boolean.FALSE, Boolean.TRUE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("DOCKER_HADOOP_HDFS_HOME_VALUE/bin/yarn jar " + - "HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 1 \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest5() throws IOException { - String str = jobRunJinjaTemplateTest(Boolean.FALSE, Boolean.FALSE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar " + - "HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 1 \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest6() throws IOException { - String str = jobRunJinjaTemplateTest(null, Boolean.FALSE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 1 \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest7() throws IOException { - String str = jobRunJinjaTemplateTest(null, null); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar HADOOP_YARN_SUBMARINE_JAR_VALUE \\\n" + - " job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env PYTHONPATH=\"./submarine_algorithm:$PYTHONPATH\" \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=" + - "DOCKER_CONTAINER_NETWORK_VALUE \\\n" + - " --env HADOOP_LOG_DIR=/tmp \\\n" + - " --env TZ=\"\" \\\n" + - " --input_path INPUT_PATH_VALUE \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 1 \\\n" + - " --worker_docker_image TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --worker_resources memory=TF_WORKER_SERVICES_MEMORY_VALUE," + - "vcores=TF_WORKER_SERVICES_CPU_VALUE,gpu=TF_WORKER_SERVICES_GPU_VALUE \\\n" + - " --worker_launch_cmd \"WORKER_LAUNCH_CMD_VALUE\" \\\n" + - " --localization \"hdfs://file1:.\" \\\n" + - " --localization \"hdfs://file2:.\" \\\n" + - " --localization \"hdfs://file3:.\" \\\n" + - " --localization \"SUBMARINE_ALGORITHM_HDFS_PATH_VALUE:./submarine_algorithm\" \\\n" + - " --localization \"SUBMARINE_HADOOP_CONF_DIR_VALUE:" + - "SUBMARINE_HADOOP_CONF_DIR_VALUE\" \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest8() throws IOException { - String str = tensorboardJinjaTemplateTest(Boolean.TRUE, Boolean.TRUE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("DOCKER_HADOOP_HDFS_HOME_VALUE/bin/yarn jar \\\n" + - " HADOOP_YARN_SUBMARINE_JAR_VALUE job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=bridge \\\n" + - " --env TZ=\"\" \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 0 \\\n" + - " --tensorboard \\\n" + - " --tensorboard_docker_image TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - @Test - void jobRunJinjaTemplateTest9() throws IOException { - String str = tensorboardJinjaTemplateTest(Boolean.TRUE, Boolean.FALSE); - StringBuffer sbCheck = new StringBuffer(); - sbCheck.append("SUBMARINE_HADOOP_HOME_VALUE/bin/yarn jar \\\n" + - " HADOOP_YARN_SUBMARINE_JAR_VALUE job run \\\n" + - " --name JOB_NAME_VALUE \\\n" + - " --env DOCKER_JAVA_HOME=DOCKER_JAVA_HOME_VALUE \\\n" + - " --env DOCKER_HADOOP_HDFS_HOME=DOCKER_HADOOP_HDFS_HOME_VALUE \\\n" + - " --env YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK=bridge \\\n" + - " --env TZ=\"\" \\\n" + - " --checkpoint_path CHECKPOINT_PATH_VALUE \\\n" + - " --queue SUBMARINE_YARN_QUEUE \\\n" + - " --num_workers 0 \\\n" + - " --tensorboard \\\n" + - " --tensorboard_docker_image TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE \\\n" + - " --keytab SUBMARINE_HADOOP_KEYTAB_VALUE \\\n" + - " --principal SUBMARINE_HADOOP_PRINCIPAL_VALUE \\\n" + - " --distribute_keytab \\\n" + - " --verbose"); - assertEquals(str, sbCheck.toString()); - } - - public String jobRunJinjaTemplateTest(Boolean dist, Boolean launchMode) throws IOException { - URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_JOBRUN_TF_JINJA); - String template = Resources.toString(urlTemplate, StandardCharsets.UTF_8); - Jinjava jinjava = new Jinjava(); - HashMap jinjaParams = initJinjaParams(dist, launchMode); - - String submarineCmd = jinjava.render(template, jinjaParams); - int pos = submarineCmd.indexOf("\n"); - if (pos == 0) { - submarineCmd = submarineCmd.replaceFirst("\n", ""); - } - - LOGGER.info("------------------------"); - LOGGER.info(submarineCmd); - LOGGER.info("------------------------"); - - return submarineCmd; - } - - public String tensorboardJinjaTemplateTest(Boolean dist, Boolean launchMode) throws IOException { - URL urlTemplate = Resources.getResource(SubmarineJob.SUBMARINE_TENSORBOARD_JINJA); - String template = Resources.toString(urlTemplate, StandardCharsets.UTF_8); - Jinjava jinjava = new Jinjava(); - HashMap jinjaParams = initJinjaParams(dist, launchMode); - - String submarineCmd = jinjava.render(template, jinjaParams); - int pos = submarineCmd.indexOf("\n"); - if (pos == 0) { - submarineCmd = submarineCmd.replaceFirst("\n", ""); - } - LOGGER.info("------------------------"); - LOGGER.info(submarineCmd); - LOGGER.info("------------------------"); - - return submarineCmd; - } - - private HashMap initJinjaParams(Boolean dist, Boolean launchMode) { - HashMap jinjaParams = new HashMap<>(); - - if (launchMode == Boolean.TRUE) { - jinjaParams.put(SubmarineUtils.unifyKey(SubmarineConstants.INTERPRETER_LAUNCH_MODE), "yarn"); - } else if (launchMode == Boolean.FALSE) { - jinjaParams.put(SubmarineUtils.unifyKey(SubmarineConstants.INTERPRETER_LAUNCH_MODE), "local"); - } - - if (dist == Boolean.TRUE) { - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.MACHINELEARNING_DISTRIBUTED_ENABLE), "true"); - } else if (dist == Boolean.FALSE) { - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.MACHINELEARNING_DISTRIBUTED_ENABLE), "false"); - } - - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_TENSORBOARD_ENABLE), "true"); - - List arrayHdfsFiles = new ArrayList<>(); - arrayHdfsFiles.add("hdfs://file1"); - arrayHdfsFiles.add("hdfs://file2"); - arrayHdfsFiles.add("hdfs://file3"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_FILES), arrayHdfsFiles); - - // mock - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.DOCKER_HADOOP_HDFS_HOME), "DOCKER_HADOOP_HDFS_HOME_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.JOB_NAME), "JOB_NAME_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_YARN_QUEUE), "SUBMARINE_YARN_QUEUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.HADOOP_YARN_SUBMARINE_JAR), "HADOOP_YARN_SUBMARINE_JAR_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_HADOOP_HOME), "SUBMARINE_HADOOP_HOME_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.DOCKER_JAVA_HOME), "DOCKER_JAVA_HOME_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.DOCKER_CONTAINER_NETWORK), "DOCKER_CONTAINER_NETWORK_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.INPUT_PATH), "INPUT_PATH_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.CHECKPOINT_PATH), "CHECKPOINT_PATH_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_PARAMETER_SERVICES_NUM), "TF_PARAMETER_SERVICES_NUM_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_PARAMETER_SERVICES_DOCKER_IMAGE), - "TF_PARAMETER_SERVICES_DOCKER_IMAGE_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_PARAMETER_SERVICES_MEMORY), "TF_PARAMETER_SERVICES_MEMORY_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_PARAMETER_SERVICES_CPU), "TF_PARAMETER_SERVICES_CPU_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_PARAMETER_SERVICES_GPU), "TF_PARAMETER_SERVICES_GPU_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.PS_LAUNCH_CMD), "PS_LAUNCH_CMD_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_DOCKER_IMAGE), - "TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_NUM), "TF_WORKER_SERVICES_NUM_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_DOCKER_IMAGE), - "TF_WORKER_SERVICES_DOCKER_IMAGE_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_MEMORY), "TF_WORKER_SERVICES_MEMORY_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_CPU), "TF_WORKER_SERVICES_CPU_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.TF_WORKER_SERVICES_GPU), "TF_WORKER_SERVICES_GPU_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.WORKER_LAUNCH_CMD), "WORKER_LAUNCH_CMD_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_ALGORITHM_HDFS_PATH), - "SUBMARINE_ALGORITHM_HDFS_PATH_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_HADOOP_CONF_DIR), "SUBMARINE_HADOOP_CONF_DIR_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_HADOOP_KEYTAB), "SUBMARINE_HADOOP_KEYTAB_VALUE"); - jinjaParams.put(SubmarineUtils.unifyKey( - SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL), "SUBMARINE_HADOOP_PRINCIPAL_VALUE"); - - return jinjaParams; - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/PySubmarineInterpreterTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/PySubmarineInterpreterTest.java deleted file mode 100644 index ebe5ab197d6..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/PySubmarineInterpreterTest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import java.util.LinkedList; -import java.util.Properties; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.ZEPPELIN_SUBMARINE_AUTH_TYPE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -class PySubmarineInterpreterTest extends BaseInterpreterTest { - - PySubmarineInterpreter pySubmarineIntp; - protected InterpreterGroup intpGroup; - - @Override - @BeforeEach - public void setUp() throws InterpreterException { - intpGroup = new InterpreterGroup(); - Properties properties = new Properties(); - properties.setProperty(ZEPPELIN_SUBMARINE_AUTH_TYPE, "simple"); - properties.setProperty("zeppelin.python.useIPython", "false"); - properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); - properties.setProperty(SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, "user"); - - pySubmarineIntp = new PySubmarineInterpreter(properties); - - intpGroup.put("note", new LinkedList()); - intpGroup.get("note").add(pySubmarineIntp); - pySubmarineIntp.setInterpreterGroup(intpGroup); - - InterpreterContext.set(getIntpContext()); - pySubmarineIntp.open(); - } - - @Test - void testTensorflow() throws InterpreterException { - String callTensorflowFunc = "import tensorflow as tf\n" + - "print('Installed TensorFlow version:' + tf.__version__)"; - - InterpreterContext intpContext = getIntpContext(); - InterpreterResult intpResult = pySubmarineIntp.interpret(callTensorflowFunc, intpContext); - - // Check if the SubmarineInterpreter performs the tensorlfow function whether successfully. - assertEquals(InterpreterResult.Code.SUCCESS, intpResult.code()); - - // Successfully execute tensorflow to get the version function, - // otherwise it will trigger an exception. - String tfVersionInfo = intpContext.out().getCurrentOutput().toString(); - boolean getVersion = tfVersionInfo.contains("Installed TensorFlow version:"); - assertTrue(getVersion, tfVersionInfo); - } - - @Override - @AfterEach - public void tearDown() throws InterpreterException { - pySubmarineIntp.close(); - intpGroup.close(); - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineInterpreterTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineInterpreterTest.java deleted file mode 100644 index 2021ac44ac6..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineInterpreterTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.job.SubmarineJob; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.OPERATION_TYPE; -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.ZEPPELIN_SUBMARINE_AUTH_TYPE; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -class SubmarineInterpreterTest extends BaseInterpreterTest { - private static Logger LOGGER = LoggerFactory.getLogger(SubmarineInterpreterTest.class); - - private SubmarineInterpreter submarineIntp; - - @Override - @BeforeEach - public void setUp() throws InterpreterException { - Properties properties = new Properties(); - properties.setProperty(ZEPPELIN_SUBMARINE_AUTH_TYPE, "simple"); - properties.setProperty("zeppelin.python.useIPython", "false"); - properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); - properties.setProperty(SubmarineConstants.SUBMARINE_HADOOP_KEYTAB, "keytab"); - properties.setProperty(SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, "user"); - - submarineIntp = new SubmarineInterpreter(properties); - - InterpreterContext.set(getIntpContext()); - submarineIntp.open(); - } - - @Test - void testDashboard() throws InterpreterException { - String script = "dashboard"; - InterpreterContext intpContext = getIntpContext(); - - InterpreterResult interpreterResult = submarineIntp.interpret(script, intpContext); - String message = interpreterResult.toJson(); - LOGGER.info(message); - - assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); - assertTrue(intpContext.out().size() >= 2); - - String dashboardTemplate = intpContext.out().getOutputAt(0).toString(); - LOGGER.info(dashboardTemplate); - assertTrue(dashboardTemplate.length() > 500, "Did not generate template!"); - } - - @Test - void testJobRun() throws InterpreterException { - String script = "JOB_RUN"; - InterpreterContext intpContext = getIntpContext(); - - intpContext.getAngularObjectRegistry().add(OPERATION_TYPE, "JOB_RUN", "noteId", "paragraphId"); - - InterpreterResult interpreterResult = submarineIntp.interpret(script, intpContext); - String message = interpreterResult.toJson(); - LOGGER.info(message); - - assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code()); - assertTrue(intpContext.out().size() >= 2); - String template = intpContext.out().getOutputAt(0).toString(); - assertTrue(template.length() > 500, "Did not generate template!"); - - SubmarineJob job = submarineIntp.getSubmarineContext().getSubmarineJob("noteId"); - int loop = 10; - while (loop-- > 0 && !job.getRunning()) { - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - LOGGER.error(e.getMessage(), e); - } - } - LOGGER.info("job.getRunning() = " + job.getRunning()); - } - - @Override - @AfterEach - public void tearDown() throws InterpreterException { - submarineIntp.close(); - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineJobTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineJobTest.java deleted file mode 100644 index d14f9572133..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/SubmarineJobTest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import com.google.common.io.Resources; -import org.apache.commons.exec.CommandLine; -import org.apache.commons.exec.DefaultExecuteResultHandler; -import org.apache.commons.exec.DefaultExecutor; -import org.apache.commons.exec.ExecuteException; -import org.apache.commons.exec.LogOutputStream; -import org.apache.commons.exec.PumpStreamHandler; -import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.net.URL; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; - -public class SubmarineJobTest { - private static Logger LOGGER = LoggerFactory.getLogger(SubmarineJobTest.class); - - private final boolean isWindows = System.getProperty("os.name").startsWith("Windows"); - private final String shell = isWindows ? "cmd /c" : "bash -c"; - - private static final String DEFAULT_EXECUTOR_TEST = "DefaultExecutorTest.sh"; - - @Test - void defaultExecutorTest() throws IOException { - DefaultExecutor executor = new DefaultExecutor(); - CommandLine cmdLine = CommandLine.parse(shell); - - URL urlTemplate = Resources.getResource(DEFAULT_EXECUTOR_TEST); - - cmdLine.addArgument(urlTemplate.getFile(), false); - - Map env = new HashMap<>(); - env.put("CLASSPATH", "`$HADOOP_HDFS_HOME/bin/hadoop classpath --glob`"); - env.put("EVN", "test"); - - AtomicBoolean cmdLineRunning = new AtomicBoolean(true); - StringBuffer sbLogOutput = new StringBuffer(); - executor.setStreamHandler(new PumpStreamHandler(new LogOutputStream() { - @Override - protected void processLine(String line, int level) { - //LOGGER.info(line); - sbLogOutput.append(line + "\n"); - } - })); - - executor.execute(cmdLine, env, new DefaultExecuteResultHandler() { - @Override - public void onProcessComplete(int exitValue) { - cmdLineRunning.set(false); - } - @Override - public void onProcessFailed(ExecuteException e) { - cmdLineRunning.set(false); - LOGGER.error(e.getMessage()); - } - }); - int loopCount = 100; - while ((loopCount-- > 0) && cmdLineRunning.get()) { - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - LOGGER.info(sbLogOutput.toString()); - } -} diff --git a/submarine/src/test/java/org/apache/zeppelin/submarine/YarnClientTest.java b/submarine/src/test/java/org/apache/zeppelin/submarine/YarnClientTest.java deleted file mode 100644 index bcfd0c36c88..00000000000 --- a/submarine/src/test/java/org/apache/zeppelin/submarine/YarnClientTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.submarine; - -import com.google.common.base.Charsets; -import com.google.common.io.Resources; -import org.apache.zeppelin.submarine.commons.SubmarineConstants; -import org.apache.zeppelin.submarine.hadoop.YarnClient; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; - -import java.io.IOException; -import java.net.URL; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.apache.zeppelin.submarine.commons.SubmarineConstants.ZEPPELIN_SUBMARINE_AUTH_TYPE; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -public class YarnClientTest { - - private static YarnClient yarnClient = null; - - @BeforeAll - public static void initEnv() { - Properties properties = new Properties(); - properties.setProperty(ZEPPELIN_SUBMARINE_AUTH_TYPE, "simple"); - properties.setProperty("zeppelin.python.useIPython", "false"); - properties.setProperty("zeppelin.python.gatewayserver_address", "127.0.0.1"); - properties.setProperty(SubmarineConstants.SUBMARINE_HADOOP_KEYTAB, "keytab"); - properties.setProperty(SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, "user"); - yarnClient = new YarnClient(properties); - } - - @Test - void testParseAppAttempts() throws IOException { - String jsonFile = "ws-v1-cluster-apps-application_id-appattempts.json"; - URL urlJson = Resources.getResource(jsonFile); - String jsonContent = Resources.toString(urlJson, Charsets.UTF_8); - - List> list = yarnClient.parseAppAttempts(jsonContent); - - assertNotNull(list); - } - - @Test - void testParseAppAttemptsContainers() throws IOException { - String jsonFile = "ws-v1-cluster-apps-application_id-appattempts-appattempt_id-containers.json"; - URL urlJson = Resources.getResource(jsonFile); - String jsonContent = Resources.toString(urlJson, Charsets.UTF_8); - - List> list = yarnClient.parseAppAttemptsContainers(jsonContent); - - assertNotNull(list.get(0).get(YarnClient.HOST_IP)); - assertNotNull(list.get(0).get(YarnClient.HOST_PORT)); - assertNotNull(list.get(0).get(YarnClient.CONTAINER_PORT)); - - assertNotNull(list); - } - - @Test - void testParseClusterApps() throws IOException { - String jsonFile = "ws-v1-cluster-apps-application_id-finished.json"; - URL urlJson = Resources.getResource(jsonFile); - String jsonContent = Resources.toString(urlJson, Charsets.UTF_8); - - Map list = yarnClient.parseClusterApps(jsonContent); - - assertNotNull(list); - } -} diff --git a/submarine/src/test/resources/DefaultExecutorTest.sh b/submarine/src/test/resources/DefaultExecutorTest.sh deleted file mode 100755 index 15c76e89355..00000000000 --- a/submarine/src/test/resources/DefaultExecutorTest.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -echo $CLASSPATH -echo $EVN diff --git a/submarine/src/test/resources/app-v1-services-app_name.json b/submarine/src/test/resources/app-v1-services-app_name.json deleted file mode 100644 index 97480a2af6a..00000000000 --- a/submarine/src/test/resources/app-v1-services-app_name.json +++ /dev/null @@ -1,145 +0,0 @@ -{ - "name":"hzliuxun-tb", - "id":"application_1550650473498_0020", - "artifact":{ - "type":"DOCKER" - }, - "lifetime":-1, - "components":[ - { - "name":"tensorboard", - "dependencies":[ - - ], - "artifact":{ - "id":"10.120.196.232:5000/dockerfile-cpu-tf1.8.0-with-models:1.1", - "type":"DOCKER" - }, - "resource":{ - "cpus":1, - "memory":"4096", - "additional":{ - "memory-mb":{ - "value":4096, - "unit":"Mi", - "attributes":{ - - }, - "tags":[ - - ] - }, - "yarn.io/gpu":{ - "value":0, - "unit":"", - "attributes":{ - - }, - "tags":[ - - ] - }, - "vcores":{ - "value":1, - "unit":"", - "attributes":{ - - }, - "tags":[ - - ] - } - } - }, - "state":"STABLE", - "configuration":{ - "properties":{ - - }, - "env":{ - "_TASK_TYPE":"TENSORBOARD", - "_TASK_INDEX":"${COMPONENT_ID}", - "DOCKER_JAVA_HOME":"/usr/lib/jvm/java-8-openjdk-amd64/jre", - "TZ":"Asia/Shanghai", - "YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK":"bridge", - "DOCKER_HADOOP_HDFS_HOME":"/hadoop-current", - "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS":"/etc/passwd:/etc/passwd:ro,/etc/krb5.conf:/etc/krb5.conf:ro" - }, - "files":[ - { - "type":"STATIC", - "properties":{ - - }, - "dest_file":"core-site.xml", - "src_file":"hdfs://mldev/user/bdms_hzliuxun/submarine/jobs/hzliuxun-tb/staging/core-site.xml" - }, - { - "type":"STATIC", - "properties":{ - - }, - "dest_file":"hdfs-site.xml", - "src_file":"hdfs://mldev/user/bdms_hzliuxun/submarine/jobs/hzliuxun-tb/staging/hdfs-site.xml" - }, - { - "type":"STATIC", - "properties":{ - - }, - "dest_file":"run-TENSORBOARD.sh", - "src_file":"hdfs://mldev/user/bdms_hzliuxun/submarine/jobs/hzliuxun-tb/staging/TENSORBOARD-launch-script1579958783280532432.sh" - } - ] - }, - "quicklinks":[ - - ], - "containers":[ - { - "id":"container_e324_1550650473498_0020_01_000002", - "ip":"172.18.0.3", - "hostname":"tensorboard-0.hzliuxun-tb.bdms-hzliuxun.submarinecluster", - "state":"READY", - "launch_time":1551419055511, - "bare_host":"ml2.jd.163.org", - "component_instance_name":"tensorboard-0" - } - ], - "launch_command":"./run-TENSORBOARD.sh", - "number_of_containers":1, - "decommissioned_instances":[ - - ], - "run_privileged_container":false, - "restart_policy":"NEVER" - } - ], - "configuration":{ - "properties":{ - - }, - "env":{ - "DOCKER_JAVA_HOME":"/usr/lib/jvm/java-8-openjdk-amd64/jre", - "TZ":"Asia/Shanghai", - "YARN_CONTAINER_RUNTIME_DOCKER_CONTAINER_NETWORK":"bridge", - "DOCKER_HADOOP_HDFS_HOME":"/hadoop-current", - "YARN_CONTAINER_RUNTIME_DOCKER_MOUNTS":"/etc/passwd:/etc/passwd:ro,/etc/krb5.conf:/etc/krb5.conf:ro" - }, - "files":[ - - ] - }, - "state":"STARTED", - "quicklinks":{ - "Tensorboard":"http://tensorboard-0.hzliuxun-tb.hadoop.submarinecluster:6006" - }, - "version":"1551419039827", - "kerberos_principal":{ - "principal_name":"bdms_hzliuxun/dev@BDMS.163.COM", - "keytab":"hdfs://mldev/user/bdms_hzliuxun/submarine/jobs/hzliuxun-tb/staging/hzliuxun.keytab" - }, - "dependencies":[ - - ] -} \ No newline at end of file diff --git a/submarine/src/test/resources/log4j.properties b/submarine/src/test/resources/log4j.properties deleted file mode 100644 index 9c22fdc83bd..00000000000 --- a/submarine/src/test/resources/log4j.properties +++ /dev/null @@ -1,47 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Direct log messages to stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.Target=System.out -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c:%L - %m%n -#log4j.appender.stdout.layout.ConversionPattern= -#%5p [%t] (%F:%L) - %m%n -#%-4r [%t] %-5p %c %x - %m%n -# - -# Root logger option -log4j.rootLogger=INFO, stdout - -#mute some noisy guys -log4j.logger.org.apache.hadoop.mapred=WARN -log4j.logger.org.apache.hadoop.hive.ql=WARN -log4j.logger.org.apache.hadoop.hive.metastore=WARN -log4j.logger.org.apache.haadoop.hive.service.HiveServer=WARN - -log4j.logger.org.quartz=WARN -log4j.logger.DataNucleus=WARN -log4j.logger.DataNucleus.MetaData=ERROR -log4j.logger.DataNucleus.Datastore=ERROR - -# Log all JDBC parameters -log4j.logger.org.hibernate.type=ALL -log4j.logger.org.apache.hadoop=WARN - -log4j.logger.org.apache.zeppelin.interpreter=DEBUG -log4j.logger.org.apache.zeppelin.scheduler=DEBUG diff --git a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts-appattempt_id-containers.json b/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts-appattempt_id-containers.json deleted file mode 100644 index bdeb18cb3cd..00000000000 --- a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts-appattempt_id-containers.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "container": [ - { - "containerId": "container_e312_1547632280649_0002_01_000002", - "allocatedMB": "8192", - "allocatedVCores": "2", - "assignedNodeId": "ml2.jd.163.org:34216", - "priority": "0", - "startedTime": "1547637739965", - "finishedTime": "0", - "elapsedTime": "13690019", - "logUrl": "http://ml2.jd.163.org:8042/node/containerlogs/container_e312_1547632280649_0002_01_000002/hadoop", - "containerExitStatus": "0", - "containerState": "RUNNING", - "nodeHttpAddress": "http://ml2.jd.163.org:8042", - "nodeId": "ml2.jd.163.org:34216", - "allocatedResources": { - "entry": [ - { - "key": "memory-mb", - "value": "8192" - }, - { - "key": "yarn.io/gpu", - "value": "1" - }, - { - "key": "vcores", - "value": "2" - } - ] - }, - "exposedPorts": "{\"29914/tcp\":[{\"HostIp\":\"0.0.0.0\",\"HostPort\":\"32816\"}],\"8080/tcp\":[{\"HostIp\":\"0.0.0.0\",\"HostPort\":\"32817\"}]}" - }, - { - "containerId": "container_e312_1547632280649_0002_01_000001", - "allocatedMB": "1024", - "allocatedVCores": "1", - "assignedNodeId": "ml2.jd.163.org:34216", - "priority": "0", - "startedTime": "1547637729953", - "finishedTime": "0", - "elapsedTime": "13700031", - "logUrl": "http://ml2.jd.163.org:8042/node/containerlogs/container_e312_1547632280649_0002_01_000001/hadoop", - "containerExitStatus": "0", - "containerState": "RUNNING", - "nodeHttpAddress": "http://ml2.jd.163.org:8042", - "nodeId": "ml2.jd.163.org:34216", - "allocatedResources": { - "entry": [ - { - "key": "memory-mb", - "value": "1024" - }, - { - "key": "yarn.io/gpu", - "value": "0" - }, - { - "key": "vcores", - "value": "1" - } - ] - }, - "exposedPorts": "" - } - ] -} diff --git a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts.json b/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts.json deleted file mode 100644 index 15e823f174b..00000000000 --- a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-appattempts.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "appAttempts": { - "appAttempt": [ - { - "id": 1, - "startTime": 1547637729548, - "finishedTime": 0, - "containerId": "container_e312_1547632280649_0002_01_000001", - "nodeHttpAddress": "ml2.jd.163.org:8042", - "nodeId": "ml2.jd.163.org:34216", - "logsLink": "http://ml2.jd.163.org:8042/node/containerlogs/container_e312_1547632280649_0002_01_000001/hadoop", - "blacklistedNodes": "", - "nodesBlacklistedBySystem": "", - "appAttemptId": "appattempt_1547632280649_0002_000001", - "exportPorts": "null" - } - ] - } -} diff --git a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-failed.json b/submarine/src/test/resources/ws-v1-cluster-apps-application_id-failed.json deleted file mode 100644 index f1cd047aa1f..00000000000 --- a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-failed.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "app": { - "id": "application_1550650473498_0003", - "user": "bdms_hzliuxun", - "name": "hzliuxun-2e3fdwarx", - "queue": "default", - "state": "FAILED", - "finalStatus": "FAILED", - "progress": 0, - "trackingUI": "History", - "trackingUrl": "http://ml2.jd.163.org:8188/applicationhistory/app/application_1550650473498_0003", - "diagnostics": "Application application_1550650473498_0003 failed 3 times due to AM Container for appattempt_1550650473498_0003_000003 exited with exitCode: -1000\nFailing this attempt.Diagnostics: [2019-02-27 13:57:19.178]Application application_1550650473498_0003 initialization failed (exitCode=255) with output: main : command provided 0\nmain : run as user is bdms_hzliuxun\nmain : requested yarn user is bdms_hzliuxun\nUser bdms_hzliuxun not found\n\nFor more detailed output, check the application tracking page: http://ml2.jd.163.org:8188/applicationhistory/app/application_1550650473498_0003 Then click on links to logs of each attempt.\n. Failing the application.", - "clusterId": 1550650473498, - "applicationType": "yarn-service", - "applicationTags": "name: hzliuxun-2e3fdwarx", - "priority": 0, - "startedTime": 1551247036457, - "launchTime": 1551247037087, - "finishedTime": 1551247039182, - "elapsedTime": 2725, - "amContainerLogs": "http://ml2.jd.163.org:8042/node/containerlogs/container_e324_1550650473498_0003_03_000001/bdms_hzliuxun", - "amHostHttpAddress": "ml2.jd.163.org:8042", - "masterNodeId": "ml2.jd.163.org:36797", - "allocatedMB": -1, - "allocatedVCores": -1, - "reservedMB": -1, - "reservedVCores": -1, - "runningContainers": -1, - "memorySeconds": 195, - "vcoreSeconds": 0, - "queueUsagePercentage": 0, - "clusterUsagePercentage": 0, - "resourceSecondsMap": { - "entry": { - "key": "vcores", - "value": "0" - } - }, - "preemptedResourceMB": 0, - "preemptedResourceVCores": 0, - "numNonAMContainerPreempted": 0, - "numAMContainerPreempted": 0, - "preemptedMemorySeconds": 0, - "preemptedVcoreSeconds": 0, - "preemptedResourceSecondsMap": {}, - "logAggregationStatus": "SUCCEEDED", - "unmanagedApplication": false, - "amNodeLabelExpression": "", - "timeouts": { - "timeout": [ - { - "type": "LIFETIME", - "expiryTime": "UNLIMITED", - "remainingTimeInSeconds": -1 - } - ] - } - } -} \ No newline at end of file diff --git a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-finished.json b/submarine/src/test/resources/ws-v1-cluster-apps-application_id-finished.json deleted file mode 100644 index 23ee4ee8123..00000000000 --- a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-finished.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "app": { - "id": "application_1550650473498_0015", - "user": "bdms_hzliuxun", - "name": "hzliuxun-2e3fdwarx", - "queue": "default", - "state": "FINISHED", - "finalStatus": "SUCCEEDED", - "progress": 100, - "trackingUI": "History", - "trackingUrl": "http://ml2.jd.163.org:8088/proxy/application_1550650473498_0015/", - "diagnostics": "", - "clusterId": 1550650473498, - "applicationType": "yarn-service", - "applicationTags": "name: hzliuxun-2e3fdwarx", - "priority": 0, - "startedTime": 1551346859832, - "launchTime": 1551346860606, - "finishedTime": 1551347003968, - "elapsedTime": 144136, - "amContainerLogs": "http://ml2.jd.163.org:8042/node/containerlogs/container_e324_1550650473498_0015_01_000001/bdms_hzliuxun", - "amHostHttpAddress": "ml2.jd.163.org:8042", - "amRPCAddress": "ml2.jd.163.org:34038", - "masterNodeId": "ml2.jd.163.org:36797", - "allocatedMB": -1, - "allocatedVCores": -1, - "reservedMB": -1, - "reservedVCores": -1, - "runningContainers": -1, - "memorySeconds": 700242, - "vcoreSeconds": 300, - "queueUsagePercentage": 0, - "clusterUsagePercentage": 0, - "resourceSecondsMap": { - "entry": { - "key": "vcores", - "value": "300" - } - }, - "preemptedResourceMB": 0, - "preemptedResourceVCores": 0, - "numNonAMContainerPreempted": 0, - "numAMContainerPreempted": 0, - "preemptedMemorySeconds": 0, - "preemptedVcoreSeconds": 0, - "preemptedResourceSecondsMap": {}, - "logAggregationStatus": "SUCCEEDED", - "unmanagedApplication": false, - "amNodeLabelExpression": "", - "timeouts": { - "timeout": [ - { - "type": "LIFETIME", - "expiryTime": "UNLIMITED", - "remainingTimeInSeconds": -1 - } - ] - } - } -} \ No newline at end of file diff --git a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-running.json b/submarine/src/test/resources/ws-v1-cluster-apps-application_id-running.json deleted file mode 100644 index 66c5cbc4fd7..00000000000 --- a/submarine/src/test/resources/ws-v1-cluster-apps-application_id-running.json +++ /dev/null @@ -1,277 +0,0 @@ -{ - "app": { - "id": "application_1550650473498_0018", - "user": "bdms_hzliuxun", - "name": "submarine-shared-process", - "queue": "default", - "state": "RUNNING", - "finalStatus": "UNDEFINED", - "progress": 100, - "trackingUI": "ApplicationMaster", - "trackingUrl": "http://ml2.jd.163.org:8088/proxy/application_1550650473498_0018/", - "diagnostics": "", - "clusterId": 1550650473498, - "applicationType": "yarn-service", - "applicationTags": "name: submarine-shared-process", - "priority": 0, - "startedTime": 1551407952905, - "launchTime": 1551407953270, - "finishedTime": 0, - "elapsedTime": 4357547, - "amContainerLogs": "http://ml2.jd.163.org:8042/node/containerlogs/container_e324_1550650473498_0018_01_000001/bdms_hzliuxun", - "amHostHttpAddress": "ml2.jd.163.org:8042", - "amRPCAddress": "ml2.jd.163.org:40948", - "masterNodeId": "ml2.jd.163.org:36797", - "allocatedMB": 9216, - "allocatedVCores": 2, - "reservedMB": 0, - "reservedVCores": 0, - "runningContainers": 2, - "memorySeconds": 40047400, - "vcoreSeconds": 8698, - "queueUsagePercentage": 5, - "clusterUsagePercentage": 5, - "resourceSecondsMap": { - "entry": { - "key": "vcores", - "value": "8698" - } - }, - "preemptedResourceMB": 0, - "preemptedResourceVCores": 0, - "numNonAMContainerPreempted": 0, - "numAMContainerPreempted": 0, - "preemptedMemorySeconds": 0, - "preemptedVcoreSeconds": 0, - "preemptedResourceSecondsMap": {}, - "logAggregationStatus": "NOT_START", - "unmanagedApplication": false, - "amNodeLabelExpression": "", - "resourceInfo": { - "resourceUsagesByPartition": [ - { - "partitionName": "", - "used": { - "memory": 9216, - "vCores": 2, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 9216 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 2 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - }, - "reserved": { - "memory": 0, - "vCores": 0, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - }, - "pending": { - "memory": 0, - "vCores": 0, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - }, - "amUsed": { - "memory": 1024, - "vCores": 1, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 1024 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 1 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - }, - "amLimit": { - "memory": 0, - "vCores": 0, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - }, - "userAmLimit": { - "memory": 0, - "vCores": 0, - "resourceInformations": { - "resourceInformation": [ - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "memory-mb", - "resourceType": "COUNTABLE", - "units": "Mi", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "vcores", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - }, - { - "attributes": {}, - "maximumAllocation": 9223372036854776000, - "minimumAllocation": 0, - "name": "yarn.io/gpu", - "resourceType": "COUNTABLE", - "units": "", - "value": 0 - } - ] - } - } - } - ] - }, - "timeouts": { - "timeout": [ - { - "type": "LIFETIME", - "expiryTime": "UNLIMITED", - "remainingTimeInSeconds": -1 - } - ] - } - } -} \ No newline at end of file diff --git a/testing/downloadLivy.sh b/testing/downloadLivy.sh index 7f2faf3ffe0..f09837a7574 100755 --- a/testing/downloadLivy.sh +++ b/testing/downloadLivy.sh @@ -16,13 +16,21 @@ # limitations under the License. # -if [[ "$#" -ne 1 ]]; then - echo "usage) $0 [livy version]" - echo " eg) $0 0.2" +if [[ "$#" -ne 1 && "$#" -ne 2 ]]; then + echo "usage) $0 [scala version]" + echo " eg) $0 0.7.1-incubating" + echo " $0 0.8.0-incubating 2.11" exit 0 fi +# See simple version normalization: +# http://stackoverflow.com/questions/16989598/bash-comparing-version-numbers +function version { echo "$@" | awk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }'; } LIVY_VERSION="${1}" +SCALA_VERSION_SUFFIX="" +if [ $(version $LIVY_VERSION) -ge $(version "0.8.0") ]; then + SCALA_VERSION_SUFFIX="_${2}" +fi set -xe @@ -49,7 +57,7 @@ download_with_retry() { } LIVY_CACHE=".livy-dist" -LIVY_ARCHIVE="apache-livy-${LIVY_VERSION}-bin" +LIVY_ARCHIVE="apache-livy-${LIVY_VERSION}${SCALA_VERSION_SUFFIX}-bin" export LIVY_HOME="${ZEPPELIN_HOME}/livy-server-$LIVY_VERSION" echo "LIVY_HOME is ${LIVY_HOME}" diff --git a/zeppelin-client-examples/pom.xml b/zeppelin-client-examples/pom.xml index 549aa939f32..8ebc19bd7c9 100644 --- a/zeppelin-client-examples/pom.xml +++ b/zeppelin-client-examples/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml diff --git a/zeppelin-client/pom.xml b/zeppelin-client/pom.xml index ac94364825d..8b6929fef64 100644 --- a/zeppelin-client/pom.xml +++ b/zeppelin-client/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml diff --git a/zeppelin-common/pom.xml b/zeppelin-common/pom.xml index 1a2441ddc98..f1b5773419f 100644 --- a/zeppelin-common/pom.xml +++ b/zeppelin-common/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml diff --git a/zeppelin-distribution/pom.xml b/zeppelin-distribution/pom.xml index b652b5f9a17..8d88f55ea7b 100644 --- a/zeppelin-distribution/pom.xml +++ b/zeppelin-distribution/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml diff --git a/zeppelin-distribution/src/bin_license/LICENSE b/zeppelin-distribution/src/bin_license/LICENSE index 805fb747a74..0d5a9ce3260 100644 --- a/zeppelin-distribution/src/bin_license/LICENSE +++ b/zeppelin-distribution/src/bin_license/LICENSE @@ -5,9 +5,9 @@ The following components are provided under Apache License. (Apache 2.0) Amazon Web Services SDK for Java v1.11.736 (https://aws.amazon.com/sdk-for-java/) - https://raw.githubusercontent.com/aws/aws-sdk-java/1.11.736/LICENSE.txt (Apache 2.0) JavaEWAH v0.7.9 (https://github.com/lemire/javaewah) - https://github.com/lemire/javaewah/blob/master/LICENSE-2.0.txt (Apache 2.0) Apache Commons Logging (commons-logging:commons-logging:1.1.1 - http://commons.apache.org/proper/commons-logging/) - (Apache 2.0) Apache Commons Codec (commons-codec:commons-codec:1.5 - http://commons.apache.org/proper/commons-codec/) + (Apache 2.0) Apache Commons Codec (commons-codec:commons-codec:1.16.1 - http://commons.apache.org/proper/commons-codec/) (Apache 2.0) Apache Commons Collections (commons-collections:commons-collections:3.2.2 - http://commons.apache.org/proper/commons-configuration/) - (Apache 2.0) Apache Commons Compress (org.apache.commons:commons-compress:1.21 - http://commons.apache.org/proper/commons-compress/) + (Apache 2.0) Apache Commons Compress (org.apache.commons:commons-compress:1.26.1 - http://commons.apache.org/proper/commons-compress/) (Apache 2.0) Apache Commons Configuration (org.apache.commons:commons-configuration2:2.8.0 - http://commons.apache.org/configuration/) (Apache 2.0) Apache Commons CLI (commons-cli:commons-cli:1.2 - http://commons.apache.org/cli/) (Apache 2.0) Apache Commons Exec (commons-exec:commons-exec:1.3 - http://commons.apache.org/exec/) @@ -15,12 +15,12 @@ The following components are provided under Apache License. (Apache 2.0) Http Components (org.apache.httpcomponents:httpclient:4.3.6 - https://github.com/apache/httpclient) (Apache 2.0) Http Components (org.apache.httpcomponents:httpasyncclient:4.0.2 - https://github.com/apache/httpclient) (Apache 2.0) Apache Commons Lang (org.apache.commons:commons-lang:2.6 - http://commons.apache.org/proper/commons-lang/) - (Apache 2.0) Apache Commons Lang 3 (org.apache.commons:commons-lang3:3.10 - http://commons.apache.org/proper/commons-lang/) + (Apache 2.0) Apache Commons Lang 3 (org.apache.commons:commons-lang3:3.14.0 - http://commons.apache.org/proper/commons-lang/) (Apache 2.0) Apache Commons Math 3 (org.apache.commons:commons-math3:3.6.1 - http://commons.apache.org/proper/commons-math/) (Apache 2.0) Apache Commons Net (commons-net:commons-net:2.2 - http://commons.apache.org/proper/commons-net/) (Apache 2.0) Apache Commons Pool2 (commons-exec:commons-pool2:2.3 - https://commons.apache.org/proper/commons-pool/) (Apache 2.0) Apache Commons FileUpload (commons-fileupload:commons-fileupload:1.3.1 - http://commons.apache.org/fileupload/) - (Apache 2.0) Apache Commons IO (commons-io:commons-io:2.7 - http://commons.apache.org/io/) + (Apache 2.0) Apache Commons IO (commons-io:commons-io:2.15.1 - http://commons.apache.org/io/) (Apache 2.0) Apache Commons VFS2 (org.apache.commons:commons-vfs2:2.0 - https://commons.apache.org/proper/commons-vfs/) (Apache 2.0) Apache Jackrabbit webdav (org.apache.jackrabbit:jackrabbit-webdav:jar:1.5.2 - https://jackrabbit.apache.org/jcr/components/jackrabbit-webdav-library.html) (Apache 2.0) Apache Jackrabbit JCR commons - http://jackrabbit.apache.org/jcr/components/jackrabbit-jcr-commons.html) @@ -305,15 +305,15 @@ The text of each license is also included at licenses/LICENSE-[project]-[version (New BSD license) Protocol Buffer Java API (com.google.protobuf:protobuf-java-util:3.21.7 - https://developers.google.com/protocol-buffers/) (New BSD license) Protocol Buffer JavaNano API (com.google.protobuf.nano:protobuf-javanano:3.21.7 - https://developers.google.com/protocol-buffers/) (BSD) JSch (com.jcraft:jsch:0.1.42 - http://www.jcraft.com/jsch/) - (BSD 3-Clause) io.grpc:grpc-all (io.grpc:grpc-all:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-auth (io.grpc:grpc-auth:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-core (io.grpc:grpc-core:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-netty (io.grpc:grpc-netty:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-okhttp (io.grpc:grpc-okhttp:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-protobuf (io.grpc:grpc-protobuf:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-protobuf-lite (io.grpc:grpc-protobuf-lite:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-protobuf-nano (io.grpc:grpc-protobuf-nano:1.51.0 - https://github.com/grpc/grpc-java) - (BSD 3-Clause) io.grpc:grpc-stub (io.grpc:grpc-stub:1.51.0 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-all (io.grpc:grpc-all:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-auth (io.grpc:grpc-auth:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-core (io.grpc:grpc-core:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-netty (io.grpc:grpc-netty:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-okhttp (io.grpc:grpc-okhttp:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-protobuf (io.grpc:grpc-protobuf:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-protobuf-lite (io.grpc:grpc-protobuf-lite:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-protobuf-nano (io.grpc:grpc-protobuf-nano:1.55.1 - https://github.com/grpc/grpc-java) + (BSD 3-Clause) io.grpc:grpc-stub (io.grpc:grpc-stub:1.55.1 - https://github.com/grpc/grpc-java) (BSD-3-Clause) Kryo 3.0.3 (com.esotericsoftware:kryo-shaded:3.0.3 - https://github.com/EsotericSoftware/kryo) (BSD-3-Clause) Minilog 1.3.0 (com.esotericsoftware:minlog:1.3.0 - https://github.com/EsotericSoftware/minlog) diff --git a/zeppelin-examples/pom.xml b/zeppelin-examples/pom.xml index c26f3845da7..f12cca18ca2 100644 --- a/zeppelin-examples/pom.xml +++ b/zeppelin-examples/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. diff --git a/zeppelin-examples/zeppelin-example-clock/pom.xml b/zeppelin-examples/zeppelin-example-clock/pom.xml index 981b73ba10e..6dd8a3d87e7 100644 --- a/zeppelin-examples/zeppelin-example-clock/pom.xml +++ b/zeppelin-examples/zeppelin-example-clock/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-clock jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example application - Clock diff --git a/zeppelin-examples/zeppelin-example-horizontalbar/pom.xml b/zeppelin-examples/zeppelin-example-horizontalbar/pom.xml index bf841d29f98..9e0aeef2e79 100644 --- a/zeppelin-examples/zeppelin-example-horizontalbar/pom.xml +++ b/zeppelin-examples/zeppelin-example-horizontalbar/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-horizontalbar jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example application - Horizontal Bar chart diff --git a/zeppelin-examples/zeppelin-example-spell-echo/pom.xml b/zeppelin-examples/zeppelin-example-spell-echo/pom.xml index c5ad8985a38..c5a4c667356 100644 --- a/zeppelin-examples/zeppelin-example-spell-echo/pom.xml +++ b/zeppelin-examples/zeppelin-example-spell-echo/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-spell-echo jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example Spell - Echo diff --git a/zeppelin-examples/zeppelin-example-spell-flowchart/pom.xml b/zeppelin-examples/zeppelin-example-spell-flowchart/pom.xml index 663df12c56b..7c33112a549 100644 --- a/zeppelin-examples/zeppelin-example-spell-flowchart/pom.xml +++ b/zeppelin-examples/zeppelin-example-spell-flowchart/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-spell-flowchart jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example Spell - Flowchart diff --git a/zeppelin-examples/zeppelin-example-spell-markdown/pom.xml b/zeppelin-examples/zeppelin-example-spell-markdown/pom.xml index b19c4781fbc..5e68c7c99dc 100644 --- a/zeppelin-examples/zeppelin-example-spell-markdown/pom.xml +++ b/zeppelin-examples/zeppelin-example-spell-markdown/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-spell-markdown jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example Spell - Markdown diff --git a/zeppelin-examples/zeppelin-example-spell-translator/pom.xml b/zeppelin-examples/zeppelin-example-spell-translator/pom.xml index 4debfd35328..4e9f3f6bedf 100644 --- a/zeppelin-examples/zeppelin-example-spell-translator/pom.xml +++ b/zeppelin-examples/zeppelin-example-spell-translator/pom.xml @@ -23,13 +23,13 @@ zeppelin-examples org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. zeppelin-example-spell-translator jar - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 Zeppelin: Example Spell - Translator diff --git a/zeppelin-integration/pom.xml b/zeppelin-integration/pom.xml index 83570ea4ef0..e3e88bec528 100644 --- a/zeppelin-integration/pom.xml +++ b/zeppelin-integration/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 .. @@ -101,8 +101,8 @@ spark-interpreter ${project.version} test - - + + com.google.guava guava @@ -126,6 +126,12 @@ ${project.version} test + + org.apache.zeppelin + zeppelin-jdbc + ${project.version} + test + org.apache.zeppelin zeppelin-markdown @@ -151,6 +157,26 @@ ${project.version} test + + org.apache.zeppelin + zeppelin-server + ${project.version} + test + tests + + + org.apache.zeppelin + zeppelin-zengine + ${project.version} + test + tests + + + org.apache.zeppelin + zeppelin-test + ${project.version} + test + @@ -158,6 +184,11 @@ junit-jupiter-engine test + + org.awaitility + awaitility + test + @@ -189,42 +220,6 @@ - - org.apache.maven.plugins - maven-antrun-plugin - - - start-zeppelin - pre-integration-test - - - - - - - - - run - - - - stop-zeppelin - post-integration-test - - - - - - - - - run - - - - org.apache.maven.plugins maven-dependency-plugin diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/AbstractZeppelinIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/AbstractZeppelinIT.java index 046976e5e5b..6bfc27e50b2 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/AbstractZeppelinIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/AbstractZeppelinIT.java @@ -20,6 +20,8 @@ import com.google.common.base.Function; import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; import java.time.Duration; import java.time.temporal.ChronoUnit; import org.apache.commons.codec.binary.Base64; @@ -52,6 +54,39 @@ abstract public class AbstractZeppelinIT { protected static final long MAX_BROWSER_TIMEOUT_SEC = 30; protected static final long MAX_PARAGRAPH_TIMEOUT_SEC = 120; + protected void authenticationUser(String userName, String password) { + pollingWait( + By.xpath("//div[contains(@class, 'navbar-collapse')]//li//button[contains(.,'Login')]"), + MAX_BROWSER_TIMEOUT_SEC).click(); + + ZeppelinITUtils.sleep(1000, false); + + pollingWait(By.xpath("//*[@id='userName']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(userName); + pollingWait(By.xpath("//*[@id='password']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(password); + pollingWait( + By.xpath("//*[@id='loginModalContent']//button[contains(.,'Login')]"), + MAX_BROWSER_TIMEOUT_SEC).click(); + + ZeppelinITUtils.sleep(1000, false); + } + + protected void logoutUser(String userName) throws URISyntaxException { + ZeppelinITUtils.sleep(500, false); + manager.getWebDriver().findElement( + By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + userName + "')]")).click(); + ZeppelinITUtils.sleep(500, false); + manager.getWebDriver().findElement( + By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + userName + "')]//a[@ng-click='navbar.logout()']")).click(); + ZeppelinITUtils.sleep(2000, false); + if (manager.getWebDriver().findElement( + By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")).isDisplayed()) { + manager.getWebDriver().findElement( + By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")).click(); + } + manager.getWebDriver().get(new URI(manager.getWebDriver().getCurrentUrl()).resolve("/#/").toString()); + ZeppelinITUtils.sleep(500, false); + } + protected void setTextOfParagraph(int paragraphNo, String text) { String paragraphXpath = getParagraphXPath(paragraphNo); @@ -123,12 +158,7 @@ protected WebElement pollingWait(final By locator, final long timeWait) { .pollingEvery(Duration.of(1, ChronoUnit.SECONDS)) .ignoring(NoSuchElementException.class); - return wait.until(new Function() { - @Override - public WebElement apply(WebDriver driver) { - return driver.findElement(locator); - } - }); + return wait.until((Function) driver -> driver.findElement(locator)); } protected void createNewNote() { diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/ProcessData.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/ProcessData.java index 39a8a1384f4..82d71543513 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/ProcessData.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/ProcessData.java @@ -41,7 +41,6 @@ public enum Types_Of_Data { private Process checked_process; private boolean printToConsole = false; - private boolean removeRedundantOutput = true; public ProcessData(Process connected_process, boolean printToConsole, int silenceTimeout, TimeUnit timeUnit) { this.checked_process = connected_process; diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/WebDriverManager.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/WebDriverManager.java index 38cabe3f6b1..abf46907586 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/WebDriverManager.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/WebDriverManager.java @@ -17,13 +17,18 @@ package org.apache.zeppelin; -import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.Closeable; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; +import java.util.*; +import java.util.function.Supplier; +import java.util.stream.Stream; + +import org.apache.commons.lang3.SystemUtils; import org.openqa.selenium.By; import org.openqa.selenium.TimeoutException; import org.openqa.selenium.WebDriver; @@ -39,7 +44,17 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +// This class auto discovery the available WebDriver in the following priority: +// Chrome, Firefox, Safari. +// +// You can also use the environment variable ZEPPELIN_SELENIUM_BROWSER to choose a specific one. +// For example, unlike Chromium and Firefox drivers, Safari's WebDriver is pre-installed on macOS, +// to enable automation on Safari, just simply run the following command from the terminal: +// safaridriver --enable +// and then specify Safari for Zeppelin Selenium integration tests: +// export ZEPPELIN_SELENIUM_BROWSER=Safari +// +// To learn more about WebDriver, visit: https://www.selenium.dev/documentation/webdriver/ public class WebDriverManager implements Closeable { public final static Logger LOG = LoggerFactory.getLogger(WebDriverManager.class); @@ -49,58 +64,74 @@ public class WebDriverManager implements Closeable { final Path downloadDir; final WebDriver driver; - public WebDriverManager(boolean deleteTempFiles) throws IOException { + public WebDriverManager(boolean deleteTempFiles, int port) throws IOException { this.deleteTempFiles = deleteTempFiles; this.downloadDir = Files.createTempFile("browser", ".download"); this.logDir = Files.createTempFile("logdir", ".download"); - this.driver = constructWebDriver(); + this.driver = constructWebDriver(port); } - public WebDriverManager() throws IOException { - this(true); + public WebDriverManager(int port) throws IOException { + this(true, port); } public WebDriver getWebDriver() { return this.driver; } - private WebDriver constructWebDriver() { - WebDriver driver = null; - if (driver == null) { + private WebDriver constructWebDriver(int port) { + Supplier chromeDriverSupplier = () -> { try { ChromeOptions options = new ChromeOptions(); - driver = new ChromeDriver(options); + return new ChromeDriver(options); } catch (Exception e) { LOG.error("Exception in WebDriverManager while ChromeDriver ", e); + return null; } - } - if (driver == null) { + }; + Supplier firefoxDriverSupplier = () -> { try { - driver = getFirefoxDriver(); + return getFirefoxDriver(); } catch (Exception e) { LOG.error("Exception in WebDriverManager while FireFox Driver ", e); + return null; } - } - if (driver == null) { + }; + Supplier safariDriverSupplier = () -> { try { - driver = new SafariDriver(); + return new SafariDriver(); } catch (Exception e) { LOG.error("Exception in WebDriverManager while SafariDriver ", e); + return null; } - } + }; - String url; - if (System.getenv("url") != null) { - url = System.getenv("url"); - } else { - url = "http://localhost:8080"; + WebDriver driver; + switch (SystemUtils.getEnvironmentVariable("ZEPPELIN_SELENIUM_BROWSER", "").toLowerCase(Locale.ROOT)) { + case "chrome": + driver = chromeDriverSupplier.get(); + break; + case "firefox": + driver = firefoxDriverSupplier.get(); + break; + case "safari": + driver = safariDriverSupplier.get(); + break; + default: + driver = Stream.of(chromeDriverSupplier, firefoxDriverSupplier, safariDriverSupplier) + .map(Supplier::get) + .filter(Objects::nonNull) + .findFirst() + .orElse(null); + } + if (driver == null) { + throw new RuntimeException("No available WebDriver"); } + String url = "http://localhost:" + port; + long start = System.currentTimeMillis(); boolean loaded = false; - if (driver == null) { - throw new RuntimeException("No webdriver"); - } driver.manage().timeouts() .implicitlyWait(Duration.ofSeconds(AbstractZeppelinIT.MAX_IMPLICIT_WAIT)); driver.get(url); @@ -123,15 +154,13 @@ public Boolean apply(WebDriver d) { } } - if (loaded == false) { - fail(); - } + assertTrue(loaded); driver.manage().window().maximize(); return driver; } - public WebDriver getFirefoxDriver() throws IOException { + private WebDriver getFirefoxDriver() { FirefoxProfile profile = new FirefoxProfile(); profile.setPreference("browser.download.folderList", 2); diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/ZeppelinITUtils.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/ZeppelinITUtils.java index 416e58b40a7..3089f991475 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/ZeppelinITUtils.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/ZeppelinITUtils.java @@ -43,12 +43,6 @@ public static void sleep(long millis, boolean logOutput) { } } - public static void restartZeppelin() { - CommandExecutor.executeCommandLocalHost("../bin/zeppelin-daemon.sh restart", - false, ProcessData.Types_Of_Data.OUTPUT); - //wait for server to start. - sleep(5000, false); - } public static void turnOffImplicitWaits(WebDriver driver) { driver.manage().timeouts().implicitlyWait(Duration.ofSeconds(0)); diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/AuthenticationIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/AuthenticationIT.java index 0499f5e83d3..8ffdf6aea75 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/AuthenticationIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/AuthenticationIT.java @@ -21,39 +21,31 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -import java.io.File; import java.io.IOException; import java.net.URI; -import java.net.URISyntaxException; import java.util.List; -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.WebDriverManager; -import org.apache.zeppelin.ZeppelinITUtils; -import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; import org.openqa.selenium.Keys; import org.openqa.selenium.TimeoutException; import org.openqa.selenium.WebElement; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Created for org.apache.zeppelin.integration on 13/06/16. */ public class AuthenticationIT extends AbstractZeppelinIT { - private static final Logger LOG = LoggerFactory.getLogger(AuthenticationIT.class); - static String shiroPath; - static String authShiro = "[users]\n" + + private static MiniZeppelinServer zepServer; + + private static final String AUTH_SHIRO = "[users]\n" + "admin = password1, admin\n" + "finance1 = finance1, finance\n" + "finance2 = finance2, finance\n" + @@ -75,12 +67,17 @@ public class AuthenticationIT extends AbstractZeppelinIT { "/api/interpreter/** = authc, anyofrolesuser[admin, finance]\n" + "/** = authc"; - static String originalShiro = ""; - + @BeforeAll + static void init() throws Exception { + zepServer = new MiniZeppelinServer(AuthenticationIT.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", AUTH_SHIRO); + zepServer.addInterpreter("jdbc"); + zepServer.start(true, AuthenticationIT.class.getSimpleName()); + } @BeforeEach public void startUpManager() throws IOException { - manager = new WebDriverManager(); + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach @@ -88,85 +85,20 @@ public void tearDownManager() throws IOException { manager.close(); } - @BeforeAll - public static void startUp() throws IOException { - try { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), new File("../").getAbsolutePath()); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - shiroPath = conf.getAbsoluteDir(String.format("%s/shiro.ini", conf.getConfDir())); - File file = new File(shiroPath); - if (file.exists()) { - originalShiro = StringUtils.join(FileUtils.readLines(file, "UTF-8"), "\n"); - } - FileUtils.write(file, authShiro, "UTF-8"); - } catch (IOException e) { - LOG.error("Error in AuthenticationIT startUp::", e); - } - ZeppelinITUtils.restartZeppelin(); - } - - @AfterAll - public static void tearDown() throws IOException { - try { - if (!StringUtils.isBlank(shiroPath)) { - File file = new File(shiroPath); - if (StringUtils.isBlank(originalShiro)) { - FileUtils.deleteQuietly(file); - } else { - FileUtils.write(file, originalShiro, "UTF-8"); - } - } - } catch (IOException e) { - LOG.error("Error in AuthenticationIT tearDown::", e); - } - ZeppelinITUtils.restartZeppelin(); - } - - public void authenticationUser(String userName, String password) { - pollingWait(By.xpath( - "//div[contains(@class, 'navbar-collapse')]//li//button[contains(.,'Login')]"), - MAX_BROWSER_TIMEOUT_SEC).click(); - ZeppelinITUtils.sleep(1000, false); - pollingWait(By.xpath("//*[@id='userName']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(userName); - pollingWait(By.xpath("//*[@id='password']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(password); - pollingWait(By.xpath("//*[@id='loginModalContent']//button[contains(.,'Login')]"), - MAX_BROWSER_TIMEOUT_SEC).click(); - ZeppelinITUtils.sleep(1000, false); - } - - public void logoutUser(String userName) throws URISyntaxException { - ZeppelinITUtils.sleep(500, false); - manager.getWebDriver() - .findElement(By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + - userName + "')]")).click(); - ZeppelinITUtils.sleep(500, false); - manager.getWebDriver() - .findElement(By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + - userName + "')]//a[@ng-click='navbar.logout()']")).click(); - ZeppelinITUtils.sleep(2000, false); - if (manager.getWebDriver() - .findElement(By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")) - .isDisplayed()) { - manager.getWebDriver().findElement( - By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")).click(); - } - manager.getWebDriver() - .get(new URI(manager.getWebDriver().getCurrentUrl()).resolve("/#/").toString()); - ZeppelinITUtils.sleep(500, false); + public static void tearDown() throws Exception { + zepServer.destroy(); } @Test - @Disabled void testSimpleAuthentication() throws Exception { try { - AuthenticationIT authenticationIT = new AuthenticationIT(); - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); assertTrue(manager.getWebDriver().findElement(By.partialLinkText("Create new note")) .isDisplayed(), "Check is user logged in"); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); } catch (Exception e) { handleException("Exception in AuthenticationIT while testCreateNewButton ", e); } @@ -175,8 +107,7 @@ void testSimpleAuthentication() throws Exception { @Test void testAnyOfRolesUser() throws Exception { try { - AuthenticationIT authenticationIT = new AuthenticationIT(); - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -186,9 +117,9 @@ void testAnyOfRolesUser() throws Exception { "//div[@id='main']/div/div[2]"), MIN_IMPLICIT_WAIT).isDisplayed(), "Check is user has permission to view this page"); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); - authenticationIT.authenticationUser("finance1", "finance1"); + authenticationUser("finance1", "finance1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -198,9 +129,9 @@ void testAnyOfRolesUser() throws Exception { pollingWait(By.xpath("//div[@id='main']/div/div[2]"), MIN_IMPLICIT_WAIT).isDisplayed(), "Check is user has permission to view this page"); - authenticationIT.logoutUser("finance1"); + logoutUser("finance1"); - authenticationIT.authenticationUser("hr1", "hr1"); + authenticationUser("hr1", "hr1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -214,7 +145,7 @@ void testAnyOfRolesUser() throws Exception { } catch (TimeoutException e) { throw new Exception("Expected ngToast not found", e); } - authenticationIT.logoutUser("hr1"); + logoutUser("hr1"); } catch (Exception e) { handleException("Exception in AuthenticationIT while testAnyOfRolesUser ", e); @@ -224,8 +155,7 @@ void testAnyOfRolesUser() throws Exception { @Test void testGroupPermission() throws Exception { try { - AuthenticationIT authenticationIT = new AuthenticationIT(); - authenticationIT.authenticationUser("finance1", "finance1"); + authenticationUser("finance1", "finance1"); createNewNote(); String noteId = manager.getWebDriver().getCurrentUrl() @@ -247,9 +177,9 @@ void testGroupPermission() throws Exception { pollingWait(By.xpath("//div[@class='modal-dialog'][contains(.,'Permissions Saved ')]" + "//div[@class='modal-footer']//button[contains(.,'OK')]"), MAX_BROWSER_TIMEOUT_SEC).click(); - authenticationIT.logoutUser("finance1"); + logoutUser("finance1"); - authenticationIT.authenticationUser("hr1", "hr1"); + authenticationUser("hr1", "hr1"); try { WebElement element = pollingWait(By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"), MAX_BROWSER_TIMEOUT_SEC); @@ -268,9 +198,9 @@ void testGroupPermission() throws Exception { manager.getWebDriver().findElement( By.xpath("//div[@class='modal-content'][contains(.,'Insufficient privileges')]" + "//div[@class='modal-footer']//button[2]")).click(); - authenticationIT.logoutUser("hr1"); + logoutUser("hr1"); - authenticationIT.authenticationUser("finance2", "finance2"); + authenticationUser("finance2", "finance2"); try { WebElement element = pollingWait(By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"), MAX_BROWSER_TIMEOUT_SEC); @@ -287,7 +217,7 @@ void testGroupPermission() throws Exception { "//div[contains(.,'Insufficient privileges')]")); assertEquals(0, privilegesModal.size(), "Check is user has permission to view this note"); deleteTestNotebook(manager.getWebDriver()); - authenticationIT.logoutUser("finance2"); + logoutUser("finance2"); } catch (Exception e) { diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterIT.java index 055b3dbbbb9..5fe0ff0291d 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterIT.java @@ -22,9 +22,11 @@ import java.io.IOException; import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.WebDriverManager; - +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.openqa.selenium.By; @@ -32,16 +34,30 @@ class InterpreterIT extends AbstractZeppelinIT { + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + zepServer = new MiniZeppelinServer(InterpreterIT.class.getSimpleName()); + zepServer.addInterpreter("spark"); + zepServer.start(true, InterpreterIT.class.getSimpleName()); + } + @BeforeEach public void startUp() throws IOException { - manager = new WebDriverManager(); + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach - public void tearDown() throws IOException { + public void tearDownManager() throws IOException { manager.close(); } + @AfterAll + public static void tearDown() throws Exception { + zepServer.destroy(); + } + @Test void testShowDescriptionOnInterpreterCreate() throws Exception { try { diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java index 103709d41a1..b8927edccb6 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java @@ -16,13 +16,12 @@ */ package org.apache.zeppelin.integration; -import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.CommandExecutor; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.ProcessData; import org.apache.zeppelin.AbstractZeppelinIT; import org.apache.zeppelin.WebDriverManager; import org.apache.zeppelin.ZeppelinITUtils; -import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -37,24 +36,18 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.commons.io.FileUtils; - import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -import java.io.File; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; import java.time.Duration; public class InterpreterModeActionsIT extends AbstractZeppelinIT { private static final Logger LOG = LoggerFactory.getLogger(InterpreterModeActionsIT.class); - static String shiroPath; - static String authShiro = "[users]\n" + + private final static String AUTH_SHIRO = "[users]\n" + "admin = password1, admin\n" + "user1 = password2, admin\n" + "user2 = password3, admin\n" + @@ -70,17 +63,26 @@ public class InterpreterModeActionsIT extends AbstractZeppelinIT { "/api/cluster/address = anon\n" + "/** = authc"; - static String originalShiro = ""; - static String interpreterOptionPath = ""; - static String originalInterpreterOption = ""; - - static String cmdPsPython = "ps aux | grep 'kernel_server.py' | grep -v 'grep' | wc -l"; - static String cmdPsInterpreter = "ps aux | grep 'zeppelin/interpreter/python/*' |" + + private final static String CMD_PS_PYTHON = + "ps aux | grep 'kernel_server.py' | grep -v 'grep' | wc -l"; + private final static String CMD_PS_INTERPRETER_PYTHON = "ps aux | grep 'interpreter/python/*' |" + " sed -E '/grep/d' | wc -l"; + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + zepServer = new MiniZeppelinServer(InterpreterModeActionsIT.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", AUTH_SHIRO); + zepServer.addInterpreter("python"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.start(true, InterpreterModeActionsIT.class.getSimpleName()); + } + @BeforeEach - public void startUpManager() throws IOException { - manager = new WebDriverManager(); + public void startUp() throws IOException { + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach @@ -88,85 +90,9 @@ public void tearDownManager() throws IOException { manager.close(); } - @BeforeAll - public static void startUp() throws IOException { - try { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), new File("../").getAbsolutePath()); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - shiroPath = conf.getAbsoluteDir(String.format("%s/shiro.ini", conf.getConfDir())); - interpreterOptionPath = conf.getAbsoluteDir(String.format("%s/interpreter.json", conf.getConfDir())); - File shiroFile = new File(shiroPath); - if (shiroFile.exists()) { - originalShiro = StringUtils.join(FileUtils.readLines(shiroFile, "UTF-8"), "\n"); - } - FileUtils.write(shiroFile, authShiro, "UTF-8"); - - File interpreterOptionFile = new File(interpreterOptionPath); - if (interpreterOptionFile.exists()) { - originalInterpreterOption = StringUtils.join(FileUtils.readLines(interpreterOptionFile, "UTF-8"), "\n"); - } - } catch (IOException e) { - LOG.error("Error in InterpreterModeActionsIT startUp::", e); - } - ZeppelinITUtils.restartZeppelin(); - } - @AfterAll - public static void tearDown() throws IOException { - try { - if (!StringUtils.isBlank(shiroPath)) { - File shiroFile = new File(shiroPath); - if (StringUtils.isBlank(originalShiro)) { - FileUtils.deleteQuietly(shiroFile); - } else { - FileUtils.write(shiroFile, originalShiro, "UTF-8"); - } - } - if (!StringUtils.isBlank(interpreterOptionPath)) { - File interpreterOptionFile = new File(interpreterOptionPath); - if (StringUtils.isBlank(originalInterpreterOption)) { - FileUtils.deleteQuietly(interpreterOptionFile); - } else { - FileUtils.write(interpreterOptionFile, originalInterpreterOption, "UTF-8"); - } - } - } catch (IOException e) { - LOG.error("Error in InterpreterModeActionsIT tearDown::", e); - } - ZeppelinITUtils.restartZeppelin(); - } - - private void authenticationUser(String userName, String password) { - pollingWait(By.xpath( - "//div[contains(@class, 'navbar-collapse')]//li//button[contains(.,'Login')]"), - MAX_BROWSER_TIMEOUT_SEC).click(); - ZeppelinITUtils.sleep(500, false); - pollingWait(By.xpath("//*[@id='userName']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(userName); - pollingWait(By.xpath("//*[@id='password']"), MAX_BROWSER_TIMEOUT_SEC).sendKeys(password); - pollingWait(By.xpath("//*[@id='loginModalContent']//button[contains(.,'Login')]"), - MAX_BROWSER_TIMEOUT_SEC).click(); - ZeppelinITUtils.sleep(1000, false); - } - - private void logoutUser(String userName) throws URISyntaxException { - ZeppelinITUtils.sleep(500, false); - manager.getWebDriver() - .findElement(By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + - userName + "')]")).click(); - ZeppelinITUtils.sleep(500, false); - manager.getWebDriver() - .findElement(By.xpath("//div[contains(@class, 'navbar-collapse')]//li[contains(.,'" + - userName + "')]//a[@ng-click='navbar.logout()']")).click(); - ZeppelinITUtils.sleep(2000, false); - if (manager.getWebDriver() - .findElement(By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")) - .isDisplayed()) { - manager.getWebDriver().findElement( - By.xpath("//*[@id='loginModal']//div[contains(@class, 'modal-header')]/button")).click(); - } - manager.getWebDriver() - .get(new URI(manager.getWebDriver().getCurrentUrl()).resolve("/#/").toString()); - ZeppelinITUtils.sleep(500, false); + public static void tearDown() throws Exception { + zepServer.destroy(); } private void setPythonParagraph(int num, String text) { @@ -184,8 +110,7 @@ private void setPythonParagraph(int num, String text) { void testGloballyAction() throws Exception { try { //step 1: (admin) login, set 'globally in shared' mode of python interpreter, logout - InterpreterModeActionsIT interpreterModeActionsIT = new InterpreterModeActionsIT(); - interpreterModeActionsIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); clickAndWait(By.xpath("//li/a[contains(@href, '#/interpreter')]")); @@ -203,12 +128,12 @@ void testGloballyAction() throws Exception { clickAndWait(By.xpath( "//div[@class='modal-dialog']//div[@class='bootstrap-dialog-footer-buttons']//button[contains(., 'OK')]")); clickAndWait(By.xpath("//a[@class='navbar-brand navbar-title'][contains(@href, '#/')]")); - interpreterModeActionsIT.logoutUser("admin"); + logoutUser("admin"); //step 2: (user1) login, create a new note, run two paragraph with 'python', check result, check process, logout //paragraph: Check if the result is 'user1' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); By locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); WebElement element = @@ -220,31 +145,31 @@ void testGloballyAction() throws Exception { String user1noteId = manager.getWebDriver().getCurrentUrl() .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user1\""); + setPythonParagraph(1, "user=\"user1\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user1", manager.getWebDriver() .findElement( By.xpath(getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")) - .getText(), + .getText().trim(), "The output field paragraph contains"); - String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); //step 3: (user2) login, create a new note, run two paragraph with 'python', check result, check process, logout //paragraph: Check if the result is 'user2' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); element = @@ -254,28 +179,28 @@ void testGloballyAction() throws Exception { createNewNote(); } waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user2\""); + setPythonParagraph(1, "user=\"user2\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user2", manager.getWebDriver().findElement(By.xpath( - getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), + getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText().trim(), "The output field paragraph contains"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 4: (user1) login, come back note user1 made, run second paragraph, check result, check process, //restart python interpreter, check process again, logout //paragraph: Check if the result is 'user2' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user1noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -292,11 +217,11 @@ void testGloballyAction() throws Exception { waitForParagraph(2, "ERROR"); fail("Exception in InterpreterModeActionsIT while running Python Paragraph", e); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); @@ -321,15 +246,15 @@ void testGloballyAction() throws Exception { if (element.isDisplayed()) { clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); } catch (Exception e) { handleException("Exception in InterpreterModeActionsIT while testGloballyAction ", e); } @@ -339,8 +264,7 @@ void testGloballyAction() throws Exception { void testPerUserScopedAction() throws Exception { try { //step 1: (admin) login, set 'Per user in scoped' mode of python interpreter, logout - InterpreterModeActionsIT interpreterModeActionsIT = new InterpreterModeActionsIT(); - interpreterModeActionsIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -364,13 +288,13 @@ void testPerUserScopedAction() throws Exception { "//div[@class='modal-dialog']//div[@class='bootstrap-dialog-footer-buttons']//button[contains(., 'OK')]")); clickAndWait(By.xpath("//a[@class='navbar-brand navbar-title'][contains(@href, '#/')]")); - interpreterModeActionsIT.logoutUser("admin"); + logoutUser("admin"); //step 2: (user1) login, create a new note, run two paragraph with 'python', check result, check process, logout //paragraph: Check if the result is 'user1' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); By locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); WebElement element = @@ -383,30 +307,30 @@ void testPerUserScopedAction() throws Exception { .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user1\""); + setPythonParagraph(1, "user=\"user1\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user1", manager.getWebDriver().findElement(By.xpath( getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), "The output field paragraph contains"); - String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); //step 3: (user2) login, create a new note, run two paragraph with 'python', check result, check process, logout // paragraph: Check if the result is 'user2' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '2' - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); element = @@ -418,29 +342,29 @@ void testPerUserScopedAction() throws Exception { String user2noteId = manager.getWebDriver().getCurrentUrl() .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user2\""); + setPythonParagraph(1, "user=\"user2\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user2", manager.getWebDriver().findElement(By.xpath( getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), "The output field paragraph contains"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 4: (user1) login, come back note user1 made, run second paragraph, check result, // restart python interpreter in note, check process again, logout //paragraph: Check if the result is 'user1' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user1noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -481,20 +405,20 @@ void testPerUserScopedAction() throws Exception { clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); //step 5: (user2) login, come back note user2 made, restart python interpreter in note, check process, logout //System: Check if the number of python interpreter process is '0' //System: Check if the number of python process is '0' - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user2noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -523,21 +447,21 @@ void testPerUserScopedAction() throws Exception { clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process interpreter is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 6: (user1) login, come back note user1 made, run first paragraph,logout // (user2) login, come back note user2 made, run first paragraph, check process, logout //System: Check if the number of python process is '2' //System: Check if the number of python interpreter process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user1noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -554,9 +478,9 @@ void testPerUserScopedAction() throws Exception { waitForParagraph(1, "ERROR"); fail("Exception in InterpreterModeActionsIT while running Python Paragraph"); } - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user2noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -572,20 +496,20 @@ void testPerUserScopedAction() throws Exception { waitForParagraph(1, "ERROR"); fail("Exception in InterpreterModeActionsIT while running Python Paragraph", e); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 7: (admin) login, restart python interpreter in interpreter tab, check process, logout //System: Check if the number of python interpreter process is 0 //System: Check if the number of python process is 0 - interpreterModeActionsIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -608,16 +532,16 @@ void testPerUserScopedAction() throws Exception { assertTrue(invisibilityStatus, "interpreter setting dialog visibility status"); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("admin"); + logoutUser("admin"); } catch (Exception e) { handleException("Exception in InterpreterModeActionsIT while testPerUserScopedAction ", e); @@ -628,8 +552,7 @@ void testPerUserScopedAction() throws Exception { void testPerUserIsolatedAction() throws Exception { try { //step 1: (admin) login, set 'Per user in isolated' mode of python interpreter, logout - InterpreterModeActionsIT interpreterModeActionsIT = new InterpreterModeActionsIT(); - interpreterModeActionsIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); clickAndWait(By.xpath("//li/a[contains(@href, '#/interpreter')]")); @@ -649,13 +572,13 @@ void testPerUserIsolatedAction() throws Exception { clickAndWait(By.xpath( "//div[@class='modal-dialog']//div[@class='bootstrap-dialog-footer-buttons']//button[contains(., 'OK')]")); clickAndWait(By.xpath("//a[@class='navbar-brand navbar-title'][contains(@href, '#/')]")); - interpreterModeActionsIT.logoutUser("admin"); + logoutUser("admin"); //step 2: (user1) login, create a new note, run two paragraph with 'python', check result, check process, logout //paragraph: Check if the result is 'user1' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); By locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); WebElement element = @@ -667,29 +590,29 @@ void testPerUserIsolatedAction() throws Exception { String user1noteId = manager.getWebDriver().getCurrentUrl() .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user1\""); + setPythonParagraph(1, "user=\"user1\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user1", manager.getWebDriver().findElement(By.xpath( - getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), + getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText().trim(), "The output field paragraph contains"); - String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + String resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); //step 3: (user2) login, create a new note, run two paragraph with 'python', check result, check process, logout // paragraph: Check if the result is 'user2' in the second paragraph //System: Check if the number of python interpreter process is '2' //System: Check if the number of python process is '2' - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//div[contains(@class, 'col-md-4')]/div/h5/a[contains(.,'Create new" + " note')]"); element = @@ -701,30 +624,30 @@ void testPerUserIsolatedAction() throws Exception { String user2noteId = manager.getWebDriver().getCurrentUrl() .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - interpreterModeActionsIT.setPythonParagraph(1, "user=\"user2\""); + setPythonParagraph(1, "user=\"user2\""); waitForParagraph(2, "READY"); - interpreterModeActionsIT.setPythonParagraph(2, "print(user)"); + setPythonParagraph(2, "print(user)"); assertEquals("user2", manager.getWebDriver().findElement(By.xpath( - getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), + getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText().trim(), "The output field paragraph contains"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 4: (user1) login, come back note user1 made, run second paragraph, check result, // restart python interpreter in note, check process again, logout //paragraph: Check if the result is 'user1' in the second paragraph //System: Check if the number of python interpreter process is '1' //System: Check if the number of python process is '1' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user1noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -741,7 +664,7 @@ void testPerUserIsolatedAction() throws Exception { fail("Exception in InterpreterModeActionsIT while running Python Paragraph"); } assertEquals("user1", manager.getWebDriver().findElement(By.xpath( - getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText(), + getParagraphXPath(2) + "//div[contains(@class, 'text plainTextContent')]")).getText().trim(), "The output field paragraph contains"); clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); @@ -766,20 +689,20 @@ void testPerUserIsolatedAction() throws Exception { clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("1", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); //step 5: (user2) login, come back note user2 made, restart python interpreter in note, check process, logout //System: Check if the number of python interpreter process is '0' //System: Check if the number of python process is '0' - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user2noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -809,21 +732,21 @@ void testPerUserIsolatedAction() throws Exception { clickAndWait(By.xpath("//*[@id='actionbar']//span[contains(@uib-tooltip, 'Interpreter binding')]")); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 6: (user1) login, come back note user1 made, run first paragraph,logout // (user2) login, come back note user2 made, run first paragraph, check process, logout //System: Check if the number of python process is '2' //System: Check if the number of python interpreter process is '2' - interpreterModeActionsIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user1noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -840,9 +763,9 @@ void testPerUserIsolatedAction() throws Exception { waitForParagraph(1, "ERROR"); fail("Exception in InterpreterModeActionsIT while running Python Paragraph"); } - interpreterModeActionsIT.logoutUser("user1"); + logoutUser("user1"); - interpreterModeActionsIT.authenticationUser("user2", "password3"); + authenticationUser("user2", "password3"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + user2noteId + "')]"); element = (new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC))) @@ -858,20 +781,20 @@ void testPerUserIsolatedAction() throws Exception { waitForParagraph(1, "ERROR"); fail("Exception in InterpreterModeActionsIT while running Python Paragraph"); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("2", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("user2"); + logoutUser("user2"); //step 7: (admin) login, restart python interpreter in interpreter tab, check process, logout //System: Check if the number of python interpreter process is 0 //System: Check if the number of python process is 0 - interpreterModeActionsIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); pollingWait(By.xpath("//div/button[contains(@class, 'nav-btn dropdown-toggle ng-scope')]"), MAX_BROWSER_TIMEOUT_SEC).click(); @@ -894,15 +817,15 @@ void testPerUserIsolatedAction() throws Exception { assertTrue(invisibilityStatus, "interpreter setting dialog visibility status"); } - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsPython, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python process is wrong"); - resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(cmdPsInterpreter, + resultProcessNum = (String) CommandExecutor.executeCommandLocalHost(CMD_PS_INTERPRETER_PYTHON, false, ProcessData.Types_Of_Data.OUTPUT); resultProcessNum = resultProcessNum.trim().replaceAll("\n", ""); assertEquals("0", resultProcessNum, "The number of python interpreter process is wrong"); - interpreterModeActionsIT.logoutUser("admin"); + logoutUser("admin"); } catch (Exception e) { handleException("Exception in InterpreterModeActionsIT while testPerUserIsolatedAction ", e); } diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ParagraphActionsIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ParagraphActionsIT.java index de21495762a..3863d7df372 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ParagraphActionsIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ParagraphActionsIT.java @@ -26,9 +26,13 @@ import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.WebDriverManager; import org.apache.zeppelin.ZeppelinITUtils; +import org.apache.zeppelin.test.DownloadUtils; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -40,16 +44,37 @@ class ParagraphActionsIT extends AbstractZeppelinIT { + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + String sparkHome = DownloadUtils.downloadSpark(); + + zepServer = new MiniZeppelinServer(ParagraphActionsIT.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("spark"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.start(true, ParagraphActionsIT.class.getSimpleName()); + TestHelper.configureSparkInterpreter(zepServer, sparkHome); + } + @BeforeEach public void startUp() throws IOException { - manager = new WebDriverManager(); + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach - public void tearDown() throws IOException { + public void tearDownManager() throws IOException { manager.close(); } + @AfterAll + public static void tearDown() throws Exception { + zepServer.destroy(); + } + @Test void testCreateNewButton() throws Exception { try { @@ -598,6 +623,7 @@ void testShowAndHideLineNumbers() throws Exception { } @Test + @Disabled("flaky") void testEditOnDoubleClick() throws Exception { try { createNewNote(); @@ -655,7 +681,6 @@ void testEditOnDoubleClick() throws Exception { void testSingleDynamicFormTextInput() throws Exception { try { createNewNote(); - setTextOfParagraph(1, "%spark println(\"Hello \"+z.textbox(\"name\", \"world\")) "); runParagraph(1); diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/PersonalizeActionsIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/PersonalizeActionsIT.java index c9721cd6823..6b66197963c 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/PersonalizeActionsIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/PersonalizeActionsIT.java @@ -16,11 +16,10 @@ */ package org.apache.zeppelin.integration; -import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.WebDriverManager; -import org.apache.zeppelin.ZeppelinITUtils; -import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.test.DownloadUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -31,23 +30,15 @@ import org.openqa.selenium.support.ui.WebDriverWait; import org.openqa.selenium.support.ui.ExpectedConditions; import org.openqa.selenium.TimeoutException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.commons.io.FileUtils; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; -import java.io.File; import java.io.IOException; import java.time.Duration; public class PersonalizeActionsIT extends AbstractZeppelinIT { - private static final Logger LOG = LoggerFactory.getLogger(PersonalizeActionsIT.class); - - static String shiroPath; - static String authShiro = "[users]\n" + + private static final String AUTH_SHIRO = "[users]\n" + "admin = password1, admin\n" + "user1 = password2, user\n" + "[main]\n" + @@ -65,9 +56,26 @@ public class PersonalizeActionsIT extends AbstractZeppelinIT { static String originalShiro = ""; + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + String sparkHome = DownloadUtils.downloadSpark(); + + zepServer = new MiniZeppelinServer(PersonalizeActionsIT.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", AUTH_SHIRO); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("python"); + zepServer.addInterpreter("spark"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.start(true, PersonalizeActionsIT.class.getSimpleName()); + TestHelper.configureSparkInterpreter(zepServer, sparkHome); + } + @BeforeEach - public void startUpManager() throws IOException { - manager = new WebDriverManager(); + public void startUp() throws IOException { + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach @@ -75,40 +83,12 @@ public void tearDownManager() throws IOException { manager.close(); } - @BeforeAll - public static void startUp() throws IOException { - try { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), new File("../").getAbsolutePath()); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - shiroPath = conf.getAbsoluteDir(String.format("%s/shiro.ini", conf.getConfDir())); - File file = new File(shiroPath); - if (file.exists()) { - originalShiro = StringUtils.join(FileUtils.readLines(file, "UTF-8"), "\n"); - } - FileUtils.write(file, authShiro, "UTF-8"); - } catch (IOException e) { - LOG.error("Error in PersonalizeActionsIT startUp::", e); - } - ZeppelinITUtils.restartZeppelin(); - } - @AfterAll - public static void tearDown() throws IOException { - try { - if (!StringUtils.isBlank(shiroPath)) { - File file = new File(shiroPath); - if (StringUtils.isBlank(originalShiro)) { - FileUtils.deleteQuietly(file); - } else { - FileUtils.write(file, originalShiro, "UTF-8"); - } - } - } catch (IOException e) { - LOG.error("Error in PersonalizeActionsIT tearDown::", e); - } - ZeppelinITUtils.restartZeppelin(); + public static void tearDown() throws Exception { + zepServer.destroy(); } + private void setParagraphText(String text) { setTextOfParagraph(1, "%md\\n # " + text); runParagraph(1); @@ -119,9 +99,7 @@ private void setParagraphText(String text) { void testSimpleAction() throws Exception { try { // step 1 : (admin) create a new note, run a paragraph and turn on personalized mode - AuthenticationIT authenticationIT = new AuthenticationIT(); - PersonalizeActionsIT personalizeActionsIT = new PersonalizeActionsIT(); - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); By locator = By.xpath("//div[contains(@class, \"col-md-4\")]/div/h5/a[contains(.,'Create new" + " note')]"); WebDriverWait wait = @@ -133,7 +111,7 @@ void testSimpleAction() throws Exception { String noteId = manager.getWebDriver().getCurrentUrl() .substring(manager.getWebDriver().getCurrentUrl().lastIndexOf("/") + 1); waitForParagraph(1, "READY"); - personalizeActionsIT.setParagraphText("Before"); + setParagraphText("Before"); assertEquals("Before", manager.getWebDriver() .findElement( By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'markdown-body')]")) @@ -142,10 +120,10 @@ void testSimpleAction() throws Exception { "//button[contains(@uib-tooltip, 'Switch to personal mode')]"), MAX_BROWSER_TIMEOUT_SEC).click(); clickAndWait(By.xpath("//div[@class='modal-dialog'][contains(.,'Do you want to personalize your analysis?')" + "]//div[@class='modal-footer']//button[contains(.,'OK')]")); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); // step 2 : (user1) make sure it is on personalized mode and 'Before' in result of paragraph - authenticationIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); wait = new WebDriverWait(manager.getWebDriver(), Duration.ofSeconds(MAX_BROWSER_TIMEOUT_SEC)); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); @@ -163,25 +141,25 @@ void testSimpleAction() throws Exception { .findElement( By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'markdown-body')]")) .getText()); - authenticationIT.logoutUser("user1"); + logoutUser("user1"); // step 3 : (admin) change paragraph contents to 'After' and check result of paragraph - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); if (element.isDisplayed()) { pollingWait(By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"), MAX_BROWSER_TIMEOUT_SEC).click(); } waitForParagraph(1, "FINISHED"); - personalizeActionsIT.setParagraphText("After"); + setParagraphText("After"); assertEquals("After", manager.getWebDriver() .findElement( By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'markdown-body')]")) .getText()); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); // step 4 : (user1) check whether result is 'Before' or not - authenticationIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); if (element.isDisplayed()) { @@ -191,7 +169,7 @@ void testSimpleAction() throws Exception { .findElement( By.xpath(getParagraphXPath(1) + "//div[contains(@class, 'markdown-body')]")) .getText()); - authenticationIT.logoutUser("user1"); + logoutUser("user1"); } catch (Exception e) { handleException("Exception in PersonalizeActionsIT while testSimpleAction ", e); } @@ -201,8 +179,7 @@ void testSimpleAction() throws Exception { void testGraphAction() throws Exception { try { // step 1 : (admin) create a new note, run a paragraph, change active graph to 'Bar chart', turn on personalized mode - AuthenticationIT authenticationIT = new AuthenticationIT(); - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); By locator = By.xpath("//div[contains(@class, \"col-md-4\")]/div/h5/a[contains(.,'Create new" + " note')]"); WebDriverWait wait = @@ -239,12 +216,12 @@ void testGraphAction() throws Exception { + "//button[contains(@class," + "'btn btn-default btn-sm ng-binding ng-scope active')]//i")).getAttribute("class")); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); manager.getWebDriver().navigate().refresh(); // step 2 : (user1) make sure it is on personalized mode and active graph is 'Bar chart', // try to change active graph to 'Table' and then check result - authenticationIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); if (element.isDisplayed()) { @@ -265,11 +242,11 @@ void testGraphAction() throws Exception { assertEquals("fa fa-table", manager.getWebDriver().findElement(By.xpath(getParagraphXPath(1) + "//button[contains(@class," + "'btn btn-default btn-sm ng-binding ng-scope active')]//i")).getAttribute("class")); - authenticationIT.logoutUser("user1"); + logoutUser("user1"); manager.getWebDriver().navigate().refresh(); // step 3: (admin) Admin view is still table because of it's personalized! - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); if (element.isDisplayed()) { @@ -281,7 +258,7 @@ void testGraphAction() throws Exception { + "//button[contains(@class," + "'btn btn-default btn-sm ng-binding ng-scope active')]//i")).getAttribute("class")); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); } catch (Exception e) { handleException("Exception in PersonalizeActionsIT while testGraphAction ", e); } @@ -291,8 +268,7 @@ void testGraphAction() throws Exception { void testDynamicFormAction() throws Exception { try { // step 1 : (admin) login, create a new note, run a paragraph with data of spark tutorial, logout. - AuthenticationIT authenticationIT = new AuthenticationIT(); - authenticationIT.authenticationUser("admin", "password1"); + authenticationUser("admin", "password1"); By locator = By.xpath("//div[contains(@class, \"col-md-4\")]/div/h5/a[contains(.,'Create new" + " note')]"); WebDriverWait wait = @@ -319,11 +295,11 @@ void testDynamicFormAction() throws Exception { "//button[contains(@uib-tooltip, 'Switch to personal mode')]"), MAX_BROWSER_TIMEOUT_SEC).click(); clickAndWait(By.xpath("//div[@class='modal-dialog'][contains(.,'Do you want to personalize your analysis?')" + "]//div[@class='modal-footer']//button[contains(.,'OK')]")); - authenticationIT.logoutUser("admin"); + logoutUser("admin"); // step 2 : (user1) make sure it is on personalized mode and dynamic form value is 'Before', // try to change dynamic form value to 'After' and then check result - authenticationIT.authenticationUser("user1", "password2"); + authenticationUser("user1", "password2"); locator = By.xpath("//*[@id='notebook-names']//a[contains(@href, '" + noteId + "')]"); element = wait.until(ExpectedConditions.visibilityOfElementLocated(locator)); if (element.isDisplayed()) { @@ -356,7 +332,7 @@ void testDynamicFormAction() throws Exception { .findElement(By .xpath(getParagraphXPath(1) + "//div[contains(@class, 'text plainTextContent')]")) .getText()); - authenticationIT.logoutUser("user1"); + logoutUser("user1"); } catch (Exception e) { handleException("Exception in PersonalizeActionsIT while testGraphAction ", e); diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java index 98b77a2e677..a89a4c4db06 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java @@ -19,8 +19,12 @@ import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.ZeppelinITUtils; +import org.apache.zeppelin.test.DownloadUtils; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.apache.zeppelin.WebDriverManager; @@ -38,19 +42,38 @@ class SparkParagraphIT extends AbstractZeppelinIT { + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + String sparkHome = DownloadUtils.downloadSpark(); + + zepServer = new MiniZeppelinServer(SparkParagraphIT.class.getSimpleName()); + zepServer.addInterpreter("spark"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.start(true, SparkParagraphIT.class.getSimpleName()); + TestHelper.configureSparkInterpreter(zepServer, sparkHome); + } + @BeforeEach public void startUp() throws IOException { - manager = new WebDriverManager(); + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); createNewNote(); waitForParagraph(1, "READY"); } @AfterEach - public void tearDown() throws IOException { + public void tearDownManager() throws IOException { deleteTestNotebook(manager.getWebDriver()); manager.close(); } + @AfterAll + public static void tearDown() throws Exception { + zepServer.destroy(); + } + @Test void testSpark() throws Exception { try { diff --git a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinIT.java b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinIT.java index f035979ec80..3403e486559 100644 --- a/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinIT.java +++ b/zeppelin-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinIT.java @@ -24,9 +24,12 @@ import java.io.IOException; import org.apache.zeppelin.AbstractZeppelinIT; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.WebDriverManager; import org.apache.zeppelin.ZeppelinITUtils; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -52,17 +55,35 @@ class ZeppelinIT extends AbstractZeppelinIT { private static final Logger LOG = LoggerFactory.getLogger(ZeppelinIT.class); + private static MiniZeppelinServer zepServer; + + @BeforeAll + static void init() throws Exception { + + zepServer = new MiniZeppelinServer(ZeppelinIT.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("angular"); + zepServer.addInterpreter("sh"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.start(true, ZeppelinIT.class.getSimpleName()); + } @BeforeEach public void startUp() throws IOException { - manager = new WebDriverManager(); + manager = new WebDriverManager(zepServer.getZeppelinConfiguration().getServerPort()); } @AfterEach - public void tearDown() throws IOException { + public void tearDownManager() throws IOException { manager.close(); } + @AfterAll + public static void tearDown() throws Exception { + zepServer.destroy(); + } + @Test @Disabled("Doesnt work") void testAngularDisplay() throws Exception { diff --git a/zeppelin-interpreter-integration/pom.xml b/zeppelin-interpreter-integration/pom.xml index 32a2000da0d..f902e0dbffc 100644 --- a/zeppelin-interpreter-integration/pom.xml +++ b/zeppelin-interpreter-integration/pom.xml @@ -23,11 +23,11 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-interpreter-integration - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 jar Zeppelin: Interpreter Integration Test @@ -58,10 +58,6 @@ zeppelin-zengine ${project.version} - - com.google.guava - guava - org.ow2.asm asm @@ -69,12 +65,6 @@ - - com.google.guava - guava - 20.0 - - org.apache.zeppelin zeppelin-server @@ -99,12 +89,6 @@ ${project.version} tests test - - - com.google.guava - guava - - @@ -115,6 +99,13 @@ test + + org.apache.zeppelin + zeppelin-test + ${project.version} + test + + org.junit.jupiter junit-jupiter-engine @@ -134,6 +125,20 @@ test + + org.awaitility + awaitility + test + + + + org.hamcrest + hamcrest + + + + + @@ -178,21 +183,19 @@ ${hadoop3.3.version} - hadoop-client-runtime - hadoop-client-minicluster org.apache.hadoop - ${hadoop-client-runtime.artifact} + hadoop-client-runtime test org.apache.hadoop - ${hadoop-client-minicluster.artifact} + hadoop-client-minicluster test diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java index 72dff00c9e4..6d6eae07af6 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/FlinkIntegrationTest.java @@ -18,11 +18,14 @@ package org.apache.zeppelin.integration; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; @@ -31,10 +34,9 @@ import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,12 +52,11 @@ public abstract class FlinkIntegrationTest { private static Logger LOGGER = LoggerFactory.getLogger(FlinkIntegrationTest.class); private static MiniHadoopCluster hadoopCluster; - private static MiniZeppelin zeppelin; + private static MiniZeppelinServer zepServer; private static InterpreterFactory interpreterFactory; private static InterpreterSettingManager interpreterSettingManager; private String flinkVersion; - private String scalaVersion; private String hadoopHome; private String flinkHome; @@ -63,34 +64,42 @@ public void download(String flinkVersion, String scalaVersion) throws IOExceptio LOGGER.info("Testing FlinkVersion: " + flinkVersion); LOGGER.info("Testing ScalaVersion: " + scalaVersion); this.flinkVersion = flinkVersion; - this.scalaVersion = scalaVersion; this.flinkHome = DownloadUtils.downloadFlink(flinkVersion, scalaVersion); - this.hadoopHome = DownloadUtils.downloadHadoop("2.7.7"); + this.hadoopHome = DownloadUtils.downloadHadoop("3.3.6"); } @BeforeAll - public static void setUp() throws IOException { + public static void setUp() throws Exception { Configuration conf = new Configuration(); conf.setBoolean(YarnConfiguration.YARN_MINICLUSTER_FIXED_PORTS, true); + conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, "target/hadoop-minicluster"); hadoopCluster = new MiniHadoopCluster(conf); hadoopCluster.start(); - zeppelin = new MiniZeppelin(); - zeppelin.start(FlinkIntegrationTest.class); - interpreterFactory = zeppelin.getInterpreterFactory(); - interpreterSettingManager = zeppelin.getInterpreterSettingManager(); + zepServer = new MiniZeppelinServer(FlinkIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("flink"); + zepServer.addInterpreter("flink-cmd"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.addLauncher("YarnInterpreterLauncher"); + zepServer.addLauncher("FlinkInterpreterLauncher"); + zepServer.start(); } @AfterAll - public static void tearDown() throws IOException { - if (zeppelin != null) { - zeppelin.stop(); - } + public static void tearDown() throws Exception { + zepServer.destroy(); if (hadoopCluster != null) { hadoopCluster.stop(); } } + @BeforeEach + void setup() { + interpreterSettingManager = zepServer.getServiceLocator().getService(InterpreterSettingManager.class); + interpreterFactory = new InterpreterFactory(interpreterSettingManager); + } + private void testInterpreterBasics() throws IOException, InterpreterException { // test FlinkInterpreter Interpreter flinkInterpreter = interpreterFactory.getInterpreter("flink", new ExecutionContext("user1", "note1", "flink")); @@ -128,7 +137,7 @@ public void testFlinkCmd() throws InterpreterException { public void testLocalMode() throws IOException, YarnException, InterpreterException { InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink"); flinkInterpreterSetting.setProperty("FLINK_HOME", flinkHome); - flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); flinkInterpreterSetting.setProperty("flink.execution.mode", "local"); testInterpreterBasics(); @@ -141,14 +150,13 @@ public void testLocalMode() throws IOException, YarnException, InterpreterExcept interpreterSettingManager.close(); } - // TODO(zjffdu) enable it when make yarn integration test work @Test public void testYarnMode() throws IOException, InterpreterException, YarnException { InterpreterSetting flinkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("flink"); flinkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath()); flinkInterpreterSetting.setProperty("FLINK_HOME", flinkHome); flinkInterpreterSetting.setProperty("PATH", hadoopHome + "/bin:" + System.getenv("PATH")); - flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); flinkInterpreterSetting.setProperty("flink.execution.mode", "yarn"); flinkInterpreterSetting.setProperty("zeppelin.flink.run.asLoginUser", "false"); @@ -172,7 +180,7 @@ public void testYarnApplicationMode() throws IOException, InterpreterException, flinkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath()); flinkInterpreterSetting.setProperty("FLINK_HOME", flinkHome); flinkInterpreterSetting.setProperty("PATH", hadoopHome + "/bin:" + System.getenv("PATH")); - flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + flinkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); flinkInterpreterSetting.setProperty("flink.execution.mode", "yarn-application"); // parameters with whitespace flinkInterpreterSetting.setProperty("flink.yarn.appName", "hello flink"); diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java index c0457a80799..92178c27460 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/JdbcIntegrationTest.java @@ -17,6 +17,7 @@ package org.apache.zeppelin.integration; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.dep.Dependency; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; @@ -29,35 +30,41 @@ import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; -import java.io.IOException; import java.util.Arrays; public class JdbcIntegrationTest { - private static MiniZeppelin zeppelin; private static InterpreterFactory interpreterFactory; private static InterpreterSettingManager interpreterSettingManager; + private static MiniZeppelinServer zepServer; @BeforeAll - public static void setUp() throws IOException { - zeppelin = new MiniZeppelin(); - zeppelin.start(JdbcIntegrationTest.class); - interpreterFactory = zeppelin.getInterpreterFactory(); - interpreterSettingManager = zeppelin.getInterpreterSettingManager(); + public static void setUp() throws Exception { + zepServer = new MiniZeppelinServer(JdbcIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("jdbc"); + zepServer.addInterpreter("python"); + zepServer.copyBinDir(); + zepServer.copyLogProperties(); + zepServer.start(); + } + + @BeforeEach + void setup() { + interpreterSettingManager = zepServer.getServiceLocator().getService(InterpreterSettingManager.class); + interpreterFactory = new InterpreterFactory(interpreterSettingManager); } @AfterAll - public static void tearDown() throws IOException { - if (zeppelin != null) { - zeppelin.stop(); - } + public static void tearDown() throws Exception { + zepServer.destroy(); } @Test @@ -68,7 +75,7 @@ void testMySql() throws InterpreterException, InterruptedException { interpreterSetting.setProperty("default.user", "root"); interpreterSetting.setProperty("default.password", "root"); - Dependency dependency = new Dependency("mysql:mysql-connector-java:5.1.46"); + Dependency dependency = new Dependency("mysql:mysql-connector-java:5.1.49"); interpreterSetting.setDependencies(Arrays.asList(dependency)); interpreterSettingManager.restart(interpreterSetting.getId()); interpreterSetting.waitForReady(60 * 1000); diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniZeppelin.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniZeppelin.java deleted file mode 100644 index 0931fe82a05..00000000000 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/MiniZeppelin.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.integration; - -import org.apache.commons.io.FileUtils; -import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.display.AngularObjectRegistryListener; -import org.apache.zeppelin.helium.ApplicationEventListener; -import org.apache.zeppelin.interpreter.InterpreterFactory; -import org.apache.zeppelin.interpreter.InterpreterSettingManager; -import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - -import static org.mockito.Mockito.mock; - -public class MiniZeppelin { - - protected static final Logger LOGGER = LoggerFactory.getLogger(MiniZeppelin.class); - - protected InterpreterSettingManager interpreterSettingManager; - protected InterpreterFactory interpreterFactory; - protected File zeppelinHome; - private File confDir; - private File notebookDir; - protected ZeppelinConfiguration conf; - - public void start(Class clazz) throws IOException { - zeppelinHome = new File(".."); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), - zeppelinHome.getAbsolutePath()); - confDir = new File(zeppelinHome, "conf_" + clazz.getSimpleName()); - notebookDir = new File(zeppelinHome, "notebook_" + clazz.getSimpleName()); - confDir.mkdirs(); - notebookDir.mkdirs(); - LOGGER.info("ZEPPELIN_HOME: " + zeppelinHome.getAbsolutePath()); - FileUtils.copyFile(new File(zeppelinHome, "conf/log4j.properties"), new File(confDir, "log4j.properties")); - FileUtils.copyFile(new File(zeppelinHome, "conf/log4j2.properties"), new File(confDir, "log4j2.properties")); - FileUtils.copyFile(new File(zeppelinHome, "conf/log4j_yarn_cluster.properties"), new File(confDir, "log4j_yarn_cluster.properties")); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), confDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT.getVarName(), "120000"); - conf = ZeppelinConfiguration.create(); - interpreterSettingManager = new InterpreterSettingManager(conf, - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); - interpreterFactory = new InterpreterFactory(interpreterSettingManager); - } - - public void stop() throws IOException { - interpreterSettingManager.close(); - FileUtils.deleteDirectory(confDir); - FileUtils.deleteDirectory(notebookDir); - } - - public File getZeppelinHome() { - return zeppelinHome; - } - - public File getZeppelinConfDir() { - return confDir; - } - - public InterpreterFactory getInterpreterFactory() { - return interpreterFactory; - } - - public InterpreterSettingManager getInterpreterSettingManager() { - return interpreterSettingManager; - } -} diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ShellIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ShellIntegrationTest.java index f3a24b86bed..6af3dbf573f 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ShellIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ShellIntegrationTest.java @@ -17,6 +17,7 @@ package org.apache.zeppelin.integration; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; @@ -26,26 +27,35 @@ import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -public class ShellIntegrationTest extends AbstractTestRestApi { +class ShellIntegrationTest extends AbstractTestRestApi { + private static MiniZeppelinServer zepServer; @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - AbstractTestRestApi.startUp(ShellIntegrationTest.class.getSimpleName()); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ShellIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.copyBinDir(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.start(); } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test @@ -67,7 +77,6 @@ void testBasicShell() throws IOException { p.setText("%sh invalid_cmd"); note.run(p.getId(), true); assertEquals(Job.Status.ERROR, p.getStatus()); - assertTrue(p.getReturn().message().get(0).getData().contains("command not found"), p.getReturn().toString()); // test shell environment variable p.setText("%sh a='hello world'\n" + diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java index d6117956873..4286f824f11 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest.java @@ -17,7 +17,6 @@ package org.apache.zeppelin.integration; -import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; @@ -27,6 +26,9 @@ import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.maven.model.Model; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; @@ -36,7 +38,6 @@ import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -61,7 +62,6 @@ public abstract class SparkIntegrationTest { private static Logger LOGGER = LoggerFactory.getLogger(SparkIntegrationTest.class); private static MiniHadoopCluster hadoopCluster; - private static MiniZeppelin zeppelin; private static InterpreterFactory interpreterFactory; protected static InterpreterSettingManager interpreterSettingManager; @@ -69,6 +69,8 @@ public abstract class SparkIntegrationTest { private String hadoopVersion; private String sparkHome; + private static MiniZeppelinServer zepServer; + public void prepareSpark(String sparkVersion, String hadoopVersion) { LOGGER.info("Testing Spark Version: " + sparkVersion); LOGGER.info("Testing Hadoop Version: " + hadoopVersion); @@ -78,21 +80,26 @@ public void prepareSpark(String sparkVersion, String hadoopVersion) { } @BeforeAll - public static void setUp() throws IOException { + static void init() throws Exception { hadoopCluster = new MiniHadoopCluster(); hadoopCluster.start(); - zeppelin = new MiniZeppelin(); - zeppelin.start(SparkIntegrationTest.class); - interpreterFactory = zeppelin.getInterpreterFactory(); - interpreterSettingManager = zeppelin.getInterpreterSettingManager(); + zepServer = new MiniZeppelinServer(SparkIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("spark"); + zepServer.addInterpreter("spark-submit"); + zepServer.copyBinDir(); + zepServer.copyLogProperties(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.start(); + interpreterSettingManager = zepServer.getServiceLocator().getService(InterpreterSettingManager.class); + interpreterFactory = zepServer.getServiceLocator().getService(InterpreterFactory.class); } @AfterAll - public static void tearDown() throws IOException { - if (zeppelin != null) { - zeppelin.stop(); - } + public static void tearDown() throws Exception { + zepServer.destroy(); if (hadoopCluster != null) { hadoopCluster.stop(); } @@ -122,8 +129,8 @@ protected void setUpSparkInterpreterSetting(InterpreterSetting interpreterSettin * @throws IOException */ private void setupIvySettings(InterpreterSetting interpreterSetting) throws IOException { - File ivysettings = new File(zeppelin.getZeppelinConfDir(), "ivysettings.xml"); - FileUtils.copyToFile(SparkIntegrationTest.class.getResourceAsStream("/ivysettings.xml"), ivysettings); + String ivysettingsContent = IOUtils.toString(SparkIntegrationTest.class.getResourceAsStream("/ivysettings.xml"), StandardCharsets.UTF_8.name()); + File ivysettings = zepServer.addConfigFile("ivysettings.xml", ivysettingsContent); interpreterSetting.setProperty("spark.jars.ivySettings", ivysettings.getAbsolutePath()); } @@ -197,7 +204,7 @@ public void testLocalMode() throws IOException, YarnException, InterpreterExcept InterpreterSetting sparkInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark"); sparkInterpreterSetting.setProperty("spark.master", "local[*]"); sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome); - sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false"); sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false"); sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false"); @@ -226,7 +233,7 @@ public void testYarnClientMode() throws IOException, YarnException, InterruptedE sparkInterpreterSetting.setProperty("spark.master", "yarn-client"); sparkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath()); sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome); - sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false"); sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false"); sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec()); @@ -235,6 +242,7 @@ public void testYarnClientMode() throws IOException, YarnException, InterruptedE sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); sparkInterpreterSetting.setProperty("spark.user.name", "#{user}"); sparkInterpreterSetting.setProperty("zeppelin.spark.run.asLoginUser", "false"); + sparkInterpreterSetting.setProperty("spark.r.command", getRScriptExec()); try { setUpSparkInterpreterSetting(sparkInterpreterSetting); @@ -279,10 +287,13 @@ public void testYarnClusterMode() throws IOException, YarnException, Interrupted sparkInterpreterSetting.setProperty("spark.master", "yarn-cluster"); sparkInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath()); sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome); - sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false"); sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false"); sparkInterpreterSetting.setProperty("PYSPARK_PYTHON", getPythonExec()); + sparkInterpreterSetting.setProperty("spark.pyspark.python", getPythonExec()); + sparkInterpreterSetting.setProperty("zeppelin.R.cmd", getRExec()); + sparkInterpreterSetting.setProperty("spark.r.command", getRScriptExec()); sparkInterpreterSetting.setProperty("spark.driver.memory", "512m"); sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false"); sparkInterpreterSetting.setProperty("zeppelin.spark.deprecatedMsg.show", "false"); @@ -346,7 +357,7 @@ public void testScopedMode() throws InterpreterException { sparkInterpreterSetting.setProperty("spark.master", "local[*]"); sparkInterpreterSetting.setProperty("spark.submit.deployMode", "client"); sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome); - sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zeppelin.getZeppelinConfDir().getAbsolutePath()); + sparkInterpreterSetting.setProperty("ZEPPELIN_CONF_DIR", zepServer.getZeppelinConfiguration().getConfDir()); sparkInterpreterSetting.setProperty("zeppelin.spark.useHiveContext", "false"); sparkInterpreterSetting.setProperty("zeppelin.pyspark.useIPython", "false"); sparkInterpreterSetting.setProperty("zeppelin.spark.scala.color", "false"); @@ -389,4 +400,20 @@ private String getPythonExec() throws IOException, InterruptedException { } return IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8).trim(); } + + private String getRScriptExec() throws IOException, InterruptedException { + Process process = Runtime.getRuntime().exec(new String[]{"which", "Rscript"}); + if (process.waitFor() != 0) { + throw new RuntimeException("Fail to run command: which Rscript."); + } + return IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8).trim(); + } + + private String getRExec() throws IOException, InterruptedException { + Process process = Runtime.getRuntime().exec(new String[]{"which", "R"}); + if (process.waitFor() != 0) { + throw new RuntimeException("Fail to run command: which R."); + } + return IOUtils.toString(process.getInputStream(), StandardCharsets.UTF_8).trim(); + } } diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java index dfd9be366da..27c511e64e1 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest32.java @@ -17,7 +17,6 @@ package org.apache.zeppelin.integration; -import org.apache.zeppelin.interpreter.InterpreterSetting; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -25,16 +24,6 @@ public class SparkIntegrationTest32 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.2.0", "2.7"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends SparkIntegrationTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java index 73846fcf647..9183257184c 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest33.java @@ -24,16 +24,6 @@ public class SparkIntegrationTest33 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.3.0", "2"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends SparkIntegrationTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java index 8af3ee6d5e7..d66bdad0536 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkIntegrationTest34.java @@ -22,17 +22,7 @@ import org.junit.jupiter.api.Nested; import java.io.IOException; -public class SparkIntegrationTest34 extends SparkIntegrationTest { - - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends SparkIntegrationTest { - - @BeforeEach - public void downloadSpark() throws IOException { - prepareSpark("3.4.0", "2"); - } - } +public class SparkIntegrationTest34 { @Nested @DisplayName("Hadoop3") diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkSubmitIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkSubmitIntegrationTest.java index e0af99abfee..05e6af37783 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkSubmitIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/SparkSubmitIntegrationTest.java @@ -25,6 +25,9 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; @@ -33,7 +36,6 @@ import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -43,7 +45,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -import java.io.IOException; import java.util.EnumSet; import java.util.List; import java.util.concurrent.TimeoutException; @@ -54,40 +55,38 @@ public class SparkSubmitIntegrationTest { private static Logger LOGGER = LoggerFactory.getLogger(SparkSubmitIntegrationTest.class); private static MiniHadoopCluster hadoopCluster; - private static MiniZeppelin zeppelin; private static InterpreterFactory interpreterFactory; private static InterpreterSettingManager interpreterSettingManager; private static String sparkHome; + private static MiniZeppelinServer zepServer; @BeforeAll - public static void setUp() throws IOException { - String sparkVersion = "3.4.1"; - String hadoopVersion = "3"; - LOGGER.info("Testing Spark Version: " + sparkVersion); - LOGGER.info("Testing Hadoop Version: " + hadoopVersion); - sparkHome = DownloadUtils.downloadSpark(sparkVersion, hadoopVersion); - + static void init() throws Exception { + LOGGER.info("Testing Spark Version: " + DownloadUtils.DEFAULT_SPARK_VERSION); + LOGGER.info("Testing Hadoop Version: " + DownloadUtils.DEFAULT_SPARK_HADOOP_VERSION); + sparkHome = DownloadUtils.downloadSpark(); hadoopCluster = new MiniHadoopCluster(); hadoopCluster.start(); - zeppelin = new MiniZeppelin(); - zeppelin.start(SparkIntegrationTest.class); - interpreterFactory = zeppelin.getInterpreterFactory(); - interpreterSettingManager = zeppelin.getInterpreterSettingManager(); - - InterpreterSetting sparkSubmitInterpreterSetting = - interpreterSettingManager.getInterpreterSettingByName("spark-submit"); + zepServer = new MiniZeppelinServer(SparkSubmitIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("spark-submit"); + zepServer.copyBinDir(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.start(); + interpreterSettingManager = zepServer.getServiceLocator().getService(InterpreterSettingManager.class); + interpreterFactory = zepServer.getServiceLocator().getService(InterpreterFactory.class); + InterpreterSetting sparkSubmitInterpreterSetting = interpreterSettingManager.getInterpreterSettingByName("spark-submit"); sparkSubmitInterpreterSetting.setProperty("SPARK_HOME", sparkHome); sparkSubmitInterpreterSetting.setProperty("HADOOP_CONF_DIR", hadoopCluster.getConfigPath()); sparkSubmitInterpreterSetting.setProperty("YARN_CONF_DIR", hadoopCluster.getConfigPath()); } @AfterAll - public static void tearDown() throws IOException { - if (zeppelin != null) { - zeppelin.stop(); - } + public static void tearDown() throws Exception { + zepServer.destroy(); if (hadoopCluster != null) { hadoopCluster.stop(); } @@ -102,7 +101,7 @@ void testLocalMode() throws InterpreterException, YarnException { InterpreterContext context = new InterpreterContext.Builder().setNoteId("note1").setParagraphId("paragraph_1").build(); InterpreterResult interpreterResult = sparkSubmitInterpreter.interpret("--master local --class org.apache.spark.examples.SparkPi --deploy-mode client " + - sparkHome + "/examples/jars/spark-examples_2.12-3.4.1.jar", context); + sparkHome + "/examples/jars/spark-examples_2.12-" + DownloadUtils.DEFAULT_SPARK_VERSION + ".jar", context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code(), interpreterResult.toString()); // no yarn application launched @@ -126,7 +125,7 @@ void testYarnMode() throws InterpreterException, YarnException { sparkSubmitInterpreter.interpret("--master yarn --deploy-mode cluster --class org.apache.spark.examples.SparkPi " + "--conf spark.app.name=" + yarnAppName + " --conf spark.driver.memory=512m " + "--conf spark.executor.memory=512m " + - sparkHome + "/examples/jars/spark-examples_2.12-3.4.1.jar", context); + sparkHome + "/examples/jars/spark-examples_2.12-" + DownloadUtils.DEFAULT_SPARK_VERSION + ".jar", context); assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code(), interpreterResult.toString()); GetApplicationsRequest request = GetApplicationsRequest.newInstance(EnumSet.of(YarnApplicationState.FINISHED)); @@ -159,7 +158,7 @@ public void run() { sparkSubmitInterpreter.interpret("--master yarn --deploy-mode cluster --class org.apache.spark.examples.SparkPi " + "--conf spark.app.name=" + yarnAppName + " --conf spark.driver.memory=512m " + "--conf spark.executor.memory=512m " + - sparkHome + "/examples/jars/spark-examples_2.12-3.4.1.jar", context); + sparkHome + "/examples/jars/spark-examples_2.12-" + DownloadUtils.DEFAULT_SPARK_VERSION + ".jar", context); assertEquals(InterpreterResult.Code.INCOMPLETE, interpreterResult.code(), interpreterResult.toString()); assertTrue(interpreterResult.toString().contains("Paragraph received a SIGTERM"), interpreterResult.toString()); } catch (InterpreterException e) { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/YarnInterpreterLauncherIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/YarnInterpreterLauncherIntegrationTest.java index 0866b257702..2a39e04715f 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/YarnInterpreterLauncherIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/YarnInterpreterLauncherIntegrationTest.java @@ -22,6 +22,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.exceptions.YarnException; +import org.apache.zeppelin.MiniZeppelinServer; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.dep.Dependency; import org.apache.zeppelin.interpreter.ExecutionContext; import org.apache.zeppelin.interpreter.Interpreter; @@ -34,6 +36,7 @@ import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,8 +44,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.io.IOException; import java.util.Arrays; import java.util.EnumSet; @@ -51,29 +52,38 @@ public class YarnInterpreterLauncherIntegrationTest { private static final Logger LOGGER = LoggerFactory.getLogger(YarnInterpreterLauncherIntegrationTest.class); private static MiniHadoopCluster hadoopCluster; - private static MiniZeppelin zeppelin; + private static MiniZeppelinServer zepServer; private static InterpreterFactory interpreterFactory; private static InterpreterSettingManager interpreterSettingManager; - private String hadoopHome; - @BeforeAll - public static void setUp() throws IOException { + static void init() throws Exception { Configuration conf = new Configuration(); hadoopCluster = new MiniHadoopCluster(conf); hadoopCluster.start(); - zeppelin = new MiniZeppelin(); - zeppelin.start(YarnInterpreterLauncherIntegrationTest.class); - interpreterFactory = zeppelin.getInterpreterFactory(); - interpreterSettingManager = zeppelin.getInterpreterSettingManager(); + zepServer = new MiniZeppelinServer(YarnInterpreterLauncherIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("jdbc"); + zepServer.addInterpreter("python"); + zepServer.addLauncher("YarnInterpreterLauncher"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT.getVarName(), "120000"); + + zepServer.start(); + } + @BeforeEach + void setup() { + interpreterSettingManager = zepServer.getServiceLocator().getService(InterpreterSettingManager.class); + interpreterFactory = new InterpreterFactory(interpreterSettingManager); } - @AfterAll - public static void tearDown() throws IOException { - if (zeppelin != null) { - zeppelin.stop(); - } + public static void tearDown() throws Exception { + zepServer.destroy(); if (hadoopCluster != null) { hadoopCluster.stop(); } diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java index 68320512e62..6c4029955d8 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZSessionIntegrationTest.java @@ -18,19 +18,20 @@ package org.apache.zeppelin.integration; import org.apache.commons.io.IOUtils; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.client.ClientConfig; import org.apache.zeppelin.client.ExecuteResult; import org.apache.zeppelin.client.websocket.SimpleMessageHandler; import org.apache.zeppelin.client.Status; import org.apache.zeppelin.client.ZSession; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.apache.zeppelin.interpreter.lifecycle.TimeoutLifecycleManager; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.rest.AbstractTestRestApi; -import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,31 +51,47 @@ public class ZSessionIntegrationTest extends AbstractTestRestApi { private static Notebook notebook; private static String sparkHome; private static String flinkHome; + private static MiniZeppelinServer zepServer; - private ClientConfig clientConfig = new ClientConfig("http://localhost:8080"); - + private ClientConfig clientConfig; @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SESSION_CHECK_INTERVAL.getVarName(), "5000"); - - AbstractTestRestApi.startUp(ZSessionIntegrationTest.class.getSimpleName()); - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ZSessionIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("spark"); + zepServer.addInterpreter("spark-submit"); + zepServer.addInterpreter("flink"); + zepServer.addInterpreter("flink-cmd"); + zepServer.addInterpreter("python"); + zepServer.copyBinDir(); + zepServer.addLauncher("FlinkInterpreterLauncher"); + ZeppelinConfiguration zConf = zepServer.getZeppelinConfiguration(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SESSION_CHECK_INTERVAL.getVarName(), "5000"); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_CLASS.getVarName(), TimeoutLifecycleManager.class.getName()); zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_CHECK_INTERVAL.getVarName(), "5000"); zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_THRESHOLD.getVarName(), "10000"); - - notebook = TestUtils.getInstance(Notebook.class); - sparkHome = DownloadUtils.downloadSpark("3.4.1", "3"); + sparkHome = DownloadUtils.downloadSpark(); flinkHome = DownloadUtils.downloadFlink("1.17.1", "2.12"); + zepServer.start(); + notebook = zepServer.getServiceLocator().getService(Notebook.class); + } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); + clientConfig = new ClientConfig("http://localhost:" + conf.getServerPort()); } @Test @@ -106,7 +123,8 @@ void testZSession_Shell() throws Exception { assertEquals(Status.ERROR, result.getStatus()); assertEquals(2, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData().contains("command not found"), result.getResults().get(0).getData()); + // Depends on JVM language + //assertTrue(result.getResults().get(0).getData().contains("command not found"), result.getResults().get(0).getData()); assertEquals("TEXT", result.getResults().get(1).getType()); assertTrue(result.getResults().get(1).getData().contains("ExitValue"), result.getResults().get(1).getData()); @@ -152,7 +170,8 @@ void testZSession_Shell_Submit() throws Exception { assertEquals(Status.ERROR, result.getStatus()); assertEquals(2, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData().contains("command not found"), result.getResults().get(0).getData()); + // depends of JVM language + // assertTrue(result.getResults().get(0).getData().contains("command not found"), result.getResults().get(0).getData()); assertEquals("TEXT", result.getResults().get(1).getType()); assertTrue(result.getResults().get(1).getData().contains("ExitValue"), result.getResults().get(1).getData()); @@ -189,7 +208,7 @@ void testZSession_Spark() throws Exception { assertEquals(Status.FINISHED, result.getStatus(), result.toString()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData().contains("3.4.1"), result.getResults().get(0).getData()); + assertTrue(result.getResults().get(0).getData().contains(DownloadUtils.DEFAULT_SPARK_VERSION), result.getResults().get(0).getData()); assertEquals(0, result.getJobUrls().size()); // pyspark @@ -258,7 +277,7 @@ void testZSession_Spark_Submit() throws Exception { assertEquals(Status.FINISHED, result.getStatus(), result.toString()); assertEquals(1, result.getResults().size()); assertEquals("TEXT", result.getResults().get(0).getType()); - assertTrue(result.getResults().get(0).getData().contains("3.4.1"), result.getResults().get(0).getData()); + assertTrue(result.getResults().get(0).getData().contains(DownloadUtils.DEFAULT_SPARK_VERSION), result.getResults().get(0).getData()); assertEquals(0, result.getJobUrls().size()); // pyspark diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientIntegrationTest.java index 5d193b05ade..fb0fbdf5d21 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientIntegrationTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.integration; import org.apache.commons.io.IOUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.client.ClientConfig; import org.apache.zeppelin.client.NoteResult; import org.apache.zeppelin.client.ParagraphResult; @@ -29,10 +30,12 @@ import org.apache.zeppelin.common.SessionInfo; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.rest.AbstractTestRestApi; -import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -45,28 +48,39 @@ import java.util.HashMap; import java.util.Map; -public class ZeppelinClientIntegrationTest extends AbstractTestRestApi { +class ZeppelinClientIntegrationTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(ZeppelinClientIntegrationTest.class); private static Notebook notebook; private static ClientConfig clientConfig; private static ZeppelinClient zeppelinClient; + private static MiniZeppelinServer zepServer; @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); - - AbstractTestRestApi.startUp(ZeppelinClientIntegrationTest.class.getSimpleName()); - notebook = TestUtils.getInstance(Notebook.class); - - clientConfig = new ClientConfig("http://localhost:8080"); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ZeppelinClientIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("sh"); + zepServer.copyBinDir(); + zepServer.copyLogProperties(); + ZeppelinConfiguration zConf = zepServer.getZeppelinConfiguration(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); + zepServer.start(); + clientConfig = new ClientConfig("http://localhost:" + zepServer.getZeppelinConfiguration().getServerPort()); zeppelinClient = new ZeppelinClient(clientConfig); + notebook = zepServer.getServiceLocator().getService(Notebook.class); } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test @@ -241,7 +255,8 @@ void testExecuteParagraph() throws Exception { assertEquals(Status.ERROR, paragraphResult.getStatus()); assertEquals(2, paragraphResult.getResults().size()); assertEquals("TEXT", paragraphResult.getResults().get(0).getType()); - assertTrue(paragraphResult.getResults().get(0).getData().contains("command not found"), paragraphResult.getResults().get(0).getData()); + // depends on JVM language + // assertTrue(paragraphResult.getResults().get(0).getData().contains("command not found"), paragraphResult.getResults().get(0).getData()); assertEquals("TEXT", paragraphResult.getResults().get(1).getType()); assertTrue(paragraphResult.getResults().get(1).getData().contains("ExitValue"), paragraphResult.getResults().get(1).getData()); @@ -305,7 +320,8 @@ void testSubmitParagraph() throws Exception { assertEquals(Status.ERROR, paragraphResult.getStatus()); assertEquals(2, paragraphResult.getResults().size()); assertEquals("TEXT", paragraphResult.getResults().get(0).getType()); - assertTrue(paragraphResult.getResults().get(0).getData().contains("command not found"), paragraphResult.getResults().get(0).getData()); + // depends on JVM language + // assertTrue(paragraphResult.getResults().get(0).getData().contains("command not found"), paragraphResult.getResults().get(0).getData()); assertEquals("TEXT", paragraphResult.getResults().get(1).getType()); assertTrue(paragraphResult.getResults().get(1).getData().contains("ExitValue"), paragraphResult.getResults().get(1).getData()); @@ -388,7 +404,8 @@ void testExecuteNote() throws Exception { ParagraphResult p1 = noteResult.getParagraphResultList().get(1); assertEquals(Status.ERROR, p1.getStatus()); assertEquals("TEXT", p1.getResults().get(0).getType()); - assertTrue(p1.getResults().get(0).getData().contains("command not found"), p1.getResults().get(0).getData()); + // depends on JVM language + //assertTrue(p1.getResults().get(0).getData().contains("command not found"), p1.getResults().get(0).getData()); assertEquals("TEXT", p1.getResults().get(1).getType()); assertTrue(p1.getResults().get(1).getData().contains("ExitValue"), p1.getResults().get(1).getData()); @@ -462,7 +479,8 @@ void testSubmitNote() throws Exception { ParagraphResult p1 = noteResult.getParagraphResultList().get(1); assertEquals(Status.ERROR, p1.getStatus()); assertEquals("TEXT", p1.getResults().get(0).getType()); - assertTrue(p1.getResults().get(0).getData().contains("command not found"), p1.getResults().get(0).getData()); + // depends on JVM language + // assertTrue(p1.getResults().get(0).getData().contains("command not found"), p1.getResults().get(0).getData()); assertEquals("TEXT", p1.getResults().get(1).getType()); assertTrue(p1.getResults().get(1).getData().contains("ExitValue"), p1.getResults().get(1).getData()); diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientWithAuthIntegrationTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientWithAuthIntegrationTest.java index f96eb47c9d6..6c223ed4f93 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientWithAuthIntegrationTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinClientWithAuthIntegrationTest.java @@ -22,34 +22,48 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.client.ClientConfig; import org.apache.zeppelin.client.ZeppelinClient; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.rest.AbstractTestRestApi; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -public class ZeppelinClientWithAuthIntegrationTest extends AbstractTestRestApi { +class ZeppelinClientWithAuthIntegrationTest extends AbstractTestRestApi { + + private static final Logger LOG = LoggerFactory.getLogger(ZeppelinClientWithAuthIntegrationTest.class); private static ClientConfig clientConfig; private static ZeppelinClient zeppelinClient; + private static MiniZeppelinServer zepServer; @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); - - AbstractTestRestApi.startUpWithAuthenticationEnable(ZeppelinClientWithAuthIntegrationTest.class.getSimpleName()); - - clientConfig = new ClientConfig("http://localhost:8080"); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ZeppelinClientWithAuthIntegrationTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addConfigFile("shiro.ini", AbstractTestRestApi.ZEPPELIN_SHIRO); + zepServer.copyBinDir(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ALLOWED_ORIGINS.getVarName(), "*"); + zepServer.start(); + clientConfig = new ClientConfig("http://localhost:" + zepServer.getZeppelinConfiguration().getServerPort()); zeppelinClient = new ZeppelinClient(clientConfig); } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java index ac1952494d4..65060ee49ad 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinFlinkClusterTest.java @@ -18,8 +18,9 @@ package org.apache.zeppelin.integration; import org.apache.commons.io.IOUtils; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.rest.AbstractTestRestApi; @@ -28,6 +29,7 @@ import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; @@ -45,26 +47,37 @@ public abstract class ZeppelinFlinkClusterTest extends AbstractTestRestApi { private static final Logger LOGGER = LoggerFactory.getLogger(ZeppelinFlinkClusterTest.class); - private String flinkVersion; private String flinkHome; public void download(String flinkVersion, String scalaVersion) { - this.flinkVersion = flinkVersion; LOGGER.info("Testing FlinkVersion: " + flinkVersion); LOGGER.info("Testing ScalaVersion: " + scalaVersion); this.flinkHome = DownloadUtils.downloadFlink(flinkVersion, scalaVersion); } + private static MiniZeppelinServer zepServer; + @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - AbstractTestRestApi.startUp(ZeppelinFlinkClusterTest.class.getSimpleName()); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ZeppelinFlinkClusterTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("flink"); + zepServer.addInterpreter("flink-cmd"); + zepServer.copyBinDir(); + zepServer.addLauncher("FlinkInterpreterLauncher"); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.start(); } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Disabled("(zjffdu) Disable Temporary") diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java index 3366f58a8b2..2fd93d331bb 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest.java @@ -17,6 +17,8 @@ package org.apache.zeppelin.integration; import org.apache.commons.io.IOUtils; +import org.apache.zeppelin.test.DownloadUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.display.AngularObject; import org.apache.zeppelin.display.Input; @@ -29,9 +31,7 @@ import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; -import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.rest.AbstractTestRestApi; @@ -39,7 +39,6 @@ import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -52,17 +51,20 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.junit.jupiter.api.Assumptions.assumeTrue; +import static org.awaitility.Awaitility.await; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.StringReader; +import java.time.Duration; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.Callable; /** * Test against spark cluster. @@ -78,6 +80,7 @@ public abstract class ZeppelinSparkClusterTest extends AbstractTestRestApi { //TODO(zjffdu) remove this after we upgrade it to junit 4.13 (ZEPPELIN-3341) private static Set verifiedSparkVersions = new HashSet<>(); + private static MiniZeppelinServer zepServer; private String sparkVersion; private String sparkHome; @@ -108,7 +111,7 @@ public void setupSparkInterpreter(String sparkHome) throws InterpreterException Map sparkProperties = (Map) sparkIntpSetting.getProperties(); - LOG.info("SPARK HOME detected " + sparkHome); + LOGGER.info("SPARK HOME detected " + sparkHome); String masterEnv = System.getenv("SPARK_MASTER"); sparkProperties.put(SPARK_MASTER_PROPERTY_NAME, new InterpreterProperty(SPARK_MASTER_PROPERTY_NAME, masterEnv == null ? "local[2]" : masterEnv)); @@ -127,41 +130,38 @@ public void setupSparkInterpreter(String sparkHome) throws InterpreterException new InterpreterProperty("zeppelin.spark.scala.color", "false")); sparkProperties.put("zeppelin.spark.deprecatedMsg.show", new InterpreterProperty("zeppelin.spark.deprecatedMsg.show", "false")); - TestUtils.getInstance(Notebook.class).getInterpreterSettingManager().restart(sparkIntpSetting.getId()); + zepServer.getServiceLocator().getService(Notebook.class).getInterpreterSettingManager().restart(sparkIntpSetting.getId()); } @BeforeAll - public static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), - "helium"); - AbstractTestRestApi.startUp(ZeppelinSparkClusterTest.class.getSimpleName()); + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ZeppelinSparkClusterTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("spark"); + zepServer.addInterpreter("python"); + zepServer.copyBinDir(); + zepServer.copyLogProperties(); + zepServer.getZeppelinConfiguration().setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + "helium"); + zepServer.start(); } @AfterAll - public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + static void destroy() throws Exception { + zepServer.destroy(); } - private void waitForFinish(Paragraph p) { - while (p.getStatus() != Status.FINISHED - && p.getStatus() != Status.ERROR - && p.getStatus() != Status.ABORT) { - try { - Thread.sleep(100); - } catch (InterruptedException e) { - LOG.error("Exception in WebDriverManager while getWebDriver ", e); - } - } + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } - private void waitForRunning(Paragraph p) { - while (p.getStatus() != Status.RUNNING) { - try { - Thread.sleep(100); - } catch (InterruptedException e) { - LOG.error("Exception in WebDriverManager while getWebDriver ", e); - } - } + private Callable isParagraphFinish(Paragraph p) { + return () -> Status.FINISHED.equals(p.getStatus()) || Status.ERROR.equals(p.getStatus()) || Status.ABORT.equals(p.getStatus()); + } + + private Callable isParagraphRunning(Paragraph p) { + return () -> Status.RUNNING.equals(p.getStatus()); } @Test @@ -210,9 +210,9 @@ public void scalaOutputTest() throws IOException, InterruptedException { // test cancel p.setText("%spark sc.range(1,10).map(e=>{Thread.sleep(1000); e}).collect()"); note.run(p.getId(), false); - waitForRunning(p); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphRunning(p)); p.abort(); - waitForFinish(p); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p)); assertEquals(Status.ABORT, p.getStatus()); return null; }); @@ -501,7 +501,7 @@ public void zRunTest() throws IOException, InterruptedException { assertEquals(Status.FINISHED, p0.getStatus()); // z.run is not blocking call. So p1 may not be finished when p0 is done. - waitForFinish(p1); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p1)); assertEquals(Status.FINISHED, p1.getStatus()); note.run(p2.getId(), true); assertEquals(Status.FINISHED, p2.getStatus()); @@ -513,10 +513,10 @@ public void zRunTest() throws IOException, InterruptedException { // run current Node, z.runNote(noteId) p0.setText(String.format("%%spark z.runNote(\"%s\")", note.getId())); note.run(p0.getId()); - waitForFinish(p0); - waitForFinish(p1); - waitForFinish(p2); - waitForFinish(p3); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p0)); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p1)); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p2)); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p3)); assertEquals(Status.FINISHED, p3.getStatus()); String p3result = p3.getReturn().message().get(0).getData(); @@ -527,13 +527,7 @@ public void zRunTest() throws IOException, InterruptedException { p3.setText("%spark println(\"END\")"); note.run(p0.getId(), true); - // Sleep 1 second to ensure p3 start running - try { - Thread.sleep(1000); - } catch (InterruptedException e) { - fail(); - } - waitForFinish(p3); + await().pollDelay(Duration.ofSeconds(1)).pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p3)); assertEquals(Status.FINISHED, p3.getStatus()); assertEquals("END\n", p3.getReturn().message().get(0).getData()); @@ -549,14 +543,14 @@ public void zRunTest() throws IOException, InterruptedException { // run p20 of note2 via paragraph in note1 p0.setText(String.format("%%spark.pyspark z.run(\"%s\", \"%s\")", note2.getId(), p20.getId())); note.run(p0.getId(), true); - waitForFinish(p20); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p20)); assertEquals(Status.FINISHED, p20.getStatus()); assertEquals(Status.READY, p21.getStatus()); p0.setText(String.format("%%spark z.runNote(\"%s\")", note2.getId())); note.run(p0.getId(), true); - waitForFinish(p20); - waitForFinish(p21); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p20)); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p21)); assertEquals(Status.FINISHED, p20.getStatus()); assertEquals(Status.FINISHED, p21.getStatus()); assertEquals("1", p21.getReturn().message().get(0).getData()); @@ -683,13 +677,13 @@ private void verifySparkVersionNumber() throws IOException { p.setText("%spark print(sc.version)"); note.run(p.getId()); - waitForFinish(p); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p)); assertEquals(Status.FINISHED, p.getStatus()); assertEquals(sparkVersion, p.getReturn().message().get(0).getData()); p.setText("%spark.pyspark sc.version"); note.run(p.getId()); - waitForFinish(p); + await().pollInterval(Duration.ofMillis(100)).atMost(Duration.ofSeconds(30)).until(isParagraphFinish(p)); assertEquals(Status.FINISHED, p.getStatus()); assertTrue(p.getReturn().message().get(0).getData().contains(sparkVersion), p.getReturn().toString()); @@ -721,7 +715,7 @@ public void testSparkZeppelinContextDynamicForms() throws IOException { "println(items(0))"; p.setText(code); note.run(p.getId()); - waitForFinish(p); + await().pollInterval(Duration.ofMillis(100)).timeout(Duration.ofSeconds(30)).until(isParagraphFinish(p)); assertEquals(Status.FINISHED, p.getStatus()); Iterator formIter = p.settings.getForms().keySet().iterator(); @@ -1143,14 +1137,16 @@ public void testFailtoLaunchSpark() throws IOException { p1.setText("%spark\nsc.version"); note.run(p1.getId(), true); assertEquals(Status.ERROR, p1.getStatus()); - assertTrue(p1.getReturn().message().get(0).getData().contains("No such file or directory"), - "Actual error message: " + p1.getReturn().message().get(0).getData()); + // depends on JVM language +// assertTrue(p1.getReturn().message().get(0).getData().contains("No such file or directory"), +// "Actual error message: " + p1.getReturn().message().get(0).getData()); // run it again, and get the same error note.run(p1.getId(), true); assertEquals(Status.ERROR, p1.getStatus()); - assertTrue(p1.getReturn().message().get(0).getData().contains("No such file or directory"), - "Actual error message: " + p1.getReturn().message().get(0).getData()); + // depends on JVM language +// assertTrue(p1.getReturn().message().get(0).getData().contains("No such file or directory"), +// Actual error message: " + p1.getReturn().message().get(0).getData()); return null; }); } finally { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java index 18d5b701777..1f1b7692450 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest32.java @@ -23,16 +23,6 @@ public class ZeppelinSparkClusterTest32 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends ZeppelinSparkClusterTest { - - @BeforeEach - public void downloadSpark() throws Exception { - prepareSpark("3.2.0", "2.7"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends ZeppelinSparkClusterTest { diff --git a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java index dbfacd9e400..43eb620f2d6 100644 --- a/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java +++ b/zeppelin-interpreter-integration/src/test/java/org/apache/zeppelin/integration/ZeppelinSparkClusterTest33.java @@ -23,16 +23,6 @@ public class ZeppelinSparkClusterTest33 { - @Nested - @DisplayName("Hadoop2") - public class Hadoop2 extends ZeppelinSparkClusterTest { - - @BeforeEach - public void downloadSpark() throws Exception { - prepareSpark("3.3.0", "2"); - } - } - @Nested @DisplayName("Hadoop3") public class Hadoop3 extends ZeppelinSparkClusterTest { diff --git a/zeppelin-interpreter-parent/pom.xml b/zeppelin-interpreter-parent/pom.xml index 4813a083df5..85295ed26ae 100644 --- a/zeppelin-interpreter-parent/pom.xml +++ b/zeppelin-interpreter-parent/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml diff --git a/zeppelin-interpreter-shaded/pom.xml b/zeppelin-interpreter-shaded/pom.xml index 2ba6a76e038..cc952701bbe 100644 --- a/zeppelin-interpreter-shaded/pom.xml +++ b/zeppelin-interpreter-shaded/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-interpreter-shaded @@ -40,7 +40,7 @@ org.apache.zeppelin zeppelin-interpreter - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 true diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml index 3bc0584398a..0d8c0102667 100644 --- a/zeppelin-interpreter/pom.xml +++ b/zeppelin-interpreter/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../pom.xml @@ -253,74 +253,19 @@ - hadoop2 - + hadoop3 true - ${hadoop2.7.version} - hadoop-client - hadoop-yarn-api - hadoop-client + ${hadoop3.3.version} - - org.apache.hadoop - hadoop-common - - - log4j - log4j - - - org.slf4j - slf4j-log4j12 - - - - - - org.apache.hadoop - hadoop-yarn-client - - - log4j - log4j - - - - - - - - hadoop3 - - - ${hadoop3.2.version} - hadoop-client-api - hadoop-client-runtime - hadoop-client-minicluster - - - - - - org.apache.hadoop - ${hadoop-client-runtime.artifact} - org.apache.hadoop - ${hadoop-client-minicluster.artifact} - test - - - junit - junit - - + hadoop-client-runtime diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManager.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManager.java index 6e586afb7f0..d0240d9c322 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManager.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManager.java @@ -131,7 +131,7 @@ public abstract class ClusterManager { private static final Logger LOGGER = LoggerFactory.getLogger(ClusterManager.class); - public ZeppelinConfiguration zConf; + protected final ZeppelinConfiguration zConf; protected Collection clusterNodes = new ArrayList<>(); @@ -158,8 +158,8 @@ public abstract class ClusterManager { protected boolean isTest = false; public ClusterManager(ZeppelinConfiguration zConf) { + this.zConf = zConf; try { - this.zConf = zConf; zeplServerHost = RemoteInterpreterUtils.findAvailableHostAddress(); String clusterAddr = this.zConf.getClusterAddress(); if (!StringUtils.isEmpty(clusterAddr)) { diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerClient.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerClient.java index 9526cc782b6..fd19a721c82 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerClient.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerClient.java @@ -63,10 +63,11 @@ public void start(String metaKey) { super.start(); // Instantiated cluster monitoring class - clusterMonitor = new ClusterMonitor(this); + clusterMonitor = new ClusterMonitor(this, zConf); clusterMonitor.start(INTP_PROCESS_META, metaKey); } + @Override public void shutdown() { if (!zConf.isClusterMode()) { return; diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerServer.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerServer.java index b394b25f25b..5c4ec0be82c 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerServer.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterManagerServer.java @@ -100,7 +100,7 @@ public void start() { // Instantiated raftServer monitoring class String clusterName = getClusterNodeName(); - clusterMonitor = new ClusterMonitor(this); + clusterMonitor = new ClusterMonitor(this, zConf); clusterMonitor.start(SERVER_META, clusterName); super.start(); diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterMonitor.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterMonitor.java index 133f8b69101..6451c263eb5 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterMonitor.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/cluster/ClusterMonitor.java @@ -68,10 +68,12 @@ public class ClusterMonitor { // and the interperterGroupID when monitoring the interperter processes private String metaKey; - public ClusterMonitor(ClusterManager clusterManagerServer) { + private final ZeppelinConfiguration zconf; + + public ClusterMonitor(ClusterManager clusterManagerServer, ZeppelinConfiguration zconf) { this.clusterManager = clusterManagerServer; + this.zconf = zconf; - ZeppelinConfiguration zconf = ZeppelinConfiguration.create(); heartbeatInterval = zconf.getClusterHeartbeatInterval(); heartbeatTimeout = zconf.getClusterHeartbeatTimeout(); diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java index 438b2f3d817..5fa0a9d00ba 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java @@ -29,6 +29,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.OptionalLong; import java.util.TimeZone; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -66,8 +69,6 @@ public class ZeppelinConfiguration { private Boolean anonymousAllowed; - private static ZeppelinConfiguration conf; - private static final EnvironmentConfiguration envConfig = new EnvironmentConfiguration(); private static final SystemConfiguration sysConfig = new SystemConfiguration(); @@ -117,37 +118,25 @@ private void loadXMLConfig(@Nullable String filename) throws ConfigurationExcept } } - public static ZeppelinConfiguration create() { - if (conf != null) { - return conf; - } - return ZeppelinConfiguration.create(null); + public static ZeppelinConfiguration load() { + return ZeppelinConfiguration.load(null); } /** * Load from via filename. */ - public static synchronized ZeppelinConfiguration create(@Nullable String filename) { - if (conf != null) { - return conf; - } - - conf = new ZeppelinConfiguration(filename); - + public static ZeppelinConfiguration load(@Nullable String filename) { + return new ZeppelinConfiguration(filename); + } - LOGGER.info("Server Host: {}", conf.getServerAddress()); - if (conf.useSsl()) { - LOGGER.info("Server SSL Port: {}", conf.getServerSslPort()); + public void printShortInfo() { + LOGGER.info("Server Host: {}", getServerAddress()); + if (useSsl()) { + LOGGER.info("Server SSL Port: {}", getServerSslPort()); } else { - LOGGER.info("Server Port: {}", conf.getServerPort()); + LOGGER.info("Server Port: {}", getServerPort()); } - LOGGER.info("Context Path: {}", conf.getServerContextPath()); + LOGGER.info("Context Path: {}", getServerContextPath()); LOGGER.info("Zeppelin Version: {}", Util.getVersion()); - - return conf; - } - - public static void reset() { - conf = null; } public void setProperty(String name, String value) { @@ -212,42 +201,67 @@ public String getString(ConfVars c) { return getString(c.name(), c.getVarName(), c.getStringValue()); } - public String getString(String envName, String propertyName, String defaultValue) { + public static String getStaticString(ConfVars c) { + return getStaticString(c.name(), c.getVarName()).orElse(c.getStringValue()); + } + + public static Optional getStaticString(String envName, String propertyName) { if (envConfig.containsKey(envName)) { - return envConfig.getString(envName); + return Optional.of(envConfig.getString(envName)); } if (sysConfig.containsKey(propertyName)) { - return sysConfig.getString(propertyName); + return Optional.of(sysConfig.getString(propertyName)); } - return getStringValue(propertyName, defaultValue); + return Optional.empty(); + } + public String getString(String envName, String propertyName, String defaultValue) { + return getStaticString(envName, propertyName) + .orElseGet(() -> getStringValue(propertyName, defaultValue)); } public int getInt(ConfVars c) { return getInt(c.name(), c.getVarName(), c.getIntValue()); } - public int getInt(String envName, String propertyName, int defaultValue) { + public static int getStaticInt(ConfVars c) { + return getStaticInt(c.name(), c.getVarName()).orElse(c.getIntValue()); + } + + public static OptionalInt getStaticInt(String envName, String propertyName) { if (envConfig.containsKey(envName)) { - return envConfig.getInt(envName); + return OptionalInt.of(envConfig.getInt(envName)); } if (sysConfig.containsKey(propertyName)) { - return sysConfig.getInt(propertyName); + return OptionalInt.of(sysConfig.getInt(propertyName)); } - return getIntValue(propertyName, defaultValue); + return OptionalInt.empty(); + } + + public int getInt(String envName, String propertyName, int defaultValue) { + return getStaticInt(envName, propertyName) + .orElseGet(() -> getIntValue(propertyName, defaultValue)); } public long getLong(ConfVars c) { return getLong(c.name(), c.getVarName(), c.getLongValue()); } - public long getLong(String envName, String propertyName, long defaultValue) { + public static long getStaticLong(ConfVars c) { + return getStaticLong(c.name(), c.getVarName()).orElse(c.getLongValue()); + } + + public static OptionalLong getStaticLong(String envName, String propertyName) { if (envConfig.containsKey(envName)) { - return envConfig.getLong(envName); + return OptionalLong.of(envConfig.getLong(envName)); } if (sysConfig.containsKey(propertyName)) { - return sysConfig.getLong(propertyName); + return OptionalLong.of(sysConfig.getLong(propertyName)); } - return getLongValue(propertyName, defaultValue); + return OptionalLong.empty(); + } + public long getLong(String envName, String propertyName, long defaultValue) { + return getStaticLong(envName, propertyName) + .orElseGet(() -> getLongValue(propertyName, defaultValue)); } /** @@ -785,7 +799,7 @@ public boolean isZeppelinNotebookMarkdownEscapeHtml() { return getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_MARKDOWN_ESCAPE_HTML); } - public Boolean isZeppelinNotebookCollaborativeModeEnable() { + public boolean isZeppelinNotebookCollaborativeModeEnable() { return getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_COLLABORATIVE_MODE_ENABLE); } diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java index 623dba7ec97..c43737105b5 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/AbstractDependencyResolver.java @@ -49,8 +49,7 @@ public abstract class AbstractDependencyResolver { protected RepositorySystemSession session; private Proxy proxy = null; - public AbstractDependencyResolver(String localRepoPath) { - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + protected AbstractDependencyResolver(String localRepoPath, ZeppelinConfiguration conf) { if (conf.getZeppelinProxyUrl() != null) { try { URL proxyUrl = new URL(conf.getZeppelinProxyUrl()); @@ -61,14 +60,15 @@ public AbstractDependencyResolver(String localRepoPath) { } } session = Booter.newRepositorySystemSession(system, localRepoPath); - repos.addAll(Booter.newCentralRepositorys(proxy)); // add maven central + repos.addAll(Booter.newCentralRepositorys(proxy, conf)); // add maven central repos.add(Booter.newLocalRepository()); } - public AbstractDependencyResolver(String localRepoPath, Proxy proxy) { + protected AbstractDependencyResolver(String localRepoPath, Proxy proxy, + ZeppelinConfiguration conf) { this.proxy = proxy; session = Booter.newRepositorySystemSession(system, localRepoPath); - repos.addAll(Booter.newCentralRepositorys(proxy)); // add maven central + repos.addAll(Booter.newCentralRepositorys(proxy, conf)); // add maven central repos.add(Booter.newLocalRepository()); } diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java index 18cef292863..9a00ce970f4 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/Booter.java @@ -82,11 +82,11 @@ static String resolveLocalRepoPath(String localRepoPath) { return Paths.get(home).resolve(localRepoPath).toAbsolutePath().toString(); } - public static List newCentralRepositorys(Proxy proxy) { + public static List newCentralRepositorys(Proxy proxy, + ZeppelinConfiguration conf) { String mvnRepoEnv = System.getenv("ZEPPELIN_INTERPRETER_DEP_MVNREPO"); if (mvnRepoEnv == null) { - mvnRepoEnv = ZeppelinConfiguration.create().getString( - ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_DEP_MVNREPO); + mvnRepoEnv = conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_DEP_MVNREPO); } if (mvnRepoEnv == null) { mvnRepoEnv = "https://repo1.maven.org/maven2/"; diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java index 8033bd92f1f..3c2a00c77eb 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/dep/DependencyResolver.java @@ -27,6 +27,7 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.eclipse.aether.RepositoryException; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; @@ -55,12 +56,12 @@ public class DependencyResolver extends AbstractDependencyResolver { "org.apache.zeppelin:zeppelin-interpreter", "org.apache.zeppelin:zeppelin-server"}; - public DependencyResolver(String localRepoPath) { - super(localRepoPath); + public DependencyResolver(String localRepoPath, ZeppelinConfiguration conf) { + super(localRepoPath, conf); } - public DependencyResolver(String localRepoPath, Proxy proxy) { - super(localRepoPath, proxy); + public DependencyResolver(String localRepoPath, Proxy proxy, ZeppelinConfiguration conf) { + super(localRepoPath, proxy, conf); } public List load(String artifact) diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java index d138595fe7d..a2e87670c58 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/helium/Application.java @@ -31,7 +31,7 @@ public abstract class Application { private final ApplicationContext context; - public Application(ApplicationContext context) { + protected Application(ApplicationContext context) { this.context = context; } @@ -129,7 +129,7 @@ public void printStringAsJavascript(String js) throws IOException { } private void beginJavascript() throws IOException { - StringBuffer js = new StringBuffer(); + StringBuilder js = new StringBuilder(); js.append("\n\n"); context.out.write(js.toString()); diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java index bf09cb7106b..9c3c9e59f50 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/Interpreter.java @@ -22,6 +22,7 @@ import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.zeppelin.annotation.Experimental; import org.apache.zeppelin.annotation.ZeppelinApi; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.scheduler.Scheduler; import org.apache.zeppelin.scheduler.SchedulerFactory; @@ -149,6 +150,7 @@ public Scheduler getScheduler() { private URL[] classloaderUrls; protected Properties properties; protected String userName; + protected ZeppelinConfiguration zConf; @ZeppelinApi public Interpreter(Properties properties) { @@ -198,6 +200,10 @@ public String getUserName() { return this.userName; } + public void setZeppelinConfiguration(ZeppelinConfiguration zConf) { + this.zConf = zConf; + } + public void setInterpreterGroup(InterpreterGroup interpreterGroup) { this.interpreterGroup = interpreterGroup; } diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java index 82cf8722116..d77216bd014 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOption.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; -import com.google.gson.Gson; import org.apache.zeppelin.conf.ZeppelinConfiguration; /** @@ -30,7 +29,7 @@ public class InterpreterOption { public static final transient String SHARED = "shared"; public static final transient String SCOPED = "scoped"; public static final transient String ISOLATED = "isolated"; - private static ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + private transient ZeppelinConfiguration conf; // always set it as true, keep this field just for backward compatibility boolean remote = true; @@ -61,6 +60,10 @@ public void setHost(String host) { this.host = host; } + public void setConf(ZeppelinConfiguration conf) { + this.conf = conf; + } + public boolean permissionIsSet() { return setPermission; } @@ -71,7 +74,7 @@ public void setUserPermission(boolean setPermission) { public List getOwners() { if (null != owners && conf.isUsernameForceLowerCase()) { - List lowerCaseUsers = new ArrayList(); + List lowerCaseUsers = new ArrayList<>(); for (String owner : owners) { lowerCaseUsers.add(owner.toLowerCase()); } @@ -113,8 +116,8 @@ public static InterpreterOption fromInterpreterOption(InterpreterOption other) { option.isExistingProcess = other.isExistingProcess; option.setPermission = other.setPermission; option.owners = (null == other.owners) ? - new ArrayList() : new ArrayList<>(other.owners); - + new ArrayList<>() : new ArrayList<>(other.owners); + option.conf = other.conf; return option; } diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java index f794a582837..ec236ce1f28 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/LazyOpenInterpreter.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Properties; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.scheduler.Scheduler; @@ -204,6 +205,11 @@ public void setUserName(String userName) { this.intp.setUserName(userName); } + @Override + public void setZeppelinConfiguration(ZeppelinConfiguration zConf) { + this.intp.setZeppelinConfiguration(zConf); + } + @Override public String getUserName() { return this.intp.getUserName(); diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java index dad3074c51a..f84aceb9bcb 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java @@ -207,7 +207,7 @@ public void run() { @Override public void init(Map properties) throws InterpreterRPCException, TException { - this.zConf = ZeppelinConfiguration.create(); + this.zConf = ZeppelinConfiguration.load(); for (Map.Entry entry : properties.entrySet()) { this.zConf.setProperty(entry.getKey(), entry.getValue()); } @@ -373,7 +373,7 @@ public void createInterpreter(String interpreterGroupId, String sessionId, Strin properties.get("zeppelin.interpreter.output.limit")); } - depLoader = new DependencyResolver(localRepoPath); + depLoader = new DependencyResolver(localRepoPath, zConf); appLoader = new ApplicationLoader(resourcePool, depLoader); resultCacheInSeconds = @@ -399,6 +399,7 @@ public void createInterpreter(String interpreterGroupId, String sessionId, Strin interpreter.setInterpreterGroup(interpreterGroup); interpreter.setUserName(userName); + interpreter.setZeppelinConfiguration(zConf); interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), sessionId); diff --git a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java index 24057803f7c..34609b49169 100644 --- a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java +++ b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/scheduler/SchedulerFactory.java @@ -50,9 +50,8 @@ public static SchedulerFactory singleton() { } private SchedulerFactory() { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - int threadPoolSize = - zConf.getInt(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_SCHEDULER_POOL_SIZE); + int threadPoolSize = ZeppelinConfiguration + .getStaticInt(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_SCHEDULER_POOL_SIZE); LOGGER.info("Scheduler Thread Pool Size: {}", threadPoolSize); executor = ExecutorFactory.singleton().createOrGet(SCHEDULER_EXECUTOR_NAME, threadPoolSize); } diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterMultiNodeTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterMultiNodeTest.java index 8f4108e4083..62dea69ad85 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterMultiNodeTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterMultiNodeTest.java @@ -57,7 +57,7 @@ static void startCluster() throws IOException, InterruptedException { clusterAddrList += ","; } } - zconf = ZeppelinConfiguration.create(); + zconf = ZeppelinConfiguration.load(); zconf.setClusterAddress(clusterAddrList); // mock cluster manager server @@ -118,7 +118,6 @@ static void stopCluster() { for (ClusterManagerServer clusterServer : clusterServers) { clusterServer.shutdown(); } - ZeppelinConfiguration.reset(); LOGGER.info("stopCluster <<<"); } diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterSingleNodeTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterSingleNodeTest.java index e1cc4a17641..8815a467c7f 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterSingleNodeTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/cluster/ClusterSingleNodeTest.java @@ -50,7 +50,7 @@ class ClusterSingleNodeTest { static void startCluster() throws IOException, InterruptedException { LOGGER.info("startCluster >>>"); - zconf = ZeppelinConfiguration.create("zeppelin-site-test.xml"); + zconf = ZeppelinConfiguration.load("zeppelin-site-test.xml"); // Set the cluster IP and port zServerHost = RemoteInterpreterUtils.findAvailableHostAddress(); @@ -92,7 +92,6 @@ static void stopCluster() { if (null != clusterClient) { clusterServer.shutdown(); } - ZeppelinConfiguration.reset(); LOGGER.info("stopCluster"); } diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/BooterTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/BooterTest.java index 1a9e98e16e8..70ff074d54b 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/BooterTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/BooterTest.java @@ -56,9 +56,8 @@ void should_throw_exception_for_null() { @Test void getInterpreterMvnRepoPathTest() { - ZeppelinConfiguration.reset(); - ZeppelinConfiguration.create("zeppelin-site-test.xml"); - List remoteRepositories = Booter.newCentralRepositorys(null); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-site-test.xml"); + List remoteRepositories = Booter.newCentralRepositorys(null, conf); assertNotNull(remoteRepositories); assertEquals(2, remoteRepositories.size()); assertEquals("https://repo1.maven.org/maven2/", remoteRepositories.get(0).getUrl()); diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/DependencyResolverTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/DependencyResolverTest.java index c43ce8e4107..f51b683a75d 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/DependencyResolverTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/dep/DependencyResolverTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.dep; import org.apache.commons.io.FileUtils; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.eclipse.aether.RepositoryException; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -44,7 +45,7 @@ static void setUp() throws Exception { System.currentTimeMillis()); testPath = tmpDir.getAbsolutePath() + "/test-repo"; testCopyPath = new File(tmpDir, "test-copy-repo"); - resolver = new DependencyResolver(testPath); + resolver = new DependencyResolver(testPath, ZeppelinConfiguration.load()); } @AfterAll @@ -108,7 +109,8 @@ void should_throw_exception_if_dependency_not_found() throws Exception { FileNotFoundException exception = assertThrows(FileNotFoundException.class, () -> { resolver.load("one.two:1.0", testCopyPath); }); - assertEquals("Source 'one.two:1.0' does not exist", exception.getMessage()); + assertEquals("File system element for parameter 'source' does not exist: 'one.two:1.0'", + exception.getMessage()); } } diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/ApplicationLoaderTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/ApplicationLoaderTest.java index 52508b48dd2..3344a38338f 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/ApplicationLoaderTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/ApplicationLoaderTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.helium; import org.apache.commons.io.FileUtils; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.dep.DependencyResolver; import org.apache.zeppelin.interpreter.InterpreterOutput; import org.apache.zeppelin.resource.LocalResourcePool; @@ -52,7 +53,8 @@ void tearDown() throws IOException { void loadUnloadApplication() throws Exception { // given LocalResourcePool resourcePool = new LocalResourcePool("pool1"); - DependencyResolver dep = new DependencyResolver(tmpDir.getAbsolutePath()); + DependencyResolver dep = + new DependencyResolver(tmpDir.getAbsolutePath(), ZeppelinConfiguration.load()); ApplicationLoader appLoader = new ApplicationLoader(resourcePool, dep); HeliumPackage pkg1 = createPackageInfo(MockApplication1.class.getName(), "artifact1"); diff --git a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/HeliumPackageTest.java b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/HeliumPackageTest.java index 4efeda7dca6..737e0186566 100644 --- a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/HeliumPackageTest.java +++ b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/helium/HeliumPackageTest.java @@ -19,6 +19,7 @@ import static org.apache.commons.text.StringEscapeUtils.escapeHtml4; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import java.util.Map; @@ -31,7 +32,7 @@ void parseSpellPackageInfo() { String examplePackage = "{\n" + " \"type\" : \"SPELL\",\n" + " \"name\" : \"echo-spell\",\n" + - " \"description\" : \"'%echo' - return just what receive (example)\",\n" + + " \"description\" : \"'%echo' - return just what receive (example)i\",\n" + " \"artifact\" : \"./zeppelin-examples/zeppelin-example-spell-echo\",\n" + " \"license\" : \"Apache-2.0\",\n" + " \"icon\" : \"\",\n" + @@ -44,6 +45,7 @@ void parseSpellPackageInfo() { HeliumPackage p = HeliumPackage.fromJson(examplePackage); assertEquals("%echo", p.getSpellInfo().getMagic()); assertEquals(escapeHtml4("%echo "), p.getSpellInfo().getUsage()); + assertNotEquals("'%echo' - return just what receive (example)i", p.getDescription()); } @Test diff --git a/zeppelin-jupyter-interpreter-shaded/pom.xml b/zeppelin-jupyter-interpreter-shaded/pom.xml index c10fe84b66d..1070d5ae5a2 100644 --- a/zeppelin-jupyter-interpreter-shaded/pom.xml +++ b/zeppelin-jupyter-interpreter-shaded/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-jupyter-interpreter-shaded diff --git a/zeppelin-jupyter-interpreter/pom.xml b/zeppelin-jupyter-interpreter/pom.xml index 58a5d484b3a..aa7ab7fd176 100644 --- a/zeppelin-jupyter-interpreter/pom.xml +++ b/zeppelin-jupyter-interpreter/pom.xml @@ -23,7 +23,7 @@ zeppelin-interpreter-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../zeppelin-interpreter-parent/pom.xml diff --git a/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IPythonKernelTest.java b/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IPythonKernelTest.java index 3e77facca6b..09ef103e75a 100644 --- a/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IPythonKernelTest.java +++ b/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IPythonKernelTest.java @@ -126,7 +126,7 @@ void testPythonBasics() throws InterpreterException, InterruptedException, IOExc // multiple output context = getInterpreterContext(); result = interpreter.interpret("print('hello world')\nprint('hello world2')", context); - Thread.sleep(100); + Thread.sleep(5000); assertEquals(InterpreterResult.Code.SUCCESS, result.code()); interpreterResultMessages = context.out.toInterpreterResultMessage(); assertEquals(1, interpreterResultMessages.size()); diff --git a/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IRKernelTest.java b/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IRKernelTest.java deleted file mode 100644 index 47dcb27b9d1..00000000000 --- a/zeppelin-jupyter-interpreter/src/test/java/org/apache/zeppelin/jupyter/IRKernelTest.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - - -package org.apache.zeppelin.jupyter; - -import org.apache.zeppelin.interpreter.Interpreter; -import org.apache.zeppelin.interpreter.InterpreterContext; -import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterGroup; -import org.apache.zeppelin.interpreter.InterpreterOutput; -import org.apache.zeppelin.interpreter.InterpreterResult; -import org.apache.zeppelin.interpreter.InterpreterResultMessage; -import org.apache.zeppelin.interpreter.LazyOpenInterpreter; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -/** - * This test class is also used in the module rlang - * - * @author pdallig - */ -@SuppressWarnings("java:S5786") -public class IRKernelTest { - - protected Interpreter interpreter; - protected static boolean ENABLE_GOOGLEVIS_TEST = true; - - protected Interpreter createInterpreter(Properties properties) { - return new JupyterInterpreter(properties); - } - - @BeforeEach - public void setUp() throws InterpreterException { - Properties properties = new Properties(); - - InterpreterContext context = getInterpreterContext(); - InterpreterContext.set(context); - interpreter = createInterpreter(properties); - - InterpreterGroup interpreterGroup = new InterpreterGroup(); - interpreterGroup.addInterpreterToSession(new LazyOpenInterpreter(interpreter), "session_1"); - interpreter.setInterpreterGroup(interpreterGroup); - - interpreter.open(); - } - - @AfterEach - public void tearDown() throws InterpreterException { - if (interpreter != null) { - interpreter.close(); - } - } - - @Test - void testIRInterpreter() throws InterpreterException, IOException { - InterpreterContext context = getInterpreterContext(); - InterpreterResult result = interpreter.interpret("1+1", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - List resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size()); - assertEquals(InterpreterResult.Type.HTML, resultMessages.get(0).getType(), - resultMessages.toString()); - assertEquals("2", resultMessages.get(0).getData(), resultMessages.toString()); - - // error - context = getInterpreterContext(); - result = interpreter.interpret("unknown_var", context); - assertEquals(InterpreterResult.Code.ERROR, result.code()); - resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size()); - assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType(), result.toString()); - assertTrue(resultMessages.get(0).getData().contains("object 'unknown_var' not found"), - resultMessages.toString()); - - context = getInterpreterContext(); - result = interpreter.interpret("foo <- TRUE\n" + - "print(foo)\n" + - "bare <- c(1, 2.5, 4)\n" + - "print(bare)\n" + - "double <- 15.0\n" + - "print(double)", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size()); - assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType(), result.toString()); - assertTrue(resultMessages.get(0).getData().contains("[1] TRUE\n" + - "[1] 1.0 2.5 4.0\n" + - "[1] 15\n"), resultMessages.toString()); - - // plotting - context = getInterpreterContext(); - result = interpreter.interpret("hist(mtcars$mpg)", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size()); - assertEquals(InterpreterResult.Type.IMG, resultMessages.get(0).getType(), - resultMessages.toString()); - - // ggplot2 - result = interpreter.interpret("library(ggplot2)\n" + - "ggplot(diamonds, aes(x=carat, y=price, color=cut)) + geom_point()", - getInterpreterContext()); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(1, resultMessages.size()); - assertEquals(InterpreterResult.Type.IMG, resultMessages.get(0).getType(), - resultMessages.toString()); - - // googlevis - // TODO(zjffdu) It is weird that googlevis doesn't work with spark 2.2 - if (ENABLE_GOOGLEVIS_TEST) { - context = getInterpreterContext(); - result = interpreter.interpret("library(googleVis)\n" + - "df=data.frame(country=c(\"US\", \"GB\", \"BR\"), \n" + - " val1=c(10,13,14), \n" + - " val2=c(23,12,32))\n" + - "Bar <- gvisBarChart(df)\n" + - "print(Bar, tag = 'chart')", context); - assertEquals(InterpreterResult.Code.SUCCESS, result.code()); - resultMessages = context.out.toInterpreterResultMessage(); - assertEquals(2, resultMessages.size()); - assertEquals(InterpreterResult.Type.HTML, resultMessages.get(1).getType(), - resultMessages.toString()); - assertTrue(resultMessages.get(1).getData().contains("javascript"), - resultMessages.get(1).getData()); - } - } - - protected InterpreterContext getInterpreterContext() { - Map localProperties = new HashMap<>(); - localProperties.put("kernel", "ir"); - InterpreterContext context = InterpreterContext.builder() - .setNoteId("note_1") - .setParagraphId("paragraph_1") - .setInterpreterOut(new InterpreterOutput()) - .setLocalProperties(localProperties) - .build(); - return context; - } -} diff --git a/zeppelin-jupyter/pom.xml b/zeppelin-jupyter/pom.xml index 642e2a0ce91..8b36fceb45e 100644 --- a/zeppelin-jupyter/pom.xml +++ b/zeppelin-jupyter/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-jupyter diff --git a/zeppelin-plugins/launcher/cluster/pom.xml b/zeppelin-plugins/launcher/cluster/pom.xml index c620fbacb9c..dcaf9ce1be9 100644 --- a/zeppelin-plugins/launcher/cluster/pom.xml +++ b/zeppelin-plugins/launcher/cluster/pom.xml @@ -25,7 +25,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins @@ -42,7 +42,7 @@ org.apache.zeppelin launcher-docker - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 diff --git a/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheckThread.java b/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheck.java similarity index 83% rename from zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheckThread.java rename to zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheck.java index f428bc2346b..932de4dabfb 100644 --- a/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheckThread.java +++ b/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterCheck.java @@ -30,28 +30,30 @@ // Metadata registered in the cluster by the interpreter process, // Keep the interpreter process started -public class ClusterInterpreterCheckThread extends Thread { +public class ClusterInterpreterCheck implements Runnable { private static final Logger LOGGER - = LoggerFactory.getLogger(ClusterInterpreterCheckThread.class); + = LoggerFactory.getLogger(ClusterInterpreterCheck.class); - private InterpreterClient intpProcess; - private String intpGroupId; - private int connectTimeout; + private final InterpreterClient intpProcess; + private final String intpGroupId; + private final int connectTimeout; + private final ZeppelinConfiguration zConf; - ClusterInterpreterCheckThread(InterpreterClient intpProcess, - String intpGroupId, - int connectTimeout) { + ClusterInterpreterCheck(InterpreterClient intpProcess, + String intpGroupId, + int connectTimeout, + ZeppelinConfiguration zConf) { this.intpProcess = intpProcess; this.intpGroupId = intpGroupId; this.connectTimeout = connectTimeout; + this.zConf = zConf; } @Override public void run() { LOGGER.info("ClusterInterpreterCheckThread run() >>>"); - ClusterManagerServer clusterServer = ClusterManagerServer.getInstance( - ZeppelinConfiguration.create()); + ClusterManagerServer clusterServer = ClusterManagerServer.getInstance(zConf); clusterServer.getIntpProcessStatus(intpGroupId, connectTimeout, new ClusterCallback>() { diff --git a/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncher.java b/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncher.java index 9b8841c6a3b..70f2a350a12 100644 --- a/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncher.java +++ b/zeppelin-plugins/launcher/cluster/src/main/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncher.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.Executors; import static org.apache.zeppelin.cluster.event.ClusterEvent.CREATE_INTP_PROCESS; import static org.apache.zeppelin.cluster.meta.ClusterMeta.INTP_TSERVER_HOST; @@ -229,9 +230,9 @@ private InterpreterClient createInterpreterProcess(InterpreterLaunchContext cont } // must first step start check interpreter thread - ClusterInterpreterCheckThread intpCheckThread = new ClusterInterpreterCheckThread( - intpProcess, context.getInterpreterGroupId(), getConnectTimeout(context)); - intpCheckThread.start(); + ClusterInterpreterCheck intpCheck = new ClusterInterpreterCheck( + intpProcess, context.getInterpreterGroupId(), getConnectTimeout(context), zConf); + Executors.newSingleThreadExecutor().execute(intpCheck); return intpProcess; } diff --git a/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncherTest.java b/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncherTest.java index f41d4d879ed..a9467fa93fa 100644 --- a/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncherTest.java +++ b/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterInterpreterLauncherTest.java @@ -23,8 +23,6 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -32,9 +30,7 @@ import java.io.IOException; import java.util.Properties; -public class ClusterInterpreterLauncherTest extends ClusterMockTest { - private static final Logger LOGGER = - LoggerFactory.getLogger(ClusterInterpreterLauncherTest.class); +class ClusterInterpreterLauncherTest extends ClusterMockTest { @BeforeAll static void startTest() throws IOException, InterruptedException { @@ -77,6 +73,7 @@ void testConnectExistOnlineIntpProcess() throws IOException { assertEquals("127.0.0.1", interpreterProcess.getHost()); assertEquals("name", interpreterProcess.getInterpreterSettingName()); assertEquals(5000, interpreterProcess.getConnectTimeout()); + interpreterProcess.close(); } @Test @@ -104,6 +101,7 @@ void testConnectExistOfflineIntpProcess() throws IOException { assertEquals(zconf.getInterpreterRemoteRunnerPath(), interpreterProcess.getInterpreterRunner()); assertTrue(interpreterProcess.getEnv().size() >= 1); assertEquals(true, interpreterProcess.isUserImpersonated()); + interpreterProcess.close(); } @Test diff --git a/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterMockTest.java b/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterMockTest.java index e66abcedf56..5d5371ab97f 100644 --- a/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterMockTest.java +++ b/zeppelin-plugins/launcher/cluster/src/test/java/org/apache/zeppelin/interpreter/launcher/ClusterMockTest.java @@ -54,7 +54,7 @@ public class ClusterMockTest { public static void startCluster() throws IOException, InterruptedException { LOGGER.info("startCluster >>>"); - zconf = ZeppelinConfiguration.create(); + zconf = ZeppelinConfiguration.load(); // Set the cluster IP and port zServerHost = RemoteInterpreterUtils.findAvailableHostAddress(); @@ -105,7 +105,6 @@ public static void stopCluster() { } tSocket.close(); - ZeppelinConfiguration.reset(); LOGGER.info("stopCluster <<<"); } diff --git a/zeppelin-plugins/launcher/docker/pom.xml b/zeppelin-plugins/launcher/docker/pom.xml index 4691ea89bf9..75a6c969d34 100644 --- a/zeppelin-plugins/launcher/docker/pom.xml +++ b/zeppelin-plugins/launcher/docker/pom.xml @@ -25,7 +25,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/launcher/docker/src/main/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcess.java b/zeppelin-plugins/launcher/docker/src/main/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcess.java index a643f96bd3f..84d2b5c03ee 100644 --- a/zeppelin-plugins/launcher/docker/src/main/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcess.java +++ b/zeppelin-plugins/launcher/docker/src/main/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcess.java @@ -432,9 +432,8 @@ public String getErrorMessage() { return null; } - // upload configure file to submarine interpreter container + // upload keytab and configuration file to interpreter container: // keytab file & zeppelin-site.xml & krb5.conf - // The submarine configures the mount file into the container through `localization` // NOTE: The path to the file uploaded to the container, // Can not be repeated, otherwise it will lead to failure. private void copyRunFileToContainer(String containerId) @@ -476,22 +475,18 @@ private void copyRunFileToContainer(String containerId) intpKeytab = properties.getProperty("spark.yarn.keytab", ""); } if (StringUtils.isBlank(intpKeytab)) { - // 3.3) submarine interpreter properties keytab file - intpKeytab = properties.getProperty("submarine.hadoop.keytab", ""); - } - if (StringUtils.isBlank(intpKeytab)) { - // 3.4) livy interpreter properties keytab file + // 3.3) livy interpreter properties keytab file intpKeytab = properties.getProperty("zeppelin.livy.keytab", ""); } if (StringUtils.isBlank(intpKeytab)) { - // 3.5) jdbc interpreter properties keytab file + // 3.4) jdbc interpreter properties keytab file intpKeytab = properties.getProperty("zeppelin.jdbc.keytab.location", ""); } if (!StringUtils.isBlank(intpKeytab)) { LOGGER.info("intpKeytab : {}", intpKeytab); copyFiles.putIfAbsent(intpKeytab, intpKeytab); } - // 3.6) zeppelin server keytab file + // 3.5) zeppelin server keytab file String zeppelinServerKeytab = zconf.getString(ZEPPELIN_SERVER_KERBEROS_KEYTAB); if (!StringUtils.isBlank(zeppelinServerKeytab)) { copyFiles.putIfAbsent(zeppelinServerKeytab, zeppelinServerKeytab); diff --git a/zeppelin-plugins/launcher/docker/src/test/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcessTest.java b/zeppelin-plugins/launcher/docker/src/test/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcessTest.java index 1bf55412685..8ae12b3d8a8 100644 --- a/zeppelin-plugins/launcher/docker/src/test/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcessTest.java +++ b/zeppelin-plugins/launcher/docker/src/test/java/org/apache/zeppelin/interpreter/launcher/DockerInterpreterProcessTest.java @@ -36,7 +36,7 @@ class DockerInterpreterProcessTest { - protected static ZeppelinConfiguration zconf = spy(ZeppelinConfiguration.create()); + protected static ZeppelinConfiguration zconf = spy(ZeppelinConfiguration.load()); @Test void testCreateIntpProcess() throws IOException { diff --git a/zeppelin-plugins/launcher/flink/pom.xml b/zeppelin-plugins/launcher/flink/pom.xml index 8869681d48b..04d08c0d9d8 100644 --- a/zeppelin-plugins/launcher/flink/pom.xml +++ b/zeppelin-plugins/launcher/flink/pom.xml @@ -25,7 +25,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/launcher/flink/src/main/java/org/apache/zeppelin/interpreter/launcher/FlinkInterpreterLauncher.java b/zeppelin-plugins/launcher/flink/src/main/java/org/apache/zeppelin/interpreter/launcher/FlinkInterpreterLauncher.java index aa22ab64608..847cef2bff4 100644 --- a/zeppelin-plugins/launcher/flink/src/main/java/org/apache/zeppelin/interpreter/launcher/FlinkInterpreterLauncher.java +++ b/zeppelin-plugins/launcher/flink/src/main/java/org/apache/zeppelin/interpreter/launcher/FlinkInterpreterLauncher.java @@ -42,6 +42,7 @@ public class FlinkInterpreterLauncher extends StandardInterpreterLauncher { private static final Set FLINK_EXECUTION_MODES = Sets.newHashSet( "local", "remote", "yarn", "yarn-application", "kubernetes-application"); + public FlinkInterpreterLauncher(ZeppelinConfiguration zConf, RecoveryStorage recoveryStorage) { super(zConf, recoveryStorage); } @@ -156,7 +157,7 @@ private String chooseFlinkAppJar(String flinkHome) throws IOException { } final String flinkScalaVersion = scalaVersion; File flinkInterpreterFolder = - new File(ZeppelinConfiguration.create().getInterpreterDir(), "flink"); + new File(zConf.getInterpreterDir(), "flink"); List flinkScalaJars = Arrays.stream(flinkInterpreterFolder .listFiles(file -> file.getName().endsWith(".jar"))) diff --git a/zeppelin-plugins/launcher/k8s-standard/pom.xml b/zeppelin-plugins/launcher/k8s-standard/pom.xml index 49e5727c03a..55404647fab 100644 --- a/zeppelin-plugins/launcher/k8s-standard/pom.xml +++ b/zeppelin-plugins/launcher/k8s-standard/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/launcher/k8s-standard/src/test/java/org/apache/zeppelin/interpreter/launcher/K8sStandardInterpreterLauncherTest.java b/zeppelin-plugins/launcher/k8s-standard/src/test/java/org/apache/zeppelin/interpreter/launcher/K8sStandardInterpreterLauncherTest.java index 160c191ff33..651f4a9e6fc 100644 --- a/zeppelin-plugins/launcher/k8s-standard/src/test/java/org/apache/zeppelin/interpreter/launcher/K8sStandardInterpreterLauncherTest.java +++ b/zeppelin-plugins/launcher/k8s-standard/src/test/java/org/apache/zeppelin/interpreter/launcher/K8sStandardInterpreterLauncherTest.java @@ -42,7 +42,7 @@ void setUp() { @Test void testK8sLauncher() throws IOException { // given - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); K8sStandardInterpreterLauncher launcher = new K8sStandardInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("ENV_1", "VALUE_1"); @@ -71,7 +71,7 @@ void testK8sLauncher() throws IOException { @Test void testK8sLauncherWithSparkAndUserImpersonate() throws IOException { // given - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); K8sStandardInterpreterLauncher launcher = new K8sStandardInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("ENV_1", "VALUE_1"); @@ -105,7 +105,7 @@ void testK8sLauncherWithSparkAndUserImpersonate() throws IOException { @Test void testK8sLauncherWithSparkAndWithoutUserImpersonate() throws IOException { // given - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); K8sStandardInterpreterLauncher launcher = new K8sStandardInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("ENV_1", "VALUE_1"); diff --git a/zeppelin-plugins/launcher/yarn/pom.xml b/zeppelin-plugins/launcher/yarn/pom.xml index d95cfd8c904..fc1a39ff3ce 100644 --- a/zeppelin-plugins/launcher/yarn/pom.xml +++ b/zeppelin-plugins/launcher/yarn/pom.xml @@ -25,7 +25,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins @@ -73,44 +73,18 @@ - hadoop2 - + hadoop3 true - - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - provided - ${hadoop.version} - - - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - - - - - - hadoop3 - - ${hadoop3.2.version} - hadoop-client-runtime + ${hadoop3.3.version} org.apache.hadoop - hadoop-client + hadoop-client-api provided ${hadoop.version} diff --git a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnInterpreterLauncher.java b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnInterpreterLauncher.java index f0ff5ab73d6..3cd445ea36f 100644 --- a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnInterpreterLauncher.java +++ b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnInterpreterLauncher.java @@ -47,7 +47,8 @@ public InterpreterClient launchDirectly(InterpreterLaunchContext context) throws context.getProperties(), buildEnvFromProperties(context), getConnectTimeout(context), - getConnectPoolSize(context)); + getConnectPoolSize(context), + zConf); } protected Map buildEnvFromProperties(InterpreterLaunchContext context) { diff --git a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java index 974e4c37461..f47eaf68d76 100644 --- a/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java +++ b/zeppelin-plugins/launcher/yarn/src/main/java/org/apache/zeppelin/interpreter/launcher/YarnRemoteInterpreterProcess.java @@ -78,7 +78,7 @@ public class YarnRemoteInterpreterProcess extends RemoteInterpreterProcess { private String host; private int port = -1; - private ZeppelinConfiguration zConf; + private final ZeppelinConfiguration zConf; private final InterpreterLaunchContext launchContext; private final Properties properties; private final Map envs; @@ -102,12 +102,13 @@ public YarnRemoteInterpreterProcess( Properties properties, Map envs, int connectTimeout, - int connectionPoolSize) { + int connectionPoolSize, + ZeppelinConfiguration zConf) { super(connectTimeout, connectionPoolSize, launchContext.getIntpEventServerHost(), launchContext.getIntpEventServerPort()); - this.zConf = ZeppelinConfiguration.create(); + this.zConf = zConf; this.launchContext = launchContext; this.properties = properties; this.envs = envs; @@ -491,7 +492,7 @@ private File createInterpreterZip() throws IOException { try (ZipOutputStream interpreterZipStream = new ZipOutputStream(new FileOutputStream(interpreterArchive))) { interpreterZipStream.setLevel(0); - String zeppelinHomeEnv = System.getenv("ZEPPELIN_HOME"); + String zeppelinHomeEnv = zConf.getZeppelinHome(); if (org.apache.commons.lang3.StringUtils.isBlank(zeppelinHomeEnv)) { throw new IOException("ZEPPELIN_HOME is not specified"); } diff --git a/zeppelin-plugins/notebookrepo/azure/pom.xml b/zeppelin-plugins/notebookrepo/azure/pom.xml index 416a4ae8ee1..8cf0bf6fb6a 100644 --- a/zeppelin-plugins/notebookrepo/azure/pom.xml +++ b/zeppelin-plugins/notebookrepo/azure/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/notebookrepo/azure/src/main/java/org/apache/zeppelin/notebook/repo/AzureNotebookRepo.java b/zeppelin-plugins/notebookrepo/azure/src/main/java/org/apache/zeppelin/notebook/repo/AzureNotebookRepo.java index 490f8189436..902d7b2109a 100644 --- a/zeppelin-plugins/notebookrepo/azure/src/main/java/org/apache/zeppelin/notebook/repo/AzureNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/azure/src/main/java/org/apache/zeppelin/notebook/repo/AzureNotebookRepo.java @@ -37,6 +37,7 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,7 +45,7 @@ /** * Azure storage backend for notebooks */ -public class AzureNotebookRepo implements NotebookRepo { +public class AzureNotebookRepo extends AbstractNotebookRepo { private static final Logger LOGGER = LoggerFactory.getLogger(AzureNotebookRepo.class); private ZeppelinConfiguration conf; @@ -57,8 +58,8 @@ public AzureNotebookRepo() { } @Override - public void init(ZeppelinConfiguration conf) throws IOException { - this.conf = conf; + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); user = conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_USER); shareName = conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_AZURE_SHARE); @@ -125,7 +126,7 @@ public Note get(String noteId, String notePath, AuthenticationInfo subject) thro String json = IOUtils.toString(ins, conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_ENCODING)); ins.close(); - return Note.fromJson(noteId, json); + return noteParser.fromJson(noteId, json); } @Override diff --git a/zeppelin-plugins/notebookrepo/filesystem/pom.xml b/zeppelin-plugins/notebookrepo/filesystem/pom.xml index 243f515770b..3a8cdb10d23 100644 --- a/zeppelin-plugins/notebookrepo/filesystem/pom.xml +++ b/zeppelin-plugins/notebookrepo/filesystem/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins @@ -49,42 +49,15 @@ - hadoop2 + hadoop3 true - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - provided - ${hadoop.version} - - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - - - - - - hadoop3 - - ${hadoop3.2.version} + ${hadoop3.3.version} - - org.apache.hadoop - hadoop-client - provided - ${hadoop.version} - org.apache.hadoop hadoop-client-runtime @@ -100,85 +73,10 @@ - - hadoop2-azure - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-azure - ${hadoop.version} - - - com.fasterxml.jackson.core - jackson-core - - - com.google.guava - guava - - - org.apache.commons - commons-lang3 - - - com.jcraf - jsch - - - org.apache.commons - commons-compress - - - - - com.microsoft.azure - azure-data-lake-store-sdk - ${adl.sdk.version} - - - com.fasterxml.jackson.core - jackson-core - - - - - - - - hadoop2-aws - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-aws - ${hadoop.version} - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind - - - - - - hadoop3-azure - ${hadoop3.0.version} + ${hadoop3.3.version} @@ -257,7 +155,7 @@ hadoop3-aws - ${hadoop3.0.version} + ${hadoop3.3.version} diff --git a/zeppelin-plugins/notebookrepo/filesystem/src/main/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepo.java b/zeppelin-plugins/notebookrepo/filesystem/src/main/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepo.java index efa92eb19dd..8168061ead4 100644 --- a/zeppelin-plugins/notebookrepo/filesystem/src/main/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/filesystem/src/main/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepo.java @@ -22,6 +22,7 @@ import org.apache.zeppelin.notebook.FileSystemStorage; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,7 +37,7 @@ * NotebookRepos for hdfs. * */ -public class FileSystemNotebookRepo implements NotebookRepo { +public class FileSystemNotebookRepo extends AbstractNotebookRepo { private static final Logger LOGGER = LoggerFactory.getLogger(FileSystemNotebookRepo.class); private FileSystemStorage fs; @@ -47,10 +48,12 @@ public FileSystemNotebookRepo() { } @Override - public void init(ZeppelinConfiguration zConf) throws IOException { - this.fs = new FileSystemStorage(zConf, zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR)); + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); + this.fs = new FileSystemStorage(conf, + conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR)); LOGGER.info("Creating FileSystem: {}", this.fs.getFs().getClass().getName()); - this.notebookDir = this.fs.makeQualified(new Path(zConf.getNotebookDir())); + this.notebookDir = this.fs.makeQualified(new Path(conf.getNotebookDir())); LOGGER.info("Using folder {} to store notebook", notebookDir); this.fs.tryMkDir(notebookDir); } @@ -75,7 +78,7 @@ public Map list(AuthenticationInfo subject) throws IOException public Note get(String noteId, String notePath, AuthenticationInfo subject) throws IOException { String content = this.fs.readFile( new Path(notebookDir, buildNoteFileName(noteId, notePath))); - return Note.fromJson(noteId, content); + return noteParser.fromJson(noteId, content); } @Override diff --git a/zeppelin-plugins/notebookrepo/filesystem/src/test/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepoTest.java b/zeppelin-plugins/notebookrepo/filesystem/src/test/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepoTest.java index 26cfcf874af..1d7f8d19def 100644 --- a/zeppelin-plugins/notebookrepo/filesystem/src/test/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepoTest.java +++ b/zeppelin-plugins/notebookrepo/filesystem/src/test/java/org/apache/zeppelin/notebook/repo/FileSystemNotebookRepoTest.java @@ -23,8 +23,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -47,16 +49,19 @@ class FileSystemNotebookRepoTest { private FileSystemNotebookRepo hdfsNotebookRepo; private String notebookDir; private AuthenticationInfo authInfo = AuthenticationInfo.ANONYMOUS; + private NoteParser noteParser; @BeforeEach void setUp() throws IOException { notebookDir = Files.createTempDirectory("FileSystemNotebookRepoTest").toFile().getAbsolutePath(); - zConf = ZeppelinConfiguration.create(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir); + zConf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(zConf); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + notebookDir); hadoopConf = new Configuration(); fs = FileSystem.get(hadoopConf); hdfsNotebookRepo = new FileSystemNotebookRepo(); - hdfsNotebookRepo.init(zConf); + hdfsNotebookRepo.init(zConf, noteParser); } @AfterEach @@ -70,6 +75,8 @@ void testBasics() throws IOException { // create a new note Note note = new Note(); + note.setZeppelinConfiguration(zConf); + note.setNoteParser(noteParser); note.setPath("/title_1"); Map config = new HashMap<>(); @@ -104,6 +111,8 @@ void testBasics() throws IOException { // create another new note under folder note = new Note(); + note.setZeppelinConfiguration(zConf); + note.setNoteParser(noteParser); note.setPath("/folder1/title_1"); note.setConfig(config); hdfsNotebookRepo.save(note, authInfo); @@ -132,6 +141,8 @@ void testComplicatedScenarios() throws IOException { // scenario_2: note_folder is existed. // create a new note Note note = new Note(); + note.setZeppelinConfiguration(zConf); + note.setNoteParser(noteParser); note.setPath("/title_1"); Map config = new HashMap<>(); config.put("config_1", "value_1"); diff --git a/zeppelin-plugins/notebookrepo/gcs/pom.xml b/zeppelin-plugins/notebookrepo/gcs/pom.xml index 792297642b3..ad468cd6182 100644 --- a/zeppelin-plugins/notebookrepo/gcs/pom.xml +++ b/zeppelin-plugins/notebookrepo/gcs/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/notebookrepo/gcs/src/main/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepo.java b/zeppelin-plugins/notebookrepo/gcs/src/main/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepo.java index 083f564b050..5c10cd5149d 100644 --- a/zeppelin-plugins/notebookrepo/gcs/src/main/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/gcs/src/main/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepo.java @@ -28,7 +28,6 @@ import com.google.cloud.storage.StorageOptions; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import com.google.gson.JsonParseException; import java.io.FileInputStream; import java.io.IOException; @@ -47,6 +46,8 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; +import org.apache.zeppelin.notebook.exception.CorruptedNoteException; import org.apache.zeppelin.user.AuthenticationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,7 +66,7 @@ * @see * google-auth-library-java. */ -public class GCSNotebookRepo implements NotebookRepo { +public class GCSNotebookRepo extends AbstractNotebookRepo { private static final Logger LOGGER = LoggerFactory.getLogger(GCSNotebookRepo.class); private String encoding; @@ -78,15 +79,24 @@ public GCSNotebookRepo() { } @VisibleForTesting - public GCSNotebookRepo(ZeppelinConfiguration zConf, Storage storage) throws IOException { - init(zConf); + public GCSNotebookRepo(ZeppelinConfiguration conf, NoteParser noteParser, Storage storage) + throws IOException + { + try { + init(conf, noteParser); + } catch (IOException e) { + // Skip Credentials Exception during tests + if (!e.getMessage().contains("Default Credentials")) { + throw e; + } + } this.storage = storage; } @Override - public void init(ZeppelinConfiguration zConf) throws IOException { + public void init(ZeppelinConfiguration zConf, NoteParser noteParser) throws IOException { + super.init(zConf, noteParser); this.encoding = zConf.getString(ConfVars.ZEPPELIN_ENCODING); - String gcsStorageDir = zConf.getGCSStorageDir(); if (gcsStorageDir.isEmpty()) { throw new IOException("GCS storage directory must be set using 'zeppelin.notebook.gcs.dir'"); @@ -182,8 +192,8 @@ public Note get(String noteId, String notePath, AuthenticationInfo subject) thro } try { - return Note.fromJson(noteId, new String(contents, encoding)); - } catch (JsonParseException jpe) { + return noteParser.fromJson(noteId, new String(contents, encoding)); + } catch (CorruptedNoteException jpe) { throw new IOException( "Could note parse as json " + blobId.toString() + jpe.getMessage(), jpe); } diff --git a/zeppelin-plugins/notebookrepo/gcs/src/test/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepoTest.java b/zeppelin-plugins/notebookrepo/gcs/src/test/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepoTest.java index 58d3b888137..ff13c9bed26 100644 --- a/zeppelin-plugins/notebookrepo/gcs/src/test/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepoTest.java +++ b/zeppelin-plugins/notebookrepo/gcs/src/test/java/org/apache/zeppelin/notebook/repo/GCSNotebookRepoTest.java @@ -26,7 +26,9 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.contrib.nio.testing.LocalStorageHelper; import com.google.common.collect.ImmutableMap; + import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -36,25 +38,28 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.user.AuthenticationInfo; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; -//TODO(zjffdu) This test fails due to some changes in google, need to fix -@Disabled class GCSNotebookRepoTest { private static final AuthenticationInfo AUTH_INFO = AuthenticationInfo.ANONYMOUS; + private static final String DEFAULT_URL = "gs://bucketname"; private GCSNotebookRepo notebookRepo; private Storage storage; + private ZeppelinConfiguration zConf; + private NoteParser noteParser; private static Stream buckets() { return Stream.of( @@ -69,12 +74,23 @@ private static Stream buckets() { @BeforeEach void setUp() throws Exception { - this.runningNote = makeRunningNote(); + this.zConf = ZeppelinConfiguration.load(); + this.noteParser = new GsonNoteParser(zConf); + this.runningNote = makeRunningNote(zConf, noteParser); this.storage = LocalStorageHelper.getOptions().getService(); } - private static Note makeRunningNote() { + @AfterEach + void tearDown() { + if (notebookRepo != null) { + notebookRepo.close(); + } + } + + private static Note makeRunningNote(ZeppelinConfiguration zConf, NoteParser noteParser) { Note note = new Note(); + note.setZeppelinConfiguration(zConf); + note.setNoteParser(noteParser); note.setPath("/test_note"); note.setConfig(ImmutableMap.of("key", "value")); @@ -89,16 +105,16 @@ private static Note makeRunningNote() { @ParameterizedTest @MethodSource("buckets") void testList_nonexistent(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); assertThat(notebookRepo.list(AUTH_INFO)).isEmpty(); } @ParameterizedTest @MethodSource("buckets") void testList(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); createAt(runningNote, "note.zpln", bucketName, basePath); createAt(runningNote, "/note.zpln", bucketName, basePath); createAt(runningNote, "validid/my_12.zpln", bucketName, basePath); @@ -117,17 +133,18 @@ void testList(String bucketName, Optional basePath, String uriPath) thro @Test void testGet_nonexistent() throws Exception { - try { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + assertThrows(IOException.class, () -> { notebookRepo.get("id", "", AUTH_INFO); - fail(); - } catch (IOException e) {} + }); } @ParameterizedTest @MethodSource("buckets") void testGet(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); create(runningNote, bucketName, basePath); // Status of saved running note is removed in get() @@ -142,20 +159,19 @@ void testGet(String bucketName, Optional basePath, String uriPath) throw @ParameterizedTest @MethodSource("buckets") void testGet_malformed(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); createMalformed("id", "/name", bucketName, basePath); - try { + assertThrows(IOException.class, () -> { notebookRepo.get("id", "/name", AUTH_INFO); - fail(); - } catch (IOException e) {} + }); } @ParameterizedTest @MethodSource("buckets") void testSave_create(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); notebookRepo.save(runningNote, AUTH_INFO); // Output is saved assertThat(storage.readAllBytes(makeBlobId(runningNote.getId(), runningNote.getPath(), bucketName, basePath))) @@ -165,8 +181,8 @@ void testSave_create(String bucketName, Optional basePath, String uriPat @ParameterizedTest @MethodSource("buckets") void testSave_update(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); notebookRepo.save(runningNote, AUTH_INFO); // Change name of runningNote runningNote.setPath("/new-name"); @@ -177,17 +193,18 @@ void testSave_update(String bucketName, Optional basePath, String uriPat @Test void testRemove_nonexistent() throws Exception { - try { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + assertThrows(IOException.class, () -> { notebookRepo.remove("id", "/name", AUTH_INFO); - fail(); - } catch (IOException e) {} + }); } @ParameterizedTest @MethodSource("buckets") void testRemove(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); create(runningNote, bucketName, basePath); notebookRepo.remove(runningNote.getId(), runningNote.getPath(), AUTH_INFO); assertThat(storage.get(makeBlobId(runningNote.getId(), runningNote.getPath(), bucketName, basePath))).isNull(); @@ -195,22 +212,23 @@ void testRemove(String bucketName, Optional basePath, String uriPath) th @Test void testRemoveFolder_nonexistent() throws Exception { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); assertThrows(IOException.class, () -> { notebookRepo.remove("id", "/name", AUTH_INFO); fail(); }); - } @ParameterizedTest @MethodSource("buckets") void testRemoveFolder(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); - Note firstNote = makeRunningNote(); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + Note firstNote = makeRunningNote(zConf, noteParser); firstNote.setPath("/folder/test_note"); create(firstNote, bucketName, basePath); - Note secondNote = makeRunningNote(); + Note secondNote = makeRunningNote(zConf, noteParser); secondNote.setPath("/folder/sub_folder/test_note_second"); create(secondNote, bucketName, basePath); notebookRepo.remove("/folder", AUTH_INFO); @@ -220,18 +238,19 @@ void testRemoveFolder(String bucketName, Optional basePath, String uriPa @Test - void testMove_nonexistent() { - try { + void testMove_nonexistent() throws IOException { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + assertThrows(IOException.class, () -> { notebookRepo.move("id", "/name", "/name_new", AUTH_INFO); - fail(); - } catch (IOException e) {} + }); } @ParameterizedTest @MethodSource("buckets") void testMove(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); create(runningNote, bucketName, basePath); notebookRepo.move(runningNote.getId(), runningNote.getPath(), runningNote.getPath() + "_new", AUTH_INFO); assertThat(storage.get(makeBlobId(runningNote.getId(), runningNote.getPath(), bucketName, basePath))).isNull(); @@ -239,21 +258,22 @@ void testMove(String bucketName, Optional basePath, String uriPath) thro @Test void testMoveFolder_nonexistent() throws Exception { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), DEFAULT_URL); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); assertThrows(IOException.class, () -> { notebookRepo.move("/name", "/name_new", AUTH_INFO); - fail(); }); } @ParameterizedTest @MethodSource("buckets") void testMoveFolder(String bucketName, Optional basePath, String uriPath) throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); - this.notebookRepo = new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); - Note firstNote = makeRunningNote(); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), uriPath); + this.notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + Note firstNote = makeRunningNote(zConf, noteParser); firstNote.setPath("/folder/test_note"); create(firstNote, bucketName, basePath); - Note secondNote = makeRunningNote(); + Note secondNote = makeRunningNote(zConf, noteParser); secondNote.setPath("/folder/sub_folder/test_note_second"); create(secondNote, bucketName, basePath); notebookRepo.move("/folder", "/folder_new", AUTH_INFO); @@ -282,40 +302,38 @@ private BlobId makeBlobId(String noteId, String notePath, String bucketName, Opt private void createAt(Note note, String relativePath, String bucketName, Optional basePath) throws IOException { BlobId id = BlobId.of(bucketName, makeName(relativePath, basePath)); BlobInfo info = BlobInfo.newBuilder(id).setContentType("application/json").build(); - storage.create(info, note.toJson().getBytes("UTF-8")); + storage.create(info, note.toJson().getBytes(StandardCharsets.UTF_8)); } private void create(Note note, String bucketName, Optional basePath) throws IOException { BlobInfo info = BlobInfo.newBuilder(makeBlobId(note.getId(), note.getPath(), bucketName, basePath)) .setContentType("application/json") .build(); - storage.create(info, note.toJson().getBytes("UTF-8")); + storage.create(info, note.toJson().getBytes(StandardCharsets.UTF_8)); } private void createMalformed(String noteId, String notePath, String bucketName, Optional basePath) throws IOException { BlobInfo info = BlobInfo.newBuilder(makeBlobId(noteId, notePath, bucketName, basePath)) .setContentType("application/json") .build(); - storage.create(info, "{ invalid-json }".getBytes("UTF-8")); + storage.create(info, "{ invalid-json }".getBytes(StandardCharsets.UTF_8)); } /* These tests test path parsing for illegal paths, and do not use the parameterized vars */ @Test void testInitialization_pathNotSet() throws Exception { - try { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), ""); - new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); - fail(); - } catch (IOException e) {} + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), ""); + assertThrows(IOException.class, () -> { + notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + }); } @Test void testInitialization_malformedPath() throws Exception { - try { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), "foo"); - new GCSNotebookRepo(ZeppelinConfiguration.create(), storage); - fail(); - } catch (IOException e) {} + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_GCS_STORAGE_DIR.getVarName(), "foo"); + assertThrows(IOException.class, () -> { + notebookRepo = new GCSNotebookRepo(zConf, noteParser, storage); + }); } } diff --git a/zeppelin-plugins/notebookrepo/github/pom.xml b/zeppelin-plugins/notebookrepo/github/pom.xml index 2dfb7252efe..f68ac6525c9 100644 --- a/zeppelin-plugins/notebookrepo/github/pom.xml +++ b/zeppelin-plugins/notebookrepo/github/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/notebookrepo/github/src/main/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepo.java b/zeppelin-plugins/notebookrepo/github/src/main/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepo.java index 010d9985044..2d146adfbc9 100644 --- a/zeppelin-plugins/notebookrepo/github/src/main/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/github/src/main/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepo.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.notebook.repo; import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.PullCommand; @@ -53,8 +54,8 @@ public class GitHubNotebookRepo extends GitNotebookRepo { private Git git; @Override - public void init(ZeppelinConfiguration conf) throws IOException { - super.init(conf); + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); LOG.debug("initializing GitHubNotebookRepo"); this.git = super.getGit(); this.zeppelinConfiguration = conf; diff --git a/zeppelin-plugins/notebookrepo/github/src/test/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepoTest.java b/zeppelin-plugins/notebookrepo/github/src/test/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepoTest.java index dd19e902dcc..bf32d1dd9b7 100644 --- a/zeppelin-plugins/notebookrepo/github/src/test/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepoTest.java +++ b/zeppelin-plugins/notebookrepo/github/src/test/java/org/apache/zeppelin/notebook/repo/GitHubNotebookRepoTest.java @@ -22,7 +22,9 @@ import org.apache.commons.io.FileUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterFactory; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.user.AuthenticationInfo; import org.eclipse.jgit.api.Git; @@ -33,11 +35,10 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; +import java.nio.file.Files; import java.util.Iterator; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -50,50 +51,41 @@ * 2. The second repository is considered as the local notebook repository */ class GitHubNotebookRepoTest { - private static final Logger LOG = LoggerFactory.getLogger(GitHubNotebookRepoTest.class); - private static final String TEST_NOTE_ID = "2A94M5J1Z"; private static final String TEST_NOTE_PATH = "/my_project/my_note1"; private File remoteZeppelinDir; private File localZeppelinDir; - private String localNotebooksDir; - private String remoteNotebooksDir; + private File localNotebooksDir; + private File remoteNotebooksDir; private ZeppelinConfiguration conf; private GitHubNotebookRepo gitHubNotebookRepo; private RevCommit firstCommitRevision; private Git remoteGit; + private NoteParser noteParser; @BeforeEach void setUp() throws Exception { - conf = ZeppelinConfiguration.create(); - - String remoteRepositoryPath = System.getProperty("java.io.tmpdir") + "/ZeppelinTestRemote_" + - System.currentTimeMillis(); - String localRepositoryPath = System.getProperty("java.io.tmpdir") + "/ZeppelinTest_" + - System.currentTimeMillis(); + conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); // Create a fake remote notebook Git repository locally in another directory - remoteZeppelinDir = new File(remoteRepositoryPath); - remoteZeppelinDir.mkdirs(); - + remoteZeppelinDir = + Files.createTempDirectory(this.getClass().getSimpleName() + "remote").toFile(); // Create a local repository for notebooks - localZeppelinDir = new File(localRepositoryPath); - localZeppelinDir.mkdirs(); + localZeppelinDir = + Files.createTempDirectory(this.getClass().getSimpleName() + "local").toFile(); // Notebooks directory (for both the remote and local directories) - localNotebooksDir = String.join(File.separator, localRepositoryPath, "notebook"); - remoteNotebooksDir = String.join(File.separator, remoteRepositoryPath, "notebook"); - - File notebookDir = new File(localNotebooksDir); - notebookDir.mkdirs(); + localNotebooksDir = new File(localZeppelinDir, "notebook"); + remoteNotebooksDir = new File(remoteZeppelinDir, "notebook"); FileUtils.copyDirectory( new File(GitHubNotebookRepoTest.class.getResource("/notebook").getFile()), - new File(remoteNotebooksDir)); + remoteNotebooksDir); // Create the fake remote Git repository - Repository remoteRepository = new FileRepository(String.join(File.separator, remoteNotebooksDir, ".git")); + Repository remoteRepository = new FileRepository(new File(remoteNotebooksDir, ".git")); remoteRepository.create(); remoteGit = new Git(remoteRepository); @@ -101,33 +93,33 @@ void setUp() throws Exception { firstCommitRevision = remoteGit.commit().setMessage("First commit from remote repository").call(); // Set the Git and Git configurations - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), remoteZeppelinDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), + remoteZeppelinDir.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + localNotebooksDir.getAbsolutePath()); // Set the GitHub configurations - System.setProperty( + conf.setProperty( ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.GitHubNotebookRepo"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_URL.getVarName(), + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_URL.getVarName(), remoteNotebooksDir + File.separator + ".git"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_USERNAME.getVarName(), "token"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_ACCESS_TOKEN.getVarName(), + conf.setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_USERNAME.getVarName(), "token"); + conf.setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_GIT_REMOTE_ACCESS_TOKEN.getVarName(), "access-token"); // Create the Notebook repository (configured for the local repository) gitHubNotebookRepo = new GitHubNotebookRepo(); - gitHubNotebookRepo.init(conf); + gitHubNotebookRepo.init(conf, noteParser); } @AfterEach void tearDown() throws Exception { // Cleanup the temporary folders uses as Git repositories - File[] temporaryFolders = { remoteZeppelinDir, localZeppelinDir }; - - for(File temporaryFolder : temporaryFolders) { - if (!FileUtils.deleteQuietly(temporaryFolder)) - LOG.error("Failed to delete {} ", temporaryFolder.getName()); - } + FileUtils.deleteDirectory(localZeppelinDir); + FileUtils.deleteDirectory(remoteZeppelinDir); } @Test diff --git a/zeppelin-plugins/notebookrepo/mongo/pom.xml b/zeppelin-plugins/notebookrepo/mongo/pom.xml index 20a778f94ba..cf6765e594f 100644 --- a/zeppelin-plugins/notebookrepo/mongo/pom.xml +++ b/zeppelin-plugins/notebookrepo/mongo/pom.xml @@ -25,7 +25,7 @@ org.apache.zeppelin zengine-plugins-parent - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/notebookrepo/mongo/src/main/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepo.java b/zeppelin-plugins/notebookrepo/mongo/src/main/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepo.java index 8ed23a583d2..0935e71cb16 100644 --- a/zeppelin-plugins/notebookrepo/mongo/src/main/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/mongo/src/main/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepo.java @@ -33,6 +33,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; + import com.mongodb.MongoClient; import com.mongodb.MongoClientURI; import com.mongodb.client.AggregateIterable; @@ -45,17 +46,16 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; /** * Backend for storing Notebook on MongoDB. */ -public class MongoNotebookRepo implements NotebookRepo { +public class MongoNotebookRepo extends AbstractNotebookRepo { private static final Logger LOG = LoggerFactory.getLogger(MongoNotebookRepo.class); - private ZeppelinConfiguration conf; - private MongoClient client; private MongoDatabase db; @@ -72,8 +72,8 @@ public MongoNotebookRepo() { } @Override - public void init(ZeppelinConfiguration zConf) throws IOException { - this.conf = zConf; + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); client = new MongoClient(new MongoClientURI(conf.getMongoUri())); db = client.getDatabase(conf.getMongoDatabase()); notes = db.getCollection(conf.getMongoCollection()); @@ -92,18 +92,17 @@ public void init(ZeppelinConfiguration zConf) throws IOException { * If a note already exists in MongoDB, skip it. */ private void insertFileSystemNotes() throws IOException { - NotebookRepo vfsRepo = new VFSNotebookRepo(); - vfsRepo.init(this.conf); - Map infos = vfsRepo.list(null); - - try (AutoLock autoLock = lock.lockForWrite()) { - for (NoteInfo info : infos.values()) { - Note note = vfsRepo.get(info.getId(), info.getPath(), null); - saveOrIgnore(note, null); + try (NotebookRepo vfsRepo = new VFSNotebookRepo()) { + vfsRepo.init(conf, noteParser); + Map infos = vfsRepo.list(null); + + try (AutoLock autoLock = lock.lockForWrite()) { + for (NoteInfo info : infos.values()) { + Note note = vfsRepo.get(info.getId(), info.getPath(), null); + saveOrIgnore(note, null); + } } } - - vfsRepo.close(); } @Override @@ -436,7 +435,7 @@ private Note documentToNote(String noteId, Document doc) throws IOException { // document to JSON String json = doc.toJson(); // JSON to note - return Note.fromJson(noteId, json); + return noteParser.fromJson(noteId, json); } /** diff --git a/zeppelin-plugins/notebookrepo/mongo/src/test/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepoTest.java b/zeppelin-plugins/notebookrepo/mongo/src/test/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepoTest.java deleted file mode 100644 index d3cc3701004..00000000000 --- a/zeppelin-plugins/notebookrepo/mongo/src/test/java/org/apache/zeppelin/notebook/repo/MongoNotebookRepoTest.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.notebook.repo; - -import static org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_MONGO_URI; -import static org.junit.jupiter.api.Assertions.assertEquals; - -import java.io.IOException; -import java.net.ServerSocket; -import java.util.Map; -import de.flapdoodle.embed.mongo.MongodExecutable; -import de.flapdoodle.embed.mongo.MongodStarter; -import de.flapdoodle.embed.mongo.config.MongodConfig; -import de.flapdoodle.embed.mongo.config.Net; -import de.flapdoodle.embed.mongo.distribution.Version; -import de.flapdoodle.embed.process.runtime.Network; -import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.notebook.Note; -import org.apache.zeppelin.notebook.NoteInfo; -import org.apache.zeppelin.notebook.Paragraph; -import org.apache.zeppelin.user.AuthenticationInfo; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -class MongoNotebookRepoTest { - - private MongodExecutable mongodExecutable; - - private ZeppelinConfiguration zConf; - - private MongoNotebookRepo notebookRepo; - - @BeforeEach - void setUp() throws IOException { - String bindIp = "localhost"; - ServerSocket socket = new ServerSocket(0); - int port = socket.getLocalPort(); - socket.close(); - - MongodConfig mongodConfig = MongodConfig.builder() - .version(Version.Main.PRODUCTION) - .net(new Net(bindIp, port, Network.localhostIsIPv6())) - .build(); - - mongodExecutable = MongodStarter.getDefaultInstance() - .prepare(mongodConfig); - mongodExecutable.start(); - - System.setProperty(ZEPPELIN_NOTEBOOK_MONGO_URI.getVarName(), "mongodb://" + bindIp + ":" + port); - zConf = ZeppelinConfiguration.create(); - notebookRepo = new MongoNotebookRepo(); - notebookRepo.init(zConf); - } - - @AfterEach - void tearDown() throws IOException { - if (mongodExecutable != null) { - mongodExecutable.stop(); - } - } - - @Test - void testBasics() throws IOException { - assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - - // create note1 - Note note1 = new Note(); - note1.setPath("/my_project/my_note1"); - Paragraph p1 = note1.insertNewParagraph(0, AuthenticationInfo.ANONYMOUS); - p1.setText("%md hello world"); - p1.setTitle("my title"); - notebookRepo.save(note1, AuthenticationInfo.ANONYMOUS); - - Map noteInfos = notebookRepo.list(AuthenticationInfo.ANONYMOUS); - assertEquals(1, noteInfos.size()); - Note note1Loaded = notebookRepo.get(note1.getId(), note1.getPath(), AuthenticationInfo.ANONYMOUS); - assertEquals(note1.getId(), note1Loaded.getId()); - assertEquals(note1.getName(), note1Loaded.getName()); - - // create note2 - Note note2 = new Note(); - note2.setPath("/my_note2"); - Paragraph p2 = note2.insertNewParagraph(0, AuthenticationInfo.ANONYMOUS); - p2.setText("%md hello world2"); - p2.setTitle("my title2"); - notebookRepo.save(note2, AuthenticationInfo.ANONYMOUS); - - noteInfos = notebookRepo.list(AuthenticationInfo.ANONYMOUS); - assertEquals(2, noteInfos.size()); - - // move note2 - String newPath = "/my_project2/my_note2"; - notebookRepo.move(note2.getId(), note2.getPath(), "/my_project2/my_note2", AuthenticationInfo.ANONYMOUS); - - Note note3 = notebookRepo.get(note2.getId(), newPath, AuthenticationInfo.ANONYMOUS); - assertEquals(note2, note3); - - // move folder - notebookRepo.move("/my_project2", "/my_project3/my_project2", AuthenticationInfo.ANONYMOUS); - noteInfos = notebookRepo.list(AuthenticationInfo.ANONYMOUS); - assertEquals(2, noteInfos.size()); - - Note note4 = notebookRepo.get(note3.getId(), "/my_project3/my_project2/my_note2", AuthenticationInfo.ANONYMOUS); - assertEquals(note3, note4); - - // remove note1 - notebookRepo.remove(note1.getId(), note1.getPath(), AuthenticationInfo.ANONYMOUS); - assertEquals(1, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - - notebookRepo.remove("/my_project3", AuthenticationInfo.ANONYMOUS); - assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - } - - @Test - void testGetNotePath() throws IOException { - assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - - Note note = new Note(); - String notePath = "/folder1/folder2/folder3/folder4/folder5/my_note"; - note.setPath(notePath); - notebookRepo.save(note, AuthenticationInfo.ANONYMOUS); - - notebookRepo.init(zConf); - Map noteInfos = notebookRepo.list(AuthenticationInfo.ANONYMOUS); - assertEquals(1, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - assertEquals(notePath, noteInfos.get(note.getId()).getPath()); - - notebookRepo.remove(note.getId(), note.getPath(), AuthenticationInfo.ANONYMOUS); - assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); - } -} diff --git a/zeppelin-plugins/notebookrepo/oss/pom.xml b/zeppelin-plugins/notebookrepo/oss/pom.xml index ff16cb19a15..f1662f25dcb 100644 --- a/zeppelin-plugins/notebookrepo/oss/pom.xml +++ b/zeppelin-plugins/notebookrepo/oss/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins diff --git a/zeppelin-plugins/notebookrepo/oss/src/main/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepo.java b/zeppelin-plugins/notebookrepo/oss/src/main/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepo.java index 5fddf9904bc..46789f47d10 100644 --- a/zeppelin-plugins/notebookrepo/oss/src/main/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/oss/src/main/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepo.java @@ -20,6 +20,7 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.repo.storage.OSSOperator; import org.apache.zeppelin.notebook.repo.storage.RemoteStorageOperator; import org.apache.zeppelin.user.AuthenticationInfo; @@ -35,7 +36,8 @@ /** * NotebookRepo for Aliyun OSS (https://cn.aliyun.com/product/oss) */ -public class OSSNotebookRepo implements NotebookRepoWithVersionControl { +public class OSSNotebookRepo extends AbstractNotebookRepo + implements NotebookRepoWithVersionControl { private static final Logger LOGGER = LoggerFactory.getLogger(OSSNotebookRepo.class); private String bucketName; @@ -49,7 +51,8 @@ public OSSNotebookRepo() { } @Override - public void init(ZeppelinConfiguration conf) throws IOException { + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); String endpoint = conf.getOSSEndpoint(); bucketName = conf.getOSSBucketName(); rootFolder = conf.getNotebookDir(); @@ -100,7 +103,7 @@ public Map list(AuthenticationInfo subject) throws IOException public Note getByOSSPath(String noteId, String ossPath) throws IOException { String noteText = ossOperator.getTextObject(bucketName, ossPath); - return Note.fromJson(noteId, noteText); + return noteParser.fromJson(noteId, noteText); } diff --git a/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/MockStorageOperator.java b/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/MockStorageOperator.java index 3aa8d6d1967..aaca3181d9c 100644 --- a/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/MockStorageOperator.java +++ b/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/MockStorageOperator.java @@ -24,8 +24,8 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.nio.file.Files; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -33,26 +33,25 @@ public class MockStorageOperator implements RemoteStorageOperator { - private String mockRootFolder; + private File mockRootFolder; public MockStorageOperator() throws IOException { - Path tempDirectory = Files.createTempDirectory("zeppelin_mock_storage_dir_"); - mockRootFolder = tempDirectory.toString() + "/"; + mockRootFolder = Files.createTempDirectory("zeppelin_mock_storage_dir_").toFile(); } @Override public void createBucket(String bucketName) throws IOException { - FileUtils.forceMkdir(new File(mockRootFolder + bucketName)); + FileUtils.forceMkdir(new File(mockRootFolder, bucketName)); } @Override public void deleteBucket(String bucketName) throws IOException { - FileUtils.deleteDirectory(new File(mockRootFolder + bucketName)); + FileUtils.deleteDirectory(new File(mockRootFolder, bucketName)); } @Override public boolean doesObjectExist(String bucketName, String key) throws IOException { - File file = new File(mockRootFolder + bucketName + "/" + key); + File file = new File(mockRootFolder, bucketName + File.separator + key); return file.exists() && !file.isDirectory(); } @@ -61,20 +60,21 @@ public String getTextObject(String bucketName, String key) throws IOException { if (!doesObjectExist(bucketName, key)) { throw new IOException("Note or its revision not found"); } - return FileUtils.readFileToString(new File(mockRootFolder + bucketName + "/" + key), "UTF-8"); + return FileUtils.readFileToString(new File(mockRootFolder, bucketName + File.separator + key), + StandardCharsets.UTF_8); } @Override public void putTextObject(String bucketName, String key, InputStream inputStream) throws IOException { - File destination = new File(mockRootFolder + bucketName + "/" + key); + File destination = new File(mockRootFolder, bucketName + File.separator + key); destination.getParentFile().mkdirs(); FileUtils.copyInputStreamToFile(inputStream, destination); } @Override public void moveObject(String bucketName, String sourceKey, String destKey) throws IOException { - FileUtils.moveFile(new File(mockRootFolder + bucketName + "/" + sourceKey), - new File(mockRootFolder + bucketName + "/" + destKey)); + FileUtils.moveFile(new File(mockRootFolder, bucketName + File.separator + sourceKey), + new File(mockRootFolder, bucketName + File.separator + destKey)); } @Override @@ -93,7 +93,7 @@ public void deleteDir(String bucketName, String dirname) throws IOException { @Override public void deleteFile(String bucketName, String objectKey) throws IOException { - FileUtils.forceDelete(new File(mockRootFolder + bucketName + "/" + objectKey)); + FileUtils.forceDelete(new File(mockRootFolder, bucketName + File.separator + objectKey)); } @Override @@ -107,16 +107,23 @@ public void deleteFiles(String bucketName, List objectKeys) throws IOExc @Override public List listDirObjects(String bucketName, String dirname) { - File directory = new File(mockRootFolder + bucketName + "/" + dirname); + File directory = new File(mockRootFolder, bucketName + File.separator + dirname); if (!directory.isDirectory()) { return new ArrayList<>(); } Collection files = FileUtils.listFiles(directory, TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE); - return files.stream().map(file -> file.getPath().substring((mockRootFolder + bucketName + "/").length())).collect(Collectors.toList()); + return files.stream().map( + file -> file.getPath() + .substring((mockRootFolder + File.separator + bucketName + File.separator).length())) + .collect(Collectors.toList()); } @Override public void shutdown() { - + try { + FileUtils.deleteDirectory(mockRootFolder); + } catch (IOException e) { + // Test case + } } } diff --git a/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepoTest.java b/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepoTest.java index 0de6ff0a192..1b1ef3d6904 100644 --- a/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepoTest.java +++ b/zeppelin-plugins/notebookrepo/oss/src/test/java/org/apache/zeppelin/notebook/repo/OSSNotebookRepoTest.java @@ -18,8 +18,10 @@ package org.apache.zeppelin.notebook.repo; import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.repo.storage.RemoteStorageOperator; import org.apache.zeppelin.scheduler.Job; @@ -45,10 +47,13 @@ class OSSNotebookRepoTest { private String bucket; private static int OSS_VERSION_MAX = 30; - + private ZeppelinConfiguration conf; + private NoteParser noteParser; @BeforeEach void setUp() throws IOException { + ZeppelinConfiguration conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); bucket = "zeppelin-test-bucket"; String endpoint = "yourEndpoint"; String accessKeyId = "yourAccessKeyId"; @@ -56,18 +61,19 @@ void setUp() throws IOException { ossOperator = new MockStorageOperator(); ossOperator.createBucket(bucket); notebookRepo = new OSSNotebookRepo(); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ENDPOINT.getVarName(), + + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ENDPOINT.getVarName(), endpoint); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_BUCKET.getVarName(), + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_BUCKET.getVarName(), bucket); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ACCESSKEYID.getVarName(), + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ACCESSKEYID.getVarName(), accessKeyId); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ACCESSKEYSECRET.getVarName(), + conf.setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_ACCESSKEYSECRET.getVarName(), accessKeySecret); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_VERSION_MAX.getVarName(), + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_OSS_VERSION_MAX.getVarName(), OSS_VERSION_MAX + ""); - notebookRepo.init(conf); + notebookRepo.init(conf, noteParser); notebookRepo.setOssOperator(ossOperator); } @@ -92,6 +98,8 @@ void testNotebookRepo() throws IOException { // create Note note1 Note note1 = new Note(); + note1.setZeppelinConfiguration(conf); + note1.setNoteParser(noteParser); note1.setPath("/spark/note_1"); notebookRepo.save(note1, anonymous); @@ -124,6 +132,8 @@ void testNotebookRepo() throws IOException { // create another Note note2 Note note2 = new Note(); + note2.setZeppelinConfiguration(conf); + note2.setNoteParser(noteParser); note2.setPath("/spark/note_2"); notebookRepo.save(note2, anonymous); @@ -168,6 +178,8 @@ void testNotebookRepoWithVersionControl() throws IOException { // create Note note1 Note note1 = new Note(); + note1.setZeppelinConfiguration(conf); + note1.setNoteParser(noteParser); note1.setPath("/version_control/note_1"); List revisionList = new ArrayList<>(); diff --git a/zeppelin-plugins/notebookrepo/s3/pom.xml b/zeppelin-plugins/notebookrepo/s3/pom.xml index 9d1aa51c6db..837ba05df3f 100644 --- a/zeppelin-plugins/notebookrepo/s3/pom.xml +++ b/zeppelin-plugins/notebookrepo/s3/pom.xml @@ -24,7 +24,7 @@ zengine-plugins-parent org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 ../../../zeppelin-plugins @@ -66,30 +66,6 @@ - - - com.google.inject - guice - 5.0.1 - - - - org.apache.hadoop - hadoop-yarn-client - ${hadoop2.7.version} - - - javax.xml.bind - jaxb-api - - - diff --git a/zeppelin-plugins/notebookrepo/s3/src/main/java/org/apache/zeppelin/notebook/repo/S3NotebookRepo.java b/zeppelin-plugins/notebookrepo/s3/src/main/java/org/apache/zeppelin/notebook/repo/S3NotebookRepo.java index 497203c5faa..4d841cea1a5 100644 --- a/zeppelin-plugins/notebookrepo/s3/src/main/java/org/apache/zeppelin/notebook/repo/S3NotebookRepo.java +++ b/zeppelin-plugins/notebookrepo/s3/src/main/java/org/apache/zeppelin/notebook/repo/S3NotebookRepo.java @@ -35,6 +35,7 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,7 +66,7 @@ /** * Backend for storing Notebooks on S3 */ -public class S3NotebookRepo implements NotebookRepo { +public class S3NotebookRepo extends AbstractNotebookRepo { private static final Logger LOGGER = LoggerFactory.getLogger(S3NotebookRepo.class); // Use a credential provider chain so that instance profiles can be utilized @@ -87,15 +88,15 @@ public class S3NotebookRepo implements NotebookRepo { private String user; private boolean useServerSideEncryption; private CannedAccessControlList objectCannedAcl; - private ZeppelinConfiguration conf; private String rootFolder; public S3NotebookRepo() { } - public void init(ZeppelinConfiguration conf) throws IOException { - this.conf = conf; + @Override + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); bucketName = conf.getS3BucketName(); user = conf.getS3User(); rootFolder = user + "/notebook"; @@ -114,7 +115,7 @@ public void init(ZeppelinConfiguration conf) throws IOException { } ClientConfiguration cliConf = createClientConfiguration(); - + // see if we should be encrypting data in S3 String kmsKeyID = conf.getS3KMSKeyID(); if (kmsKeyID != null) { @@ -224,7 +225,7 @@ public Note get(String noteId, String notePath, AuthenticationInfo subject) thro } try (InputStream ins = s3object.getObjectContent()) { String json = IOUtils.toString(ins, conf.getString(ConfVars.ZEPPELIN_ENCODING)); - return Note.fromJson(noteId, json); + return noteParser.fromJson(noteId, json); } } @@ -234,9 +235,9 @@ public void save(Note note, AuthenticationInfo subject) throws IOException { String key = rootFolder + "/" + buildNoteFileName(note); File file = File.createTempFile("note", "zpln"); try { - Writer writer = new OutputStreamWriter(new FileOutputStream(file)); - writer.write(json); - writer.close(); + try (Writer writer = new OutputStreamWriter(new FileOutputStream(file))) { + writer.write(json); + } PutObjectRequest putRequest = new PutObjectRequest(bucketName, key, file); if (useServerSideEncryption) { // Request server-side encryption. @@ -334,5 +335,4 @@ public List getSettings(AuthenticationInfo subject) { public void updateSettings(Map settings, AuthenticationInfo subject) { LOGGER.warn("Method not implemented"); } - } diff --git a/zeppelin-plugins/notebookrepo/s3/src/test/java/org/apache/zeppelin/notebook/repo/S3NotebookRepoTest.java b/zeppelin-plugins/notebookrepo/s3/src/test/java/org/apache/zeppelin/notebook/repo/S3NotebookRepoTest.java index 3c31329af6d..e9d44b5d699 100644 --- a/zeppelin-plugins/notebookrepo/s3/src/test/java/org/apache/zeppelin/notebook/repo/S3NotebookRepoTest.java +++ b/zeppelin-plugins/notebookrepo/s3/src/test/java/org/apache/zeppelin/notebook/repo/S3NotebookRepoTest.java @@ -23,9 +23,12 @@ import com.amazonaws.regions.Regions; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; + import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.gaul.s3proxy.junit.S3ProxyExtension; import org.junit.jupiter.api.AfterEach; @@ -45,6 +48,9 @@ class S3NotebookRepoTest { private AuthenticationInfo anonymous = AuthenticationInfo.ANONYMOUS; private S3NotebookRepo notebookRepo; + private ZeppelinConfiguration conf; + private NoteParser noteParser; + @RegisterExtension static S3ProxyExtension s3Proxy = S3ProxyExtension.builder() .withCredentials("access", "secret") @@ -55,15 +61,16 @@ class S3NotebookRepoTest { void setUp() throws IOException { String bucket = "test-bucket"; notebookRepo = new S3NotebookRepo(); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(), + conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(), s3Proxy.getUri().toString()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_BUCKET.getVarName(), + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_BUCKET.getVarName(), bucket); System.setProperty("aws.accessKeyId", s3Proxy.getAccessKey()); System.setProperty("aws.secretKey", s3Proxy.getSecretKey()); - notebookRepo.init(conf); + notebookRepo.init(conf, noteParser); // create bucket for notebook AmazonS3 s3Client = AmazonS3ClientBuilder @@ -98,6 +105,8 @@ void testNotebookRepo() throws IOException { // create Note note1 Note note1 = new Note(); + note1.setZeppelinConfiguration(conf); + note1.setNoteParser(noteParser); note1.setPath("/spark/note_1"); notebookRepo.save(note1, anonymous); @@ -119,6 +128,8 @@ void testNotebookRepo() throws IOException { // create another Note note2 Note note2 = new Note(); + note2.setZeppelinConfiguration(conf); + note2.setNoteParser(noteParser); note2.setPath("/spark/note_2"); notebookRepo.save(note2, anonymous); diff --git a/zeppelin-plugins/pom.xml b/zeppelin-plugins/pom.xml index cacb7a98e00..98fd3d24a13 100644 --- a/zeppelin-plugins/pom.xml +++ b/zeppelin-plugins/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zengine-plugins-parent diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml index bd71725d033..21c375ab4c6 100644 --- a/zeppelin-server/pom.xml +++ b/zeppelin-server/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-server @@ -38,8 +38,8 @@ 2.1 1.11 4.1.0 - 2.12.6.1 - 9.13 + 2.12.7.1 + 9.37.2 2.0.0-M15 @@ -310,16 +310,9 @@ org.apache.zeppelin - zeppelin-zengine + zeppelin-test ${project.version} - tests test - - - com.google.guava - guava - - @@ -384,6 +377,18 @@ + + org.awaitility + awaitility + test + + + + org.hamcrest + hamcrest + + + xml-apis @@ -417,7 +422,7 @@ maven-surefire-plugin - + 1 false -Xmx3g -Xms1g -Dfile.encoding=UTF-8 @@ -475,29 +480,13 @@ - - hadoop2 - - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - ${hadoop.version} - ${hadoop.deps.scope} - - - - hadoop3 true - ${hadoop3.2.version} + ${hadoop3.3.version} hadoop-client-api hadoop-client-runtime diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ClusterRestApi.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ClusterRestApi.java index 31d936e0776..feb9a707f3d 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ClusterRestApi.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/ClusterRestApi.java @@ -55,9 +55,11 @@ public class ClusterRestApi { // Do not modify, Use by `zeppelin-web/src/app/cluster/cluster.html` private static final String PROPERTIES = "properties"; + private final ZeppelinConfiguration zConf; @Inject public ClusterRestApi(ZeppelinConfiguration zConf) { + this.zConf = zConf; if (zConf.isClusterMode()) { clusterManagerServer = ClusterManagerServer.getInstance(zConf); } else { @@ -69,8 +71,7 @@ public ClusterRestApi(ZeppelinConfiguration zConf) { @Path("/address") @ZeppelinApi public Response getClusterAddress() { - ZeppelinConfiguration zconf = ZeppelinConfiguration.create(); - String clusterAddr = zconf.getClusterAddress(); + String clusterAddr = zConf.getClusterAddress(); Map data = new HashMap<>(); data.put("clusterAddr", clusterAddr); diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/SessionRestApi.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/SessionRestApi.java index a6474d1d167..d6009e8a599 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/SessionRestApi.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/SessionRestApi.java @@ -22,6 +22,7 @@ import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.common.SessionInfo; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.rest.exception.SessionNoteFoundException; import org.apache.zeppelin.server.JsonResponse; import org.apache.zeppelin.service.SessionManagerService; @@ -53,8 +54,10 @@ public class SessionRestApi { private final SessionManagerService sessionManagerService; @Inject - public SessionRestApi(Notebook notebook, InterpreterSettingManager interpreterSettingManager) { - this.sessionManagerService = new SessionManagerService(notebook, interpreterSettingManager); + public SessionRestApi(Notebook notebook, InterpreterSettingManager interpreterSettingManager, + ZeppelinConfiguration zConf) { + this.sessionManagerService = + new SessionManagerService(notebook, interpreterSettingManager, zConf); } /** diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/exception/WebApplicationExceptionMapper.java b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/exception/WebApplicationExceptionMapper.java index 3a1cb1ccaa1..fb55955e1c5 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/rest/exception/WebApplicationExceptionMapper.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/rest/exception/WebApplicationExceptionMapper.java @@ -30,7 +30,7 @@ @Provider public class WebApplicationExceptionMapper implements ExceptionMapper { - private static final Logger LOGGER = LoggerFactory.getLogger(WebApplicationException.class); + private static final Logger LOGGER = LoggerFactory.getLogger(WebApplicationExceptionMapper.class); private final Gson gson; diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java index 8688716006c..59718ec1e5e 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/CorsFilter.java @@ -37,6 +37,12 @@ public class CorsFilter implements Filter { private static final Logger LOGGER = LoggerFactory.getLogger(CorsFilter.class); + private final ZeppelinConfiguration conf; + + public CorsFilter(ZeppelinConfiguration conf) { + this.conf = conf; + } + @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException, ServletException { @@ -44,7 +50,7 @@ public void doFilter(ServletRequest request, ServletResponse response, FilterCha String origin = ""; try { - if (CorsUtils.isValidOrigin(sourceHost, ZeppelinConfiguration.create())) { + if (CorsUtils.isValidOrigin(sourceHost, conf)) { origin = sourceHost; } } catch (URISyntaxException e) { @@ -70,13 +76,12 @@ private void addCorsHeaders(HttpServletResponse response, String origin) { response.setHeader("Access-Control-Allow-Headers", "authorization,Content-Type"); response.setHeader("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, HEAD, DELETE"); - ZeppelinConfiguration zeppelinConfiguration = ZeppelinConfiguration.create(); - response.setHeader("X-FRAME-OPTIONS", zeppelinConfiguration.getXFrameOptions()); - if (zeppelinConfiguration.useSsl()) { - response.setHeader("Strict-Transport-Security", zeppelinConfiguration.getStrictTransport()); + response.setHeader("X-FRAME-OPTIONS", conf.getXFrameOptions()); + if (conf.useSsl()) { + response.setHeader("Strict-Transport-Security", conf.getStrictTransport()); } - response.setHeader("X-XSS-Protection", zeppelinConfiguration.getXxssProtection()); - response.setHeader("X-Content-Type-Options", zeppelinConfiguration.getXContentTypeOptions()); + response.setHeader("X-XSS-Protection", conf.getXxssProtection()); + response.setHeader("X-Content-Type-Options", conf.getXContentTypeOptions()); } @Override diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/RestApiApplication.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/RestApiApplication.java index 56e29e87422..502e3275107 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/server/RestApiApplication.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/RestApiApplication.java @@ -54,6 +54,8 @@ public Set> getClasses() { // add ExceptionMapper s.add(WebApplicationExceptionMapper.class); + // add JSON-Consumer and Producer + s.add(GsonProvider.class); return s; } } diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java index 86322ebb3bd..d9f4e91af10 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java @@ -41,18 +41,21 @@ import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; +import java.net.MalformedURLException; import java.nio.file.Files; import java.security.GeneralSecurityException; import java.util.Base64; import java.util.HashSet; import java.util.List; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.EnumSet; -import javax.inject.Inject; import javax.inject.Singleton; import javax.management.remote.JMXServiceURL; import javax.servlet.DispatcherType; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; +import javax.websocket.server.ServerEndpointConfig; import org.apache.commons.lang3.StringUtils; import org.apache.shiro.web.env.EnvironmentLoaderListener; @@ -67,7 +70,6 @@ import org.apache.zeppelin.helium.HeliumApplicationFactory; import org.apache.zeppelin.helium.HeliumBundleFactory; import org.apache.zeppelin.interpreter.InterpreterFactory; -import org.apache.zeppelin.interpreter.InterpreterOutput; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.interpreter.recovery.RecoveryStorage; @@ -76,8 +78,10 @@ import org.apache.zeppelin.metric.PrometheusServlet; import org.apache.zeppelin.notebook.NoteEventListener; import org.apache.zeppelin.notebook.NoteManager; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.AuthorizationService; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.repo.NotebookRepo; import org.apache.zeppelin.notebook.repo.NotebookRepoSync; @@ -93,6 +97,8 @@ import org.apache.zeppelin.service.AuthenticationService; import org.apache.zeppelin.socket.ConnectionManager; import org.apache.zeppelin.socket.NotebookServer; +import org.apache.zeppelin.socket.SessionConfigurator; +import org.apache.zeppelin.storage.ConfigStorage; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.Credentials; import org.apache.zeppelin.util.ReflectionUtils; @@ -115,7 +121,6 @@ import org.eclipse.jetty.webapp.WebAppContext; import org.eclipse.jetty.websocket.jsr356.server.deploy.WebSocketServerContainerInitializer; import org.eclipse.jetty.websocket.servlet.WebSocketServlet; -import org.glassfish.hk2.api.Immediate; import org.glassfish.hk2.api.ServiceLocator; import org.glassfish.hk2.api.ServiceLocatorFactory; import org.glassfish.hk2.utilities.ServiceLocatorUtilities; @@ -125,58 +130,64 @@ import org.slf4j.LoggerFactory; /** Main class of Zeppelin. */ -public class ZeppelinServer { +public class ZeppelinServer implements AutoCloseable { private static final Logger LOG = LoggerFactory.getLogger(ZeppelinServer.class); private static final String WEB_APP_CONTEXT_NEXT = "/next"; + public static final String DEFAULT_SERVICE_LOCATOR_NAME = "shared-locator"; - public static final String SERVICE_LOCATOR_NAME= "shared-locator"; + private final AtomicBoolean duringShutdown = new AtomicBoolean(false); + private final ZeppelinConfiguration conf; + private final Optional promMetricRegistry; + private final Server jettyWebServer; + private final ServiceLocator sharedServiceLocator; + private final ConfigStorage storage; - @Inject - public ZeppelinServer(ZeppelinConfiguration conf) { - LOG.info("Instantiated ZeppelinServer"); - InterpreterOutput.LIMIT = conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_OUTPUT_LIMIT); + public ZeppelinServer(ZeppelinConfiguration conf) throws IOException { + this(conf, DEFAULT_SERVICE_LOCATOR_NAME); } - public static void main(String[] args) throws IOException { - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - - Server jettyWebServer = setupJettyServer(conf); - - PrometheusMeterRegistry promMetricRegistry = null; + public ZeppelinServer(ZeppelinConfiguration conf, String serviceLocatorName) throws IOException { + LOG.info("Instantiated ZeppelinServer"); + this.conf = conf; if (conf.isPrometheusMetricEnabled()) { - promMetricRegistry = new PrometheusMeterRegistry(PrometheusConfig.DEFAULT); - Metrics.addRegistry(promMetricRegistry); + promMetricRegistry = Optional.of(new PrometheusMeterRegistry(PrometheusConfig.DEFAULT)); + } else { + promMetricRegistry = Optional.empty(); } - initMetrics(conf); + jettyWebServer = setupJettyServer(); + sharedServiceLocator = ServiceLocatorFactory.getInstance().create(serviceLocatorName); + storage = ConfigStorage.createConfigStorage(conf); + } + + public void startZeppelin() { + initMetrics(); TimedHandler timedHandler = new TimedHandler(Metrics.globalRegistry, Tags.empty()); jettyWebServer.setHandler(timedHandler); ContextHandlerCollection contexts = new ContextHandlerCollection(); timedHandler.setHandler(contexts); - - ServiceLocator sharedServiceLocator = ServiceLocatorFactory.getInstance().create(SERVICE_LOCATOR_NAME); ServiceLocatorUtilities.enableImmediateScope(sharedServiceLocator); ServiceLocatorUtilities.addClasses(sharedServiceLocator, - NotebookRepoSync.class, ImmediateErrorHandlerImpl.class); ImmediateErrorHandlerImpl handler = sharedServiceLocator.getService(ImmediateErrorHandlerImpl.class); + ServiceLocatorUtilities.bind( sharedServiceLocator, new AbstractBinder() { @Override protected void configure() { - Credentials credentials = new Credentials(conf); + bind(storage).to(ConfigStorage.class); + bindAsContract(PluginManager.class).in(Singleton.class); + bind(GsonNoteParser.class).to(NoteParser.class).in(Singleton.class); bindAsContract(InterpreterFactory.class).in(Singleton.class); - bindAsContract(NotebookRepoSync.class).to(NotebookRepo.class).in(Immediate.class); + bindAsContract(NotebookRepoSync.class).to(NotebookRepo.class).in(Singleton.class); bindAsContract(Helium.class).in(Singleton.class); bind(conf).to(ZeppelinConfiguration.class); bindAsContract(InterpreterSettingManager.class).in(Singleton.class); bindAsContract(InterpreterService.class).in(Singleton.class); - bind(credentials).to(Credentials.class); - bindAsContract(GsonProvider.class).in(Singleton.class); - bindAsContract(WebApplicationExceptionMapper.class).in(Singleton.class); + bindAsContract(Credentials.class).in(Singleton.class); bindAsContract(AdminService.class).in(Singleton.class); bindAsContract(AuthorizationService.class).in(Singleton.class); bindAsContract(ConnectionManager.class).in(Singleton.class); @@ -218,30 +229,25 @@ protected void configure() { final WebAppContext defaultWebApp = setupWebAppContext(contexts, conf, conf.getString(ConfVars.ZEPPELIN_WAR), conf.getServerContextPath()); final WebAppContext nextWebApp = setupWebAppContext(contexts, conf, conf.getString(ConfVars.ZEPPELIN_ANGULAR_WAR), WEB_APP_CONTEXT_NEXT); - initWebApp(defaultWebApp, conf, sharedServiceLocator, promMetricRegistry); - initWebApp(nextWebApp, conf, sharedServiceLocator, promMetricRegistry); - // Cluster Manager Server - setupClusterManagerServer(sharedServiceLocator, conf); + initWebApp(defaultWebApp); + initWebApp(nextWebApp); - // JMX Enable - if (conf.isJMXEnabled()) { - int port = conf.getJMXPort(); - // Setup JMX - MBeanContainer mbeanContainer = new MBeanContainer(ManagementFactory.getPlatformMBeanServer()); - jettyWebServer.addBean(mbeanContainer); - JMXServiceURL jmxURL = - new JMXServiceURL( - String.format( - "service:jmx:rmi://0.0.0.0:%d/jndi/rmi://0.0.0.0:%d/jmxrmi", - port, port)); - ConnectorServer jmxServer = new ConnectorServer(jmxURL, "org.eclipse.jetty.jmx:name=rmiconnectorserver"); - jettyWebServer.addBean(jmxServer); - LOG.info("JMX Enabled with port: {}", port); + NotebookRepo repo = + ServiceLocatorUtilities.getService(sharedServiceLocator, NotebookRepo.class.getName()); + NoteParser noteParser = + ServiceLocatorUtilities.getService(sharedServiceLocator, NoteParser.class.getName()); + try { + repo.init(conf, noteParser); + } catch (IOException e) { + LOG.error("Failed to init NotebookRepo", e); } + // Cluster Manager Server + setupClusterManagerServer(); - runNoteOnStart(conf, jettyWebServer, sharedServiceLocator); + initJMX(); - Runtime.getRuntime().addShutdownHook(new Thread(() -> shutdown(conf, jettyWebServer, sharedServiceLocator))); + runNoteOnStart(sharedServiceLocator); + Runtime.getRuntime().addShutdownHook(new Thread(this::shutdown)); // Try to get Notebook from ServiceLocator, because Notebook instantiation is lazy, it is // created when user open zeppelin in browser if we don't get it explicitly here. @@ -281,10 +287,10 @@ protected void configure() { } catch (InterruptedException e) { // Many fast unit tests interrupt the Zeppelin server at this point LOG.error("Interrupt while waiting for construction errors - init shutdown", e); - shutdown(conf, jettyWebServer, sharedServiceLocator); + shutdown(); Thread.currentThread().interrupt(); - } + if (jettyWebServer.isStopped() || jettyWebServer.isStopping()) { LOG.debug("jetty server is stopped {} - is stopping {}", jettyWebServer.isStopped(), jettyWebServer.isStopping()); } else { @@ -292,19 +298,48 @@ protected void configure() { jettyWebServer.join(); } catch (InterruptedException e) { LOG.error("Interrupt while waiting for jetty threads - init shutdown", e); - shutdown(conf, jettyWebServer, sharedServiceLocator); + shutdown(); Thread.currentThread().interrupt(); } } - if (!conf.isRecoveryEnabled()) { - sharedServiceLocator.getService(InterpreterSettingManager.class).close(); + } + + public static void main(String[] args) throws Exception { + ZeppelinConfiguration conf = ZeppelinConfiguration.load(); + conf.printShortInfo(); + try (ZeppelinServer server = new ZeppelinServer(conf)) { + server.startZeppelin(); } } - private static void initMetrics(ZeppelinConfiguration conf) { + private void initJMX() { + // JMX Enable + if (conf.isJMXEnabled()) { + int port = conf.getJMXPort(); + // Setup JMX + MBeanContainer mbeanContainer = new MBeanContainer(ManagementFactory.getPlatformMBeanServer()); + jettyWebServer.addBean(mbeanContainer); + JMXServiceURL jmxURL; + try { + jmxURL = new JMXServiceURL( + String.format( + "service:jmx:rmi://0.0.0.0:%d/jndi/rmi://0.0.0.0:%d/jmxrmi", + port, port)); + ConnectorServer jmxServer = new ConnectorServer(jmxURL, "org.eclipse.jetty.jmx:name=rmiconnectorserver"); + jettyWebServer.addBean(jmxServer); + LOG.info("JMX Enabled with port: {}", port); + } catch (MalformedURLException e) { + LOG.error("Invalid JMXServiceURL - JMX Disabled", e); + } + } + } + private void initMetrics() { if (conf.isJMXEnabled()) { Metrics.addRegistry(new JmxMeterRegistry(JmxConfig.DEFAULT, Clock.SYSTEM)); } + if (promMetricRegistry.isPresent()) { + Metrics.addRegistry(promMetricRegistry.get()); + } new ClassLoaderMetrics().bindTo(Metrics.globalRegistry); new JvmMemoryMetrics().bindTo(Metrics.globalRegistry); new JvmThreadMetrics().bindTo(Metrics.globalRegistry); @@ -314,35 +349,44 @@ private static void initMetrics(ZeppelinConfiguration conf) { new JVMInfoBinder().bindTo(Metrics.globalRegistry); } - private static void shutdown(ZeppelinConfiguration conf, Server jettyWebServer, ServiceLocator sharedServiceLocator) { - LOG.info("Shutting down Zeppelin Server ... "); - try { - if (jettyWebServer != null) { - jettyWebServer.stop(); - } - if (sharedServiceLocator != null) { - if (!conf.isRecoveryEnabled()) { - sharedServiceLocator.getService(InterpreterSettingManager.class).close(); + public void shutdown(int exitCode) { + if (!duringShutdown.getAndSet(true)) { + LOG.info("Shutting down Zeppelin Server ... - ExitCode {}", exitCode); + try { + if (jettyWebServer != null) { + jettyWebServer.stop(); + } + if (sharedServiceLocator != null) { + if (!conf.isRecoveryEnabled()) { + sharedServiceLocator.getService(InterpreterSettingManager.class).close(); + } + sharedServiceLocator.getService(Notebook.class).close(); } - sharedServiceLocator.getService(Notebook.class).close(); + } catch (Exception e) { + LOG.error("Error while stopping servlet container", e); + } + LOG.info("Bye"); + if (exitCode != 0) { + System.exit(exitCode); } - } catch (Exception e) { - LOG.error("Error while stopping servlet container", e); } - LOG.info("Bye"); } - private static Server setupJettyServer(ZeppelinConfiguration conf) { + public void shutdown() { + shutdown(0); + } + + private Server setupJettyServer() { InstrumentedQueuedThreadPool threadPool = new InstrumentedQueuedThreadPool(Metrics.globalRegistry, Tags.empty(), conf.getInt(ConfVars.ZEPPELIN_SERVER_JETTY_THREAD_POOL_MAX), conf.getInt(ConfVars.ZEPPELIN_SERVER_JETTY_THREAD_POOL_MIN), conf.getInt(ConfVars.ZEPPELIN_SERVER_JETTY_THREAD_POOL_TIMEOUT)); final Server server = new Server(threadPool); - initServerConnector(server, conf); + initServerConnector(server); return server; } - private static void initServerConnector(Server server, ZeppelinConfiguration conf) { + private void initServerConnector(Server server) { ServerConnector connector; HttpConfiguration httpConfig = new HttpConfiguration(); @@ -382,7 +426,7 @@ private static void initServerConnector(Server server, ZeppelinConfiguration con server.addConnector(connector); } - private static void runNoteOnStart(ZeppelinConfiguration conf, Server jettyWebServer, ServiceLocator sharedServiceLocator) throws IOException { + private void runNoteOnStart(ServiceLocator sharedServiceLocator) { String noteIdToRun = conf.getNotebookRunId(); if (!StringUtils.isEmpty(noteIdToRun)) { LOG.info("Running note {} on start", noteIdToRun); @@ -393,68 +437,75 @@ private static void runNoteOnStart(ZeppelinConfiguration conf, Server jettyWebSe String base64EncodedJsonSerializedServiceContext = conf.getNotebookRunServiceContext(); if (StringUtils.isEmpty(base64EncodedJsonSerializedServiceContext)) { LOG.info("No service context provided. use ANONYMOUS"); - serviceContext = new ServiceContext(AuthenticationInfo.ANONYMOUS, new HashSet() {}); + serviceContext = new ServiceContext(AuthenticationInfo.ANONYMOUS, new HashSet<>()); } else { serviceContext = new Gson().fromJson( new String(Base64.getDecoder().decode(base64EncodedJsonSerializedServiceContext)), ServiceContext.class); } - boolean success = notebookService.runAllParagraphs(noteIdToRun, null, serviceContext, new ServiceCallback() { - @Override - public void onStart(String message, ServiceContext context) throws IOException { - } + try { + boolean success = notebookService.runAllParagraphs(noteIdToRun, null, serviceContext, new ServiceCallback() { + @Override + public void onStart(String message, ServiceContext context) throws IOException { + } - @Override - public void onSuccess(Paragraph result, ServiceContext context) throws IOException { - } + @Override + public void onSuccess(Paragraph result, ServiceContext context) throws IOException { + } - @Override - public void onFailure(Exception ex, ServiceContext context) throws IOException { + @Override + public void onFailure(Exception ex, ServiceContext context) throws IOException { + } + }); + if (conf.getNotebookRunAutoShutdown()) { + shutdown(success ? 0 : 1); } - }); - - if (conf.getNotebookRunAutoShutdown()) { - shutdown(conf, jettyWebServer, sharedServiceLocator); - System.exit(success ? 0 : 1); + } catch (IOException e) { + LOG.error("Error during Paragraph Execution", e); } } } - private static void setupNotebookServer(WebAppContext webapp, ZeppelinConfiguration conf) { + private void setupNotebookServer(WebAppContext webapp) { String maxTextMessageSize = conf.getWebsocketMaxTextMessageSize(); WebSocketServerContainerInitializer .configure(webapp, (servletContext, wsContainer) -> { wsContainer.setDefaultMaxTextMessageBufferSize(Integer.parseInt(maxTextMessageSize)); - wsContainer.addEndpoint(NotebookServer.class); + wsContainer.addEndpoint(ServerEndpointConfig.Builder.create(NotebookServer.class, "/ws") + .configurator(new SessionConfigurator(sharedServiceLocator)).build()); }); } - private static void setupClusterManagerServer(ServiceLocator serviceLocator, ZeppelinConfiguration conf) { + private void setupClusterManagerServer() { if (conf.isClusterMode()) { LOG.info("Cluster mode is enabled, starting ClusterManagerServer"); ClusterManagerServer clusterManagerServer = ClusterManagerServer.getInstance(conf); - NotebookServer notebookServer = serviceLocator.getService(NotebookServer.class); + NotebookServer notebookServer = sharedServiceLocator.getService(NotebookServer.class); clusterManagerServer.addClusterEventListeners(ClusterManagerServer.CLUSTER_NOTE_EVENT_TOPIC, notebookServer); - AuthorizationService authorizationService = serviceLocator.getService(AuthorizationService.class); + AuthorizationService authorizationService = + sharedServiceLocator.getService(AuthorizationService.class); clusterManagerServer.addClusterEventListeners(ClusterManagerServer.CLUSTER_AUTH_EVENT_TOPIC, authorizationService); - InterpreterSettingManager interpreterSettingManager = serviceLocator.getService(InterpreterSettingManager.class); + InterpreterSettingManager interpreterSettingManager = + sharedServiceLocator.getService(InterpreterSettingManager.class); clusterManagerServer.addClusterEventListeners(ClusterManagerServer.CLUSTER_INTP_SETTING_EVENT_TOPIC, interpreterSettingManager); // Since the ClusterInterpreterLauncher is lazy, dynamically generated, So in cluster mode, // when the zeppelin service starts, Create a ClusterInterpreterLauncher object, // This allows the ClusterInterpreterLauncher to listen for cluster events. try { - InterpreterSettingManager intpSettingManager = serviceLocator.getService(InterpreterSettingManager.class); + InterpreterSettingManager intpSettingManager = + sharedServiceLocator.getService(InterpreterSettingManager.class); RecoveryStorage recoveryStorage = ReflectionUtils.createClazzInstance( conf.getRecoveryStorageClass(), new Class[] {ZeppelinConfiguration.class, InterpreterSettingManager.class}, new Object[] {conf, intpSettingManager}); recoveryStorage.init(); - PluginManager.get().loadInterpreterLauncher(InterpreterSetting.CLUSTER_INTERPRETER_LAUNCHER_NAME, recoveryStorage); + sharedServiceLocator.getService(PluginManager.class).loadInterpreterLauncher( + InterpreterSetting.CLUSTER_INTERPRETER_LAUNCHER_NAME, recoveryStorage); } catch (IOException e) { LOG.error(e.getMessage(), e); } @@ -537,7 +588,7 @@ private static void setupTruststoreWithPemFiles(SslContextFactory.Server sslCont } } - private static void setupRestApiContextHandler(WebAppContext webapp, ZeppelinConfiguration conf) { + private void setupRestApiContextHandler(WebAppContext webapp) { final ServletHolder servletHolder = new ServletHolder(new org.glassfish.jersey.servlet.ServletContainer()); @@ -555,8 +606,10 @@ private static void setupRestApiContextHandler(WebAppContext webapp, ZeppelinCon } } - private static void setupPrometheusContextHandler(WebAppContext webapp, PrometheusMeterRegistry promMetricRegistry) { - webapp.addServlet(new ServletHolder(new PrometheusServlet(promMetricRegistry)), "/metrics"); + private void setupPrometheusContextHandler(WebAppContext webapp) { + if (promMetricRegistry.isPresent()) { + webapp.addServlet(new ServletHolder(new PrometheusServlet(promMetricRegistry.get())), "/metrics"); + } } private static void setupHealthCheckContextHandler(WebAppContext webapp) { @@ -588,7 +641,7 @@ private static WebAppContext setupWebAppContext( webApp.addServlet(new ServletHolder(new IndexHtmlServlet(conf)), "/index.html"); contexts.addHandler(webApp); - webApp.addFilter(new FilterHolder(CorsFilter.class), "/*", EnumSet.allOf(DispatcherType.class)); + webApp.addFilter(new FilterHolder(new CorsFilter(conf)), "/*", EnumSet.allOf(DispatcherType.class)); webApp.setInitParameter( "org.eclipse.jetty.servlet.Default.dirAllowed", @@ -596,7 +649,7 @@ private static WebAppContext setupWebAppContext( return webApp; } - private static void initWebApp(WebAppContext webApp, ZeppelinConfiguration conf, ServiceLocator sharedServiceLocator, PrometheusMeterRegistry promMetricRegistry) { + private void initWebApp(WebAppContext webApp) { webApp.addEventListener( new ServletContextListener() { @Override @@ -611,16 +664,19 @@ public void contextDestroyed(ServletContextEvent servletContextEvent) {} }); // Create `ZeppelinServer` using reflection and setup REST Api - setupRestApiContextHandler(webApp, conf); + setupRestApiContextHandler(webApp); // prometheus endpoint - if (promMetricRegistry != null) { - setupPrometheusContextHandler(webApp, promMetricRegistry); - } + setupPrometheusContextHandler(webApp); // health endpoints setupHealthCheckContextHandler(webApp); // Notebook server - setupNotebookServer(webApp, conf); + setupNotebookServer(webApp); + } + + @Override + public void close() throws Exception { + shutdown(); } } diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/service/InterpreterService.java b/zeppelin-server/src/main/java/org/apache/zeppelin/service/InterpreterService.java index 70b5bfa0d04..7ed09aaa46c 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/service/InterpreterService.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/service/InterpreterService.java @@ -71,7 +71,7 @@ public void installInterpreter( String interpreterBaseDir = conf.getInterpreterDir(); String localRepoPath = conf.getInterpreterLocalRepoPath(); - final DependencyResolver dependencyResolver = new DependencyResolver(localRepoPath); + final DependencyResolver dependencyResolver = new DependencyResolver(localRepoPath, conf); // TODO(jl): Make a rule between an interpreter name and an installation directory List possibleInterpreterDirectories = new ArrayList<>(); diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/service/SessionManagerService.java b/zeppelin-server/src/main/java/org/apache/zeppelin/service/SessionManagerService.java index ed5dbc3a610..0aa096a847c 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/service/SessionManagerService.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/service/SessionManagerService.java @@ -53,13 +53,16 @@ public class SessionManagerService { private final InterpreterSettingManager interpreterSettingManager; private final Notebook notebook; private final ScheduledExecutorService sessionCheckerExecutor; + private final ZeppelinConfiguration zConf; - public SessionManagerService(Notebook notebook, InterpreterSettingManager interpreterSettingManager) { + public SessionManagerService(Notebook notebook, + InterpreterSettingManager interpreterSettingManager, ZeppelinConfiguration zConf) { this.notebook = notebook; this.interpreterSettingManager = interpreterSettingManager; + this.zConf = zConf; this.sessionCheckerExecutor = ExecutorFactory.singleton().createOrGetScheduled("Session-Checker-Executor", 1); - int sessionCheckerInterval = ZeppelinConfiguration.create() - .getInt(ZeppelinConfiguration.ConfVars.ZEPPELIN_SESSION_CHECK_INTERVAL); + int sessionCheckerInterval = + zConf.getInt(ZeppelinConfiguration.ConfVars.ZEPPELIN_SESSION_CHECK_INTERVAL); this.sessionCheckerExecutor.scheduleAtFixedRate(() -> { LOGGER.info("Start session check task"); Iterator> iter = sessions.entrySet().iterator(); @@ -156,7 +159,7 @@ public void stopSession(String sessionId) throws Exception { public SessionInfo getSessionInfo(String sessionId) throws Exception { SessionInfo sessionInfo = sessions.get(sessionId); if (sessionInfo == null) { - LOGGER.warn("No such session: " + sessionId); + LOGGER.warn("No such session: {}", sessionId); return null; } ManagedInterpreterGroup interpreterGroup = this.interpreterSettingManager.getInterpreterGroupById(sessionId); diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/ConnectionManager.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/ConnectionManager.java index c323a87b3cb..4635bb5bec1 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/ConnectionManager.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/ConnectionManager.java @@ -82,16 +82,14 @@ public class ConnectionManager { final Queue watcherSockets = new ConcurrentLinkedQueue<>(); private final HashSet collaborativeModeList = Metrics.gaugeCollectionSize("zeppelin_collaborative_modes", Tags.empty(),new HashSet<>()); - private final Boolean collaborativeModeEnable = ZeppelinConfiguration - .create() - .isZeppelinNotebookCollaborativeModeEnable(); - private final AuthorizationService authorizationService; + private final ZeppelinConfiguration zConf; @Inject - public ConnectionManager(AuthorizationService authorizationService) { + public ConnectionManager(AuthorizationService authorizationService, ZeppelinConfiguration zConf) { this.authorizationService = authorizationService; + this.zConf = zConf; } public void addConnection(NotebookSocket conn) { @@ -191,7 +189,7 @@ public String getAssociatedNoteId(NotebookSocket socket) { } private void checkCollaborativeStatus(String noteId, Set socketList) { - if (!collaborativeModeEnable.booleanValue()) { + if (!zConf.isZeppelinNotebookCollaborativeModeEnable()) { return; } boolean collaborativeStatusNew = socketList.size() > 1; diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java index ef3cff10ca8..662206dc59a 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/NotebookServer.java @@ -43,6 +43,7 @@ import javax.inject.Inject; import javax.inject.Provider; import javax.websocket.CloseReason; +import javax.websocket.Endpoint; import javax.websocket.EndpointConfig; import javax.websocket.OnClose; import javax.websocket.OnError; @@ -80,10 +81,12 @@ import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteEventListener; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.NotebookImportDeserializer; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.ParagraphJobListener; +import org.apache.zeppelin.notebook.exception.CorruptedNoteException; import org.apache.zeppelin.notebook.repo.NotebookRepoWithVersionControl.Revision; import org.apache.zeppelin.rest.exception.ForbiddenException; import org.apache.zeppelin.scheduler.Job; @@ -115,7 +118,7 @@ * no-parameter constructor */ @ManagedObject -@ServerEndpoint(value = "/ws", configurator = SessionConfigurator.class) +@ServerEndpoint(value = "/ws") public class NotebookServer implements AngularObjectRegistryListener, RemoteInterpreterProcessListener, ApplicationEventListener, @@ -139,9 +142,6 @@ String getKey() { } } - private final Boolean collaborativeModeEnable = ZeppelinConfiguration - .create() - .isZeppelinNotebookCollaborativeModeEnable(); private static final Logger LOG = LoggerFactory.getLogger(NotebookServer.class); private static final Gson gson = new GsonBuilder() .setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ") @@ -151,14 +151,13 @@ String getKey() { private static final AtomicReference self = new AtomicReference<>(); private final ExecutorService executorService = Executors.newFixedThreadPool(10); - private final boolean sendParagraphStatusToFrontend = ZeppelinConfiguration.create().getBoolean( - ZeppelinConfiguration.ConfVars.ZEPPELIN_WEBSOCKET_PARAGRAPH_STATUS_PROGRESS); // TODO(jl): This will be removed by handling session directly private final Map sessionIdNotebookSocketMap = Metrics.gaugeMapSize("zeppelin_session_id_notebook_sockets", Tags.empty(), new ConcurrentHashMap<>()); private ConnectionManager connectionManager; private ZeppelinConfiguration zeppelinConfiguration; private Provider notebookProvider; + private Provider noteParser; private Provider notebookServiceProvider; private AuthorizationService authorizationService; private Provider configurationServiceProvider; @@ -174,6 +173,12 @@ public void setZeppelinConfiguration(ZeppelinConfiguration zeppelinConfiguration this.zeppelinConfiguration = zeppelinConfiguration; } + @Inject + public void setNoteParser(Provider noteParser) { + this.noteParser = noteParser; + LOG.info("Injected NoteParser"); + } + @Inject public void setServiceLocator(ServiceLocator serviceLocator) { LOG.info("Injected ServiceLocator: {}", serviceLocator); @@ -308,8 +313,7 @@ public void onMessage(NotebookSocket conn, String msg) { return; } - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - boolean allowAnonymous = conf.isAnonymousAllowed(); + boolean allowAnonymous = zeppelinConfiguration.isAnonymousAllowed(); if (!allowAnonymous && receivedMessage.principal.equals("anonymous")) { LOG.warn("Anonymous access not allowed."); return; @@ -515,6 +519,10 @@ private void removeConnection(NotebookSocket notebookSocket) { connectionManager.removeUserConnection(notebookSocket.getUser(), notebookSocket); } + private boolean sendParagraphStatusToFrontend() { + return zeppelinConfiguration.getBoolean(ZeppelinConfiguration.ConfVars.ZEPPELIN_WEBSOCKET_PARAGRAPH_STATUS_PROGRESS); + } + @OnError public void onError(Session session, Throwable error) { if (session != null) { @@ -720,8 +728,7 @@ public void broadcastNoteList(AuthenticationInfo subject, Set userAndRol // broadcast ClusterEvent private void broadcastClusterEvent(ClusterEvent event, String msgId, Object... objects) { - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - if (!conf.isClusterMode()) { + if (!zeppelinConfiguration.isClusterMode()) { return; } @@ -753,7 +760,7 @@ private void broadcastClusterEvent(ClusterEvent event, String msgId, Object... o } String msg = ClusterMessage.serializeMessage(clusterMessage); - ClusterManagerServer.getInstance(conf).broadcastClusterEvent( + ClusterManagerServer.getInstance(zeppelinConfiguration).broadcastClusterEvent( ClusterManagerServer.CLUSTER_NOTE_EVENT_TOPIC, msg); } @@ -776,12 +783,12 @@ public void onClusterEvent(String msg) { authenticationInfo = AuthenticationInfo.fromJson(json); } else if (StringUtils.equals(key, "Note")) { try { - note = Note.fromJson(null, json); - } catch (IOException e) { + note = noteParser.get().fromJson(null, json); + } catch (CorruptedNoteException e) { LOG.warn("Fail to parse note json", e); } } else if (StringUtils.equals(key, "Paragraph")) { - paragraph = Paragraph.fromJson(json); + paragraph = noteParser.get().fromJson(json); } else if (StringUtils.equals(key, "Set")) { Gson gson = new Gson(); userAndRoles = gson.fromJson(json, new TypeToken>() { @@ -791,7 +798,7 @@ public void onClusterEvent(String msg) { userParagraphMap = gson.fromJson(json, new TypeToken>() { }.getType()); } else { - LOG.error("Unknown key:{}, json:{}!" + key, json); + LOG.error("Unknown key:{}, json:{}!", key, json); } } @@ -1201,7 +1208,7 @@ public void onSuccess(Paragraph p, ServiceContext context) throws IOException { private void patchParagraph(NotebookSocket conn, ServiceContext context, Message fromMessage) throws IOException { - if (!collaborativeModeEnable) { + if (!zeppelinConfiguration.isZeppelinNotebookCollaborativeModeEnable()) { return; } String paragraphId = fromMessage.getType("id", LOG); @@ -1784,7 +1791,7 @@ public void onSuccess(Note note, ServiceContext context) throws IOException { */ @Override public void onOutputAppend(String noteId, String paragraphId, int index, String output) { - if (!sendParagraphStatusToFrontend) { + if (!sendParagraphStatusToFrontend()) { return; } Message msg = new Message(OP.PARAGRAPH_APPEND_OUTPUT) @@ -1803,7 +1810,7 @@ public void onOutputAppend(String noteId, String paragraphId, int index, String @Override public void onOutputUpdated(String noteId, String paragraphId, int index, InterpreterResult.Type type, String output) { - if (!sendParagraphStatusToFrontend) { + if (!sendParagraphStatusToFrontend()) { return; } Message msg = new Message(OP.PARAGRAPH_UPDATE_OUTPUT) @@ -1841,7 +1848,7 @@ public void onOutputUpdated(String noteId, String paragraphId, int index, */ @Override public void onOutputClear(String noteId, String paragraphId) { - if (!sendParagraphStatusToFrontend) { + if (!sendParagraphStatusToFrontend()) { return; } @@ -2050,7 +2057,7 @@ public void onSuccess(List notesJobInfo, public void onProgressUpdate(Job job, int progress) { if (job instanceof Paragraph) { final Paragraph p = (Paragraph) job; - if (!sendParagraphStatusToFrontend) { + if (!sendParagraphStatusToFrontend()) { return; } connectionManager.broadcast(p.getNote().getId(), diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SessionConfigurator.java b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SessionConfigurator.java index dc4a933e341..ada3053c1a5 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SessionConfigurator.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/socket/SessionConfigurator.java @@ -21,16 +21,23 @@ import javax.websocket.HandshakeResponse; import javax.websocket.server.HandshakeRequest; import javax.websocket.server.ServerEndpointConfig; +import javax.websocket.server.ServerEndpointConfig.Configurator; -import org.apache.zeppelin.server.ZeppelinServer; import org.apache.zeppelin.util.WatcherSecurityKey; import org.apache.zeppelin.utils.CorsUtils; -import org.glassfish.hk2.api.ServiceLocatorFactory; +import org.glassfish.hk2.api.ServiceLocator; /** * This class set headers to websocket sessions and inject hk2 when initiating instances by ServerEndpoint annotation. */ -public class SessionConfigurator extends ServerEndpointConfig.Configurator { +public class SessionConfigurator extends Configurator { + + private final ServiceLocator serviceLocator; + + public SessionConfigurator(ServiceLocator serviceLocator) { + this.serviceLocator = serviceLocator; + } + @Override public void modifyHandshake(ServerEndpointConfig sec, HandshakeRequest request, HandshakeResponse response) { @@ -45,6 +52,6 @@ public void modifyHandshake(ServerEndpointConfig sec, HandshakeRequest request, @Override public T getEndpointInstance(Class endpointClass) throws InstantiationException { - return ServiceLocatorFactory.getInstance().find(ZeppelinServer.SERVICE_LOCATOR_NAME).getService(endpointClass); + return serviceLocator.getService(endpointClass); } } diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/CorsUtils.java b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/CorsUtils.java index 38afff19b01..7b4c2add772 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/CorsUtils.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/CorsUtils.java @@ -23,8 +23,13 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; public class CorsUtils { + + private CorsUtils() { + // Helper Class + } + public static final String HEADER_ORIGIN = "Origin"; - public static Boolean isValidOrigin(String sourceHost, ZeppelinConfiguration conf) + public static boolean isValidOrigin(String sourceHost, ZeppelinConfiguration conf) throws UnknownHostException, URISyntaxException { String sourceUriHost = ""; diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java index 0172759d92c..470b3dce493 100644 --- a/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java +++ b/zeppelin-server/src/main/java/org/apache/zeppelin/utils/TestUtils.java @@ -25,12 +25,12 @@ public class TestUtils { public static T getInstance(Class clazz) { checkCalledByTestMethod(); - return getInstance(ServiceLocatorFactory.getInstance().find(ZeppelinServer.SERVICE_LOCATOR_NAME), clazz); + return getInstance(ServiceLocatorFactory.getInstance().find(ZeppelinServer.DEFAULT_SERVICE_LOCATOR_NAME), clazz); } public static void clearInstances() { checkCalledByTestMethod(); - ServiceLocatorFactory.getInstance().destroy(ZeppelinServer.SERVICE_LOCATOR_NAME); + ServiceLocatorFactory.getInstance().destroy(ZeppelinServer.DEFAULT_SERVICE_LOCATOR_NAME); } static T getInstance(ServiceLocator serviceLocator, Class clazz) { diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/MiniZeppelinServer.java b/zeppelin-server/src/test/java/org/apache/zeppelin/MiniZeppelinServer.java new file mode 100644 index 00000000000..456782df826 --- /dev/null +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/MiniZeppelinServer.java @@ -0,0 +1,320 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.zeppelin; + +import org.apache.commons.io.FileUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; +import org.apache.zeppelin.rest.AbstractTestRestApi; +import org.apache.zeppelin.server.ZeppelinServer; +import org.glassfish.hk2.api.ServiceLocator; +import org.glassfish.hk2.api.ServiceLocatorFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.net.ServerSocket; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.time.Duration; + +import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.util.Optional; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +public class MiniZeppelinServer implements AutoCloseable { + protected static final Logger LOG = LoggerFactory.getLogger(MiniZeppelinServer.class); + + private final File zeppelinHome; + private final File confDir; + private final File notebookDir; + private final String classname; + private ZeppelinServer zepServer; + private final ZeppelinConfiguration zConf; + private String serviceLocator; + + private ExecutorService executor; + protected final Runnable SERVER = new Runnable() { + @Override + public void run() { + try { + zepServer.startZeppelin(); + } catch (Exception e) { + LOG.error("Exception in MiniZeppelinServer", e); + throw new RuntimeException(e); + } + } + }; + + public MiniZeppelinServer(String classname) throws IOException { + this(classname, null); + } + + public MiniZeppelinServer(String classname, String zeppelinConfiguration) throws IOException { + this.classname = classname; + zeppelinHome = Files.createTempDirectory(classname).toFile(); + LOG.info("ZEPPELIN_HOME: " + zeppelinHome.getAbsolutePath()); + confDir = new File(zeppelinHome, "conf"); + confDir.mkdirs(); + LOG.info("ZEPPELIN_CONF_DIR: " + confDir.getAbsolutePath()); + notebookDir = new File(zeppelinHome, "notebook"); + notebookDir.mkdirs(); + + zConf = ZeppelinConfiguration.load(zeppelinConfiguration); + zConf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), + zeppelinHome.getAbsoluteFile().toString()); + Optional webWar = getWebWar(); + Optional webAngularWar = getWebAngularWar(); + if (webWar.isPresent()) { + zConf.setProperty(ConfVars.ZEPPELIN_WAR.getVarName(), webWar.get().getAbsolutePath()); + } else { + // some test profile does not build zeppelin-web. + // to prevent zeppelin starting up fail, create zeppelin-web/dist directory + File dummyWebDir = new File(zeppelinHome, "zeppelin-web" + File.separator + "dist"); + dummyWebDir.mkdirs(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_WAR.getVarName(), + dummyWebDir.getAbsolutePath()); + } + if (webAngularWar.isPresent()) { + zConf.setProperty(ConfVars.ZEPPELIN_ANGULAR_WAR.getVarName(), + webAngularWar.get().getAbsolutePath()); + } else { + File dummyWebDir = new File(zeppelinHome, "zeppelin-web-angular" + File.separator + "dist"); + dummyWebDir.mkdirs(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ANGULAR_WAR.getVarName(), + dummyWebDir.getAbsolutePath()); + } + } + + private Optional getWebWar() { + File webWarTargetFolder = new File(".." + File.separator + "zeppelin-web" + File.separator + "target"); + if (webWarTargetFolder.exists()) { + for (File targetFile : webWarTargetFolder.listFiles()) { + if (targetFile.getName().contains("zeppelin-web") + && targetFile.getName().contains(".war")) { + return Optional.of(targetFile); + } + } + } + return Optional.empty(); + } + + private Optional getWebAngularWar() { + File webWarTargetFolder = + new File(".." + File.separator + "zeppelin-web-angular" + File.separator + "target"); + if (webWarTargetFolder.exists()) { + for (File targetFile : webWarTargetFolder.listFiles()) { + if (targetFile.getName().contains("zeppelin-web-angular") + && targetFile.getName().contains(".war")) { + return Optional.of(targetFile); + } + } + } + return Optional.empty(); + } + + public void copyBinDir() throws IOException { + File binDirDest = new File(zeppelinHome, "bin"); + File binDirSrc = new File(".." + File.separator + "bin"); + LOG.info("Copy {} to {}", binDirSrc.getAbsolutePath(), + binDirDest.getAbsolutePath()); + FileUtils.copyDirectory(binDirSrc, binDirDest); + } + + public File addConfigFile(String file, String data) throws IOException { + File configfile = new File(confDir, file); + FileUtils.writeStringToFile(configfile, data, StandardCharsets.UTF_8); + return configfile; + } + + public void addInterpreter(String interpreterName) throws IOException { + copyMainInterpreter(); + File interpreterDirDest = new File(zeppelinHome, "interpreter" + File.separator + interpreterName); + File interpreterDirSrc = + new File(".." + File.separator + "interpreter" + File.separator + interpreterName); + LOG.info("Copy {}-Interpreter from {} to {}", interpreterName, + interpreterDirSrc.getAbsolutePath(), + interpreterDirDest.getAbsolutePath()); + FileUtils.copyDirectory(interpreterDirSrc, interpreterDirDest); + } + + public void addLauncher(String launcherName) throws IOException { + File launcherDirDest = new File(zeppelinHome, + "plugins" + File.separator + "Launcher" + File.separator + launcherName); + File launcherDirSrc = + new File(".." + File.separator + "plugins" + File.separator + "Launcher" + File.separator + + launcherName); + LOG.info("Copy {} from {} to {}", launcherName, launcherDirSrc.getAbsolutePath(), + launcherDirDest.getAbsolutePath()); + FileUtils.copyDirectory(launcherDirSrc, launcherDirDest); + } + + public void copyLogProperties() throws IOException { + File confDir = new File(".." + File.separator + "conf"); + for (String conffile : confDir.list()) { + if (conffile.contains("log")) { + File logPropertiesSrc = new File(confDir, conffile); + File logPropertiesDest = + new File(zeppelinHome, "conf" + File.separator + conffile); + if (!logPropertiesDest.exists()) { + LOG.info("Copy {} to {}", logPropertiesSrc.getAbsolutePath(), + logPropertiesDest.getAbsolutePath()); + FileUtils.copyFile(logPropertiesSrc, logPropertiesDest); + } + } + } + } + + public void copyMainInterpreter() throws IOException { + File interpreterDir = new File(".." + File.separator + "interpreter"); + for (String interpreterfile : interpreterDir.list()) { + if (interpreterfile.contains("zeppelin-interpreter-shaded")) { + File zeppelinInterpreterShadedSrc = new File(interpreterDir, interpreterfile); + File zeppelinInterpreterShadedDest = + new File(zeppelinHome, "interpreter" + File.separator + interpreterfile); + if (!zeppelinInterpreterShadedDest.exists()) { + LOG.info("Copy {} to {}", zeppelinInterpreterShadedSrc.getAbsolutePath(), + zeppelinInterpreterShadedDest.getAbsolutePath()); + FileUtils.copyFile(zeppelinInterpreterShadedSrc, zeppelinInterpreterShadedDest); + } + } + } + } + + public String getZeppelinHome() { + return zeppelinHome.getAbsolutePath(); + } + + public ZeppelinConfiguration getZeppelinConfiguration() { + return zConf; + } + + public int getFreePort() { + try (ServerSocket serverSocket = new ServerSocket(0)) { + assertNotNull(serverSocket); + assertTrue(serverSocket.getLocalPort()> 0); + return serverSocket.getLocalPort(); + } catch (IOException e) { + fail("Port is not available"); + } + return 0; + } + + public void start() throws Exception { + start(false); + } + + public ServiceLocator getServiceLocator() { + return ServiceLocatorFactory.getInstance().find(serviceLocator); + } + + public void start(boolean deleteNotebookData) throws Exception { + this.start(deleteNotebookData, + ZeppelinServer.DEFAULT_SERVICE_LOCATOR_NAME); + } + + public void start(boolean deleteNotebookData, String serviceLocator) + throws Exception { + LOG.info("Starting ZeppelinServer testClassName: {}", classname); + // copy the resources files to a temp folder + zConf.setServerPort(getFreePort()); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), + zeppelinHome.getAbsolutePath()); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SEARCH_INDEX_PATH.getVarName(), + new File(zeppelinHome, "index").getAbsolutePath()); + + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), + confDir.getAbsolutePath()); + zConf.setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName(), + "spark"); + if (deleteNotebookData) { + FileUtils.deleteDirectory(notebookDir); + notebookDir.mkdirs(); + } + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + notebookDir.getPath()); + LOG.info("Staring ZeppelinServerMock Zeppelin up..."); + this.serviceLocator = serviceLocator; + zepServer = new ZeppelinServer(zConf, serviceLocator); + executor = Executors.newSingleThreadExecutor(); + executor.submit(SERVER); + await().pollDelay(Duration.ofSeconds(2)).atMost(Duration.ofMinutes(3)) + .until(checkIfServerIsRunningCallable()); + LOG.info("ZeppelinServerMock started."); + } + + public boolean checkIfServerIsRunning() { + boolean isRunning = false; + HttpGet httpGet = new HttpGet("http://localhost:" + zConf.getServerPort() + "/api/version"); + try (CloseableHttpResponse response = AbstractTestRestApi.getHttpClient().execute(httpGet)) { + isRunning = response.getStatusLine().getStatusCode() == 200; + } catch (IOException e) { + // Ignore + } + return isRunning; + } + + private Callable checkIfServerIsRunningCallable() { + return () -> checkIfServerIsRunning(); + } + + private Callable checkIfServerIsNotRunningCallable() { + return () -> !checkIfServerIsRunning(); + } + + public void shutDown() throws Exception { + shutDown(false); + } + + public void shutDown(final boolean deleteConfDir) throws Exception { + if (!executor.isShutdown()) { + LOG.info("ZeppelinServerMock shutDown..."); + zepServer.close(); + executor.shutdown(); + executor.shutdownNow(); + await().pollDelay(2, TimeUnit.SECONDS).atMost(3, TimeUnit.MINUTES) + .until(checkIfServerIsNotRunningCallable()); + + LOG.info("ZeppelinServerMock terminated."); + + if (deleteConfDir) { + FileUtils.deleteQuietly(confDir); + } + } + } + + @Override + public void close() throws Exception { + shutDown(); + FileUtils.deleteDirectory(zeppelinHome); + } + + public void destroy() throws Exception { + close(); + } +} diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterEventTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterEventTest.java index 5c72abab44e..79a65c7615d 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterEventTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterEventTest.java @@ -24,6 +24,7 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; import org.apache.thrift.TException; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.cluster.meta.ClusterMetaType; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterResult; @@ -68,8 +69,8 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -public class ClusterEventTest extends ZeppelinServerMock { - private static Logger LOGGER = LoggerFactory.getLogger(ClusterEventTest.class); +public class ClusterEventTest extends AbstractTestRestApi { + private static final Logger LOGGER = LoggerFactory.getLogger(ClusterEventTest.class); private static List clusterAuthEventListenerTests = new ArrayList<>(); private static List clusterNoteEventListenerTests = new ArrayList<>(); @@ -88,14 +89,23 @@ public class ClusterEventTest extends ZeppelinServerMock { Gson gson = new Gson(); - @BeforeAll - static void init() throws Exception { - ZeppelinConfiguration zconf = genZeppelinConf(); + private static MiniZeppelinServer zepServer; - ZeppelinServerMock.startUp("ClusterEventTest", zconf); - notebook = TestUtils.getInstance(Notebook.class); - authorizationService = TestUtils.getInstance(AuthorizationService.class); + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); + } + @BeforeAll + static void init() throws Exception { + zepServer = new MiniZeppelinServer(ClusterEventTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("sh"); + genClusterAddressConf(zepServer.getZeppelinConfiguration()); + zepServer.start(); + notebook = zepServer.getServiceLocator().getService(Notebook.class); + authorizationService = zepServer.getServiceLocator().getService(AuthorizationService.class); + ZeppelinConfiguration zconf = zepServer.getZeppelinConfiguration(); schedulerService = new QuartzSchedulerService(zconf, notebook); notebook.initNotebook(); notebook.waitForFinishInit(1, TimeUnit.MINUTES); @@ -136,30 +146,24 @@ static void init() throws Exception { @AfterAll static void destroy() throws Exception { - try { - if (null != clusterClient) { - clusterClient.shutdown(); - } - for (ClusterManagerServer clusterServer : clusterServers) { - clusterServer.shutdown(); - } - - ZeppelinServerMock.shutDown(); - } finally { - // Because zconf is a single instance, it needs clean cluster address - ZeppelinConfiguration zconf = ZeppelinConfiguration.create(); - zconf.setClusterAddress(""); + if (null != clusterClient) { + clusterClient.shutdown(); } - ZeppelinConfiguration.reset(); + for (ClusterManagerServer clusterServer : clusterServers) { + clusterServer.shutdown(); + } + + zepServer.destroy(); LOGGER.info("stopCluster <<<"); } @BeforeEach void setUp() { anonymous = new AuthenticationInfo("anonymous"); + conf = zepServer.getZeppelinConfiguration(); } - private static ZeppelinConfiguration genZeppelinConf() + private static void genClusterAddressConf(ZeppelinConfiguration zconf) throws IOException, InterruptedException { String clusterAddrList = ""; String zServerHost = RemoteInterpreterUtils.findAvailableHostAddress(); @@ -171,11 +175,8 @@ private static ZeppelinConfiguration genZeppelinConf() clusterAddrList += ","; } } - ZeppelinConfiguration zconf = ZeppelinConfiguration.create(); zconf.setClusterAddress(clusterAddrList); LOGGER.info("clusterAddrList = {}", clusterAddrList); - - return zconf; } public static ClusterManagerServer startClusterSingleNode(String clusterAddrList, @@ -298,7 +299,7 @@ void testRenameNoteEvent() throws IOException { final String newName = "testName"; String jsonRequest = "{\"name\": " + newName + "}"; - CloseableHttpResponse put = AbstractTestRestApi.httpPut("/notebook/" + noteId + "/rename/", jsonRequest); + CloseableHttpResponse put = httpPut("/notebook/" + noteId + "/rename/", jsonRequest); assertThat("test testRenameNote:", put, AbstractTestRestApi.isAllowed()); put.close(); @@ -331,8 +332,8 @@ void testCloneNoteEvent() throws IOException { note1Id = TestUtils.getInstance(Notebook.class).createNote("note1", anonymous); Thread.sleep(1000); - CloseableHttpResponse post = AbstractTestRestApi.httpPost("/notebook/" + note1Id, ""); - LOG.info("testCloneNote response\n" + post.getStatusLine().getReasonPhrase()); + CloseableHttpResponse post = httpPost("/notebook/" + note1Id, ""); + LOGGER.info("testCloneNote response\n" + post.getStatusLine().getReasonPhrase()); assertThat(post, AbstractTestRestApi.isAllowed()); Map resp = gson.fromJson(EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8), @@ -341,7 +342,7 @@ void testCloneNoteEvent() throws IOException { post.close(); Thread.sleep(1000); - CloseableHttpResponse get = AbstractTestRestApi.httpGet("/notebook/" + clonedNoteId); + CloseableHttpResponse get = httpGet("/notebook/" + clonedNoteId); assertThat(get, AbstractTestRestApi.isAllowed()); Map resp2 = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), new TypeToken>() {}.getType()); @@ -383,8 +384,10 @@ void insertParagraphEvent() throws IOException { // insert new paragraph NewParagraphRequest newParagraphRequest = new NewParagraphRequest("Test", null, null, null); - CloseableHttpResponse post = AbstractTestRestApi.httpPost("/notebook/" + noteId + "/paragraph", gson.toJson(newParagraphRequest)); - LOG.info("test clear paragraph output response\n" + EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8)); + CloseableHttpResponse post = + httpPost("/notebook/" + noteId + "/paragraph", gson.toJson(newParagraphRequest)); + LOGGER.info("test clear paragraph output response\n" + + EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8)); assertThat(post, AbstractTestRestApi.isAllowed()); post.close(); @@ -488,15 +491,15 @@ void testInterpreterEvent() throws IOException, InterruptedException { " \"exclusions\":[]\n" + " }]," + "\"option\": { \"remote\": true, \"session\": false }}"; - CloseableHttpResponse post = AbstractTestRestApi.httpPost("/interpreter/setting", reqBody1); + CloseableHttpResponse post = httpPost("/interpreter/setting", reqBody1); String postResponse = EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8); - LOG.info("testCreatedInterpreterDependencies create response\n" + postResponse); + LOGGER.info("testCreatedInterpreterDependencies create response\n" + postResponse); InterpreterSetting created = convertResponseToInterpreterSetting(postResponse); assertThat("test create method:", post, AbstractTestRestApi.isAllowed()); post.close(); // 1. Call settings API - CloseableHttpResponse get = AbstractTestRestApi.httpGet("/interpreter/setting"); + CloseableHttpResponse get = httpGet("/interpreter/setting"); String rawResponse = EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8); get.close(); @@ -538,8 +541,10 @@ void testInterpreterEvent() throws IOException, InterruptedException { jsonObject.addProperty("value", "this is new prop"); jsonObject.addProperty("type", "textarea"); jsonRequest.getAsJsonObject("properties").add("propname2", jsonObject); - CloseableHttpResponse put = AbstractTestRestApi.httpPut("/interpreter/setting/" + created.getId(), jsonRequest.toString()); - LOG.info("testSettingCRUD update response\n" + EntityUtils.toString(put.getEntity(), StandardCharsets.UTF_8)); + CloseableHttpResponse put = + httpPut("/interpreter/setting/" + created.getId(), jsonRequest.toString()); + LOGGER.info("testSettingCRUD update response\n" + + EntityUtils.toString(put.getEntity(), StandardCharsets.UTF_8)); // then: call update setting API assertThat("test update method:", put, AbstractTestRestApi.isAllowed()); put.close(); @@ -547,8 +552,9 @@ void testInterpreterEvent() throws IOException, InterruptedException { checkClusterIntpSettingEventListener(); // 3: call delete setting API - CloseableHttpResponse delete = AbstractTestRestApi.httpDelete("/interpreter/setting/" + created.getId()); - LOG.info("testSettingCRUD delete response\n" + EntityUtils.toString(delete.getEntity(), StandardCharsets.UTF_8)); + CloseableHttpResponse delete = httpDelete("/interpreter/setting/" + created.getId()); + LOGGER.info("testSettingCRUD delete response\n" + + EntityUtils.toString(delete.getEntity(), StandardCharsets.UTF_8)); // then: call delete setting API assertThat("Test delete method:", delete, AbstractTestRestApi.isAllowed()); delete.close(); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterNoteEventListenerTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterNoteEventListenerTest.java index 8279e7d2ae5..ccc8a2b3226 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterNoteEventListenerTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ClusterNoteEventListenerTest.java @@ -20,9 +20,13 @@ import com.google.gson.reflect.TypeToken; import org.apache.zeppelin.cluster.event.ClusterEventListener; import org.apache.zeppelin.cluster.event.ClusterMessage; +import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.user.AuthenticationInfo; +import org.junit.jupiter.api.BeforeEach; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,6 +41,15 @@ public class ClusterNoteEventListenerTest implements ClusterEventListener { public String receiveMsg = null; + private ZeppelinConfiguration conf; + private NoteParser noteParser; + + @BeforeEach + void setup() { + conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); + } + @Override public void onClusterEvent(String msg) { receiveMsg = msg; @@ -56,13 +69,13 @@ public void onClusterEvent(String msg) { LOGGER.debug(authenticationInfo.toJson()); } else if (key.equals("Note")) { try { - note = Note.fromJson(null, json); + note = noteParser.fromJson(null, json); + LOGGER.debug(note.toJson()); } catch (IOException e) { LOGGER.warn("Fail to parse note json", e); } - LOGGER.debug(note.toJson()); } else if (key.equals("Paragraph")) { - paragraph = Paragraph.fromJson(json); + paragraph = noteParser.fromJson(json); LOGGER.debug(paragraph.toJson()); } else if (key.equals("Set")) { Gson gson = new Gson(); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ZeppelinServerMock.java b/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ZeppelinServerMock.java deleted file mode 100644 index 4ea51ef2807..00000000000 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/cluster/ZeppelinServerMock.java +++ /dev/null @@ -1,185 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.zeppelin.cluster; - -import org.apache.commons.io.FileUtils; -import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.interpreter.InterpreterSetting; -import org.apache.zeppelin.notebook.Notebook; -import org.apache.zeppelin.plugin.PluginManager; -import org.apache.zeppelin.rest.AbstractTestRestApi; -import org.apache.zeppelin.server.ZeppelinServer; -import org.apache.zeppelin.utils.TestUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -public class ZeppelinServerMock { - protected static final Logger LOG = LoggerFactory.getLogger(ZeppelinServerMock.class); - - protected static final boolean WAS_RUNNING = AbstractTestRestApi.checkIfServerIsRunning(); - - protected static File zeppelinHome; - protected static File confDir; - protected static File notebookDir; - - private String getUrl(String path) { - String url; - if (System.getProperty("url") != null) { - url = System.getProperty("url"); - } else { - url = "http://localhost:8080"; - } - url += AbstractTestRestApi.REST_API_URL; - if (path != null) { - url += path; - } - - return url; - } - - static ExecutorService executor; - protected static final Runnable SERVER = new Runnable() { - @Override - public void run() { - try { - TestUtils.clearInstances(); - ZeppelinServer.main(new String[]{""}); - } catch (Exception e) { - LOG.error("Exception in WebDriverManager while getWebDriver ", e); - throw new RuntimeException(e); - } - } - }; - - private static void start(String testClassName, boolean cleanData, ZeppelinConfiguration zconf) - throws Exception { - LOG.info("Starting ZeppelinServer testClassName: {}", testClassName); - - if (!WAS_RUNNING) { - // copy the resources files to a temp folder - zeppelinHome = new File(".."); - LOG.info("ZEPPELIN_HOME: " + zeppelinHome.getAbsolutePath()); - confDir = new File(zeppelinHome, "conf_" + testClassName); - confDir.mkdirs(); - zconf.save(confDir + "/zeppelin-site.xml"); - - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), - zeppelinHome.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_WAR.getVarName(), - new File("../zeppelin-web/dist").getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), - confDir.getAbsolutePath()); - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName(), - "spark"); - notebookDir = new File(zeppelinHome.getAbsolutePath() + "/notebook_" + testClassName); - if (cleanData) { - FileUtils.deleteDirectory(notebookDir); - } - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), - notebookDir.getPath() - ); - LOG.info("zconf.getClusterAddress() = {}", zconf.getClusterAddress()); - - // some test profile does not build zeppelin-web. - // to prevent zeppelin starting up fail, create zeppelin-web/dist directory - new File("../zeppelin-web/dist").mkdirs(); - - LOG.info("Staring ZeppelinServerMock Zeppelin up..."); - - executor = Executors.newSingleThreadExecutor(); - executor.submit(SERVER); - long s = System.currentTimeMillis(); - boolean started = false; - while (System.currentTimeMillis() - s < 1000 * 60 * 3) { // 3 minutes - Thread.sleep(2000); - started = AbstractTestRestApi.checkIfServerIsRunning(); - if (started == true) { - break; - } - } - if (started == false) { - throw new RuntimeException("Can not start Zeppelin server"); - } - //ZeppelinServer.notebook.setParagraphJobListener(NotebookServer.getInstance()); - LOG.info("ZeppelinServerMock stared."); - } - } - - protected static void startUp(String testClassName, ZeppelinConfiguration zconf) throws Exception { - start(testClassName, true, zconf); - } - - protected static void shutDown() throws Exception { - shutDown(true); - } - - protected static void shutDown(final boolean deleteConfDir) throws Exception { - if (!WAS_RUNNING && TestUtils.getInstance(Notebook.class) != null) { - // restart interpreter to stop all interpreter processes - List settingList = TestUtils.getInstance(Notebook.class).getInterpreterSettingManager() - .get(); - if (!TestUtils.getInstance(Notebook.class).getConf().isRecoveryEnabled()) { - for (InterpreterSetting setting : settingList) { - TestUtils.getInstance(Notebook.class).getInterpreterSettingManager().restart(setting.getId()); - } - } - LOG.info("ZeppelinServerMock shutDown..."); - executor.shutdown(); - executor.shutdownNow(); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName()); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_WAR.getVarName()); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName()); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONFIG_FS_DIR.getVarName()); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName()); - System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName()); - - long s = System.currentTimeMillis(); - boolean started = true; - while (System.currentTimeMillis() - s < 1000 * 60 * 3) { // 3 minutes - Thread.sleep(2000); - started = AbstractTestRestApi.checkIfServerIsRunning(); - if (started == false) { - break; - } - } - if (started == true) { - throw new RuntimeException("Can not stop Zeppelin server"); - } - - ClusterManagerServer clusterManagerServer = - ClusterManagerServer.getInstance(ZeppelinConfiguration.create()); - clusterManagerServer.shutdown(); - - LOG.info("ZeppelinServerMock terminated."); - - if (deleteConfDir) { - FileUtils.deleteDirectory(confDir); - } - PluginManager.reset(); - ZeppelinConfiguration.reset(); - } - } - - -} diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/configuration/RequestHeaderSizeTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/configuration/RequestHeaderSizeTest.java index 68d14400d96..60f6d17c10d 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/configuration/RequestHeaderSizeTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/configuration/RequestHeaderSizeTest.java @@ -25,40 +25,49 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; - -import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.MiniZeppelinServer; +import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.rest.AbstractTestRestApi; -import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class RequestHeaderSizeTest extends AbstractTestRestApi { private static final int REQUEST_HEADER_MAX_SIZE = 20000; - @BeforeEach - void startZeppelin() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_JETTY_REQUEST_HEADER_SIZE - .getVarName(), String.valueOf(REQUEST_HEADER_MAX_SIZE)); - startUp(RequestHeaderSizeTest.class.getSimpleName()); + private static MiniZeppelinServer zep; + + @BeforeAll + static void init() throws Exception { + zep = new MiniZeppelinServer(RequestHeaderSizeTest.class.getSimpleName()); + zep.getZeppelinConfiguration().setProperty(ConfVars.ZEPPELIN_SERVER_JETTY_REQUEST_HEADER_SIZE + .getVarName(), String.valueOf(REQUEST_HEADER_MAX_SIZE)); + zep.start(); } - @AfterEach - void stopZeppelin() throws Exception { - shutDown(); + @AfterAll + static void destroy() throws Exception { + zep.destroy(); + } + + @BeforeEach + void setup() { + conf = zep.getZeppelinConfiguration(); } @Test void increased_request_header_size_do_not_cause_431_when_request_size_is_over_8K() throws Exception { CloseableHttpClient client = HttpClients.createDefault(); - HttpGet httpGet = new HttpGet(getUrlToTest() + "/version"); + HttpGet httpGet = new HttpGet(getUrlToTest(conf) + "/version"); String headerValue = RandomStringUtils.randomAlphanumeric(REQUEST_HEADER_MAX_SIZE - 2000); httpGet.setHeader("not_too_large_header", headerValue); CloseableHttpResponse response = client.execute(httpGet); assertThat(response.getStatusLine().getStatusCode(), is(HttpStatus.SC_OK)); response.close(); - httpGet = new HttpGet(getUrlToTest() + "/version"); + httpGet = new HttpGet(getUrlToTest(conf) + "/version"); headerValue = RandomStringUtils.randomAlphanumeric(REQUEST_HEADER_MAX_SIZE + 2000); httpGet.setHeader("too_large_header", headerValue); response = client.execute(httpGet); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/integration/TestHelper.java b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/TestHelper.java new file mode 100644 index 00000000000..e4d270c9489 --- /dev/null +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/TestHelper.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.zeppelin.integration; + +import java.util.concurrent.Callable; + +import org.apache.zeppelin.MiniZeppelinServer; +import org.apache.zeppelin.interpreter.InterpreterSetting; +import org.apache.zeppelin.interpreter.InterpreterSettingManager; + +import static org.awaitility.Awaitility.await; + +public class TestHelper { + + private TestHelper() { + // Helper class + } + + public static void configureSparkInterpreter(MiniZeppelinServer zepServer, String sparkHome) { + await().until(interpreterSettingManagerAvailable(zepServer)); + InterpreterSetting sparkInterpreterSetting = zepServer.getServiceLocator() + .getService(InterpreterSettingManager.class).getInterpreterSettingByName("spark"); + sparkInterpreterSetting.setProperty("spark.master", "local[*]"); + sparkInterpreterSetting.setProperty("SPARK_HOME", sparkHome); + sparkInterpreterSetting.close(); + } + + private static Callable interpreterSettingManagerAvailable( + MiniZeppelinServer zepServer) { + return () -> zepServer.getServiceLocator().getService(InterpreterSettingManager.class) != null; + } +} diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/metric/MetricEndpointTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/metric/MetricEndpointTest.java index bd69d7e407b..ab20594e5ce 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/metric/MetricEndpointTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/metric/MetricEndpointTest.java @@ -26,24 +26,29 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.rest.AbstractTestRestApi; +import org.apache.zeppelin.rest.NotebookRepoRestApiTest; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; class MetricEndpointTest extends AbstractTestRestApi { + private static MiniZeppelinServer zepServer; + @BeforeAll static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_METRIC_ENABLE_PROMETHEUS.getVarName(), - "true"); - AbstractTestRestApi.startUp(MetricEndpointTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(NotebookRepoRestApiTest.class.getSimpleName()); + zepServer.getZeppelinConfiguration().setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_METRIC_ENABLE_PROMETHEUS.getVarName(), "true"); + zepServer.start(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } /** @@ -52,18 +57,16 @@ static void destroy() throws Exception { */ @Test void testPrometheusMetricJVM() throws IOException { - CloseableHttpResponse get = getHttpClient().execute(new HttpGet(getUrlToTest() + "/metrics")); - assertEquals(200, get.getStatusLine().getStatusCode()); - String response = EntityUtils.toString(get.getEntity()); - assertTrue(response.contains("jvm_memory"), "Contains JVM metric"); - get.close(); + try ( + CloseableHttpResponse get = getHttpClient().execute( + new HttpGet(getUrlToTest(zepServer.getZeppelinConfiguration()) + "/metrics"))) { + assertEquals(200, get.getStatusLine().getStatusCode()); + String response = EntityUtils.toString(get.getEntity()); + assertTrue(response.contains("jvm_memory"), "Contains JVM metric"); + } } - protected static String getUrlToTest() { - String url = "http://localhost:8080"; - if (System.getProperty("url") != null) { - url = System.getProperty("url"); - } - return url; + protected static String getUrlToTest(ZeppelinConfiguration zConf) { + return "http://localhost:" + zConf.getServerPort(); } } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/recovery/RecoveryTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/recovery/RecoveryTest.java index b130364eb46..bd69ecd71fa 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/recovery/RecoveryTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/recovery/RecoveryTest.java @@ -21,6 +21,7 @@ import org.apache.commons.io.FileUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterException; import org.apache.zeppelin.interpreter.InterpreterSetting; @@ -32,52 +33,68 @@ import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.rest.AbstractTestRestApi; import org.apache.zeppelin.scheduler.Job; -import org.apache.zeppelin.server.ZeppelinServer; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.utils.TestUtils; -import org.glassfish.hk2.api.ServiceLocatorFactory; -import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.hamcrest.MatcherAssert.assertThat; +import static org.awaitility.Awaitility.await; import java.io.File; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.Map; +import java.util.concurrent.Callable; class RecoveryTest extends AbstractTestRestApi { - + private static final Logger LOG = LoggerFactory.getLogger(RecoveryTest.class); private Gson gson = new Gson(); private static File recoveryDir = null; + private static MiniZeppelinServer zepServer; private Notebook notebook; private AuthenticationInfo anonymous = new AuthenticationInfo("anonymous"); - @BeforeEach - void init() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_STORAGE_CLASS.getVarName(), - FileSystemRecoveryStorage.class.getName()); + @BeforeAll + static void init() throws Exception { + zepServer = new MiniZeppelinServer(RecoveryTest.class.getSimpleName()); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("python"); + zepServer.copyLogProperties(); + zepServer.copyBinDir(); + zepServer.getZeppelinConfiguration().setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_STORAGE_CLASS.getVarName(), + FileSystemRecoveryStorage.class.getName()); recoveryDir = Files.createTempDirectory("recovery").toFile(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_DIR.getVarName(), - recoveryDir.getAbsolutePath()); - startUp(RecoveryTest.class.getSimpleName()); + zepServer.getZeppelinConfiguration().setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_DIR.getVarName(), + recoveryDir.getAbsolutePath()); + zepServer.start(); - notebook = ServiceLocatorFactory.getInstance().find(ZeppelinServer.SERVICE_LOCATOR_NAME).getService(Notebook.class); } - @AfterEach - void destroy() throws Exception { - shutDown(true, true); + @AfterAll + static void destroy() throws Exception { + zepServer.destroy(); FileUtils.deleteDirectory(recoveryDir); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_STORAGE_CLASS.getVarName(), - ZeppelinConfiguration.ConfVars.ZEPPELIN_RECOVERY_STORAGE_CLASS.getStringValue()); } + @BeforeEach + void setUp() { + conf = zepServer.getZeppelinConfiguration(); + notebook = zepServer.getServiceLocator().getService(Notebook.class); + anonymous = new AuthenticationInfo("anonymous"); + } + + @Test void testRecovery() throws Exception { LOG.info("Test testRecovery"); @@ -107,8 +124,8 @@ void testRecovery() throws Exception { }); // shutdown zeppelin and restart it - shutDown(); - startUp(RecoveryTest.class.getSimpleName(), false); + zepServer.shutDown(false); + zepServer.start(); // run the paragraph again, but change the text to print variable `user` Thread.sleep(10 * 1000); @@ -177,8 +194,8 @@ void testRecovery_2() throws Exception { }); // shutdown zeppelin and restart it - shutDown(); - startUp(RecoveryTest.class.getSimpleName(), false); + zepServer.shutDown(); + zepServer.start(); Thread.sleep(5 * 1000); // run the paragraph again, but change the text to print variable `user`. @@ -237,11 +254,9 @@ void testRecovery_3() throws Exception { }); // shutdown zeppelin and restart it - shutDown(); - StopInterpreter.main(new String[]{}); - - startUp(RecoveryTest.class.getSimpleName(), false); - + zepServer.shutDown(); + new StopInterpreter(conf); + zepServer.start(); Thread.sleep(5 * 1000); // run the paragraph again, but change the text to print variable `user`. // can not recover the python interpreter, because it has been shutdown. @@ -271,12 +286,21 @@ void testRecovery_3() throws Exception { } } + private Callable hasParagraphStatus(Paragraph p, Job.Status status) { + return () -> p.getStatus().equals(status); + } + + private Callable isParagraphTerminated(Paragraph p) { + return () -> p.isTerminated(); + } + @Test void testRecovery_Running_Paragraph_sh() throws Exception { LOG.info("Test testRecovery_Running_Paragraph_sh"); String note1Id = null; try { - note1Id = TestUtils.getInstance(Notebook.class).createNote("note4", AuthenticationInfo.ANONYMOUS); + note1Id = zepServer.getServiceLocator().getService(Notebook.class).createNote("note4", + AuthenticationInfo.ANONYMOUS); Paragraph p1 = TestUtils.getInstance(Notebook.class).processNote(note1Id, note1 -> { return note1.addNewParagraph(AuthenticationInfo.ANONYMOUS); @@ -286,31 +310,20 @@ void testRecovery_Running_Paragraph_sh() throws Exception { CloseableHttpResponse post = httpPost("/notebook/job/" + note1Id + "/" + p1.getId(), ""); assertThat(post, isAllowed()); post.close(); - long start = System.currentTimeMillis(); // wait until paragraph is RUNNING - while((System.currentTimeMillis() - start) < 10 * 1000) { - if (p1.getStatus() == Job.Status.RUNNING) { - break; - } - Thread.sleep(1000); - } + await().until(hasParagraphStatus(p1, Job.Status.RUNNING)); if (p1.getStatus() != Job.Status.RUNNING) { fail("Fail to run paragraph: " + p1.getReturn()); } // shutdown zeppelin and restart it - shutDown(); - startUp(RecoveryTest.class.getSimpleName(), false); - + zepServer.shutDown(); + zepServer.start(); + Thread.sleep(5000); // wait until paragraph is finished - start = System.currentTimeMillis(); - while((System.currentTimeMillis() - start) < 10 * 1000) { - if (p1.isTerminated()) { - break; - } - Thread.sleep(1000); - } - + await().until(isParagraphTerminated(p1)); + // Wait because paragraph is re submited + Thread.sleep(11 * 1000); assertEquals(Job.Status.FINISHED, p1.getStatus()); assertEquals("hello\n", p1.getReturn().message().get(0).getData()); Thread.sleep(5 * 1000); @@ -355,11 +368,11 @@ void testRecovery_Finished_Paragraph_python() throws Exception { } // shutdown zeppelin and restart it - shutDown(); + zepServer.shutDown(); // sleep 15 seconds to make sure the paragraph is finished Thread.sleep(15 * 1000); - startUp(RecoveryTest.class.getSimpleName(), false); + zepServer.start(); // sleep 10 seconds to make sure recovering is finished Thread.sleep(10 * 1000); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java index 210d2fdf691..df1432d57c2 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java @@ -23,7 +23,6 @@ import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.PumpStreamHandler; -import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.http.Header; import org.apache.http.HttpResponse; @@ -42,41 +41,27 @@ import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.message.BasicNameValuePair; -import org.apache.zeppelin.notebook.Notebook; -import org.apache.zeppelin.plugin.PluginManager; -import org.apache.zeppelin.utils.TestUtils; import org.hamcrest.Description; import org.hamcrest.Matcher; import org.hamcrest.TypeSafeMatcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.File; import java.io.IOException; import java.lang.ref.WeakReference; -import java.net.InetAddress; -import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.regex.Pattern; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.interpreter.InterpreterSetting; -import org.apache.zeppelin.server.ZeppelinServer; public abstract class AbstractTestRestApi { - protected static final Logger LOG = LoggerFactory.getLogger(AbstractTestRestApi.class); + private static final Logger LOG = LoggerFactory.getLogger(AbstractTestRestApi.class); public static final String REST_API_URL = "/api"; - static final String URL = getUrlToTest(); - protected static final boolean WAS_RUNNING = checkIfServerIsRunning(); - static boolean isRunningWithAuth = false; + protected ZeppelinConfiguration conf; - private static File shiroIni = null; - private static String zeppelinShiro = + protected static final String ZEPPELIN_SHIRO = "[users]\n" + "admin = password1, admin\n" + "user1 = password2, role1, role2\n" + @@ -97,7 +82,7 @@ public abstract class AbstractTestRestApi { "/api/cluster/address = anon\n" + "/** = authc"; - private static String zeppelinShiroKnox = + protected static final String ZEPPELIN_SHIRO_KNOX = "[main]\n" + "knoxJwtRealm = org.apache.zeppelin.realm.jwt.KnoxJwtRealm\n" + "knoxJwtRealm.providerUrl = https://domain.example.com/\n" + @@ -118,8 +103,7 @@ public abstract class AbstractTestRestApi { "/api/cluster/address = anon\n" + "/** = authc"; - private static File knoxSsoPem = null; - private static String knoxSsoPemCertificate = + protected static final String KNOW_SSO_PEM_CERTIFICATE = "-----BEGIN CERTIFICATE-----\n" + "MIIChjCCAe+gAwIBAgIJALYrdDEXKwcqMA0GCSqGSIb3DQEBBQUAMIGEMQswCQYD\n" + "VQQGEwJVUzENMAsGA1UECBMEVGVzdDENMAsGA1UEBxMEVGVzdDEPMA0GA1UEChMG\n" @@ -137,10 +121,6 @@ public abstract class AbstractTestRestApi { + "/bA8TFNPblPxavIOcd+R+RfFmT1YKfYIhco=\n" + "-----END CERTIFICATE-----"; - protected static File zeppelinHome; - protected static File confDir; - protected static File notebookDir; - private static CloseableHttpClient httpClient; public static CloseableHttpClient getHttpClient() { @@ -150,239 +130,25 @@ public static CloseableHttpClient getHttpClient() { return httpClient; } - private String getUrl(String path) { - String url; - if (System.getProperty("url") != null) { - url = System.getProperty("url"); - } else { - url = "http://localhost:8080"; - } - url += REST_API_URL; - if (path != null) { - url += path; - } - - return url; - } - - protected static String getUrlToTest() { - String url = "http://localhost:8080" + REST_API_URL; - if (System.getProperty("url") != null) { - url = System.getProperty("url"); - } - return url; - } - - static ExecutorService executor; - protected static final Runnable SERVER = new Runnable() { - @Override - public void run() { - try { - TestUtils.clearInstances(); - ZeppelinServer.main(new String[]{""}); - } catch (Throwable e) { - LOG.error("Exception in WebDriverManager while getWebDriver ", e); - throw new RuntimeException(e); - } - } - }; - - private static void start(boolean withAuth, - String testClassName, - boolean withKnox, - boolean cleanData) - throws Exception { - LOG.info("Starting ZeppelinServer withAuth: {}, testClassName: {}, withKnox: {}", - withAuth, testClassName, withKnox); - - if (!WAS_RUNNING) { - ZeppelinConfiguration.reset(); - // copy the resources files to a temp folder - zeppelinHome = new File(".."); - LOG.info("ZEPPELIN_HOME: " + zeppelinHome.getAbsolutePath()); - confDir = new File(zeppelinHome, "conf_" + testClassName); - FileUtils.deleteDirectory(confDir); - LOG.info("ZEPPELIN_CONF_DIR: " + confDir.getAbsolutePath()); - confDir.mkdirs(); - - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), - zeppelinHome.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_WAR.getVarName(), - new File("../zeppelin-web/dist").getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_ANGULAR_WAR.getVarName(), - new File("../zeppelin-web-angular/dist").getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), - confDir.getAbsolutePath()); - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName(), - "spark"); - - notebookDir = new File(zeppelinHome.getAbsolutePath() + "/notebook_" + testClassName); - if (cleanData) { - FileUtils.deleteDirectory(notebookDir); - } - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), - notebookDir.getPath() - ); - - // some test profile does not build zeppelin-web. - // to prevent zeppelin starting up fail, create zeppelin-web/dist directory - new File("../zeppelin-web/dist").mkdirs(); - new File("../zeppelin-web-angular/dist").mkdirs(); - - LOG.info("Starting Zeppelin Server..."); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); - LOG.info("zconf.getClusterAddress() = {}", conf.getClusterAddress()); - - if (withAuth) { - isRunningWithAuth = true; - - // Create a shiro env test. - shiroIni = new File(confDir, "shiro.ini"); - if (!shiroIni.exists()) { - shiroIni.createNewFile(); - } - if (withKnox) { - FileUtils.writeStringToFile(shiroIni, - zeppelinShiroKnox.replaceAll("knox-sso.pem", confDir + "/knox-sso.pem"), - StandardCharsets.UTF_8); - knoxSsoPem = new File(confDir, "knox-sso.pem"); - if (!knoxSsoPem.exists()) { - knoxSsoPem.createNewFile(); - } - FileUtils.writeStringToFile(knoxSsoPem, knoxSsoPemCertificate, StandardCharsets.UTF_8); - } else { - FileUtils.writeStringToFile(shiroIni, zeppelinShiro, StandardCharsets.UTF_8); - } - - } - - executor = Executors.newSingleThreadExecutor(); - executor.submit(SERVER); - long s = System.currentTimeMillis(); - boolean started = false; - while (System.currentTimeMillis() - s < 1000 * 60 * 3) { // 3 minutes - Thread.sleep(2000); - started = checkIfServerIsRunning(); - if (started == true) { - break; - } - } - if (started == false) { - throw new RuntimeException("Can not start Zeppelin server"); - } - - LOG.info("Zeppelin Server is started."); - } - } - - protected static void startUpWithKnoxEnable(String testClassName) throws Exception { - start(true, testClassName, true, true); + protected static String getUrlToTest(ZeppelinConfiguration conf) { + return "http://localhost:" + conf.getServerPort() + REST_API_URL; } - protected static void startUpWithAuthenticationEnable(String testClassName) throws Exception { - start(true, testClassName, false, true); - } - - protected static void startUp(String testClassName) throws Exception { - start(false, testClassName, false, true); - } - - protected static void startUp(String testClassName, boolean cleanData) throws Exception { - start(false, testClassName, false, cleanData); - } - - private static String getHostname() { - try { - return InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - LOG.error("Exception in WebDriverManager while getWebDriver ", e); - return "localhost"; - } - } - - protected static void shutDown() throws Exception { - shutDown(true); - } - - protected static void shutDown(final boolean deleteConfDir) throws Exception { - shutDown(deleteConfDir, false); - } - - protected static void shutDown(final boolean deleteConfDir, - boolean forceShutdownInterpreter) throws Exception { - - if (!WAS_RUNNING && TestUtils.getInstance(Notebook.class) != null) { - // restart interpreter to stop all interpreter processes - List settingList = TestUtils.getInstance(Notebook.class).getInterpreterSettingManager() - .get(); - if (!TestUtils.getInstance(Notebook.class).getConf().isRecoveryEnabled() || forceShutdownInterpreter) { - for (InterpreterSetting setting : settingList) { - TestUtils.getInstance(Notebook.class).getInterpreterSettingManager().restart(setting.getId()); - } - } - if (shiroIni != null) { - FileUtils.deleteQuietly(shiroIni); - } - LOG.info("Terminating Zeppelin Server..."); - executor.shutdown(); - executor.shutdownNow(); - - long s = System.currentTimeMillis(); - boolean started = true; - while (System.currentTimeMillis() - s < 1000 * 60 * 3) { // 3 minutes - Thread.sleep(2000); - started = checkIfServerIsRunning(); - if (started == false) { - break; - } - } - if (started == true) { - throw new RuntimeException("Can not stop Zeppelin server"); - } - - LOG.info("Zeppelin Server is terminated."); - - if (isRunningWithAuth) { - isRunningWithAuth = shiroIni.exists(); - } - - if (deleteConfDir && !TestUtils.getInstance(Notebook.class).getConf().isRecoveryEnabled()) { - // don't delete interpreter.json when recovery is enabled. otherwise the interpreter setting - // id will change after zeppelin restart, then we can not recover interpreter process - // properly - FileUtils.deleteDirectory(confDir); - } - TestUtils.clearInstances(); - } - } - - public static boolean checkIfServerIsRunning() { - boolean isRunning; - try (CloseableHttpResponse response = httpGet("/version")) { - isRunning = response.getStatusLine().getStatusCode() == 200; - } catch (IOException e) { - LOG.error("AbstractTestRestApi.checkIfServerIsRunning() fails .. ZeppelinServer is not " + - "running"); - isRunning = false; - } - return isRunning; - } - - public static CloseableHttpResponse httpGet(String path) throws IOException { + public CloseableHttpResponse httpGet(String path) + throws IOException { return httpGet(path, StringUtils.EMPTY, StringUtils.EMPTY); } - public static CloseableHttpResponse httpGet(String path, String user, String pwd) throws IOException { + public CloseableHttpResponse httpGet(String path, String user, String pwd) + throws IOException { return httpGet(path, user, pwd, StringUtils.EMPTY); } - public static CloseableHttpResponse httpGet(String path, String user, String pwd, String cookies) + public CloseableHttpResponse httpGet(String path, String user, String pwd, String cookies) throws IOException { - LOG.info("Connecting to {}", URL + path); - HttpGet httpGet = new HttpGet(URL + path); - httpGet.addHeader("Origin", URL); + LOG.info("Connecting to {}", getUrlToTest(conf) + path); + HttpGet httpGet = new HttpGet(getUrlToTest(conf) + path); + httpGet.addHeader("Origin", getUrlToTest(conf)); if (userAndPasswordAreNotBlank(user, pwd)) { httpGet.setHeader("Cookie", "JSESSIONID=" + getCookie(user, pwd)); } @@ -394,15 +160,16 @@ public static CloseableHttpResponse httpGet(String path, String user, String pwd return response; } - public static CloseableHttpResponse httpDelete(String path) throws IOException { + public CloseableHttpResponse httpDelete(String path) + throws IOException { return httpDelete(path, StringUtils.EMPTY, StringUtils.EMPTY); } - public static CloseableHttpResponse httpDelete(String path, String user, String pwd) + public CloseableHttpResponse httpDelete(String path, String user, String pwd) throws IOException { - LOG.info("Connecting to {}", URL + path); - HttpDelete httpDelete = new HttpDelete(URL + path); - httpDelete.addHeader("Origin", URL); + LOG.info("Connecting to {}", getUrlToTest(conf) + path); + HttpDelete httpDelete = new HttpDelete(getUrlToTest(conf) + path); + httpDelete.addHeader("Origin", getUrlToTest(conf)); if (userAndPasswordAreNotBlank(user, pwd)) { httpDelete.setHeader("Cookie", "JSESSIONID=" + getCookie(user, pwd)); } @@ -411,18 +178,19 @@ public static CloseableHttpResponse httpDelete(String path, String user, String return response; } - public static CloseableHttpResponse httpPost(String path, String body) throws IOException { + public CloseableHttpResponse httpPost(String path, String body) + throws IOException { return httpPost(path, body, StringUtils.EMPTY, StringUtils.EMPTY); } - public static CloseableHttpResponse httpPost(String path, String request, String user, String pwd) + public CloseableHttpResponse httpPost(String path, String request, String user, String pwd) throws IOException { - LOG.info("Connecting to {}", URL + path); + LOG.info("Connecting to {}", getUrlToTest(conf) + path); RequestConfig localConfig = RequestConfig.custom() .setCookieSpec(CookieSpecs.IGNORE_COOKIES) .build(); - HttpPost httpPost = new HttpPost(URL + path); + HttpPost httpPost = new HttpPost(getUrlToTest(conf) + path); httpPost.setConfig(localConfig); httpPost.setEntity(new StringEntity(request, ContentType.APPLICATION_JSON)); if (userAndPasswordAreNotBlank(user, pwd)) { @@ -433,15 +201,16 @@ public static CloseableHttpResponse httpPost(String path, String request, String return response; } - public static CloseableHttpResponse httpPut(String path, String body) throws IOException { + public CloseableHttpResponse httpPut(String path, String body) + throws IOException { return httpPut(path, body, StringUtils.EMPTY, StringUtils.EMPTY); } - public static CloseableHttpResponse httpPut(String path, String body, String user, String pwd) + public CloseableHttpResponse httpPut(String path, String body, String user, String pwd) throws IOException { - LOG.info("Connecting to {}", URL + path); - HttpPut httpPut = new HttpPut(URL + path); - httpPut.addHeader("Origin", URL); + LOG.info("Connecting to {}", getUrlToTest(conf) + path); + HttpPut httpPut = new HttpPut(getUrlToTest(conf) + path); + httpPut.addHeader("Origin", getUrlToTest(conf)); httpPut.setEntity(new StringEntity(body, ContentType.TEXT_PLAIN)); if (userAndPasswordAreNotBlank(user, pwd)) { httpPut.setHeader("Cookie", "JSESSIONID=" + getCookie(user, pwd)); @@ -451,9 +220,10 @@ public static CloseableHttpResponse httpPut(String path, String body, String use return response; } - private static String getCookie(String user, String password) throws IOException { - HttpPost httpPost = new HttpPost(URL + "/login"); - httpPost.addHeader("Origin", URL); + private String getCookie(String user, String password) + throws IOException { + HttpPost httpPost = new HttpPost(getUrlToTest(conf) + "/login"); + httpPost.addHeader("Origin", getUrlToTest(conf)); ArrayList postParameters = new ArrayList(); postParameters.add(new BasicNameValuePair("password", password)); postParameters.add(new BasicNameValuePair("userName", user)); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ConfigurationsRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ConfigurationsRestApiTest.java index 6621e241ca8..afdc9070657 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ConfigurationsRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ConfigurationsRestApiTest.java @@ -21,8 +21,10 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -34,41 +36,54 @@ class ConfigurationsRestApiTest extends AbstractTestRestApi { Gson gson = new Gson(); + private static MiniZeppelinServer zep; + @BeforeAll static void init() throws Exception { - AbstractTestRestApi.startUp(ConfigurationsRestApi.class.getSimpleName()); + zep = new MiniZeppelinServer(ConfigurationsRestApi.class.getSimpleName()); + zep.start(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zep.destroy(); + } + + @BeforeEach + void setup() { + conf = zep.getZeppelinConfiguration(); } @Test void testGetAll() throws IOException { - CloseableHttpResponse get = httpGet("/configurations/all"); - Map resp = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), - new TypeToken>(){}.getType()); - Map body = (Map) resp.get("body"); - assertTrue(body.size() > 0); - // it shouldn't have key/value pair which key contains "password" - for (String key : body.keySet()) { - assertTrue(!key.contains("password")); + try (CloseableHttpResponse get = httpGet("/configurations/all")) { + Map resp = + gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), + new TypeToken>() { + }.getType()); + Map body = (Map) resp.get("body"); + assertTrue(body.size() > 0); + // it shouldn't have key/value pair which key contains "password" + for (String key : body.keySet()) { + assertTrue(!key.contains("password")); + } } - get.close(); } @Test void testGetViaPrefix() throws IOException { final String prefix = "zeppelin.server"; - CloseableHttpResponse get = httpGet("/configurations/prefix/" + prefix); - Map resp = gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), - new TypeToken>(){}.getType()); - Map body = (Map) resp.get("body"); - assertTrue(body.size() > 0); - for (String key : body.keySet()) { - assertTrue(!key.contains("password") && key.startsWith(prefix)); + try ( + CloseableHttpResponse get = httpGet("/configurations/prefix/" + prefix)) { + Map resp = + gson.fromJson(EntityUtils.toString(get.getEntity(), StandardCharsets.UTF_8), + new TypeToken>() { + }.getType()); + Map body = (Map) resp.get("body"); + assertTrue(body.size() > 0); + for (String key : body.keySet()) { + assertTrue(!key.contains("password") && key.startsWith(prefix)); + } } - get.close(); } } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/HeliumRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/HeliumRestApiTest.java index ed74641892c..c28b8d5412d 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/HeliumRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/HeliumRestApiTest.java @@ -21,8 +21,8 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.helium.Helium; -import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -47,17 +47,24 @@ class HeliumRestApiTest extends AbstractTestRestApi { private Gson gson = new Gson(); + private static MiniZeppelinServer zepServer; private static Helium helium; @BeforeAll static void init() throws Exception { - AbstractTestRestApi.startUp(HeliumRestApi.class.getSimpleName()); - helium = TestUtils.getInstance(Helium.class); + zepServer = new MiniZeppelinServer(HeliumRestApi.class.getSimpleName()); + zepServer.start(); + helium = zepServer.getServiceLocator().getService(Helium.class); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } @BeforeEach diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/InterpreterRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/InterpreterRestApiTest.java index d75700112b6..127e88b5a6f 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/InterpreterRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/InterpreterRestApiTest.java @@ -24,6 +24,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.interpreter.InterpreterOption; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.notebook.Notebook; @@ -37,6 +38,8 @@ import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -53,21 +56,27 @@ */ @TestMethodOrder(MethodOrderer.MethodName.class) class InterpreterRestApiTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(InterpreterRestApiTest.class); private Gson gson = new Gson(); private AuthenticationInfo anonymous; + private static MiniZeppelinServer zepServer; @BeforeAll static void init() throws Exception { - AbstractTestRestApi.startUp(InterpreterRestApiTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(InterpreterRestApiTest.class.getSimpleName()); + zepServer.copyBinDir(); + zepServer.addInterpreter("md"); + zepServer.start(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } @BeforeEach void setUp() { + conf = zepServer.getZeppelinConfiguration(); anonymous = new AuthenticationInfo("anonymous"); } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/KnoxRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/KnoxRestApiTest.java index 174f5f08cb8..3d10da31436 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/KnoxRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/KnoxRestApiTest.java @@ -20,8 +20,10 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.slf4j.Logger; @@ -43,15 +45,24 @@ public class KnoxRestApiTest extends AbstractTestRestApi { private static final Logger LOG = LoggerFactory.getLogger(KnoxRestApiTest.class); Gson gson = new Gson(); + private static MiniZeppelinServer zepServer; @BeforeAll public static void init() throws Exception { - AbstractTestRestApi.startUpWithKnoxEnable(KnoxRestApiTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(KnoxRestApiTest.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", AbstractTestRestApi.ZEPPELIN_SHIRO_KNOX); + zepServer.addConfigFile("knox-sso.pem", AbstractTestRestApi.KNOW_SSO_PEM_CERTIFICATE); + zepServer.start(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @AfterAll public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRepoRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRepoRestApiTest.java index 0057d027105..a388606c29b 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRepoRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRepoRestApiTest.java @@ -39,6 +39,7 @@ import java.util.List; import java.util.Map; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.user.AuthenticationInfo; /** @@ -49,19 +50,23 @@ public class NotebookRepoRestApiTest extends AbstractTestRestApi { Gson gson = new Gson(); AuthenticationInfo anonymous; + private static MiniZeppelinServer zepServer; + @BeforeAll public static void init() throws Exception { - AbstractTestRestApi.startUp(NotebookRepoRestApiTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(NotebookRepoRestApiTest.class.getSimpleName()); + zepServer.start(); } @AfterAll public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } @BeforeEach public void setUp() { anonymous = new AuthenticationInfo("anonymous"); + conf = zepServer.getZeppelinConfiguration(); } private List> getListOfReposotiry() throws IOException { @@ -80,13 +85,13 @@ private void updateNotebookRepoWithNewSetting(String payload) throws IOException } @Test - public void thatCanGetNotebookRepositoiesSettings() throws IOException { + void thatCanGetNotebookRepositoiesSettings() throws IOException { List> listOfRepositories = getListOfReposotiry(); assertThat(listOfRepositories.size(), is(not(0))); } @Test - public void reloadRepositories() throws IOException { + void reloadRepositories() throws IOException { CloseableHttpResponse get = httpGet("/notebook-repositories/reload"); int status = get.getStatusLine().getStatusCode(); get.close(); @@ -94,7 +99,7 @@ public void reloadRepositories() throws IOException { } @Test - public void setNewDirectoryForLocalDirectory() throws IOException { + void setNewDirectoryForLocalDirectory() throws IOException { List> listOfRepositories = getListOfReposotiry(); String localVfs = StringUtils.EMPTY; String className = StringUtils.EMPTY; diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRestApiTest.java index 7a7912581cc..2f340070ada 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookRestApiTest.java @@ -24,7 +24,6 @@ import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.repo.NotebookRepoWithVersionControl; import org.apache.zeppelin.rest.message.ParametersRequest; -import org.apache.zeppelin.socket.NotebookServer; import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -32,6 +31,8 @@ import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -39,6 +40,7 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.Paragraph; @@ -58,22 +60,31 @@ */ @TestMethodOrder(MethodOrderer.MethodName.class) class NotebookRestApiTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(NotebookRestApiTest.class); Gson gson = new Gson(); AuthenticationInfo anonymous; + private static MiniZeppelinServer zepServer; + @BeforeAll static void init() throws Exception { - startUp(NotebookRestApiTest.class.getSimpleName()); - TestUtils.getInstance(Notebook.class).setParagraphJobListener(NotebookServer.getInstance()); + zepServer = new MiniZeppelinServer(NotebookRestApiTest.class.getSimpleName()); + zepServer.copyLogProperties(); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("python"); + zepServer.addInterpreter("sh"); + zepServer.copyBinDir(); + zepServer.start(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); } @BeforeEach void setUp() { + conf = zepServer.getZeppelinConfiguration(); anonymous = new AuthenticationInfo("anonymous"); } @@ -413,7 +424,7 @@ void testRunParagraphJob() throws Exception { assertEquals(Job.Status.FINISHED, p.getStatus()); // run non-blank paragraph - p.setText("test"); + p.setText("%python \n print(\"hello"); post = httpPost("/notebook/job/" + note1Id + "/" + p.getId(), ""); assertThat(post, isAllowed()); resp = gson.fromJson(EntityUtils.toString(post.getEntity(), StandardCharsets.UTF_8), @@ -517,7 +528,7 @@ void testRunParagraphSynchronously() throws IOException { assertEquals("ERROR", stringMap.get("code")); List interpreterResults = (List) stringMap.get("msg"); assertTrue(interpreterResults.get(0).get("data").toString() - .contains("invalid_cmd: command not found"), interpreterResults.get(0).toString()); + .contains("invalid_cmd: "), interpreterResults.get(0).toString()); post.close(); assertNotEquals(Job.Status.READY, p.getStatus()); @@ -1158,8 +1169,8 @@ void testRunWithServerRestart() throws Exception { put.close(); // restart server (while keeping interpreter configuration) - AbstractTestRestApi.shutDown(false); - startUp(NotebookRestApiTest.class.getSimpleName(), false); + zepServer.shutDown(false); + zepServer.start(); CloseableHttpResponse post2 = httpPost("/notebook/job/" + note1Id + "?blocking=true", ""); assertThat(post2, isAllowed()); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookSecurityRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookSecurityRestApiTest.java index 9b000997c8b..564711b01ab 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookSecurityRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/NotebookSecurityRestApiTest.java @@ -26,11 +26,13 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.utils.TestUtils; import org.hamcrest.Matcher; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.hamcrest.MatcherAssert.assertThat; @@ -41,15 +43,22 @@ public class NotebookSecurityRestApiTest extends AbstractTestRestApi { Gson gson = new Gson(); + private static MiniZeppelinServer zepServer; + @BeforeAll public static void init() throws Exception { - AbstractTestRestApi.startUpWithAuthenticationEnable( - NotebookSecurityRestApiTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(NotebookSecurityRestApiTest.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", ZEPPELIN_SHIRO); + zepServer.start(); } - @AfterAll public static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java index 972a278f7d3..103e5fca8f0 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SecurityRestApiTest.java @@ -21,9 +21,11 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; @@ -35,15 +37,23 @@ class SecurityRestApiTest extends AbstractTestRestApi { Gson gson = new Gson(); + private static MiniZeppelinServer zepServer; @BeforeAll static void init() throws Exception { - AbstractTestRestApi.startUpWithAuthenticationEnable(SecurityRestApiTest.class.getSimpleName()); + zepServer = new MiniZeppelinServer(SecurityRestApiTest.class.getSimpleName()); + zepServer.addConfigFile("shiro.ini", ZEPPELIN_SHIRO); + zepServer.start(); } @AfterAll static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SessionRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SessionRestApiTest.java index dfca0202dd9..11543f8967e 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SessionRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/SessionRestApiTest.java @@ -21,10 +21,12 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.common.SessionInfo; import org.apache.zeppelin.server.JsonResponse; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -36,14 +38,23 @@ class SessionRestApiTest extends AbstractTestRestApi { Gson gson = new Gson(); + private static MiniZeppelinServer zepServer; + @BeforeAll - static void init() throws Exception { - AbstractTestRestApi.startUp(SessionRestApi.class.getSimpleName()); + public static void init() throws Exception { + zepServer = new MiniZeppelinServer(SessionRestApiTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.start(); } @AfterAll - static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + public static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setup() { + conf = zepServer.getZeppelinConfiguration(); } @Test diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java index efcb686f9d2..8abb03ce8fc 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java @@ -25,6 +25,7 @@ import org.apache.zeppelin.notebook.AuthorizationService; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.rest.message.NoteJobStatus; +import org.apache.zeppelin.test.DownloadUtils; import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -33,6 +34,8 @@ import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -43,7 +46,9 @@ import java.util.List; import java.util.Map; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; +import org.apache.zeppelin.integration.TestHelper; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.user.AuthenticationInfo; @@ -60,21 +65,31 @@ */ @TestMethodOrder(MethodOrderer.MethodName.class) class ZeppelinRestApiTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(ZeppelinRestApiTest.class); Gson gson = new Gson(); AuthenticationInfo anonymous; + private static MiniZeppelinServer zepServer; @BeforeAll - static void init() throws Exception { - AbstractTestRestApi.startUp(ZeppelinRestApiTest.class.getSimpleName()); + public static void init() throws Exception { + String sparkHome = DownloadUtils.downloadSpark(); + zepServer = new MiniZeppelinServer(ZeppelinRestApiTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("sh"); + zepServer.addInterpreter("spark"); + zepServer.copyBinDir(); + zepServer.start(); + TestHelper.configureSparkInterpreter(zepServer, sparkHome); } @AfterAll - static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + public static void destroy() throws Exception { + zepServer.destroy(); } @BeforeEach void setUp() { + conf = zepServer.getZeppelinConfiguration(); anonymous = new AuthenticationInfo("anonymous"); } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/security/DirAccessTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/security/DirAccessTest.java index 58d061b4eff..56248bf3d8c 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/security/DirAccessTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/security/DirAccessTest.java @@ -20,50 +20,61 @@ import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.util.EntityUtils; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.rest.AbstractTestRestApi; import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.charset.StandardCharsets; class DirAccessTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(DirAccessTest.class); + + private MiniZeppelinServer zepServer; + @Test void testDirAccessForbidden() throws Exception { - synchronized (this) { - try { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_DEFAULT_DIR_ALLOWED - .getVarName(), "false"); - AbstractTestRestApi.startUp(DirAccessTest.class.getSimpleName()); - CloseableHttpResponse getMethod = getHttpClient().execute(new HttpGet(getUrlToTest() + "/app/")); - LOG.info("Invoke getMethod - " + EntityUtils.toString(getMethod.getEntity(), StandardCharsets.UTF_8)); - - assertEquals(HttpStatus.SC_FORBIDDEN, getMethod.getStatusLine().getStatusCode()); - } finally { - AbstractTestRestApi.shutDown(); - } + try { + zepServer = new MiniZeppelinServer(DirAccessTest.class.getSimpleName()); + conf = zepServer.getZeppelinConfiguration(); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_DEFAULT_DIR_ALLOWED + .getVarName(), "false"); + zepServer.start(); + CloseableHttpResponse getMethod = + getHttpClient().execute(new HttpGet(getUrlToTest() + "/app")); + LOG.info("Invoke getMethod - " + + EntityUtils.toString(getMethod.getEntity(), StandardCharsets.UTF_8)); + LOG.info("server port {}", conf.getServerPort()); + assertEquals(HttpStatus.SC_FORBIDDEN, getMethod.getStatusLine().getStatusCode()); + } finally { + zepServer.destroy(); } } @Test void testDirAccessOk() throws Exception { - synchronized (this) { - try { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_DEFAULT_DIR_ALLOWED + try { + zepServer = new MiniZeppelinServer(DirAccessTest.class.getSimpleName()); + conf = zepServer.getZeppelinConfiguration(); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_DEFAULT_DIR_ALLOWED .getVarName(), "true"); - AbstractTestRestApi.startUp(DirAccessTest.class.getSimpleName()); - CloseableHttpResponse getMethod = getHttpClient().execute(new HttpGet(getUrlToTest() + "/app/")); - LOG.info("Invoke getMethod - " + EntityUtils.toString(getMethod.getEntity(), StandardCharsets.UTF_8)); - assertEquals(HttpStatus.SC_OK, getMethod.getStatusLine().getStatusCode()); - } finally { - AbstractTestRestApi.shutDown(); - } + zepServer.start(); + CloseableHttpResponse getMethod = + getHttpClient().execute(new HttpGet(getUrlToTest() + "/app")); + LOG.info("Invoke getMethod - " + + EntityUtils.toString(getMethod.getEntity(), StandardCharsets.UTF_8)); + assertEquals(HttpStatus.SC_OK, getMethod.getStatusLine().getStatusCode()); + } finally { + zepServer.destroy(); } } - protected static String getUrlToTest() { - String url = "http://localhost:8080"; + protected String getUrlToTest() { + String url = "http://localhost:" + conf.getServerPort(); if (System.getProperty("url") != null) { url = System.getProperty("url"); } diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/server/CorsFilterTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/server/CorsFilterTest.java index 0e8858d269e..15c95904a07 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/server/CorsFilterTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/server/CorsFilterTest.java @@ -23,6 +23,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -44,7 +45,7 @@ class CorsFilterTest { @Test @SuppressWarnings("rawtypes") void validCorsFilterTest() throws IOException, ServletException { - CorsFilter filter = new CorsFilter(); + CorsFilter filter = new CorsFilter(ZeppelinConfiguration.load()); HttpServletResponse mockResponse = mock(HttpServletResponse.class); FilterChain mockedFilterChain = mock(FilterChain.class); HttpServletRequest mockRequest = mock(HttpServletRequest.class); @@ -69,7 +70,7 @@ public Object answer(InvocationOnMock invocationOnMock) throws Throwable { @Test @SuppressWarnings("rawtypes") void invalidCorsFilterTest() throws IOException, ServletException { - CorsFilter filter = new CorsFilter(); + CorsFilter filter = new CorsFilter(ZeppelinConfiguration.load()); HttpServletResponse mockResponse = mock(HttpServletResponse.class); FilterChain mockedFilterChain = mock(FilterChain.class); HttpServletRequest mockRequest = mock(HttpServletRequest.class); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/service/ConfigurationServiceTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/service/ConfigurationServiceTest.java index 71ca77372ad..a8443e605c8 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/service/ConfigurationServiceTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/service/ConfigurationServiceTest.java @@ -18,12 +18,14 @@ package org.apache.zeppelin.service; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.rest.AbstractTestRestApi; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; @@ -45,17 +47,26 @@ class ConfigurationServiceTest extends AbstractTestRestApi { private ServiceCallback callback = mock(ServiceCallback.class); + private static MiniZeppelinServer zepServer; + @BeforeAll - static void setUp() throws Exception { - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), + public static void init() throws Exception { + zepServer = new MiniZeppelinServer(ConfigurationServiceTest.class.getSimpleName()); + zepServer.getZeppelinConfiguration().setProperty( + ZeppelinConfiguration.ConfVars.ZEPPELIN_HELIUM_REGISTRY.getVarName(), "helium"); - AbstractTestRestApi.startUp(ConfigurationServiceTest.class.getSimpleName()); - configurationService = TestUtils.getInstance(ConfigurationService.class); + zepServer.start(); + configurationService = zepServer.getServiceLocator().getService(ConfigurationService.class); } @AfterAll - static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + public static void destroy() throws Exception { + zepServer.destroy(); + } + + @BeforeEach + void setUp() { + conf = zepServer.getZeppelinConfiguration(); } @Test diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/service/InterpreterServiceTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/service/InterpreterServiceTest.java index 0f2e76d89fe..8e2804ead3e 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/service/InterpreterServiceTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/service/InterpreterServiceTest.java @@ -101,7 +101,8 @@ void downloadInterpreter() throws IOException { String artifactName = "junit:junit:4.11"; Path specificInterpreterPath = Files.createDirectory(Paths.get(interpreterDir.toString(), interpreterName)); - DependencyResolver dependencyResolver = new DependencyResolver(localRepoDir.toString()); + DependencyResolver dependencyResolver = + new DependencyResolver(localRepoDir.toString(), ZeppelinConfiguration.load()); doNothing().when(mockInterpreterSettingManager).refreshInterpreterTemplates(); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/service/NotebookServiceTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/service/NotebookServiceTest.java index 01e81c537c5..8ba1087a9f5 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/service/NotebookServiceTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/service/NotebookServiceTest.java @@ -54,9 +54,11 @@ import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.interpreter.ManagedInterpreterGroup; import org.apache.zeppelin.notebook.AuthorizationService; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; import org.apache.zeppelin.notebook.NoteManager; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.exception.NotePathAlreadyExistsException; @@ -65,6 +67,7 @@ import org.apache.zeppelin.notebook.scheduler.QuartzSchedulerService; import org.apache.zeppelin.search.LuceneSearch; import org.apache.zeppelin.search.SearchService; +import org.apache.zeppelin.storage.ConfigStorage; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.Credentials; import org.junit.jupiter.api.AfterEach; @@ -86,17 +89,19 @@ class NotebookServiceTest { private ServiceCallback callback = mock(ServiceCallback.class); - private Gson gson = new Gson(); + private NoteParser noteParser; @BeforeEach void setUp() throws Exception { notebookDir = Files.createTempDirectory("notebookDir").toAbsolutePath().toFile(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), - notebookDir.getAbsolutePath()); - ZeppelinConfiguration zeppelinConfiguration = ZeppelinConfiguration.create(); + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + notebookDir.getAbsolutePath()); + noteParser = new GsonNoteParser(zConf); + ConfigStorage storage = ConfigStorage.createConfigStorage(zConf); NotebookRepo notebookRepo = new VFSNotebookRepo(); - notebookRepo.init(zeppelinConfiguration); + notebookRepo.init(zConf, noteParser); InterpreterSettingManager mockInterpreterSettingManager = mock(InterpreterSettingManager.class); InterpreterFactory mockInterpreterFactory = mock(InterpreterFactory.class); @@ -116,11 +121,12 @@ void setUp() throws Exception { when(mockInterpreterGroup.getInterpreterSetting()).thenReturn(mockInterpreterSetting); when(mockInterpreterSetting.getStatus()).thenReturn(InterpreterSetting.Status.READY); Credentials credentials = new Credentials(); - NoteManager noteManager = new NoteManager(notebookRepo, zeppelinConfiguration); - AuthorizationService authorizationService = new AuthorizationService(noteManager, zeppelinConfiguration); + NoteManager noteManager = new NoteManager(notebookRepo, zConf); + AuthorizationService authorizationService = + new AuthorizationService(noteManager, zConf, storage); notebook = new Notebook( - zeppelinConfiguration, + zConf, authorizationService, notebookRepo, noteManager, @@ -128,13 +134,13 @@ void setUp() throws Exception { mockInterpreterSettingManager, credentials, null); - searchService = new LuceneSearch(zeppelinConfiguration, notebook); - QuartzSchedulerService schedulerService = new QuartzSchedulerService(zeppelinConfiguration, notebook); + searchService = new LuceneSearch(zConf, notebook); + QuartzSchedulerService schedulerService = new QuartzSchedulerService(zConf, notebook); notebook.initNotebook(); notebook.waitForFinishInit(1, TimeUnit.MINUTES); notebookService = new NotebookService( - notebook, authorizationService, zeppelinConfiguration, schedulerService); + notebook, authorizationService, zConf, schedulerService); String interpreterName = "test"; when(mockInterpreterSetting.getName()).thenReturn(interpreterName); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/socket/ConnectionManagerTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/socket/ConnectionManagerTest.java index d3779c9bc6a..e574fd4e17c 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/socket/ConnectionManagerTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/socket/ConnectionManagerTest.java @@ -18,6 +18,8 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; + +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.AuthorizationService; import org.junit.jupiter.api.Test; @@ -27,7 +29,7 @@ class ConnectionManagerTest { void checkMapGrow() { AuthorizationService authService = mock(AuthorizationService.class); - ConnectionManager manager = new ConnectionManager(authService); + ConnectionManager manager = new ConnectionManager(authService, ZeppelinConfiguration.load()); NotebookSocket socket = mock(NotebookSocket.class); manager.addNoteConnection("test", socket); assertEquals(1, manager.noteSocketMap.size()); @@ -48,7 +50,7 @@ void checkMapGrow() { void checkMapGrowRemoveAll() { AuthorizationService authService = mock(AuthorizationService.class); - ConnectionManager manager = new ConnectionManager(authService); + ConnectionManager manager = new ConnectionManager(authService, ZeppelinConfiguration.load()); NotebookSocket socket = mock(NotebookSocket.class); manager.addNoteConnection("test", socket); assertEquals(1, manager.noteSocketMap.size()); diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/socket/NotebookServerTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/socket/NotebookServerTest.java index f8a90ebb12f..1a8a032e2a6 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/socket/NotebookServerTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/socket/NotebookServerTest.java @@ -31,19 +31,20 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.awaitility.Awaitility.await; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; - -import javax.servlet.http.HttpServletRequest; +import java.util.concurrent.Callable; import org.apache.commons.io.IOUtils; import org.apache.thrift.TException; @@ -62,6 +63,7 @@ import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.Notebook.NoteProcessor; import org.apache.zeppelin.notebook.repo.NotebookRepoWithVersionControl; +import org.apache.zeppelin.MiniZeppelinServer; import org.apache.zeppelin.common.Message; import org.apache.zeppelin.common.Message.OP; import org.apache.zeppelin.rest.AbstractTestRestApi; @@ -70,40 +72,48 @@ import org.apache.zeppelin.service.NotebookService; import org.apache.zeppelin.service.ServiceContext; import org.apache.zeppelin.user.AuthenticationInfo; -import org.apache.zeppelin.utils.TestUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** Basic REST API tests for notebookServer. */ class NotebookServerTest extends AbstractTestRestApi { + private static final Logger LOG = LoggerFactory.getLogger(NotebookServerTest.class); private static Notebook notebook; private static NotebookServer notebookServer; private static NotebookService notebookService; private static AuthorizationService authorizationService; - private HttpServletRequest mockRequest; private AuthenticationInfo anonymous; + private static MiniZeppelinServer zepServer; + @BeforeAll - static void init() throws Exception { - AbstractTestRestApi.startUp(NotebookServerTest.class.getSimpleName()); - notebook = TestUtils.getInstance(Notebook.class); - authorizationService = TestUtils.getInstance(AuthorizationService.class); - notebookServer = TestUtils.getInstance(NotebookServer.class); - notebookService = TestUtils.getInstance(NotebookService.class); + public static void init() throws Exception { + zepServer = new MiniZeppelinServer(NotebookServerTest.class.getSimpleName()); + zepServer.addInterpreter("md"); + zepServer.addInterpreter("angular"); + zepServer.addInterpreter("spark"); + zepServer.copyBinDir(); + zepServer.start(); + notebook = zepServer.getServiceLocator().getService(Notebook.class); + authorizationService = zepServer.getServiceLocator().getService(AuthorizationService.class); + notebookServer = zepServer.getServiceLocator().getService(NotebookServer.class); + notebookService = zepServer.getServiceLocator().getService(NotebookService.class); } @AfterAll - static void destroy() throws Exception { - AbstractTestRestApi.shutDown(); + public static void destroy() throws Exception { + zepServer.destroy(); } @BeforeEach void setUp() { - mockRequest = mock(HttpServletRequest.class); + conf = zepServer.getZeppelinConfiguration(); anonymous = AuthenticationInfo.ANONYMOUS; } @@ -121,7 +131,7 @@ void checkInvalidOrigin() { @Test void testCollaborativeEditing() throws IOException { - if (!ZeppelinConfiguration.create().isZeppelinNotebookCollaborativeModeEnable()) { + if (!zepServer.getZeppelinConfiguration().isZeppelinNotebookCollaborativeModeEnable()) { return; } NotebookSocket sock1 = createWebSocket(); @@ -406,15 +416,8 @@ void testLoadAngularObjectFromNote() throws IOException, InterruptedException { // wait for paragraph finished - Status status = notebook.processNote(note1Id, note1-> note1.getParagraph(p1Id).getStatus()); - while (true) { - System.out.println("loop"); - if (status == Job.Status.FINISHED) { - break; - } - Thread.sleep(100); - status = notebook.processNote(note1Id, note1-> note1.getParagraph(p1Id).getStatus()); - } + await().atMost(Duration.ofMinutes(5)).pollInterval(Duration.ofMillis(100)) + .until(checkParagraphStatus(note1Id, p1Id, Status.FINISHED)); // sleep for 1 second to make sure job running thread finish to fire event. See ZEPPELIN-3277 Thread.sleep(1000); @@ -452,6 +455,14 @@ void testLoadAngularObjectFromNote() throws IOException, InterruptedException { } } + private Callable checkParagraphStatus(String noteId, String paragraphId, Status status) { + return () -> { + return status + .equals(notebook.processNote(noteId, note -> note.getParagraph(paragraphId).getStatus())); + }; + + } + @Test void testImportNotebook() throws IOException { String msg = "{\"op\":\"IMPORT_NOTE\",\"data\":" + @@ -666,7 +677,7 @@ void testCreateNoteWithDefaultInterpreterId() throws IOException { .put("defaultInterpreterId", defaultInterpreterId).toJson()); int sendCount = 2; - if (ZeppelinConfiguration.create().isZeppelinNotebookCollaborativeModeEnable()) { + if (zepServer.getZeppelinConfiguration().isZeppelinNotebookCollaborativeModeEnable()) { sendCount++; } // expect the events are broadcasted properly diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/utils/CorsUtilsTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/utils/CorsUtilsTest.java index aca30686895..3d6718f2997 100644 --- a/zeppelin-server/src/test/java/org/apache/zeppelin/utils/CorsUtilsTest.java +++ b/zeppelin-server/src/test/java/org/apache/zeppelin/utils/CorsUtilsTest.java @@ -24,36 +24,31 @@ import java.net.URISyntaxException; import java.net.UnknownHostException; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; class CorsUtilsTest { - @AfterEach - public void cleanup() { - ZeppelinConfiguration.reset(); - } @Test void isInvalid() throws URISyntaxException, UnknownHostException { - assertFalse(CorsUtils.isValidOrigin("http://127.0.1.1", ZeppelinConfiguration.create())); + assertFalse(CorsUtils.isValidOrigin("http://127.0.1.1", ZeppelinConfiguration.load())); } @Test void isInvalidFromConfig() throws URISyntaxException, UnknownHostException { assertFalse(CorsUtils.isValidOrigin("http://otherinvalidhost.com", - ZeppelinConfiguration.create("zeppelin-site.xml"))); + ZeppelinConfiguration.load("zeppelin-site.xml"))); } @Test void isLocalhost() throws URISyntaxException, UnknownHostException { - assertTrue(CorsUtils.isValidOrigin("http://localhost", ZeppelinConfiguration.create())); + assertTrue(CorsUtils.isValidOrigin("http://localhost", ZeppelinConfiguration.load())); } @Test void isLocalMachine() throws URISyntaxException, UnknownHostException { String origin = "http://" + InetAddress.getLocalHost().getHostName(); - assertTrue(CorsUtils.isValidOrigin(origin, ZeppelinConfiguration.create()), + assertTrue(CorsUtils.isValidOrigin(origin, ZeppelinConfiguration.load()), "Origin " + origin + " is not allowed. Please check your hostname."); } @@ -61,41 +56,41 @@ void isLocalMachine() throws URISyntaxException, UnknownHostException { void isValidFromConfig() throws URISyntaxException, UnknownHostException { assertTrue(CorsUtils.isValidOrigin("http://otherhost.com", - ZeppelinConfiguration.create("zeppelin-site.xml"))); + ZeppelinConfiguration.load("zeppelin-site.xml"))); } @Test void isValidFromStar() throws URISyntaxException, UnknownHostException { assertTrue(CorsUtils.isValidOrigin("http://anyhost.com", - ZeppelinConfiguration.create("zeppelin-site-star.xml"))); + ZeppelinConfiguration.load("zeppelin-site-star.xml"))); } @Test void nullOrigin() throws URISyntaxException, UnknownHostException { assertFalse(CorsUtils.isValidOrigin(null, - ZeppelinConfiguration.create("zeppelin-site.xml"))); + ZeppelinConfiguration.load("zeppelin-site.xml"))); } @Test void nullOriginWithStar() throws URISyntaxException, UnknownHostException { assertTrue(CorsUtils.isValidOrigin(null, - ZeppelinConfiguration.create("zeppelin-site-star.xml"))); + ZeppelinConfiguration.load("zeppelin-site-star.xml"))); } @Test void emptyOrigin() throws URISyntaxException, UnknownHostException { assertFalse(CorsUtils.isValidOrigin("", - ZeppelinConfiguration.create("zeppelin-site.xml"))); + ZeppelinConfiguration.load("zeppelin-site.xml"))); } @Test void notAURIOrigin() throws URISyntaxException, UnknownHostException { assertFalse(CorsUtils.isValidOrigin("test123", - ZeppelinConfiguration.create("zeppelin-site.xml"))); + ZeppelinConfiguration.load("zeppelin-site.xml"))); } } diff --git a/zeppelin-test/pom.xml b/zeppelin-test/pom.xml new file mode 100644 index 00000000000..d3c8cdb2e15 --- /dev/null +++ b/zeppelin-test/pom.xml @@ -0,0 +1,73 @@ + + + + + 4.0.0 + + org.apache.zeppelin + zeppelin + 0.11.2.3.3.6.0-1 + + zeppelin-test + Zeppelin: Test + Zeppelin test code used in other modules + + 0.9.5 + + jar + + + org.apache.commons + commons-compress + ${commons.compress.version} + + + commons-io + commons-io + + + me.tongfei + progressbar + ${progressbar.version} + + + org.slf4j + slf4j-api + + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + + diff --git a/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadRequest.java b/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadRequest.java new file mode 100644 index 00000000000..50126acd0b4 --- /dev/null +++ b/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadRequest.java @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.test; + +import java.net.MalformedURLException; +import java.net.URL; +import java.util.Optional; + +public class DownloadRequest { + private final URL url; + private final Optional alternativeUrl; + private final int retries; + + public static final int DEFAULT_RETRIES = 3; + + public DownloadRequest(String url, int retries) throws MalformedURLException { + this(url, null, retries); + } + + public DownloadRequest(String url, String alternativeUrl) throws MalformedURLException { + this(url, alternativeUrl, DEFAULT_RETRIES); + } + + public DownloadRequest(String url, String alternativeUrl, int retries) + throws MalformedURLException { + if (alternativeUrl != null) { + this.url = new URL(url); + this.alternativeUrl = Optional.of(new URL(alternativeUrl)); + this.retries = retries; + } else { + this.url = new URL(url); + this.alternativeUrl = Optional.empty(); + this.retries = retries; + } + } + + public DownloadRequest(URL url, int retries) { + this(url, null, retries); + } + + public DownloadRequest(URL url, URL alternativeUrl) { + this(url, alternativeUrl, DEFAULT_RETRIES); + } + + public DownloadRequest(URL url, URL alternativeUrl, int retries) { + this.url = url; + this.alternativeUrl = Optional.of(alternativeUrl); + this.retries = retries; + } + + public URL getUrl() { + return url; + } + + public Optional getAlternativeUrl() { + return alternativeUrl; + } + + public int getRetries() { + return retries; + } +} diff --git a/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadUtils.java b/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadUtils.java new file mode 100644 index 00000000000..37332b6c224 --- /dev/null +++ b/zeppelin-test/src/main/java/org/apache/zeppelin/test/DownloadUtils.java @@ -0,0 +1,629 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.test; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import me.tongfei.progressbar.DelegatingProgressBarConsumer; +import me.tongfei.progressbar.ProgressBar; +import me.tongfei.progressbar.ProgressBarBuilder; +import me.tongfei.progressbar.ProgressBarStyle; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.Optional; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +/** + * Utility class for downloading spark/flink/livy. This is used for spark/flink integration test. + */ +public class DownloadUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(DownloadUtils.class); + + public static final String APACHE_MIRROR_ENV_KEY = "APACHE_MIRROR"; + public static final String PROGRESS_BAR_UPDATE_INTERVAL_ENV_KEY = "PROGRESS_BAR_UPDATE_INTERVAL"; + + private static final String MIRROR_URL; + private static final String ARCHIVE_URL = "https://archive.apache.org/dist/"; + private static final int PROGRESS_BAR_UPDATE_INTERVAL; + + private static String downloadFolder = System.getProperty("user.home") + "/.cache"; + public static final String DEFAULT_SPARK_VERSION = "3.4.2"; + public static final String DEFAULT_SPARK_HADOOP_VERSION = "3"; + + + private DownloadUtils() { + throw new IllegalStateException("Utility class"); + } + + static { + try { + FileUtils.forceMkdir(new File(downloadFolder)); + String envUrl = System.getenv(APACHE_MIRROR_ENV_KEY); + if (StringUtils.isNotBlank(envUrl)) { + MIRROR_URL = envUrl; + } else { + MIRROR_URL = + IOUtils.toString(new URL("https://www.apache.org/dyn/closer.lua?preferred=true"), + StandardCharsets.UTF_8); + } + String envProgressUpdateInterval = System.getenv(PROGRESS_BAR_UPDATE_INTERVAL_ENV_KEY); + if (StringUtils.isNotBlank(envProgressUpdateInterval)) { + PROGRESS_BAR_UPDATE_INTERVAL = Integer.valueOf(envProgressUpdateInterval); + } else { + PROGRESS_BAR_UPDATE_INTERVAL = 1000; + } + } catch (IOException e) { + throw new RuntimeException("Fail to create download folder: " + downloadFolder, e); + } + } + + /** + * Download Spark with default versions + * + * @return home of Spark installation + */ + public static String downloadSpark() { + return downloadSpark(DEFAULT_SPARK_VERSION, DEFAULT_SPARK_HADOOP_VERSION, null); + } + + /** + * Download of a Spark distribution with a specific Hadoop version + * + * @param sparkVersion + * @param hadoopVersion + * @return home of Spark installation + */ + public static String downloadSpark(String sparkVersion, String hadoopVersion) { + return downloadSpark(sparkVersion, hadoopVersion, null); + } + + /** + * Download of a Spark distribution with a Hadoop and Scala version + * + * @param sparkVersion + * @param hadoopVersion + * @param scalaVersion + * @return home of Spark installation + */ + public static String downloadSpark(String sparkVersion, String hadoopVersion, + String scalaVersion) { + File sparkFolder = new File(downloadFolder, "spark"); + final File targetSparkHomeFolder; + if (StringUtils.isNotBlank(scalaVersion)) { + targetSparkHomeFolder = new File(sparkFolder, + "spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion + "-scala" + scalaVersion); + } else { + targetSparkHomeFolder = new File(sparkFolder, + "spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion); + } + + return downloadSpark(sparkVersion, hadoopVersion, scalaVersion, targetSparkHomeFolder); + } + + /** + * Download of a Spark distribution + * + * @param sparkVersion + * @param hadoopVersion + * @param targetSparkHomeFolder - where should the spark archive be extracted + * @return home of Spark installation + */ + public static String downloadSpark(String sparkVersion, String hadoopVersion, String scalaVersion, + File targetSparkHomeFolder) { + File sparkFolder = new File(downloadFolder, "spark"); + sparkFolder.mkdir(); + final String sparkVersionLog; + if (StringUtils.isBlank(scalaVersion)) { + sparkVersionLog = "Spark " + sparkVersion + "-" + hadoopVersion; + } else { + sparkVersionLog = "Spark " + sparkVersion + "-" + hadoopVersion + "-" + scalaVersion; + } + if (targetSparkHomeFolder.exists()) { + LOGGER.info("Skip to download {} as it is already downloaded.", sparkVersionLog); + return targetSparkHomeFolder.getAbsolutePath(); + } + final File sparkTarGZ; + if (StringUtils.isBlank(scalaVersion)) { + sparkTarGZ = + new File(sparkFolder, "spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion + ".tgz"); + } else { + sparkTarGZ = new File(sparkFolder, "spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion + + "-scala" + scalaVersion + ".tgz"); + } + + try { + URL mirrorURL = new URL(MIRROR_URL + + generateSparkDownloadURL(sparkVersion, hadoopVersion, scalaVersion)); + URL archiveURL = new URL(ARCHIVE_URL + + generateSparkDownloadURL(sparkVersion, hadoopVersion, scalaVersion)); + LOGGER.info("Download {}", sparkVersionLog); + download(new DownloadRequest(mirrorURL, archiveURL), sparkTarGZ); + ProgressBarBuilder pbb = new ProgressBarBuilder() + .setTaskName("Unarchiv") + .setUnit("MiB", 1048576) // setting the progress bar to use MiB as the unit + .setStyle(ProgressBarStyle.ASCII) + .setUpdateIntervalMillis(PROGRESS_BAR_UPDATE_INTERVAL) + .setConsumer(new DelegatingProgressBarConsumer(LOGGER::info)); + try ( + InputStream fis = Files.newInputStream(sparkTarGZ.toPath()); + InputStream pbis = ProgressBar.wrap(fis, pbb); + InputStream bis = new BufferedInputStream(pbis); + InputStream gzis = new GzipCompressorInputStream(bis); + ArchiveInputStream o = new TarArchiveInputStream(gzis)) { + LOGGER.info("Unarchive {} to {}", sparkVersionLog, targetSparkHomeFolder); + unarchive(o, targetSparkHomeFolder, 1); + LOGGER.info("Unarchive {} done", sparkVersionLog); + } + } catch (IOException e) { + throw new RuntimeException("Unable to download spark", e); + } + return targetSparkHomeFolder.getAbsolutePath(); + } + + + public static void download(String url, int retries, File dst) throws IOException { + download(new URL(url), retries, dst); + } + + public static void download(DownloadRequest downloadRequest, File dst) throws IOException { + if (dst.exists()) { + LOGGER.info("Skip Download of {}, because it exists", dst); + } else { + boolean urlDownload = download(downloadRequest.getUrl(), downloadRequest.getRetries(), dst); + if (urlDownload) { + LOGGER.info("Download successfully"); + return; + } + Optional alternativeURL = downloadRequest.getAlternativeUrl(); + if (alternativeURL.isPresent()) { + urlDownload = download(alternativeURL.get(), downloadRequest.getRetries(), dst); + if (urlDownload) { + LOGGER.info("Download from alternative successfully"); + return; + } + } + throw new IOException("Unable to download from " + downloadRequest.getUrl()); + } + } + + private static boolean download(URL url, int retries, File dst) { + int retry = 0; + while (retry < retries) { + try { + HttpURLConnection httpConnection = (HttpURLConnection) (url.openConnection()); + long completeFileSize = httpConnection.getContentLength(); + ProgressBarBuilder pbb = new ProgressBarBuilder() + .setTaskName("Download " + dst.getName()) + .setUnit("MiB", 1048576) // setting the progress bar to use MiB as the unit + .setStyle(ProgressBarStyle.ASCII) + .setUpdateIntervalMillis(PROGRESS_BAR_UPDATE_INTERVAL) + .setInitialMax(completeFileSize) + .setConsumer(new DelegatingProgressBarConsumer(LOGGER::info)); + try ( + OutputStream fileOS = Files.newOutputStream(dst.toPath()); + InputStream is = url.openStream(); + InputStream pbis = ProgressBar.wrap(is, pbb); + InputStream bis = new BufferedInputStream(pbis)) { + IOUtils.copyLarge(bis, fileOS); + return true; + } + } catch (IOException e) { + LOGGER.info("Unable to download from {}", url, e); + ++retry; + } + } + return false; + } + + /** + * @param livyVersion + * @param targetLivyHomeFolder + * @return livyHome + */ + public static String downloadLivy(String livyVersion, String scalaVersion, + File targetLivyHomeFolder) { + File livyDownloadFolder = new File(downloadFolder, "livy"); + livyDownloadFolder.mkdir(); + final String livyLog = StringUtils.isBlank(scalaVersion) ? "Livy " + livyVersion : "Livy " + + livyVersion + "_" + scalaVersion; + if (targetLivyHomeFolder.exists()) { + LOGGER.info("Skip to download {} as it is already downloaded.", livyLog); + return targetLivyHomeFolder.getAbsolutePath(); + } + final File livyZip; + if (StringUtils.isBlank(scalaVersion)) { + // e.g. apache-livy-0.7.1-incubating-bin.zip + livyZip = new File(livyDownloadFolder, "apache-livy-" + livyVersion + "-bin.zip"); + } else { + // e.g apache-livy-0.8.0-incubating_2.12-bin.zip + livyZip = new File(livyDownloadFolder, "apache-livy-" + livyVersion + "_" + scalaVersion + "-bin.zip"); + } + + try { + URL mirrorURL = new URL(MIRROR_URL + generateLivyDownloadUrl(livyVersion, scalaVersion)); + URL archiveURL = new URL(ARCHIVE_URL + generateLivyDownloadUrl(livyVersion, scalaVersion)); + LOGGER.info("Download {}", livyLog); + download(new DownloadRequest(mirrorURL, archiveURL), livyZip); + LOGGER.info("Unzip {} to {}", livyLog, targetLivyHomeFolder); + ProgressBarBuilder pbb = new ProgressBarBuilder() + .setTaskName("Unarchiv Livy") + .setUnit("MiB", 1048576) // setting the progress bar to use MiB as the unit + .setStyle(ProgressBarStyle.ASCII) + .setUpdateIntervalMillis(PROGRESS_BAR_UPDATE_INTERVAL) + .setConsumer(new DelegatingProgressBarConsumer(LOGGER::info)); + try (InputStream fis = Files.newInputStream(livyZip.toPath()); + InputStream pbis = ProgressBar.wrap(fis, pbb); + InputStream bis = new BufferedInputStream(pbis); + ZipInputStream zis = new ZipInputStream(bis)) { + unzip(zis, targetLivyHomeFolder, 1); + } + LOGGER.info("Unzip {} done", livyLog); + // Create logs directory + File logs = new File(targetLivyHomeFolder, "logs"); + logs.mkdir(); + } catch (MalformedURLException e) { + LOGGER.error("invalid URL", e); + } catch (IOException e) { + throw new RuntimeException("Unable to download livy", e); + } + return targetLivyHomeFolder.getAbsolutePath(); + } + + /** + * @param livyVersion + * @return return livyHome + * @throws IOException + */ + public static String downloadLivy(String livyVersion) { + File livyDownloadFolder = new File(downloadFolder, "livy"); + File targetLivyHomeFolder = new File(livyDownloadFolder, "livy-" + livyVersion); + return downloadLivy(livyVersion, null, targetLivyHomeFolder); + } + + public static String downloadLivy(String livyVersion, String scalaVersion) { + File livyDownloadFolder = new File(downloadFolder, "livy"); + File targetLivyHomeFolder = + new File(livyDownloadFolder, "livy-" + livyVersion + "_" + scalaVersion); + return downloadLivy(livyVersion, scalaVersion, targetLivyHomeFolder); + } + + private static File newFile(File destinationDir, ZipEntry zipEntry, int strip) + throws IOException { + String filename = zipEntry.getName(); + for (int i = 0; i < strip; ++i) { + if (filename.contains(File.separator)) { + filename = filename.substring(filename.indexOf(File.separator) + 1); + } + } + File destFile = new File(destinationDir, filename); + String destDirPath = destinationDir.getCanonicalPath(); + String destFilePath = destFile.getCanonicalPath(); + + if (!destFilePath.startsWith(destDirPath + File.separator)) { + throw new IOException("Entry is outside of the target dir: " + zipEntry.getName()); + } + + return destFile; + } + + private static File newFile(File destDir, ArchiveEntry archiveEntry, int strip) + throws IOException { + String filename = archiveEntry.getName(); + for (int i = 0; i < strip; ++i) { + if (filename.contains(File.separator)) { + filename = filename.substring(filename.indexOf(File.separator) + 1); + } + } + File destFile = new File(destDir, filename); + String destDirPath = destDir.getCanonicalPath(); + String destFilePath = destFile.getCanonicalPath(); + + if (!destFilePath.startsWith(destDirPath + File.separator)) { + throw new IOException("Entry is outside of the target dir: " + archiveEntry.getName()); + } + + return destFile; + } + + private static void unarchive(ArchiveInputStream ais, File destDir, + int strip) throws IOException { + byte[] buffer = new byte[1024]; + ArchiveEntry archiveEntry = ais.getNextEntry(); + while (archiveEntry != null) { + File newFile; + try { + newFile = newFile(destDir, archiveEntry, strip); + } catch (IOException e) { + LOGGER.info("Skip {}", archiveEntry.getName()); + archiveEntry = ais.getNextEntry(); + continue; + } + if (archiveEntry.isDirectory()) { + if (!newFile.isDirectory() && !newFile.mkdirs()) { + throw new IOException("Failed to create directory " + newFile); + } + } else { + File parent = newFile.getParentFile(); + if (!parent.isDirectory() && !parent.mkdirs()) { + throw new IOException("Failed to create directory " + parent); + } + + // write file content + try (FileOutputStream fos = new FileOutputStream(newFile)) { + int len; + while ((len = ais.read(buffer)) > 0) { + fos.write(buffer, 0, len); + } + } + // Change permissions and metadata + if (newFile.getParentFile().getName().contains("bin") + && !newFile.setExecutable(true, false)) { + LOGGER.info("Setting file {} to executable failed", newFile); + } + if (!newFile.setLastModified(archiveEntry.getLastModifiedDate().getTime())) { + LOGGER.info("Setting last modified date to file {} failed", newFile); + } + } + archiveEntry = ais.getNextEntry(); + } + } + + private static void unzip(ZipInputStream zis, File destDir, int strip) throws IOException { + byte[] buffer = new byte[1024]; + ZipEntry zipEntry = zis.getNextEntry(); + while (zipEntry != null) { + File newFile; + try { + newFile = newFile(destDir, zipEntry, strip); + } catch (IOException e) { + LOGGER.info("Skip {}", zipEntry.getName()); + zipEntry = zis.getNextEntry(); + continue; + } + if (zipEntry.isDirectory()) { + if (!newFile.isDirectory() && !newFile.mkdirs()) { + throw new IOException("Failed to create directory " + newFile); + } + } else { + File parent = newFile.getParentFile(); + if (!parent.isDirectory() && !parent.mkdirs()) { + throw new IOException("Failed to create directory " + parent); + } + + // write file content + try (FileOutputStream fos = new FileOutputStream(newFile)) { + int len; + while ((len = zis.read(buffer)) > 0) { + fos.write(buffer, 0, len); + } + } + // Change permissions and metadata + if (newFile.getParentFile().getName().contains("bin") + && !newFile.setExecutable(true, false)) { + LOGGER.info("Setting file {} to executable failed", newFile); + } + if (!newFile.setLastModified(zipEntry.getLastModifiedTime().toMillis())) { + LOGGER.info("Setting last modified date to file {} failed", newFile); + } + } + zipEntry = zis.getNextEntry(); + } + zis.closeEntry(); + } + + public static String downloadFlink(String flinkVersion, String scalaVersion) { + File flinkDownloadFolder = new File(downloadFolder, "flink"); + flinkDownloadFolder.mkdir(); + File targetFlinkHomeFolder = new File(flinkDownloadFolder, "flink-" + flinkVersion); + if (targetFlinkHomeFolder.exists()) { + LOGGER.info("Skip to download Flink {}_{} as it is already downloaded.", flinkVersion, + scalaVersion); + return targetFlinkHomeFolder.getAbsolutePath(); + } + File flinkTGZ = new File(flinkDownloadFolder, + "flink-" + flinkVersion + "-bin-scala_" + scalaVersion + ".tgz"); + try { + URL mirrorURL = new URL(MIRROR_URL + generateDownloadURL( + "flink", flinkVersion, "-bin-scala_" + scalaVersion + ".tgz", "flink")); + URL archiveURL = new URL(ARCHIVE_URL + generateDownloadURL( + "flink", flinkVersion, "-bin-scala_" + scalaVersion + ".tgz", "flink")); + LOGGER.info("Download Flink {}_{}", flinkVersion, scalaVersion); + download(new DownloadRequest(mirrorURL, archiveURL), flinkTGZ); + ProgressBarBuilder pbb = new ProgressBarBuilder() + .setTaskName("Unarchiv Flink") + .setUnit("MiB", 1048576) // setting the progress bar to use MiB as the unit + .setStyle(ProgressBarStyle.ASCII) + .setUpdateIntervalMillis(1000) + .setConsumer(new DelegatingProgressBarConsumer(LOGGER::info)); + try ( + InputStream fis = Files.newInputStream(flinkTGZ.toPath()); + InputStream pbis = ProgressBar.wrap(fis, pbb); + InputStream bis = new BufferedInputStream(pbis); + InputStream gzis = new GzipCompressorInputStream(bis); + ArchiveInputStream o = new TarArchiveInputStream(gzis)) { + LOGGER.info("Unarchive Flink {}_{} to {}", flinkVersion, scalaVersion, + targetFlinkHomeFolder); + unarchive(o, targetFlinkHomeFolder, 1); + LOGGER.info("Unarchive Flink done"); + } + } catch (IOException e) { + throw new RuntimeException("Unable to download flink", e); + } + + + // download other dependencies for running flink with yarn and hive + try { + download("https://repo1.maven.org/maven2/org/apache/flink/flink-connector-hive_" + + scalaVersion + "/" + + flinkVersion + "/flink-connector-hive_" + scalaVersion + "-" + flinkVersion + ".jar", + 3, new File(targetFlinkHomeFolder, "lib" + File.separator + "flink-connector-hive_" + + scalaVersion + "-" + flinkVersion + ".jar")); + download("https://repo1.maven.org/maven2/org/apache/flink/flink-hadoop-compatibility_" + scalaVersion + "/" + + flinkVersion + "/flink-hadoop-compatibility_" + scalaVersion + "-" + flinkVersion + ".jar", + 3, new File(targetFlinkHomeFolder, "lib" + File.separator + "flink-hadoop-compatibility_" + + scalaVersion + "-" + flinkVersion + ".jar")); + download("https://repo1.maven.org/maven2/org/apache/hive/hive-exec/2.3.7/hive-exec-2.3.7.jar", + 3, new File(targetFlinkHomeFolder, "lib" + File.separator + "hive-exec-2.3.4.jar")); + download( + "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-client-api/3.3.6/hadoop-client-api-3.3.6.jar", + 3, new File(targetFlinkHomeFolder, + "lib" + File.separator + "hadoop-client-api-3.3.6.jar")); + download( + "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-client-runtime/3.3.6/hadoop-client-runtime-3.3.6.jar", + 3, new File(targetFlinkHomeFolder, + "lib" + File.separator + "hadoop-client-runtime-3.3.6.jar")); + download("https://repo1.maven.org/maven2/org/apache/flink/flink-table-api-scala_" + + scalaVersion + "/" + + flinkVersion + "/flink-table-api-scala_" + scalaVersion + "-" + flinkVersion + ".jar", + 3, new File(targetFlinkHomeFolder, "lib" + File.separator + "flink-table-api-scala_" + + scalaVersion + "-" + flinkVersion + ".jar")); + download("https://repo1.maven.org/maven2/org/apache/flink/flink-table-api-scala-bridge_" + + scalaVersion + "/" + + flinkVersion + "/flink-table-api-scala-bridge_" + scalaVersion + "-" + flinkVersion + ".jar", + 3, new File(targetFlinkHomeFolder, "lib" + File.separator + + "flink-table-api-scala-bridge_" + scalaVersion + "-" + flinkVersion + ".jar")); + + String jarName = "flink-table-planner_" + scalaVersion + "-" + flinkVersion + ".jar"; + mvFile( + targetFlinkHomeFolder + File.separator + "opt" + File.separator + jarName, + targetFlinkHomeFolder + File.separator + "lib" + File.separator + jarName); + jarName = "flink-table-planner-loader-" + flinkVersion + ".jar"; + mvFile( + targetFlinkHomeFolder + File.separator + "lib" + File.separator + jarName, + targetFlinkHomeFolder + File.separator + "opt" + File.separator + jarName); + if (SemanticVersion.of(flinkVersion).equalsOrNewerThan(SemanticVersion.of("1.16.0"))) { + jarName = "flink-sql-client-" + flinkVersion + ".jar"; + mvFile(targetFlinkHomeFolder + File.separator + "opt" + File.separator + jarName, + targetFlinkHomeFolder + File.separator + "lib" + File.separator + jarName); + } + } catch (Exception e) { + throw new RuntimeException("Fail to download jar", e); + } + return targetFlinkHomeFolder.getAbsolutePath(); + } + + private static void mvFile(String srcPath, String dstPath) throws IOException { + Path src = Paths.get(srcPath); + Path dst = Paths.get(dstPath); + if (src.toFile().exists()) { + if (dst.toFile().exists()) { + LOGGER.warn("{} does exits - replacing", dstPath); + FileUtils.deleteQuietly(dst.toFile()); + } + LOGGER.info("Copy file {} to {}", src, dst); + Files.move(src, dst, StandardCopyOption.REPLACE_EXISTING); + } else { + LOGGER.warn("{} does not exits - skipping", srcPath); + } + } + + public static String downloadHadoop(String version) { + File hadoopDownloadFolder = new File(downloadFolder, "hadoop"); + hadoopDownloadFolder.mkdir(); + File targetHadoopHomeFolder = new File(hadoopDownloadFolder, "hadoop-" + version); + if (targetHadoopHomeFolder.exists()) { + LOGGER.info("Skip to download Hadoop {} as it is already downloaded.", version); + return targetHadoopHomeFolder.getAbsolutePath(); + } + File hadoopTGZ = new File(hadoopDownloadFolder, "hadoop-" + version + ".tar.gz"); + try { + URL mirrorURL = new URL(MIRROR_URL + generateDownloadURL( + "hadoop", version, ".tar.gz", "hadoop/core")); + URL archiveURL = new URL(ARCHIVE_URL + generateDownloadURL( + "hadoop", version, ".tar.gz", "hadoop/core")); + LOGGER.info("Download Hadoop {}", version); + download(new DownloadRequest(mirrorURL, archiveURL), hadoopTGZ); + ProgressBarBuilder pbb = new ProgressBarBuilder() + .setTaskName("Unarchiv") + .setUnit("MiB", 1048576) // setting the progress bar to use MiB as the unit + .setStyle(ProgressBarStyle.ASCII) + .setUpdateIntervalMillis(1000) + .setConsumer(new DelegatingProgressBarConsumer(LOGGER::info)); + try ( + InputStream fis = Files.newInputStream(hadoopTGZ.toPath()); + InputStream pbis = ProgressBar.wrap(fis, pbb); + InputStream bis = new BufferedInputStream(pbis); + InputStream gzis = new GzipCompressorInputStream(bis); + ArchiveInputStream o = new TarArchiveInputStream(gzis)) { + LOGGER.info("Unarchive Hadoop {} to {}", version, targetHadoopHomeFolder); + unarchive(o, targetHadoopHomeFolder, 1); + LOGGER.info("Unarchive Hadoop {} done", version); + } + } catch (IOException e) { + throw new RuntimeException("Unable to download hadoop"); + } + return targetHadoopHomeFolder.getAbsolutePath(); + } + + private static String generateDownloadURL(String project, String version, String postFix, + String projectPath) { + return projectPath + "/" + project + "-" + version + "/" + project + "-" + version + + postFix; + } + + private static String generateSparkDownloadURL(String sparkVersion, String hadoopVersion, + String scalaVersion) { + final String url; + String sparkVersionFolder = "spark/spark-" + sparkVersion; + if (StringUtils.isNotBlank(hadoopVersion)) { + if (StringUtils.isNotBlank(scalaVersion)) { + // spark-3.4.0-bin-hadoop3-scala2.13.tgz + url = sparkVersionFolder + "/spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion + + "-scala" + scalaVersion + ".tgz"; + } else { + url = + sparkVersionFolder + "/spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion + ".tgz"; + } + } else { + url = sparkVersionFolder + "/spark-" + sparkVersion + "-bin-without-hadoop.tgz"; + } + return url; + } + + private static String generateLivyDownloadUrl(String livyVersion, String scalaVersion) { + SemanticVersion livy = SemanticVersion.of(livyVersion.replace("incubating", "")); + if (livy.equalsOrNewerThan(SemanticVersion.of("0.8.0"))) { + return "incubator/livy/" + livyVersion + "/apache-livy-" + livyVersion + "_" + scalaVersion + + "-bin.zip"; + } + return "incubator/livy/" + livyVersion + "/apache-livy-" + livyVersion + "-bin.zip"; + } +} diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/SemanticVersion.java b/zeppelin-test/src/main/java/org/apache/zeppelin/test/SemanticVersion.java similarity index 98% rename from zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/SemanticVersion.java rename to zeppelin-test/src/main/java/org/apache/zeppelin/test/SemanticVersion.java index f9bd771005d..f25e50353ee 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/SemanticVersion.java +++ b/zeppelin-test/src/main/java/org/apache/zeppelin/test/SemanticVersion.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.zeppelin.interpreter.integration; +package org.apache.zeppelin.test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/zeppelin-test/src/test/java/org/apache/zeppelin/test/DownloadUtilsTest.java b/zeppelin-test/src/test/java/org/apache/zeppelin/test/DownloadUtilsTest.java new file mode 100644 index 00000000000..6bed71683c8 --- /dev/null +++ b/zeppelin-test/src/test/java/org/apache/zeppelin/test/DownloadUtilsTest.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.nio.file.Path; +import java.nio.file.Paths; + +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + + +@Disabled("Takes a long time and depends on external factors.") +class DownloadUtilsTest { + + @Test + void downloadHadoop() { + String hadoopHome = DownloadUtils.downloadHadoop("3.4.0"); + Path hadoopHomePath = Paths.get(hadoopHome); + assertTrue(hadoopHomePath.toFile().exists()); + assertTrue(hadoopHomePath.toFile().isDirectory()); + } + + @Test + void downloadSpark() { + String sparkHome = DownloadUtils.downloadSpark(); + Path sparkHomePath = Paths.get(sparkHome); + assertTrue(sparkHomePath.toFile().exists()); + assertTrue(sparkHomePath.toFile().isDirectory()); + } + + @Test + void downloadSparkWithScala() { + String sparkHome = DownloadUtils.downloadSpark(DownloadUtils.DEFAULT_SPARK_VERSION, DownloadUtils.DEFAULT_SPARK_HADOOP_VERSION, "2.13"); + Path sparkHomePath = Paths.get(sparkHome); + assertTrue(sparkHomePath.toFile().exists()); + assertTrue(sparkHomePath.toFile().isDirectory()); + } + + @Test + void downloadFlink() { + String sparkHome = DownloadUtils.downloadFlink("1.16.3", "2.12"); + Path sparkHomePath = Paths.get(sparkHome); + assertTrue(sparkHomePath.toFile().exists()); + assertTrue(sparkHomePath.toFile().isDirectory()); + } + + @Test + void downloadLivy() { + String sparkHome = DownloadUtils.downloadLivy("0.7.1-incubating"); + Path sparkHomePath = Paths.get(sparkHome); + assertTrue(sparkHomePath.toFile().exists()); + assertTrue(sparkHomePath.toFile().isDirectory()); + } + + @Test + void downloadLivy080() { + String sparkHome = DownloadUtils.downloadLivy("0.8.0-incubating", "2.12"); + Path sparkHomePath = Paths.get(sparkHome); + assertTrue(sparkHomePath.toFile().exists()); + assertTrue(sparkHomePath.toFile().isDirectory()); + } + +} diff --git a/zeppelin-test/src/test/resources/log4j2.properties b/zeppelin-test/src/test/resources/log4j2.properties new file mode 100644 index 00000000000..4935e68e004 --- /dev/null +++ b/zeppelin-test/src/test/resources/log4j2.properties @@ -0,0 +1,25 @@ +################################################################################ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +################################################################################ + +rootLogger=debug, STDOUT + +appender.console.type = Console +appender.console.name = STDOUT +appender.console.layout.type = PatternLayout +appender.console.layout.pattern = %5p [%d] ({%t} %F[%M]:%L) - %m%n + diff --git a/zeppelin-web-angular/package-lock.json b/zeppelin-web-angular/package-lock.json index 93c1be865fa..3f82465b828 100644 --- a/zeppelin-web-angular/package-lock.json +++ b/zeppelin-web-angular/package-lock.json @@ -26,7 +26,7 @@ "diff-match-patch": "^1.0.4", "github-markdown-css": "^3.0.1", "highlight.js": "^9.15.8", - "lodash": "^4.17.11", + "lodash": "^4.17.21", "mathjax": "2.7.5", "monaco-editor": "0.15.1", "ng-zorro-antd": "^8.4.0", @@ -7033,17 +7033,17 @@ "dev": true }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dev": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -7080,34 +7080,10 @@ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", "dev": true }, - "node_modules/express/node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", - "dev": true, - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, "node_modules/express/node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "dev": true, "engines": { "node": ">= 0.6" @@ -7146,21 +7122,6 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, - "node_modules/express/node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", - "dev": true, - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, "node_modules/express/node_modules/statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -7636,10 +7597,16 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", - "dev": true, + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], "engines": { "node": ">=4.0" }, @@ -24535,17 +24502,17 @@ } }, "express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dev": true, "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -24579,30 +24546,10 @@ "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", "dev": true }, - "body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", - "dev": true, - "requires": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - } - }, "cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "dev": true }, "debug": { @@ -24635,18 +24582,6 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, - "raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", - "dev": true, - "requires": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - } - }, "statuses": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", @@ -25042,9 +24977,9 @@ "dev": true }, "follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", "dev": true }, "for-each": { diff --git a/zeppelin-web-angular/package.json b/zeppelin-web-angular/package.json index f1eda3c8c22..162cc91484e 100644 --- a/zeppelin-web-angular/package.json +++ b/zeppelin-web-angular/package.json @@ -34,7 +34,7 @@ "diff-match-patch": "^1.0.4", "github-markdown-css": "^3.0.1", "highlight.js": "^9.15.8", - "lodash": "^4.17.11", + "lodash": "^4.17.21", "mathjax": "2.7.5", "monaco-editor": "0.15.1", "ng-zorro-antd": "^8.4.0", diff --git a/zeppelin-web-angular/pom.xml b/zeppelin-web-angular/pom.xml index 7c1c96e611c..756452db91f 100644 --- a/zeppelin-web-angular/pom.xml +++ b/zeppelin-web-angular/pom.xml @@ -18,7 +18,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-web-angular @@ -84,6 +84,17 @@ + + npm build projects + + npm + + + ${web.e2e.enabled} + run build:projects + + + npm build diff --git a/zeppelin-web/karma.conf.js b/zeppelin-web/karma.conf.js index 2a3f4ff352c..f240f74bbdd 100644 --- a/zeppelin-web/karma.conf.js +++ b/zeppelin-web/karma.conf.js @@ -68,7 +68,7 @@ module.exports = function(config) { 'node_modules/ace-builds/src-noconflict/theme-chrome.js', 'node_modules/ace-builds/src-noconflict/mode-javascript.js', 'node_modules/angular-ui-ace/src/ui-ace.js', - 'node_modules/jquery.scrollTo/jquery.scrollTo.js', + 'node_modules/jquery.scrollto/jquery.scrollTo.js', 'node_modules/d3/d3.js', 'node_modules/nvd3/build/nv.d3.js', 'node_modules/jquery-ui/dist/jquery-ui.js', @@ -79,13 +79,12 @@ module.exports = function(config) { 'node_modules/angular-elastic-input/dist/angular-elastic-input.min.js', 'node_modules/angular-xeditable/dist/js/xeditable.js', 'node_modules/highlight.js/lib/highlight.js', - 'node_modules/lodash/index.js', 'node_modules/angular-filter/dist/angular-filter.js', 'node_modules/ng-toast/dist/ngToast.js', 'node_modules/ng-focus-if/focusIf.js', 'node_modules/bootstrap3-dialog/dist/js/bootstrap-dialog.min.js', 'node_modules/select2/dist/js/select2.js', - 'node_modules/mathjax/MathJax.js', + 'node_modules/mathjax/es5/tex-mml-chtml.js', 'node_modules/clipboard/dist/clipboard.js', 'node_modules/ngclipboard/dist/ngclipboard.js', 'node_modules/diff/dist/diff.js', diff --git a/zeppelin-web/package-lock.json b/zeppelin-web/package-lock.json index a992413e837..c550a4acbfe 100644 --- a/zeppelin-web/package-lock.json +++ b/zeppelin-web/package-lock.json @@ -47,8 +47,8 @@ "jquery.scrollto": "~2.1.2", "json3": "~3.3.1", "jszip": "2.6.1", - "lodash": "~3.9.3", - "mathjax": "2.7.0", + "lodash": "4.17.21", + "mathjax": "3.0.0", "moment": "^2.29.4", "moment-duration-format": "^1.3.0", "ng-focus-if": "~1.0.2", @@ -1156,12 +1156,6 @@ "chokidar": "^1.6.1" } }, - "node_modules/babel-cli/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-code-frame": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", @@ -1200,12 +1194,6 @@ "source-map": "^0.5.7" } }, - "node_modules/babel-core/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-generator": { "version": "6.26.1", "resolved": "https://registry.npmjs.org/babel-generator/-/babel-generator-6.26.1.tgz", @@ -1222,12 +1210,6 @@ "trim-right": "^1.0.1" } }, - "node_modules/babel-generator/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-helper-builder-binary-assignment-operator-visitor": { "version": "6.24.1", "resolved": "https://registry.npmjs.org/babel-helper-builder-binary-assignment-operator-visitor/-/babel-helper-builder-binary-assignment-operator-visitor-6.24.1.tgz", @@ -1263,12 +1245,6 @@ "lodash": "^4.17.4" } }, - "node_modules/babel-helper-define-map/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-helper-explode-assignable-expression": { "version": "6.24.1", "resolved": "https://registry.npmjs.org/babel-helper-explode-assignable-expression/-/babel-helper-explode-assignable-expression-6.24.1.tgz", @@ -1334,12 +1310,6 @@ "lodash": "^4.17.4" } }, - "node_modules/babel-helper-regex/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-helper-remap-async-to-generator": { "version": "6.24.1", "resolved": "https://registry.npmjs.org/babel-helper-remap-async-to-generator/-/babel-helper-remap-async-to-generator-6.24.1.tgz", @@ -1473,12 +1443,6 @@ "lodash": "^4.17.4" } }, - "node_modules/babel-plugin-transform-es2015-block-scoping/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-plugin-transform-es2015-classes": { "version": "6.24.1", "resolved": "https://registry.npmjs.org/babel-plugin-transform-es2015-classes/-/babel-plugin-transform-es2015-classes-6.24.1.tgz", @@ -1789,12 +1753,6 @@ "source-map-support": "^0.4.15" } }, - "node_modules/babel-register/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-runtime": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-runtime/-/babel-runtime-6.26.0.tgz", @@ -1824,12 +1782,6 @@ "lodash": "^4.17.4" } }, - "node_modules/babel-template/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-traverse": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-traverse/-/babel-traverse-6.26.0.tgz", @@ -1847,12 +1799,6 @@ "lodash": "^4.17.4" } }, - "node_modules/babel-traverse/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babel-types": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-types/-/babel-types-6.26.0.tgz", @@ -1865,12 +1811,6 @@ "to-fast-properties": "^1.0.3" } }, - "node_modules/babel-types/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/babylon": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz", @@ -2957,12 +2897,6 @@ "lodash": "^4.5.0" } }, - "node_modules/combine-lists/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -5198,12 +5132,6 @@ "node": ">=0.10.0" } }, - "node_modules/eslint-watch/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/eslint-watch/node_modules/micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -5290,11 +5218,13 @@ "node": ">=4" } }, - "node_modules/eslint/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "engines": { + "node": ">=6" + } }, "node_modules/espree": { "version": "3.5.4", @@ -5941,10 +5871,16 @@ } }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", - "dev": true, + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], "engines": { "node": ">=4.0" }, @@ -7054,12 +6990,6 @@ "node": ">=0.10.0" } }, - "node_modules/grunt-replace/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/grunt-svgmin": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/grunt-svgmin/-/grunt-svgmin-0.4.0.tgz", @@ -7954,12 +7884,6 @@ "object-assign": "^4.0.1" } }, - "node_modules/html-webpack-plugin/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/html-webpack-plugin/node_modules/param-case": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/param-case/-/param-case-2.1.1.tgz", @@ -8354,12 +8278,6 @@ "node": ">=0.10.0" } }, - "node_modules/http-proxy-middleware/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/http-proxy-middleware/node_modules/micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -8702,12 +8620,6 @@ "through": "^2.3.6" } }, - "node_modules/inquirer/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/internal-ip": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/internal-ip/-/internal-ip-4.3.0.tgz", @@ -8763,9 +8675,9 @@ } }, "node_modules/ip": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", - "integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz", + "integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==", "dev": true }, "node_modules/ip-regex": { @@ -9921,12 +9833,6 @@ "node": "*" } }, - "node_modules/karma-coverage/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/karma-firefox-launcher": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/karma-firefox-launcher/-/karma-firefox-launcher-2.1.2.tgz", @@ -10271,12 +10177,6 @@ "node": ">=0.10.0" } }, - "node_modules/karma/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/karma/node_modules/micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -10476,9 +10376,9 @@ } }, "node_modules/lodash": { - "version": "3.9.3", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.9.3.tgz", - "integrity": "sha512-v5SKZhnCUujcTpFpHEIJZDVcBM2OYjROx732HyJ6kzKZtwStTb4LG6noqmK9etHqDNhf6X7itXx5s0hTpAXPpQ==" + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "node_modules/lodash._arraycopy": { "version": "3.0.0", @@ -10765,9 +10665,23 @@ "optional": true }, "node_modules/mathjax": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-2.7.0.tgz", - "integrity": "sha512-DfceiJr14b846D77BYgDGUBEB+XLQc3huSbJG4ZUKGo8drbpOXeVo7ke6JZv163vNn5bOX0VMGxwwaOWwyQEew==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.0.0.tgz", + "integrity": "sha512-z4uLbDHNbs/aRuR6zCcnzwFQuMixkHCcWqgVaommfK/3cA1Ahq7OXemn+m8JwTYcBApSHgcrSbPr9sm3sZFL+A==", + "dependencies": { + "mathjax-full": "git://github.com/mathjax/MathJax-src.git" + } + }, + "node_modules/mathjax-full": { + "version": "3.2.2", + "resolved": "git+ssh://git@github.com/mathjax/MathJax-src.git#8565f9da973238e4c9571a86a4bcb281b1d98d9b", + "license": "Apache-2.0", + "dependencies": { + "esm": "^3.2.25", + "mhchemparser": "^4.1.0", + "mj-context-menu": "^0.6.1", + "speech-rule-engine": "^4.0.6" + } }, "node_modules/maxmin": { "version": "0.1.0", @@ -10907,6 +10821,11 @@ "node": ">= 0.6" } }, + "node_modules/mhchemparser": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/mhchemparser/-/mhchemparser-4.2.1.tgz", + "integrity": "sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ==" + }, "node_modules/micromatch": { "version": "2.3.11", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", @@ -11187,6 +11106,11 @@ "node": ">=0.10.0" } }, + "node_modules/mj-context-menu": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/mj-context-menu/-/mj-context-menu-0.6.1.tgz", + "integrity": "sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==" + }, "node_modules/mkdirp": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", @@ -12659,12 +12583,6 @@ "ms": "^2.1.1" } }, - "node_modules/portfinder/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/portfinder/node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -13444,12 +13362,6 @@ "renderkid": "^2.0.4" } }, - "node_modules/pretty-error/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/pretty-ms": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-0.1.0.tgz", @@ -14393,12 +14305,6 @@ "entities": "^2.0.0" } }, - "node_modules/renderkid/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/repeat-element": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz", @@ -15652,6 +15558,27 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", "dev": true }, + "node_modules/speech-rule-engine": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/speech-rule-engine/-/speech-rule-engine-4.0.7.tgz", + "integrity": "sha512-sJrL3/wHzNwJRLBdf6CjJWIlxC04iYKkyXvYSVsWVOiC2DSkHmxsqOhEeMsBA9XK+CHuNcsdkbFDnoUfAsmp9g==", + "dependencies": { + "commander": "9.2.0", + "wicked-good-xpath": "1.3.0", + "xmldom-sre": "0.1.31" + }, + "bin": { + "sre": "bin/sre" + } + }, + "node_modules/speech-rule-engine/node_modules/commander": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.2.0.tgz", + "integrity": "sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w==", + "engines": { + "node": "^12.20.0 || >=14" + } + }, "node_modules/split-string": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", @@ -16255,12 +16182,6 @@ "node": ">=4" } }, - "node_modules/table/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "node_modules/table/node_modules/string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -19683,6 +19604,11 @@ "node": ">= 0.4" } }, + "node_modules/wicked-good-xpath": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/wicked-good-xpath/-/wicked-good-xpath-1.3.0.tgz", + "integrity": "sha512-Gd9+TUn5nXdwj/hFsPVx5cuHHiF5Bwuc30jZ4+ronF1qHK5O7HD0sgmXWSEgwKquT3ClLoKPVbO6qGwVwLzvAw==" + }, "node_modules/window-size": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", @@ -19783,6 +19709,14 @@ "node": ">=4.0" } }, + "node_modules/xmldom-sre": { + "version": "0.1.31", + "resolved": "https://registry.npmjs.org/xmldom-sre/-/xmldom-sre-0.1.31.tgz", + "integrity": "sha512-f9s+fUkX04BxQf+7mMWAp5zk61pciie+fFLC9hX9UVvCeJQfNHRHXpeo5MPcR0EUf57PYLdt+ZO4f3Ipk2oZUw==", + "engines": { + "node": ">=0.1" + } + }, "node_modules/xmlhttprequest-ssl": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz", @@ -20843,14 +20777,6 @@ "slash": "^1.0.0", "source-map": "^0.5.6", "v8flags": "^2.1.1" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-code-frame": { @@ -20889,14 +20815,6 @@ "private": "^0.1.8", "slash": "^1.0.0", "source-map": "^0.5.7" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-generator": { @@ -20913,14 +20831,6 @@ "lodash": "^4.17.4", "source-map": "^0.5.7", "trim-right": "^1.0.1" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-helper-builder-binary-assignment-operator-visitor": { @@ -20956,14 +20866,6 @@ "babel-runtime": "^6.26.0", "babel-types": "^6.26.0", "lodash": "^4.17.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-helper-explode-assignable-expression": { @@ -21029,14 +20931,6 @@ "babel-runtime": "^6.26.0", "babel-types": "^6.26.0", "lodash": "^4.17.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-helper-remap-async-to-generator": { @@ -21163,14 +21057,6 @@ "babel-traverse": "^6.26.0", "babel-types": "^6.26.0", "lodash": "^4.17.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-plugin-transform-es2015-classes": { @@ -21480,14 +21366,6 @@ "lodash": "^4.17.4", "mkdirp": "^0.5.1", "source-map-support": "^0.4.15" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-runtime": { @@ -21519,14 +21397,6 @@ "babel-types": "^6.26.0", "babylon": "^6.18.0", "lodash": "^4.17.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-traverse": { @@ -21544,14 +21414,6 @@ "globals": "^9.18.0", "invariant": "^2.2.2", "lodash": "^4.17.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babel-types": { @@ -21564,14 +21426,6 @@ "esutils": "^2.0.2", "lodash": "^4.17.4", "to-fast-properties": "^1.0.3" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "babylon": { @@ -22505,14 +22359,6 @@ "dev": true, "requires": { "lodash": "^4.5.0" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "combined-stream": { @@ -23985,14 +23831,6 @@ "table": "^3.7.8", "text-table": "~0.2.0", "user-home": "^2.0.0" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "eslint-config-google": { @@ -24436,12 +24274,6 @@ "is-extglob": "^2.1.1" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -24517,6 +24349,11 @@ } } }, + "esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==" + }, "espree": { "version": "3.5.4", "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", @@ -25044,9 +24881,9 @@ } }, "follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", "dev": true }, "for-each": { @@ -25974,14 +25811,6 @@ "chalk": "^1.1.0", "file-sync-cmp": "^0.1.0", "lodash": "^4.11.0" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "grunt-svgmin": { @@ -26577,12 +26406,6 @@ "object-assign": "^4.0.1" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "param-case": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/param-case/-/param-case-2.1.1.tgz", @@ -26914,12 +26737,6 @@ "is-extglob": "^2.1.1" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -27213,14 +27030,6 @@ "string-width": "^1.0.1", "strip-ansi": "^3.0.0", "through": "^2.3.6" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "internal-ip": { @@ -27266,9 +27075,9 @@ "dev": true }, "ip": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz", - "integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz", + "integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ==", "dev": true }, "ip-regex": { @@ -28401,12 +28210,6 @@ "is-extglob": "^2.1.1" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "micromatch": { "version": "3.1.10", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", @@ -28479,12 +28282,6 @@ "get-stdin": "^4.0.1", "meow": "^3.3.0" } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true } } }, @@ -28679,9 +28476,9 @@ } }, "lodash": { - "version": "3.9.3", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.9.3.tgz", - "integrity": "sha512-v5SKZhnCUujcTpFpHEIJZDVcBM2OYjROx732HyJ6kzKZtwStTb4LG6noqmK9etHqDNhf6X7itXx5s0hTpAXPpQ==" + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, "lodash._arraycopy": { "version": "3.0.0", @@ -28935,9 +28732,22 @@ "optional": true }, "mathjax": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-2.7.0.tgz", - "integrity": "sha512-DfceiJr14b846D77BYgDGUBEB+XLQc3huSbJG4ZUKGo8drbpOXeVo7ke6JZv163vNn5bOX0VMGxwwaOWwyQEew==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mathjax/-/mathjax-3.0.0.tgz", + "integrity": "sha512-z4uLbDHNbs/aRuR6zCcnzwFQuMixkHCcWqgVaommfK/3cA1Ahq7OXemn+m8JwTYcBApSHgcrSbPr9sm3sZFL+A==", + "requires": { + "mathjax-full": "git://github.com/mathjax/MathJax-src.git" + } + }, + "mathjax-full": { + "version": "git+ssh://git@github.com/mathjax/MathJax-src.git#8565f9da973238e4c9571a86a4bcb281b1d98d9b", + "from": "mathjax-full@git://github.com/mathjax/MathJax-src.git", + "requires": { + "esm": "^3.2.25", + "mhchemparser": "^4.1.0", + "mj-context-menu": "^0.6.1", + "speech-rule-engine": "^4.0.6" + } }, "maxmin": { "version": "0.1.0", @@ -29049,6 +28859,11 @@ "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", "dev": true }, + "mhchemparser": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/mhchemparser/-/mhchemparser-4.2.1.tgz", + "integrity": "sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ==" + }, "micromatch": { "version": "2.3.11", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", @@ -29275,6 +29090,11 @@ } } }, + "mj-context-menu": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/mj-context-menu/-/mj-context-menu-0.6.1.tgz", + "integrity": "sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==" + }, "mkdirp": { "version": "0.5.6", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", @@ -30487,12 +30307,6 @@ "ms": "^2.1.1" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -31171,14 +30985,6 @@ "requires": { "lodash": "^4.17.20", "renderkid": "^2.0.4" - }, - "dependencies": { - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - } } }, "pretty-ms": { @@ -31981,12 +31787,6 @@ "domutils": "^2.5.2", "entities": "^2.0.0" } - }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true } } }, @@ -33065,6 +32865,23 @@ } } }, + "speech-rule-engine": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/speech-rule-engine/-/speech-rule-engine-4.0.7.tgz", + "integrity": "sha512-sJrL3/wHzNwJRLBdf6CjJWIlxC04iYKkyXvYSVsWVOiC2DSkHmxsqOhEeMsBA9XK+CHuNcsdkbFDnoUfAsmp9g==", + "requires": { + "commander": "9.2.0", + "wicked-good-xpath": "1.3.0", + "xmldom-sre": "0.1.31" + }, + "dependencies": { + "commander": { + "version": "9.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-9.2.0.tgz", + "integrity": "sha512-e2i4wANQiSXgnrBlIatyHtP1odfUp0BbV5Y5nEGbxtIrStkEOAAzCUirvLBNXHLr7kwLvJl6V+4V3XV9x7Wd9w==" + } + } + }, "split-string": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", @@ -33563,12 +33380,6 @@ "integrity": "sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==", "dev": true }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "string-width": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", @@ -36387,6 +36198,11 @@ "has-tostringtag": "^1.0.0" } }, + "wicked-good-xpath": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/wicked-good-xpath/-/wicked-good-xpath-1.3.0.tgz", + "integrity": "sha512-Gd9+TUn5nXdwj/hFsPVx5cuHHiF5Bwuc30jZ4+ronF1qHK5O7HD0sgmXWSEgwKquT3ClLoKPVbO6qGwVwLzvAw==" + }, "window-size": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", @@ -36469,6 +36285,11 @@ "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", "dev": true }, + "xmldom-sre": { + "version": "0.1.31", + "resolved": "https://registry.npmjs.org/xmldom-sre/-/xmldom-sre-0.1.31.tgz", + "integrity": "sha512-f9s+fUkX04BxQf+7mMWAp5zk61pciie+fFLC9hX9UVvCeJQfNHRHXpeo5MPcR0EUf57PYLdt+ZO4f3Ipk2oZUw==" + }, "xmlhttprequest-ssl": { "version": "1.5.5", "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz", diff --git a/zeppelin-web/package.json b/zeppelin-web/package.json index 27f769f05b4..25404abb17a 100644 --- a/zeppelin-web/package.json +++ b/zeppelin-web/package.json @@ -24,7 +24,7 @@ "karma-test": "karma start karma.conf.js" }, "dependencies": { - "mathjax": "2.7.0", + "mathjax": "3.0.0", "ace-builds": "1.3.2", "angular": "1.5.7", "angular-animate": "1.5.7", @@ -55,7 +55,7 @@ "diff": "3.3.0", "json3": "~3.3.1", "jszip": "2.6.1", - "lodash": "~3.9.3", + "lodash": "4.17.21", "ng-focus-if": "~1.0.2", "ng-sortable": "~1.3.3", "ng-infinite-scroll": "^1.3.0", diff --git a/zeppelin-web/pom.xml b/zeppelin-web/pom.xml index c4ce1e5db84..3b7ba038364 100644 --- a/zeppelin-web/pom.xml +++ b/zeppelin-web/pom.xml @@ -23,7 +23,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-web @@ -42,12 +42,6 @@ - - org.apache.zeppelin - spark-interpreter - ${project.version} - test - org.apache.zeppelin zeppelin-shell @@ -66,12 +60,6 @@ ${project.version} test - - org.apache.zeppelin - zeppelin-python - ${project.version} - test - org.apache.zeppelin zeppelin-server diff --git a/zeppelin-web/src/app/helium/helium.controller.js b/zeppelin-web/src/app/helium/helium.controller.js index 8de38a09484..2a887066cbc 100644 --- a/zeppelin-web/src/app/helium/helium.controller.js +++ b/zeppelin-web/src/app/helium/helium.controller.js @@ -13,6 +13,7 @@ */ import {HeliumType} from './helium-type'; +import _ from 'lodash'; export default function HeliumCtrl($scope, $rootScope, $sce, baseUrlSrv, ngToast, heliumService) { @@ -374,7 +375,7 @@ export default function HeliumCtrl($scope, $rootScope, $sce, }; $scope.getDescriptionText = function(pkgSearchResult) { - return $sce.trustAsHtml(pkgSearchResult.pkg.description); + return pkgSearchResult.pkg.description; }; init(); diff --git a/zeppelin-web/src/app/interpreter/interpreter.controller.js b/zeppelin-web/src/app/interpreter/interpreter.controller.js index dddae0022cf..9966ffa39c5 100644 --- a/zeppelin-web/src/app/interpreter/interpreter.controller.js +++ b/zeppelin-web/src/app/interpreter/interpreter.controller.js @@ -13,6 +13,7 @@ */ import {ParagraphStatus} from '../notebook/paragraph/paragraph.status'; +import _ from 'lodash'; angular.module('zeppelinWebApp').controller('InterpreterCtrl', InterpreterCtrl); @@ -443,7 +444,7 @@ function InterpreterCtrl($rootScope, $scope, $http, baseUrlSrv, ngToast, $timeou }; $scope.newInterpreterGroupChange = function() { - let el = _.pluck(_.filter($scope.availableInterpreters, {'name': $scope.newInterpreterSetting.group}), + let el = _.map(_.filter($scope.availableInterpreters, {'name': $scope.newInterpreterSetting.group}), 'properties'); let properties = {}; for (let i = 0; i < el.length; i++) { diff --git a/zeppelin-web/src/app/jobmanager/job/job.component.js b/zeppelin-web/src/app/jobmanager/job/job.component.js index 982fa285c69..200506e4a8d 100644 --- a/zeppelin-web/src/app/jobmanager/job/job.component.js +++ b/zeppelin-web/src/app/jobmanager/job/job.component.js @@ -13,6 +13,7 @@ */ import moment from 'moment'; +import _ from 'lodash'; import {ParagraphStatus} from '../../notebook/paragraph/paragraph.status'; import {getJobColorByStatus, getJobIconByStatus} from '../job-status'; diff --git a/zeppelin-web/src/app/notebook-repository/notebook-repository.controller.js b/zeppelin-web/src/app/notebook-repository/notebook-repository.controller.js index d6d13b32c79..049f938c251 100644 --- a/zeppelin-web/src/app/notebook-repository/notebook-repository.controller.js +++ b/zeppelin-web/src/app/notebook-repository/notebook-repository.controller.js @@ -12,6 +12,8 @@ * limitations under the License. */ +import _ from 'lodash'; + angular.module('zeppelinWebApp').controller('NotebookRepositoryCtrl', NotebookRepositoryCtrl); function NotebookRepositoryCtrl($http, baseUrlSrv, ngToast) { diff --git a/zeppelin-web/src/app/notebook/notebook.controller.js b/zeppelin-web/src/app/notebook/notebook.controller.js index 20e6d267b8c..ecf8733f752 100644 --- a/zeppelin-web/src/app/notebook/notebook.controller.js +++ b/zeppelin-web/src/app/notebook/notebook.controller.js @@ -13,6 +13,7 @@ */ import moment from 'moment'; +import _ from 'lodash'; import {isParagraphRunning} from './paragraph/paragraph.status'; diff --git a/zeppelin-web/src/app/notebook/paragraph/paragraph.controller.js b/zeppelin-web/src/app/notebook/paragraph/paragraph.controller.js index 4053cd8af3e..74c3b30c51d 100644 --- a/zeppelin-web/src/app/notebook/paragraph/paragraph.controller.js +++ b/zeppelin-web/src/app/notebook/paragraph/paragraph.controller.js @@ -16,6 +16,7 @@ import {SpellResult} from '../../spell'; import {isParagraphRunning, ParagraphStatus} from './paragraph.status'; import moment from 'moment'; +import _ from 'lodash'; import DiffMatchPatch from 'diff-match-patch'; require('moment-duration-format'); diff --git a/zeppelin-web/src/app/notebook/paragraph/result/result.controller.js b/zeppelin-web/src/app/notebook/paragraph/result/result.controller.js index d55c51e025f..b31c9c7977e 100644 --- a/zeppelin-web/src/app/notebook/paragraph/result/result.controller.js +++ b/zeppelin-web/src/app/notebook/paragraph/result/result.controller.js @@ -13,6 +13,7 @@ */ import moment from 'moment'; +import _ from 'lodash'; import DatasetFactory from '../../../tabledata/datasetfactory'; import TableVisualization from '../../../visualization/builtins/visualization-table'; diff --git a/zeppelin-web/src/app/tabledata/networkdata.js b/zeppelin-web/src/app/tabledata/networkdata.js index 368254d3f22..a0278bebc25 100644 --- a/zeppelin-web/src/app/tabledata/networkdata.js +++ b/zeppelin-web/src/app/tabledata/networkdata.js @@ -14,6 +14,7 @@ import TableData from './tabledata'; import {DatasetType} from './dataset'; +import _ from 'lodash'; /** * Create network data object from paragraph graph type result diff --git a/zeppelin-web/src/app/visualization/builtins/visualization-d3network.js b/zeppelin-web/src/app/visualization/builtins/visualization-d3network.js index 749e4344dca..6d5c3bc86e4 100644 --- a/zeppelin-web/src/app/visualization/builtins/visualization-d3network.js +++ b/zeppelin-web/src/app/visualization/builtins/visualization-d3network.js @@ -14,6 +14,7 @@ import Visualization from '../visualization'; import NetworkTransformation from '../../tabledata/network'; +import _ from 'lodash'; /** * Visualize data in network format diff --git a/zeppelin-web/src/components/note-action/note-action.service.js b/zeppelin-web/src/components/note-action/note-action.service.js index 215795ca010..ed8ab0eeda3 100644 --- a/zeppelin-web/src/components/note-action/note-action.service.js +++ b/zeppelin-web/src/components/note-action/note-action.service.js @@ -12,6 +12,8 @@ * limitations under the License. */ +import _ from 'lodash'; + angular.module('zeppelinWebApp').service('noteActionService', noteActionService); function noteActionService(websocketMsgSrv, $location, noteRenameService, noteListFactory) { diff --git a/zeppelin-web/src/components/note-list/note-list.factory.js b/zeppelin-web/src/components/note-list/note-list.factory.js index ecb04bfbcd5..4059ae22b64 100644 --- a/zeppelin-web/src/components/note-list/note-list.factory.js +++ b/zeppelin-web/src/components/note-list/note-list.factory.js @@ -12,6 +12,8 @@ * limitations under the License. */ +import _ from 'lodash'; + angular.module('zeppelinWebApp').factory('noteListFactory', NoteListFactory); function NoteListFactory(arrayOrderingSrv, TRASH_FOLDER_ID) { diff --git a/zeppelin-web/src/components/websocket/websocket-event.factory.js b/zeppelin-web/src/components/websocket/websocket-event.factory.js index 8cbf34cd3d4..5a006b86fdf 100644 --- a/zeppelin-web/src/components/websocket/websocket-event.factory.js +++ b/zeppelin-web/src/components/websocket/websocket-event.factory.js @@ -12,6 +12,8 @@ * limitations under the License. */ +import _ from 'lodash'; + angular.module('zeppelinWebApp').factory('websocketEvents', WebsocketEventFactory); function WebsocketEventFactory($rootScope, $websocket, $location, baseUrlSrv, saveAsService, ngToast) { diff --git a/zeppelin-web/src/index.html b/zeppelin-web/src/index.html index a149af0db64..844cae97530 100644 --- a/zeppelin-web/src/index.html +++ b/zeppelin-web/src/index.html @@ -192,7 +192,7 @@ - + @@ -203,13 +203,12 @@ - - + diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml index 01396dd317c..2e0dd38fcb9 100644 --- a/zeppelin-zengine/pom.xml +++ b/zeppelin-zengine/pom.xml @@ -24,7 +24,7 @@ zeppelin org.apache.zeppelin - 0.11.1.3.3.6.0-1 + 0.11.2.3.3.6.0-1 zeppelin-zengine @@ -38,10 +38,22 @@ 0.9.8 1.4.01 2.6.0 + + 2.9.8 4.5.4.201711221230-r 1.6 + + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.annocations.version} + + + + ${project.groupId} @@ -198,10 +210,7 @@ commons-vfs2 ${commons.vfs2.version} - - org.codehaus.plexus - plexus-utils - + org.apache.hadoop hadoop-hdfs-client @@ -258,6 +267,13 @@ test + + org.apache.zeppelin + zeppelin-test + ${project.version} + test + + org.apache.commons commons-lang3 @@ -308,62 +324,18 @@ - hadoop2 - + hadoop3 true - - - ${hadoop2.7.version} - - - - org.apache.hadoop - hadoop-common - - - log4j - log4j - - - - - - org.apache.hadoop - hadoop-yarn-client - - - - - - hadoop3 - - ${hadoop3.2.version} - hadoop-client-api - hadoop-client-runtime - hadoop-client-minicluster - + ${hadoop3.3.version} org.apache.hadoop - ${hadoop-client-runtime.artifact} - - - - org.apache.hadoop - ${hadoop-client-minicluster.artifact} - test - ${hadoop.version} - - - junit - junit - - + hadoop-client-runtime diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/Helium.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/Helium.java index ab022227d04..9422c7bffe8 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/Helium.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/Helium.java @@ -65,6 +65,8 @@ public class Helium { private final HeliumApplicationFactory applicationFactory; private final InterpreterSettingManager interpreterSettingManager; + private final ZeppelinConfiguration conf; + @Inject public Helium( ZeppelinConfiguration conf, @@ -77,7 +79,8 @@ public Helium( new File(conf.getAbsoluteDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), "helium-registry-cache"), heliumBundleFactory, heliumApplicationFactory, - interpreterSettingManager); + interpreterSettingManager, + conf); } @VisibleForTesting @@ -87,7 +90,8 @@ public Helium( File registryCacheDir, HeliumBundleFactory bundleFactory, HeliumApplicationFactory applicationFactory, - InterpreterSettingManager interpreterSettingManager) + InterpreterSettingManager interpreterSettingManager, + ZeppelinConfiguration conf) throws IOException { this.heliumConfPath = heliumConfPath; this.registryPaths = registryPaths; @@ -95,6 +99,7 @@ public Helium( this.bundleFactory = bundleFactory; this.applicationFactory = applicationFactory; this.interpreterSettingManager = interpreterSettingManager; + this.conf = conf; heliumConf = loadConf(heliumConfPath); allPackages = getAllPackageInfo(); @@ -134,7 +139,7 @@ private synchronized HeliumConf loadConf(String path) throws IOException { for (String uri : paths) { if (uri.startsWith("http://") || uri.startsWith("https://")) { logger.info("Add helium online registry {}", uri); - registry.add(new HeliumOnlineRegistry(uri, uri, registryCacheDir)); + registry.add(new HeliumOnlineRegistry(uri, uri, registryCacheDir, conf)); } else { logger.info("Add helium local registry {}", uri); registry.add(new HeliumLocalRegistry(uri, uri)); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java index 9dff0794293..a10d62b309d 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumBundleFactory.java @@ -70,9 +70,9 @@ */ public class HeliumBundleFactory { private static final Logger LOGGER = LoggerFactory.getLogger(HeliumBundleFactory.class); - private static final String NODE_VERSION = "v6.9.1"; - private static final String NPM_VERSION = "3.10.8"; - private static final String YARN_VERSION = "v0.21.3"; + private static final String NODE_VERSION = "v20.15.0"; + private static final String NPM_VERSION = "10.7.0"; + private static final String YARN_VERSION = "v1.22.22"; private static final String NPM_PACKAGE_NAME = "npm"; protected static final String HELIUM_LOCAL_REPO = "helium-bundle"; private static final String HELIUM_BUNDLES_DIR = "bundles"; @@ -288,15 +288,17 @@ public boolean accept(File pathname) { // if online package String version = nameAndVersion[1]; File tgz = new File(heliumLocalRepoDirectory, pkg.getName() + "-" + version + ".tgz"); - tgz.delete(); + FileUtils.deleteQuietly(tgz); // wget, extract and move dir to `bundles/${pkg.getName()}`, and remove tgz npmCommand(fpf, "pack " + pkg.getArtifact()); File extracted = new File(heliumBundleDirectory, "package"); FileUtils.deleteDirectory(extracted); List entries = unTgz(tgz, heliumBundleDirectory); - for (String entry: entries) LOGGER.debug("Extracted " + entry); - tgz.delete(); + for (String entry : entries) { + LOGGER.debug("Extracted {}", entry); + } + FileUtils.deleteQuietly(tgz); FileUtils.copyDirectory(extracted, bundleDir); FileUtils.deleteDirectory(extracted); } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumOnlineRegistry.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumOnlineRegistry.java index 58ddcd8e9f5..aa4597cd264 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumOnlineRegistry.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumOnlineRegistry.java @@ -62,9 +62,12 @@ public class HeliumOnlineRegistry extends HeliumRegistry { private Logger logger = LoggerFactory.getLogger(HeliumOnlineRegistry.class); private final Gson gson; private final File registryCacheFile; + private final ZeppelinConfiguration zConf; - public HeliumOnlineRegistry(String name, String uri, File registryCacheDir) { + public HeliumOnlineRegistry(String name, String uri, File registryCacheDir, + ZeppelinConfiguration zConf) { super(name, uri); + this.zConf = zConf; registryCacheDir.mkdirs(); UUID registryCacheFileUuid = UUID.nameUUIDFromBytes(uri.getBytes()); this.registryCacheFile = new File(registryCacheDir, registryCacheFileUuid.toString()); @@ -81,10 +84,9 @@ public synchronized List getAll() throws IOException { HttpGet get = new HttpGet(uri()); HttpResponse response; try { - ZeppelinConfiguration cfg = ZeppelinConfiguration.create(); - if ((get.getURI().getHost().equals(cfg.getS3Endpoint()))) { - if (cfg.getS3Timeout() != null) { - int timeout = Integer.valueOf(cfg.getS3Timeout()); + if ((get.getURI().getHost().equals(zConf.getS3Endpoint()))) { + if (zConf.getS3Timeout() != null) { + int timeout = Integer.parseInt(zConf.getS3Timeout()); RequestConfig requestCfg = RequestConfig.custom() .setConnectTimeout(timeout) .setSocketTimeout(timeout) @@ -100,7 +102,9 @@ public synchronized List getAll() throws IOException { if (response.getStatusLine().getStatusCode() != 200) { // try read from cache - logger.error(uri() + " returned " + response.getStatusLine().toString()); + if (logger.isErrorEnabled()) { + logger.error("{} returned {}", uri(), response.getStatusLine()); + } return readFromCache(); } else { List packageList = new LinkedList<>(); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumRegistry.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumRegistry.java index 13b4325d13c..4971063ee04 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumRegistry.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/helium/HeliumRegistry.java @@ -26,7 +26,7 @@ public abstract class HeliumRegistry { private final String name; private final String uri; - public HeliumRegistry(String name, String uri) { + protected HeliumRegistry(String name, String uri) { this.name = name; this.uri = uri; } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java index 9ec613cac82..71d390b27ed 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSetting.java @@ -45,7 +45,6 @@ import org.apache.zeppelin.interpreter.remote.RemoteInterpreter; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; -import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.plugin.PluginManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -133,7 +132,8 @@ public class InterpreterSetting { private transient ApplicationEventListener appEventListener; private transient DependencyResolver dependencyResolver; - private transient ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + private transient ZeppelinConfiguration conf; + private transient PluginManager pluginManager; private transient RecoveryStorage recoveryStorage; private transient RemoteInterpreterEventServer interpreterEventServer; @@ -199,6 +199,12 @@ public Builder setDependencies(List dependencies) { public Builder setConf(ZeppelinConfiguration conf) { interpreterSetting.conf = conf; + interpreterSetting.option.setConf(conf); + return this; + } + + public Builder setPluginManager(PluginManager pluginManager) { + interpreterSetting.pluginManager = pluginManager; return this; } @@ -288,10 +294,11 @@ public InterpreterSetting(InterpreterSetting o) { this.interpreterDir = o.getInterpreterDir(); this.interpreterRunner = o.getInterpreterRunner(); this.conf = o.getConf(); + this.pluginManager = o.getPluginMananger(); } private InterpreterLauncher createLauncher(Properties properties) throws IOException { - return PluginManager.get().loadInterpreterLauncher( + return pluginManager.loadInterpreterLauncher( getLauncherPlugin(properties), recoveryStorage); } @@ -656,6 +663,16 @@ public ZeppelinConfiguration getConf() { public InterpreterSetting setConf(ZeppelinConfiguration conf) { this.conf = conf; + this.option.setConf(conf); + return this; + } + + public PluginManager getPluginMananger() { + return pluginManager; + } + + public InterpreterSetting setPluginMananger(PluginManager pluginManager) { + this.pluginManager = pluginManager; return this; } @@ -823,7 +840,7 @@ List createInterpreters(String user, String interpreterGroupId, Str Properties intpProperties = getJavaProperties(); for (InterpreterInfo info : interpreterInfos) { Interpreter interpreter = new RemoteInterpreter(intpProperties, sessionId, - info.getClassName(), user); + info.getClassName(), user, conf); if (info.isDefaultInterpreter()) { interpreters.add(0, interpreter); } else { @@ -920,7 +937,7 @@ private String getInterpreterClassFromInterpreterSetting(String replName) { private ManagedInterpreterGroup createInterpreterGroup(String groupId) { AngularObjectRegistry angularObjectRegistry; - ManagedInterpreterGroup interpreterGroup = new ManagedInterpreterGroup(groupId, this); + ManagedInterpreterGroup interpreterGroup = new ManagedInterpreterGroup(groupId, this, conf); angularObjectRegistry = new RemoteAngularObjectRegistry(groupId, angularObjectRegistryListener, interpreterGroup); interpreterGroup.setAngularObjectRegistry(angularObjectRegistry); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java index 72d79a348d9..2475ae609a2 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/InterpreterSettingManager.java @@ -61,6 +61,7 @@ import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.ParagraphTextParser; +import org.apache.zeppelin.plugin.PluginManager; import org.apache.zeppelin.resource.Resource; import org.apache.zeppelin.resource.ResourcePool; import org.apache.zeppelin.resource.ResourceSet; @@ -116,6 +117,7 @@ public class InterpreterSettingManager implements NoteEventListener, ClusterEven "editOnDblClick", false); private final ZeppelinConfiguration conf; + private final PluginManager pluginManager; private final Path interpreterDirPath; /** @@ -155,13 +157,16 @@ public InterpreterSettingManager(ZeppelinConfiguration zeppelinConfiguration, AngularObjectRegistryListener angularObjectRegistryListener, RemoteInterpreterProcessListener remoteInterpreterProcessListener, - ApplicationEventListener appEventListener) + ApplicationEventListener appEventListener, + ConfigStorage storage, + PluginManager pluginManager) throws IOException { this(zeppelinConfiguration, new InterpreterOption(), angularObjectRegistryListener, remoteInterpreterProcessListener, appEventListener, - ConfigStorage.getInstance(zeppelinConfiguration)); + storage, + pluginManager); } public InterpreterSettingManager(ZeppelinConfiguration conf, @@ -169,14 +174,16 @@ public InterpreterSettingManager(ZeppelinConfiguration conf, AngularObjectRegistryListener angularObjectRegistryListener, RemoteInterpreterProcessListener remoteInterpreterProcessListener, ApplicationEventListener appEventListener, - ConfigStorage configStorage) + ConfigStorage configStorage, + PluginManager pluginManager) throws IOException { this.conf = conf; + this.pluginManager = pluginManager; this.defaultOption = defaultOption; this.interpreterDirPath = Paths.get(conf.getInterpreterDir()); LOGGER.debug("InterpreterRootPath: {}", interpreterDirPath); this.dependencyResolver = - new DependencyResolver(conf.getString(ConfVars.ZEPPELIN_INTERPRETER_LOCALREPO)); + new DependencyResolver(conf.getString(ConfVars.ZEPPELIN_INTERPRETER_LOCALREPO), conf); this.interpreterRepositories = dependencyResolver.getRepos(); this.defaultInterpreterGroup = conf.getString(ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); this.gson = new GsonBuilder().setPrettyPrinting().create(); @@ -230,6 +237,7 @@ private void initInterpreterSetting(InterpreterSetting interpreterSetting) { .setDependencyResolver(dependencyResolver) .setRecoveryStorage(recoveryStorage) .setInterpreterEventServer(interpreterEventServer) + .setPluginMananger(pluginManager) .postProcessing(); } @@ -532,6 +540,7 @@ private void registerInterpreterSetting(List registeredIn .setInterpreterDir(interpreterDir) .setRunner(runner) .setConf(conf) + .setPluginManager(pluginManager) .setIntepreterSettingManager(this) .create(); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroup.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroup.java index 51aeab7b98d..c1b5076167b 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroup.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroup.java @@ -42,14 +42,17 @@ public class ManagedInterpreterGroup extends InterpreterGroup { private InterpreterSetting interpreterSetting; private RemoteInterpreterProcess remoteInterpreterProcess; // attached remote interpreter process private Object interpreterProcessCreationLock = new Object(); + private final ZeppelinConfiguration zConf; /** * Create InterpreterGroup with given id and interpreterSetting, used in ZeppelinServer * @param id * @param interpreterSetting */ - ManagedInterpreterGroup(String id, InterpreterSetting interpreterSetting) { + ManagedInterpreterGroup(String id, InterpreterSetting interpreterSetting, + ZeppelinConfiguration zConf) { super(id); + this.zConf = zConf; this.interpreterSetting = interpreterSetting; } @@ -66,7 +69,7 @@ public RemoteInterpreterProcess getOrCreateInterpreterProcess(String userName, remoteInterpreterProcess = interpreterSetting.createInterpreterProcess(id, userName, properties); remoteInterpreterProcess.start(userName); - remoteInterpreterProcess.init(ZeppelinConfiguration.create()); + remoteInterpreterProcess.init(zConf); getInterpreterSetting().getRecoveryStorage() .onInterpreterClientStart(remoteInterpreterProcess); } @@ -182,6 +185,7 @@ public synchronized List getOrCreateSession(String user, String ses } } + @Override public boolean isEmpty() { return this.sessions.isEmpty(); } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/YarnAppMonitor.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/YarnAppMonitor.java index 598c69e2aba..56dc7ea912a 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/YarnAppMonitor.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/YarnAppMonitor.java @@ -44,7 +44,6 @@ public class YarnAppMonitor { private static final Logger LOGGER = LoggerFactory.getLogger(YarnAppMonitor.class); private static YarnAppMonitor instance; - private ZeppelinConfiguration conf; private ScheduledExecutorService executor; private YarnClient yarnClient; private ConcurrentHashMap apps; @@ -58,7 +57,6 @@ public static synchronized YarnAppMonitor get() { private YarnAppMonitor() { try { - this.conf = ZeppelinConfiguration.create(); this.yarnClient = YarnClient.createYarnClient(); YarnConfiguration yarnConf = new YarnConfiguration(); // disable timeline service as we only query yarn app here. @@ -94,8 +92,10 @@ private YarnAppMonitor() { LOGGER.warn("Fail to check yarn app status", e); } }, - conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_YARN_MONITOR_INTERVAL_SECS), - conf.getInt(ConfVars.ZEPPELIN_INTERPRETER_YARN_MONITOR_INTERVAL_SECS), + ZeppelinConfiguration + .getStaticInt(ConfVars.ZEPPELIN_INTERPRETER_YARN_MONITOR_INTERVAL_SECS), + ZeppelinConfiguration + .getStaticInt(ConfVars.ZEPPELIN_INTERPRETER_YARN_MONITOR_INTERVAL_SECS), TimeUnit.SECONDS); LOGGER.info("YarnAppMonitor is started"); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/install/InstallInterpreter.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/install/InstallInterpreter.java index d9e902b92a4..faa0ecd0a62 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/install/InstallInterpreter.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/install/InstallInterpreter.java @@ -43,6 +43,7 @@ public class InstallInterpreter { private final File interpreterBaseDir; private final List availableInterpreters; private final String localRepoDir; + private final ZeppelinConfiguration zConf; private URL proxyUrl; private String proxyUser; private String proxyPassword; @@ -53,11 +54,13 @@ public class InstallInterpreter { * @param interpreterBaseDir interpreter directory for installing binaries * @throws IOException */ - public InstallInterpreter(File interpreterListFile, File interpreterBaseDir, String localRepoDir) + public InstallInterpreter(File interpreterListFile, File interpreterBaseDir, String localRepoDir, + ZeppelinConfiguration zConf) throws IOException { this.interpreterListFile = interpreterListFile; this.interpreterBaseDir = interpreterBaseDir; this.localRepoDir = localRepoDir; + this.zConf = zConf; availableInterpreters = new LinkedList<>(); readAvailableInterpreters(); } @@ -143,7 +146,7 @@ public void install(String name) { throw new RuntimeException("Can't find interpreter '" + name + "'"); } - public void install(String [] names, String [] artifacts) { + public void install(String[] names, String[] artifacts) { if (names.length != artifacts.length) { throw new RuntimeException("Length of given names and artifacts are different"); } @@ -159,7 +162,8 @@ public void install(String name, String artifact) { Authentication auth = new AuthenticationBuilder().addUsername(proxyUser).addPassword(proxyPassword).build(); proxy = new Proxy(proxyUrl.getProtocol(), proxyUrl.getHost(), proxyUrl.getPort(), auth); } - DependencyResolver depResolver = new DependencyResolver(localRepoDir, proxy); + DependencyResolver depResolver = + new DependencyResolver(localRepoDir, proxy, zConf); File installDir = new File(interpreterBaseDir, name); if (installDir.exists()) { @@ -212,11 +216,12 @@ public static void main(String [] args) throws IOException { return; } - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + ZeppelinConfiguration conf = ZeppelinConfiguration.load(); InstallInterpreter installer = new InstallInterpreter( new File(conf.getInterpreterListPath()), new File(conf.getInterpreterDir()), - conf.getInterpreterLocalRepoPath()); + conf.getInterpreterLocalRepoPath(), + conf); String names = null; String artifacts = null; diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/recovery/StopInterpreter.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/recovery/StopInterpreter.java index ee0ceccd0ef..d7b710a1305 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/recovery/StopInterpreter.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/recovery/StopInterpreter.java @@ -21,6 +21,8 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.interpreter.launcher.InterpreterClient; +import org.apache.zeppelin.plugin.PluginManager; +import org.apache.zeppelin.storage.ConfigStorage; import org.apache.zeppelin.util.ReflectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -38,14 +40,16 @@ public class StopInterpreter { private static final Logger LOGGER = LoggerFactory.getLogger(StopInterpreter.class); - public static void main(String[] args) throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + public StopInterpreter(ZeppelinConfiguration zConf) throws IOException { + ConfigStorage storage = ConfigStorage.createConfigStorage(zConf); + PluginManager pluginManager = new PluginManager(zConf); InterpreterSettingManager interpreterSettingManager = - new InterpreterSettingManager(zConf, null, null, null); + new InterpreterSettingManager(zConf, null, null, null, storage, pluginManager); - RecoveryStorage recoveryStorage = ReflectionUtils.createClazzInstance(zConf.getRecoveryStorageClass(), - new Class[] {ZeppelinConfiguration.class, InterpreterSettingManager.class}, - new Object[] {zConf, interpreterSettingManager}); + RecoveryStorage recoveryStorage = + ReflectionUtils.createClazzInstance(zConf.getRecoveryStorageClass(), + new Class[] { ZeppelinConfiguration.class, InterpreterSettingManager.class }, + new Object[] { zConf, interpreterSettingManager }); LOGGER.info("Using RecoveryStorage: {}", recoveryStorage.getClass().getName()); Map restoredClients = recoveryStorage.restore(); @@ -56,4 +60,9 @@ public static void main(String[] args) throws IOException { } } } + + public static void main(String[] args) throws IOException { + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); + new StopInterpreter(zConf); + } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java index ba3be510482..859e4e2ddcb 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java @@ -21,6 +21,7 @@ import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import org.apache.thrift.TException; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.display.AngularObject; import org.apache.zeppelin.display.AngularObjectRegistry; import org.apache.zeppelin.display.GUI; @@ -70,11 +71,13 @@ public class RemoteInterpreter extends Interpreter { public RemoteInterpreter(Properties properties, String sessionId, String className, - String userName) { + String userName, + ZeppelinConfiguration zConf) { super(properties); this.sessionId = sessionId; this.className = className; this.setUserName(userName); + this.setZeppelinConfiguration(zConf); } public boolean isOpened() { diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/AuthorizationService.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/AuthorizationService.java index 77565f3029f..da82a7138dc 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/AuthorizationService.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/AuthorizationService.java @@ -46,8 +46,8 @@ public class AuthorizationService implements ClusterEventListener { private static final Logger LOGGER = LoggerFactory.getLogger(AuthorizationService.class); private static final Set EMPTY_SET = new HashSet<>(); - private ZeppelinConfiguration conf; - private ConfigStorage configStorage; + private final ZeppelinConfiguration conf; + private final ConfigStorage configStorage; // contains roles for each user (username --> roles) private Map> userRoles = new ConcurrentHashMap<>(); @@ -56,11 +56,12 @@ public class AuthorizationService implements ClusterEventListener { private Map notesAuth = new ConcurrentHashMap<>(); @Inject - public AuthorizationService(NoteManager noteManager, ZeppelinConfiguration conf) { + public AuthorizationService(NoteManager noteManager, ZeppelinConfiguration conf, + ConfigStorage storage) { LOGGER.info("Injected AuthorizationService: {}", this); this.conf = conf; + this.configStorage = storage; try { - this.configStorage = ConfigStorage.getInstance(conf); // init notesAuth by reading notebook-authorization.json NotebookAuthorizationInfoSaving authorizationInfoSaving = configStorage.loadNotebookAuthorization(); if (authorizationInfoSaving != null) { diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/FileSystemStorage.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/FileSystemStorage.java index 776041053f8..5fc60e74e55 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/FileSystemStorage.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/FileSystemStorage.java @@ -57,10 +57,9 @@ public class FileSystemStorage { // your ticket expired. static { if (UserGroupInformation.isSecurityEnabled()) { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - String keytab = zConf.getString( + String keytab = ZeppelinConfiguration.getStaticString( ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB); - String principal = zConf.getString( + String principal = ZeppelinConfiguration.getStaticString( ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL); if (StringUtils.isBlank(keytab) || StringUtils.isBlank(principal)) { throw new RuntimeException("keytab and principal can not be empty, keytab: " + keytab diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/GsonNoteParser.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/GsonNoteParser.java new file mode 100644 index 00000000000..7087ad97320 --- /dev/null +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/GsonNoteParser.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.notebook; + +import java.util.Date; + +import javax.inject.Inject; + +import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.display.Input; +import org.apache.zeppelin.notebook.exception.CorruptedNoteException; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonSyntaxException; + +public class GsonNoteParser implements NoteParser { + + private final ZeppelinConfiguration conf; + private final Gson gson; + + @Inject + public GsonNoteParser(ZeppelinConfiguration conf) { + this.conf = conf; + this.gson = new GsonBuilder() + .setPrettyPrinting() + .setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") + .registerTypeAdapter(Date.class, new NotebookImportDeserializer()) + .registerTypeAdapterFactory(Input.TypeAdapterFactory) + .setExclusionStrategies(new NoteJsonExclusionStrategy(conf)) + .create(); + } + + @Override + public Note fromJson(String noteId, String json) throws CorruptedNoteException { + try { + Note note = gson.fromJson(json, Note.class); + note.setZeppelinConfiguration(conf); + note.setNoteParser(this); + convertOldInput(note); + note.getInfo().remove("isRunning"); + note.postProcessParagraphs(); + return note; + } catch (JsonSyntaxException e) { + throw new CorruptedNoteException(noteId, "Fail to parse note json: " + json, e); + } + } + + @Override + public Paragraph fromJson(String json) { + return gson.fromJson(json, Paragraph.class); + } + + @Override + public String toJson(Note note) { + return gson.toJson(note); + } + + @Override + public String toJson(Paragraph paragraph) { + return gson.toJson(paragraph); + } + + private static void convertOldInput(Note note) { + for (Paragraph p : note.getParagraphs()) { + p.settings.convertOldInput(); + } + } +} diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java index c02f90bc452..ecc5fe1ba99 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Note.java @@ -18,10 +18,6 @@ package org.apache.zeppelin.notebook; import com.google.common.annotations.VisibleForTesting; -import com.google.gson.ExclusionStrategy; -import com.google.gson.FieldAttributes; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.common.JsonSerializable; import org.apache.zeppelin.conf.ZeppelinConfiguration; @@ -40,7 +36,6 @@ import org.apache.zeppelin.interpreter.remote.RemoteAngularObject; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; -import org.apache.zeppelin.notebook.exception.CorruptedNoteException; import org.apache.zeppelin.notebook.utility.IdHashes; import org.apache.zeppelin.scheduler.ExecutorFactory; import org.apache.zeppelin.scheduler.Job.Status; @@ -50,12 +45,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -74,51 +67,6 @@ public class Note implements JsonSerializable { private static final Logger LOGGER = LoggerFactory.getLogger(Note.class); - - // serialize Paragraph#runtimeInfos and Note#path to frontend but not to note file - private static final ExclusionStrategy NOTE_GSON_EXCLUSION_STRATEGY = - new NoteJsonExclusionStrategy(ZeppelinConfiguration.create()); - - private static class NoteJsonExclusionStrategy implements ExclusionStrategy { - private Set noteExcludeFields = new HashSet<>(); - private Set paragraphExcludeFields = new HashSet<>(); - - public NoteJsonExclusionStrategy(ZeppelinConfiguration zConf) { - String[] excludeFields = zConf.getNoteFileExcludedFields(); - for (String field : excludeFields) { - if (field.startsWith("Paragraph")) { - paragraphExcludeFields.add(field.substring(10)); - } else { - noteExcludeFields.add(field); - } - } - } - - @Override - public boolean shouldSkipField(FieldAttributes field) { - if(field.getName().equals("path")) { - return true; - } - if (field.getDeclaringClass().equals(Paragraph.class)) { - return paragraphExcludeFields.contains(field.getName()); - } else { - return noteExcludeFields.contains(field.getName()); - } - } - - @Override - public boolean shouldSkipClass(Class aClass) { - return false; - } - } - - private static final Gson GSON = new GsonBuilder() - .setPrettyPrinting() - .setDateFormat("yyyy-MM-dd HH:mm:ss.SSS") - .registerTypeAdapter(Date.class, new NotebookImportDeserializer()) - .registerTypeAdapterFactory(Input.TypeAdapterFactory) - .setExclusionStrategies(NOTE_GSON_EXCLUSION_STRATEGY) - .create(); private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"); @@ -162,7 +110,8 @@ public boolean shouldSkipClass(Class aClass) { private transient ParagraphJobListener paragraphJobListener; private transient List noteEventListeners = new ArrayList<>(); private transient Credentials credentials; - private transient ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + private transient ZeppelinConfiguration zConf; + private transient NoteParser noteParser; public Note() { generateId(); @@ -170,7 +119,8 @@ public Note() { public Note(String path, String defaultInterpreterGroup, InterpreterFactory factory, InterpreterSettingManager interpreterSettingManager, ParagraphJobListener paragraphJobListener, - Credentials credentials, List noteEventListener) { + Credentials credentials, List noteEventListener, + ZeppelinConfiguration zConf, NoteParser noteParser) { setPath(path); this.defaultInterpreterGroup = defaultInterpreterGroup; this.interpreterFactory = factory; @@ -178,6 +128,8 @@ public Note(String path, String defaultInterpreterGroup, InterpreterFactory fact this.paragraphJobListener = paragraphJobListener; this.noteEventListeners = noteEventListener; this.credentials = credentials; + this.zConf = zConf; + this.noteParser = noteParser; this.version = Util.getVersion(); generateId(); @@ -269,8 +221,8 @@ public void setVersion(String version) { public String getDefaultInterpreterGroup() { if (StringUtils.isBlank(defaultInterpreterGroup)) { - defaultInterpreterGroup = ZeppelinConfiguration.create() - .getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); + defaultInterpreterGroup = + zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT); } return defaultInterpreterGroup; } @@ -479,7 +431,7 @@ void addCloneParagraph(Paragraph srcParagraph, AuthenticationInfo subject) { LOGGER.debug("newParagraph user: {}", newParagraph.getUser()); try { - String resultJson = GSON.toJson(srcParagraph.getReturn()); + String resultJson = srcParagraph.getReturn().toJson(); InterpreterResult result = InterpreterResult.fromJson(resultJson); newParagraph.setReturn(result, null); } catch (Exception e) { @@ -859,7 +811,7 @@ public List completion(String paragraphId, return p.completion(buffer, cursor); } - public CopyOnWriteArrayList getParagraphs() { + public List getParagraphs() { return this.paragraphs; } @@ -986,8 +938,10 @@ public Note getUserNote(String user) { Note newNote = new Note(); newNote.name = getName(); newNote.id = getId(); - newNote.config = getConfig(); + newNote.setConfig(getConfig()); newNote.angularObjects = getAngularObjects(); + newNote.setZeppelinConfiguration(zConf); + newNote.setNoteParser(noteParser); Paragraph newParagraph; for (Paragraph p : paragraphs) { @@ -1082,27 +1036,7 @@ public String toString() { @Override public String toJson() { - return GSON.toJson(this); - } - - /** - * Parse note json from note file. Throw IOException if fail to parse note json. - * - * @param json - * @return Note - * @throws IOException if fail to parse note json (note file may be corrupted) - */ - public static Note fromJson(String noteId, String json) throws IOException { - try { - Note note = GSON.fromJson(json, Note.class); - convertOldInput(note); - note.info.remove("isRunning"); - note.postProcessParagraphs(); - return note; - } catch (Exception e) { - LOGGER.error("Fail to parse note json: {}", e.toString()); - throw new CorruptedNoteException(noteId, "Fail to parse note json: " + json, e); - } + return noteParser.toJson(this); } public void postProcessParagraphs() { @@ -1129,12 +1063,6 @@ public void postProcessParagraphs() { } } - private static void convertOldInput(Note note) { - for (Paragraph p : note.paragraphs) { - p.settings.convertOldInput(); - } - } - @Override public boolean equals(Object o) { if (this == o) { @@ -1176,10 +1104,6 @@ public int hashCode() { return result; } - public static Gson getGSON() { - return GSON; - } - public void setNoteEventListeners(List noteEventListeners) { this.noteEventListeners = noteEventListeners; } @@ -1204,4 +1128,16 @@ public ExecutionContext getExecutionContext() { executionContext.setStartTime(getStartTime()); return executionContext; } + + public NoteParser getNoteParser() { + return noteParser; + } + + public void setZeppelinConfiguration(ZeppelinConfiguration zConf) { + this.zConf = zConf; + } + + public void setNoteParser(NoteParser noteParser) { + this.noteParser = noteParser; + } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteJsonExclusionStrategy.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteJsonExclusionStrategy.java new file mode 100644 index 00000000000..7d68a70d74b --- /dev/null +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteJsonExclusionStrategy.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.notebook; + +import java.util.HashSet; +import java.util.Set; + +import org.apache.zeppelin.conf.ZeppelinConfiguration; + +import com.google.gson.ExclusionStrategy; +import com.google.gson.FieldAttributes; + +public class NoteJsonExclusionStrategy implements ExclusionStrategy { + private Set noteExcludeFields = new HashSet<>(); + private Set paragraphExcludeFields = new HashSet<>(); + + public NoteJsonExclusionStrategy(ZeppelinConfiguration zConf) { + String[] excludeFields = zConf.getNoteFileExcludedFields(); + for (String field : excludeFields) { + if (field.startsWith("Paragraph")) { + paragraphExcludeFields.add(field.substring(10)); + } else { + noteExcludeFields.add(field); + } + } + } + + @Override + public boolean shouldSkipField(FieldAttributes field) { + if (field.getName().equals("path")) { + return true; + } + if (field.getDeclaringClass().equals(Paragraph.class)) { + return paragraphExcludeFields.contains(field.getName()); + } else { + return noteExcludeFields.contains(field.getName()); + } + } + + @Override + public boolean shouldSkipClass(Class aClass) { + return false; + } +} diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteManager.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteManager.java index dc8326e6fd3..1147b97b44c 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteManager.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteManager.java @@ -66,12 +66,14 @@ public class NoteManager { private NoteCache noteCache; // noteId -> notePath private Map notesInfo; + private final ZeppelinConfiguration zConf; @Inject - public NoteManager(NotebookRepo notebookRepo, ZeppelinConfiguration conf) throws IOException { + public NoteManager(NotebookRepo notebookRepo, ZeppelinConfiguration zConf) throws IOException { + this.zConf = zConf; this.notebookRepo = notebookRepo; - this.noteCache = new NoteCache(conf.getNoteCacheThreshold()); - this.root = new Folder("/", notebookRepo, noteCache); + this.noteCache = new NoteCache(zConf.getNoteCacheThreshold()); + this.root = new Folder("/", notebookRepo, noteCache, zConf); this.trash = this.root.getOrCreateFolder(TRASH_FOLDER); init(); } @@ -102,7 +104,7 @@ public Map getNotesInfo() { * @throws IOException */ public void reloadNotes() throws IOException { - this.root = new Folder("/", notebookRepo, noteCache); + this.root = new Folder("/", notebookRepo, noteCache, zConf); this.trash = this.root.getOrCreateFolder(TRASH_FOLDER); init(); } @@ -436,20 +438,24 @@ public static class Folder { private Folder parent; private NotebookRepo notebookRepo; private NoteCache noteCache; + private final ZeppelinConfiguration zConf; // noteName -> NoteNode private Map notes = new ConcurrentHashMap<>(); // folderName -> Folder private Map subFolders = new ConcurrentHashMap<>(); - public Folder(String name, NotebookRepo notebookRepo, NoteCache noteCache) { + public Folder(String name, NotebookRepo notebookRepo, NoteCache noteCache, + ZeppelinConfiguration zConf) { this.name = name; + this.zConf = zConf; this.notebookRepo = notebookRepo; this.noteCache = noteCache; } - public Folder(String name, Folder parent, NotebookRepo notebookRepo, NoteCache noteCache) { - this(name, notebookRepo, noteCache); + public Folder(String name, Folder parent, NotebookRepo notebookRepo, NoteCache noteCache, + ZeppelinConfiguration zConf) { + this(name, notebookRepo, noteCache, zConf); this.parent = parent; } @@ -458,7 +464,7 @@ public synchronized Folder getOrCreateFolder(String folderName) { return this; } if (!subFolders.containsKey(folderName)) { - subFolders.put(folderName, new Folder(folderName, this, notebookRepo, noteCache)); + subFolders.put(folderName, new Folder(folderName, this, notebookRepo, noteCache, zConf)); } return subFolders.get(folderName); } @@ -492,7 +498,7 @@ public NoteNode getNote(String noteName) { } public void addNote(String noteName, NoteInfo noteInfo) { - notes.put(noteName, new NoteNode(noteInfo, this, notebookRepo, noteCache)); + notes.put(noteName, new NoteNode(noteInfo, this, notebookRepo, noteCache, zConf)); } /** @@ -589,12 +595,15 @@ public static class NoteNode { private NoteInfo noteInfo; private NotebookRepo notebookRepo; private NoteCache noteCache; + private ZeppelinConfiguration zConf; - public NoteNode(NoteInfo noteInfo, Folder parent, NotebookRepo notebookRepo, NoteCache noteCache) { + public NoteNode(NoteInfo noteInfo, Folder parent, NotebookRepo notebookRepo, + NoteCache noteCache, ZeppelinConfiguration zConf) { this.noteInfo = noteInfo; this.parent = parent; this.notebookRepo = notebookRepo; this.noteCache = noteCache; + this.zConf = zConf; } /** @@ -621,7 +630,7 @@ public T loadAndProcessNote(boolean reload, NoteProcessor noteProcessor) } else { note.setPath(parent.toString() + "/" + note.getName()); } - note.setCronSupported(ZeppelinConfiguration.create()); + note.setCronSupported(zConf); noteCache.putNote(note); } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteParser.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteParser.java new file mode 100644 index 00000000000..9868c226b49 --- /dev/null +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteParser.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.notebook; + +import org.apache.zeppelin.notebook.exception.CorruptedNoteException; + +public interface NoteParser { + + /** + * This method deserializes the specified Note Json into an Note object + * + * @param noteId - NoteId of the note + * @param json - The Json String of the specified Note + * @return A new Note object + * @throws CorruptedNoteException is thrown if the note cannot be deserialized + */ + Note fromJson(String noteId, String json) throws CorruptedNoteException; + + /** + * This method deserializes the specified Paragraph Json into an Paragraph object + * + * @param json - The Json String of the specified Paragraph + * @return A new Paragraph object + */ + Paragraph fromJson(String json); + + /** + * This method serializes the Note object into its equivalent Json representation + * + * @param note + * @return JsonString of the Note + */ + String toJson(Note note); + + /** + * This method serializes the Paragraph object into its equivalent Json representation + * + * @param paragraph + * @return JsonString of the Paragraph + */ + String toJson(Paragraph paragraph); +} diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java index 3513ec0c823..3607810e80b 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Notebook.java @@ -35,6 +35,7 @@ import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; + import javax.inject.Inject; import org.apache.commons.lang3.StringUtils; @@ -308,7 +309,8 @@ public String createNote(String notePath, boolean save) throws IOException { Note note = new Note(notePath, defaultInterpreterGroup, replFactory, interpreterSettingManager, - paragraphJobListener, credentials, noteEventListeners); + paragraphJobListener, credentials, noteEventListeners, conf, + notebookRepo.getNoteParser()); noteManager.addNote(note, subject); // init noteMeta authorizationService.createNoteAuth(note.getId(), subject); @@ -351,7 +353,7 @@ public String exportNote(String noteId) throws IOException { */ public String importNote(String sourceJson, String notePath, AuthenticationInfo subject) throws IOException { - Note oldNote = Note.fromJson(null, sourceJson); + Note oldNote = notebookRepo.getNoteParser().fromJson(null, sourceJson); if (notePath == null) { notePath = oldNote.getName(); } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java index 1faa9d42cea..db195ae8f1e 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/Paragraph.java @@ -785,11 +785,11 @@ public int hashCode() { @Override public String toJson() { - return Note.getGSON().toJson(this); + return note.getNoteParser().toJson(this); } - public static Paragraph fromJson(String json) { - return Note.getGSON().fromJson(json, Paragraph.class); + public Paragraph fromJson(String json) { + return note.getNoteParser().fromJson(json); } public void updateOutputBuffer(int index, InterpreterResult.Type type, String output) { diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/exception/CorruptedNoteException.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/exception/CorruptedNoteException.java index 53076330a16..e50c45afa2a 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/exception/CorruptedNoteException.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/exception/CorruptedNoteException.java @@ -20,7 +20,10 @@ import java.io.IOException; public class CorruptedNoteException extends IOException { - public CorruptedNoteException(final String noteId, final String message, Exception e) { - super(String.format("noteId: %s - %s", noteId, message), e); - } + + private static final long serialVersionUID = 1743308058186542714L; + + public CorruptedNoteException(final String noteId, final String message, Exception e) { + super(String.format("noteId: %s - %s", noteId, message), e); + } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/AbstractNotebookRepo.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/AbstractNotebookRepo.java new file mode 100644 index 00000000000..d7cbe6984c9 --- /dev/null +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/AbstractNotebookRepo.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.zeppelin.notebook.repo; + +import java.io.IOException; + +import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.NoteParser; + +public abstract class AbstractNotebookRepo implements NotebookRepo { + + protected ZeppelinConfiguration conf; + protected NoteParser noteParser; + + @Override + public void init(ZeppelinConfiguration conf, NoteParser parser) throws IOException { + this.noteParser = parser; + this.conf = conf; + } + + @Override + public NoteParser getNoteParser() { + return noteParser; + } +} diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/GitNotebookRepo.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/GitNotebookRepo.java index 9fb9c7b1cb3..27401dc885e 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/GitNotebookRepo.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/GitNotebookRepo.java @@ -17,9 +17,9 @@ package org.apache.zeppelin.notebook.repo; -import com.google.common.annotations.VisibleForTesting; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.errors.GitAPIException; @@ -60,17 +60,9 @@ public GitNotebookRepo() { super(); } - @VisibleForTesting - public GitNotebookRepo(ZeppelinConfiguration conf) throws IOException { - this(); - init(conf); - } - @Override - public void init(ZeppelinConfiguration conf) throws IOException { - //TODO(zjffdu), it is weird that I can not call super.init directly here, as it would cause - //AbstractMethodError - this.conf = conf; + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); setNotebookDirectory(conf.getNotebookDir()); LOGGER.info("Opening a git repo at '{}'", this.rootNotebookFolder); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/InMemoryNotebookRepo.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/InMemoryNotebookRepo.java index 1db4fb863ad..c6df1490228 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/InMemoryNotebookRepo.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/InMemoryNotebookRepo.java @@ -21,6 +21,7 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import java.io.IOException; @@ -29,13 +30,13 @@ import java.util.List; import java.util.Map; -public class InMemoryNotebookRepo implements NotebookRepo { +public class InMemoryNotebookRepo extends AbstractNotebookRepo { private Map notes = new HashMap<>(); @Override - public void init(ZeppelinConfiguration zConf) throws IOException { - + public void init(ZeppelinConfiguration zConf, NoteParser parser) throws IOException { + super.init(zConf, parser); } @Override diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepo.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepo.java index bae5cba8dd3..b55067c7848 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepo.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepo.java @@ -20,9 +20,11 @@ import org.apache.zeppelin.annotation.ZeppelinApi; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.Note; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.NoteInfo; import org.apache.zeppelin.user.AuthenticationInfo; +import java.io.Closeable; import java.io.IOException; import java.util.List; import java.util.Map; @@ -30,9 +32,9 @@ /** * Notebook repository (persistence layer) abstraction. */ -public interface NotebookRepo { +public interface NotebookRepo extends Closeable { - void init(ZeppelinConfiguration zConf) throws IOException; + void init(ZeppelinConfiguration zConf, NoteParser parser) throws IOException; /** * Lists notebook information about all notebooks in storage. This method should only read @@ -115,6 +117,7 @@ void move(String folderPath, String newFolderPath, /** * Release any underlying resources */ + @Override @ZeppelinApi void close(); @@ -137,6 +140,8 @@ void move(String folderPath, String newFolderPath, @ZeppelinApi void updateSettings(Map settings, AuthenticationInfo subject); + NoteParser getNoteParser(); + default String buildNoteFileName(String noteId, String notePath) throws IOException { if (!notePath.startsWith("/")) { throw new IOException("Invalid notePath: " + notePath); diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepoSync.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepoSync.java index 186aa61321a..2c363d3982b 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepoSync.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/NotebookRepoSync.java @@ -21,15 +21,15 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; -import org.apache.zeppelin.notebook.OldNoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.plugin.PluginManager; import org.apache.zeppelin.user.AuthenticationInfo; -import org.apache.zeppelin.util.Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; +import com.google.gson.Gson; + import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -38,9 +38,13 @@ import java.util.List; import java.util.Map; +import javax.inject.Inject; +import javax.inject.Singleton; + /** * Notebook repository sync with remote storage */ +@Singleton public class NotebookRepoSync implements NotebookRepoWithVersionControl { private static final Logger LOGGER = LoggerFactory.getLogger(NotebookRepoSync.class); private static final int MAX_REPO_NUM = 2; @@ -52,18 +56,15 @@ public class NotebookRepoSync implements NotebookRepoWithVersionControl { private List repos = new ArrayList<>(); private boolean oneWaySync; + private final PluginManager pluginManager; - /** - * @param conf - */ - @SuppressWarnings("static-access") @Inject - public NotebookRepoSync(ZeppelinConfiguration conf) throws IOException { - init(conf); + public NotebookRepoSync(PluginManager pluginManager) { + this.pluginManager = pluginManager; } @Override - public void init(ZeppelinConfiguration conf) throws IOException { + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { oneWaySync = conf.getBoolean(ConfVars.ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC); String allStorageClassNames = conf.getNotebookStorageClass().trim(); if (allStorageClassNames.isEmpty()) { @@ -79,19 +80,19 @@ public void init(ZeppelinConfiguration conf) throws IOException { // init the underlying NotebookRepo for (int i = 0; i < Math.min(storageClassNames.length, getMaxRepoNum()); i++) { - NotebookRepo notebookRepo = PluginManager.get().loadNotebookRepo(storageClassNames[i].trim()); - notebookRepo.init(conf); + NotebookRepo notebookRepo = + pluginManager.loadNotebookRepo(storageClassNames[i].trim()); + notebookRepo.init(conf, noteParser); repos.add(notebookRepo); } // couldn't initialize any storage, use default if (getRepoCount() == 0) { LOGGER.info("No storage could be initialized, using default {} storage", DEFAULT_STORAGE); - NotebookRepo defaultNotebookRepo = PluginManager.get().loadNotebookRepo(DEFAULT_STORAGE); - defaultNotebookRepo.init(conf); + NotebookRepo defaultNotebookRepo = pluginManager.loadNotebookRepo(DEFAULT_STORAGE); + defaultNotebookRepo.init(conf, noteParser); repos.add(defaultNotebookRepo); } - // sync for anonymous mode on start if (getRepoCount() > 1 && conf.isAnonymousAllowed()) { try { @@ -104,7 +105,6 @@ public void init(ZeppelinConfiguration conf) throws IOException { public List getNotebookRepos(AuthenticationInfo subject) { List reposSetting = new ArrayList<>(); - NotebookRepoWithSettings repoWithSettings; for (NotebookRepo repo : repos) { repoWithSettings = NotebookRepoWithSettings @@ -152,12 +152,14 @@ List list(int repoIndex, AuthenticationInfo subject) throws IOExceptio * Get Note from the first repository */ @Override - public Note get(String noteId, String notePath, AuthenticationInfo subject) throws IOException { + public Note get(String noteId, String notePath, AuthenticationInfo subject) + throws IOException { return getRepo(0).get(noteId, notePath, subject); } /* Get Note from specific repo (for tests) */ - Note get(int repoIndex, String noteId, String noteName, AuthenticationInfo subject) throws IOException { + Note get(int repoIndex, String noteId, String noteName, AuthenticationInfo subject) + throws IOException { return getRepo(repoIndex).get(noteId, noteName, subject); } @@ -228,7 +230,8 @@ void remove(int repoIndex, String noteId, String noteName, AuthenticationInfo su * * @throws IOException */ - void sync(int sourceRepoIndex, int destRepoIndex, AuthenticationInfo subject) throws IOException { + void sync(int sourceRepoIndex, int destRepoIndex, AuthenticationInfo subject) + throws IOException { LOGGER.info("Sync started"); NotebookRepo srcRepo = getRepo(sourceRepoIndex); NotebookRepo dstRepo = getRepo(destRepoIndex); @@ -282,7 +285,8 @@ private void pushNotes(AuthenticationInfo subject, List notesInfo, Not NotebookRepo remoteRepo) { for (NoteInfo noteInfo : notesInfo) { try { - remoteRepo.save(localRepo.get(noteInfo.getId(), noteInfo.getPath(), subject), subject); + remoteRepo.save(localRepo.get(noteInfo.getId(), noteInfo.getPath(), subject), + subject); } catch (IOException e) { LOGGER.error("Failed to push note to storage, moving onto next one", e); } @@ -546,4 +550,14 @@ public Note setNoteRevision(String noteId, String notePath, String revId, Authen return revisionNote; } + @Override + public NoteParser getNoteParser() { + try { + return getRepo(0).getNoteParser(); + } catch (IOException e) { + LOGGER.error("Error getting first repo", e); + } + return null; + } + } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepo.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepo.java index 7ed73f4942b..b0ec501aced 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepo.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepo.java @@ -38,6 +38,7 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.user.AuthenticationInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -45,10 +46,9 @@ /** * NotebookRepo implementation based on apache vfs */ -public class VFSNotebookRepo implements NotebookRepo { +public class VFSNotebookRepo extends AbstractNotebookRepo { private static final Logger LOGGER = LoggerFactory.getLogger(VFSNotebookRepo.class); - protected ZeppelinConfiguration conf; protected FileSystemManager fsManager; protected FileObject rootNotebookFileObject; protected String rootNotebookFolder; @@ -58,8 +58,8 @@ public VFSNotebookRepo() { } @Override - public void init(ZeppelinConfiguration conf) throws IOException { - this.conf = conf; + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + super.init(conf, noteParser); setNotebookDirectory(conf.getNotebookDir()); } @@ -128,12 +128,13 @@ private Map listFolder(FileObject fileObject) throws IOExcepti } @Override - public Note get(String noteId, String notePath, AuthenticationInfo subject) throws IOException { + public Note get(String noteId, String notePath, AuthenticationInfo subject) + throws IOException { FileObject noteFile = rootNotebookFileObject.resolveFile(buildNoteFileName(noteId, notePath), NameScope.DESCENDENT); String json = IOUtils.toString(noteFile.getContent().getInputStream(), conf.getString(ConfVars.ZEPPELIN_ENCODING)); - Note note = Note.fromJson(noteId, json); + Note note = noteParser.fromJson(noteId, json); // setPath here just for testing, because actually NoteManager will setPath note.setPath(notePath); return note; diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/plugin/PluginManager.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/plugin/PluginManager.java index 7cdc915f18c..eb162bb82a9 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/plugin/PluginManager.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/plugin/PluginManager.java @@ -17,7 +17,6 @@ package org.apache.zeppelin.plugin; -import com.google.common.annotations.VisibleForTesting; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.launcher.InterpreterLauncher; import org.apache.zeppelin.interpreter.launcher.SparkInterpreterLauncher; @@ -40,6 +39,8 @@ import java.util.List; import java.util.Map; +import javax.inject.Inject; + /** * Class for loading Plugins. It is singleton and factory class. * @@ -47,10 +48,8 @@ public class PluginManager { private static final Logger LOGGER = LoggerFactory.getLogger(PluginManager.class); - private static PluginManager instance; - - private ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - private String pluginsDir = zConf.getPluginsDir(); + private final String pluginsDir; + private final ZeppelinConfiguration zConf; private Map cachedLaunchers = new HashMap<>(); @@ -61,11 +60,10 @@ public class PluginManager { VFSNotebookRepo.class.getName(), GitNotebookRepo.class.getName()); - public static synchronized PluginManager get() { - if (instance == null) { - instance = new PluginManager(); - } - return instance; + @Inject + public PluginManager(ZeppelinConfiguration zConf) { + pluginsDir = zConf.getPluginsDir(); + this.zConf = zConf; } public NotebookRepo loadNotebookRepo(String notebookRepoClassName) throws IOException { @@ -161,9 +159,4 @@ private URLClassLoader getPluginClassLoader(String pluginsDir, } return new URLClassLoader(urls.toArray(new URL[0])); } - - @VisibleForTesting - public static void reset() { - instance = null; - } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/search/SearchService.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/search/SearchService.java index b4bd56e8bb8..d36e4b1693b 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/search/SearchService.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/search/SearchService.java @@ -31,7 +31,7 @@ */ public abstract class SearchService extends NoteEventAsyncListener { - public SearchService(String name) { + protected SearchService(String name) { super(name); } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/storage/ConfigStorage.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/storage/ConfigStorage.java index c380e5a8b17..8925c42d5c4 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/storage/ConfigStorage.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/storage/ConfigStorage.java @@ -18,11 +18,9 @@ package org.apache.zeppelin.storage; -import com.google.common.annotations.VisibleForTesting; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.healthcheck.HealthChecks; import org.apache.zeppelin.interpreter.InterpreterInfoSaving; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.notebook.NotebookAuthorizationInfoSaving; @@ -43,19 +41,9 @@ public abstract class ConfigStorage { protected static final String STORAGE_HEALTHCHECK_NAME = "ConfigStorage"; - private static ConfigStorage instance; - protected ZeppelinConfiguration zConf; - public static synchronized ConfigStorage getInstance(ZeppelinConfiguration zConf) - throws IOException { - if (instance == null) { - instance = createConfigStorage(zConf); - } - return instance; - } - - private static ConfigStorage createConfigStorage(ZeppelinConfiguration zConf) throws IOException { + public static ConfigStorage createConfigStorage(ZeppelinConfiguration zConf) throws IOException { String configStorageClass = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONFIG_STORAGE_CLASS); return ReflectionUtils.createClazzInstance(configStorageClass, @@ -63,7 +51,7 @@ private static ConfigStorage createConfigStorage(ZeppelinConfiguration zConf) th } - public ConfigStorage(ZeppelinConfiguration zConf) { + protected ConfigStorage(ZeppelinConfiguration zConf) { this.zConf = zConf; } @@ -96,10 +84,4 @@ protected InterpreterInfoSaving buildInterpreterInfoSaving(String json) { } return infoSaving; } - - @VisibleForTesting - public static void reset() { - HealthChecks.getHealthCheckLivenessRegistry().unregister(STORAGE_HEALTHCHECK_NAME); - instance = null; - } } diff --git a/zeppelin-zengine/src/main/java/org/apache/zeppelin/user/Credentials.java b/zeppelin-zengine/src/main/java/org/apache/zeppelin/user/Credentials.java index df69853a2f0..f4e966382e2 100644 --- a/zeppelin-zengine/src/main/java/org/apache/zeppelin/user/Credentials.java +++ b/zeppelin-zengine/src/main/java/org/apache/zeppelin/user/Credentials.java @@ -22,6 +22,8 @@ import java.util.HashMap; import java.util.Map; +import javax.inject.Inject; + import org.apache.commons.lang3.StringUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.storage.ConfigStorage; @@ -38,7 +40,7 @@ public class Credentials { private static final Logger LOGGER = LoggerFactory.getLogger(Credentials.class); - private ConfigStorage storage; + private final ConfigStorage storage; private Map credentialsMap; private Gson gson; private Encryptor encryptor; @@ -50,15 +52,16 @@ public class Credentials { * @param conf * @throws IOException */ - public Credentials(ZeppelinConfiguration conf) { + @Inject + public Credentials(ZeppelinConfiguration conf, ConfigStorage storage) { credentialsMap = new HashMap<>(); if (conf.credentialsPersist()) { String encryptKey = conf.getCredentialsEncryptKey(); if (StringUtils.isNotBlank(encryptKey)) { this.encryptor = new Encryptor(encryptKey); } + this.storage = storage; try { - storage = ConfigStorage.getInstance(conf); GsonBuilder builder = new GsonBuilder(); builder.setPrettyPrinting(); gson = builder.create(); @@ -66,12 +69,11 @@ public Credentials(ZeppelinConfiguration conf) { } catch (IOException e) { LOGGER.error("Fail to create ConfigStorage for Credentials. Persistenz will be disabled", e); encryptor = null; - storage = null; gson = null; } } else { encryptor = null; - storage = null; + this.storage = null; gson = null; } } @@ -137,7 +139,7 @@ private void loadCredentials() throws IOException { private void loadFromFile() throws IOException { try { String json = storage.loadCredentials(); - if (json != null && encryptor != null) { + if (encryptor != null && StringUtils.isNotBlank(json)) { json = encryptor.decrypt(json); } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/conf/ZeppelinConfigurationTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/conf/ZeppelinConfigurationTest.java index ba5e609ca2d..06943b138d6 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/conf/ZeppelinConfigurationTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/conf/ZeppelinConfigurationTest.java @@ -18,8 +18,6 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import java.net.MalformedURLException; @@ -29,21 +27,12 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -public class ZeppelinConfigurationTest { - @BeforeAll - public static void clearSystemVariables() { - ZeppelinConfiguration.reset(); - } - - @AfterEach - public void cleanup() { - ZeppelinConfiguration.reset(); - } +class ZeppelinConfigurationTest { @Test - public void getAllowedOrigins2Test() throws MalformedURLException { + void getAllowedOrigins2Test() throws MalformedURLException { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("test-zeppelin-site2.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("test-zeppelin-site2.xml"); List origins = conf.getAllowedOrigins(); assertEquals(2, origins.size()); assertEquals("http://onehost:8080", origins.get(0)); @@ -51,88 +40,88 @@ public void getAllowedOrigins2Test() throws MalformedURLException { } @Test - public void getAllowedOrigins1Test() throws MalformedURLException { + void getAllowedOrigins1Test() throws MalformedURLException { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("test-zeppelin-site1.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("test-zeppelin-site1.xml"); List origins = conf.getAllowedOrigins(); assertEquals(1, origins.size()); assertEquals("http://onehost:8080", origins.get(0)); } @Test - public void getAllowedOriginsNoneTest() throws MalformedURLException { + void getAllowedOriginsNoneTest() throws MalformedURLException { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); List origins = conf.getAllowedOrigins(); assertEquals(1, origins.size()); } @Test - public void isWindowsPathTestTrue() { + void isWindowsPathTestTrue() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); Boolean isIt = conf.isWindowsPath("c:\\test\\file.txt"); assertTrue(isIt); } @Test - public void isWindowsPathTestFalse() { + void isWindowsPathTestFalse() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); Boolean isIt = conf.isWindowsPath("~/test/file.xml"); assertFalse(isIt); } @Test - public void isPathWithSchemeTestTrue() { + void isPathWithSchemeTestTrue() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); Boolean isIt = conf.isPathWithScheme("hdfs://hadoop.example.com/zeppelin/notebook"); assertTrue(isIt); } @Test - public void isPathWithSchemeTestFalse() { + void isPathWithSchemeTestFalse() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); Boolean isIt = conf.isPathWithScheme("~/test/file.xml"); assertFalse(isIt); } @Test - public void isPathWithInvalidSchemeTest() { + void isPathWithInvalidSchemeTest() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); Boolean isIt = conf.isPathWithScheme("c:\\test\\file.txt"); assertFalse(isIt); } @Test - public void getNotebookDirTest() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + void getNotebookDirTest() { + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); String notebookLocation = conf.getNotebookDir(); assertTrue(notebookLocation.endsWith("notebook")); } @Test - public void isNotebookPublicTest() { + void isNotebookPublicTest() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); boolean isIt = conf.isNotebookPublic(); assertTrue(isIt); } @Test - public void getPathTest() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + void getPathTest() { + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); conf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), "/usr/lib/zeppelin"); assertEquals("/usr/lib/zeppelin", conf.getZeppelinHome()); assertEquals("/usr/lib/zeppelin/conf", conf.getConfDir()); } @Test - public void getConfigFSPath() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + void getConfigFSPath() { + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); conf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), "/usr/lib/zeppelin"); conf.setProperty(ConfVars.ZEPPELIN_CONFIG_FS_DIR.getVarName(), "conf"); assertEquals("/usr/lib/zeppelin/conf", conf.getConfigFSDir(true)); @@ -143,8 +132,8 @@ public void getConfigFSPath() { } @Test - public void checkParseException() { - ZeppelinConfiguration conf = ZeppelinConfiguration.create("zeppelin-test-site.xml"); + void checkParseException() { + ZeppelinConfiguration conf = ZeppelinConfiguration.load("zeppelin-test-site.xml"); // when conf.setProperty(ConfVars.ZEPPELIN_PORT.getVarName(), "luke skywalker"); // then diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java index 574b4e22556..7188da0ff1d 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumApplicationFactoryTest.java @@ -43,7 +43,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; -public class HeliumApplicationFactoryTest extends AbstractInterpreterTest { +class HeliumApplicationFactoryTest extends AbstractInterpreterTest { private NotebookRepo notebookRepo; private Notebook notebook; @@ -67,7 +67,7 @@ public void setUp() throws Exception { conf, authorizationService, notebookRepo, - new NoteManager(notebookRepo, ZeppelinConfiguration.create()), + new NoteManager(notebookRepo, ZeppelinConfiguration.load()), interpreterFactory, interpreterSettingManager, new Credentials()); @@ -88,7 +88,7 @@ public void tearDown() throws Exception { @Test @Disabled - public void testLoadRunUnloadApplication() + void testLoadRunUnloadApplication() throws IOException, ApplicationException, InterruptedException { // given HeliumPackage pkg1 = newHeliumPackage(HeliumType.APPLICATION, @@ -138,7 +138,7 @@ public void testLoadRunUnloadApplication() @Test @Disabled - public void testUnloadOnParagraphRemove() throws IOException { + void testUnloadOnParagraphRemove() throws IOException { // given HeliumPackage pkg1 = newHeliumPackage(HeliumType.APPLICATION, "name1", @@ -181,7 +181,7 @@ public void testUnloadOnParagraphRemove() throws IOException { @Test @Disabled - public void testUnloadOnInterpreterUnbind() throws IOException { + void testUnloadOnInterpreterUnbind() throws IOException { // given HeliumPackage pkg1 = newHeliumPackage(HeliumType.APPLICATION, "name1", @@ -221,7 +221,7 @@ public void testUnloadOnInterpreterUnbind() throws IOException { @Test @Disabled - public void testInterpreterUnbindOfNullReplParagraph() throws IOException { + void testInterpreterUnbindOfNullReplParagraph() throws IOException { // create note String note1Id = notebook.createNote("note1", anonymous); Note note1 = notebook.processNote(note1Id, @@ -248,7 +248,7 @@ public void testInterpreterUnbindOfNullReplParagraph() throws IOException { @Test @Disabled - public void testUnloadOnInterpreterRestart() throws IOException, InterpreterException { + void testUnloadOnInterpreterRestart() throws IOException, InterpreterException { // given HeliumPackage pkg1 = newHeliumPackage(HeliumType.APPLICATION, "name1", diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java index 00f5bb0058b..2f1d57a68ae 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumBundleFactoryTest.java @@ -33,21 +33,18 @@ import java.util.List; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -public class HeliumBundleFactoryTest { +class HeliumBundleFactoryTest { private HeliumBundleFactory hbf; private File nodeInstallationDir; - private String zeppelinHomePath; @BeforeEach public void setUp() throws InstallationException, TaskRunnerException, IOException { - zeppelinHomePath = System.getProperty(ConfVars.ZEPPELIN_HOME.getVarName()); - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), "../"); - - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + ZeppelinConfiguration conf = ZeppelinConfiguration.load(); + conf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), + new File("../").getAbsolutePath().toString()); nodeInstallationDir = new File(conf.getAbsoluteDir(ConfVars.ZEPPELIN_DEP_LOCALREPO), HELIUM_LOCAL_REPO); @@ -56,22 +53,17 @@ public void setUp() throws InstallationException, TaskRunnerException, IOExcepti hbf.copyFrameworkModulesToInstallPath(true); } - @AfterEach - public void tearDown() throws IOException { - if (null != zeppelinHomePath) { - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinHomePath); - } - } - @Test - public void testInstallNpm() throws InstallationException { + + //@Test + void testInstallNpm() throws InstallationException { assertTrue(new File(nodeInstallationDir, "/node/npm").isFile()); assertTrue(new File(nodeInstallationDir, "/node/node").isFile()); assertTrue(new File(nodeInstallationDir, "/node/yarn/dist/bin/yarn").isFile()); } - @Test - public void downloadPackage() throws TaskRunnerException { + //@Test + void downloadPackage() throws TaskRunnerException { HeliumPackage pkg = newHeliumPackage( HeliumType.VISUALIZATION, @@ -87,8 +79,8 @@ public void downloadPackage() throws TaskRunnerException { assertTrue(new File(nodeInstallationDir, "/node_modules/lodash").isDirectory()); } - @Test - public void bundlePackage() throws IOException, TaskRunnerException { + //@Test + void bundlePackage() throws IOException, TaskRunnerException { HeliumPackage pkg = newHeliumPackage( HeliumType.VISUALIZATION, @@ -108,8 +100,8 @@ public void bundlePackage() throws IOException, TaskRunnerException { assertEquals(lastModified, bundle.lastModified()); } - @Test - public void bundleLocalPackage() throws IOException, TaskRunnerException { + //@Test + void bundleLocalPackage() throws IOException, TaskRunnerException { URL res = Resources.getResource("helium/webpack.config.js"); String resDir = new File(res.getFile()).getParent(); String localPkg = resDir + "/../../../src/test/resources/helium/vis1"; @@ -156,8 +148,8 @@ public void bundleErrorPropagation() throws IOException, TaskRunnerException { assertNull(bundle); } - @Test - public void switchVersion() throws IOException, TaskRunnerException { + //@Test + void switchVersion() throws IOException, TaskRunnerException { URL res = Resources.getResource("helium/webpack.config.js"); String resDir = new File(res.getFile()).getParent(); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumLocalRegistryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumLocalRegistryTest.java index 328656a58de..d36727fbd18 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumLocalRegistryTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumLocalRegistryTest.java @@ -24,17 +24,18 @@ import java.io.File; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import static org.apache.zeppelin.helium.HeliumPackage.newHeliumPackage; import static org.junit.jupiter.api.Assertions.assertEquals; -public class HeliumLocalRegistryTest { +class HeliumLocalRegistryTest { private File tmpDir; @BeforeEach public void setUp() throws Exception { - tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis()); - tmpDir.mkdirs(); + tmpDir = Files.createTempDirectory("ZeppelinLTest_").toFile(); } @AfterEach @@ -43,7 +44,7 @@ public void tearDown() throws IOException { } @Test - public void testGetAllPackage() throws IOException { + void testGetAllPackage() throws IOException { // given File r1Path = new File(tmpDir, "r1"); HeliumLocalRegistry r1 = new HeliumLocalRegistry("r1", r1Path.getAbsolutePath()); @@ -59,7 +60,8 @@ public void testGetAllPackage() throws IOException { new String[][]{}, "license", ""); - FileUtils.writeStringToFile(new File(r1Path, "pkg1.json"), gson.toJson(pkg1)); + FileUtils.writeStringToFile(new File(r1Path, "pkg1.json"), gson.toJson(pkg1), + StandardCharsets.UTF_8); // then assertEquals(1, r1.getAll().size()); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumOnlineRegistryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumOnlineRegistryTest.java index 9fb4f0a1dee..94c40a4e346 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumOnlineRegistryTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumOnlineRegistryTest.java @@ -24,10 +24,12 @@ import org.junit.jupiter.api.Test; import java.io.File; import java.io.IOException; +import java.nio.file.Files; + import org.apache.zeppelin.conf.ZeppelinConfiguration; -public class HeliumOnlineRegistryTest { +class HeliumOnlineRegistryTest { // ip 192.168.65.17 belongs to private network // request will be ended with connection time out error private static final String IP = "192.168.65.17"; @@ -37,11 +39,7 @@ public class HeliumOnlineRegistryTest { @BeforeEach public void setUp() throws Exception { - tmpDir = new File( - System.getProperty("java.io.tmpdir") - + "/ZeppelinLTest_" - + System.currentTimeMillis() - ); + tmpDir = Files.createTempDirectory("ZeppelinLTest").toFile(); } @AfterEach @@ -50,19 +48,14 @@ public void tearDown() throws IOException { } @Test - public void zeppelinNotebookS3TimeoutPropertyTest() throws IOException { - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_TIMEOUT.getVarName(), - TIMEOUT - ); - System.setProperty( - ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(), - IP - ); + void zeppelinNotebookS3TimeoutPropertyTest() throws IOException { + ZeppelinConfiguration zConf = ZeppelinConfiguration.load(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_TIMEOUT.getVarName(), TIMEOUT); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_S3_ENDPOINT.getVarName(), IP); HeliumOnlineRegistry heliumOnlineRegistry = new HeliumOnlineRegistry( "https://" + IP, "https://" + IP, - tmpDir + tmpDir, zConf ); long start = System.currentTimeMillis(); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTest.java index 384103c2627..199da95d7ce 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTest.java @@ -18,6 +18,7 @@ import com.github.eirslett.maven.plugins.frontend.lib.TaskRunnerException; import org.apache.commons.io.FileUtils; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -25,19 +26,22 @@ import java.io.File; import java.io.IOException; import java.net.URISyntaxException; +import java.nio.file.Files; import static org.apache.zeppelin.helium.HeliumPackage.newHeliumPackage; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -public class HeliumTest { +class HeliumTest { private File tmpDir; private File localRegistryPath; + private ZeppelinConfiguration zConf; @BeforeEach public void setUp() throws Exception { - tmpDir = new File(System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis()); + tmpDir = Files.createTempDirectory("ZeppelinLTest").toFile(); + zConf = ZeppelinConfiguration.load(); tmpDir.mkdirs(); localRegistryPath = new File(tmpDir, "helium"); localRegistryPath.mkdirs(); @@ -49,11 +53,11 @@ public void tearDown() throws IOException { } @Test - public void testSaveLoadConf() throws IOException, URISyntaxException, TaskRunnerException { + void testSaveLoadConf() throws IOException, URISyntaxException, TaskRunnerException { // given File heliumConf = new File(tmpDir, "helium.conf"); Helium helium = new Helium(heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), - null, null, null, null); + null, null, null, null, zConf); assertFalse(heliumConf.exists()); // when @@ -63,15 +67,16 @@ public void testSaveLoadConf() throws IOException, URISyntaxException, TaskRunne assertTrue(heliumConf.exists()); // then load without exception - Helium heliumRestored = new Helium( - heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, null); + new Helium(heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, + null, zConf); } @Test - public void testRestoreRegistryInstances() throws IOException, URISyntaxException, TaskRunnerException { + void testRestoreRegistryInstances() throws IOException, URISyntaxException, TaskRunnerException { File heliumConf = new File(tmpDir, "helium.conf"); Helium helium = new Helium( - heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, null); + heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, null, + zConf); HeliumTestRegistry registry1 = new HeliumTestRegistry("r1", "r1"); HeliumTestRegistry registry2 = new HeliumTestRegistry("r2", "r2"); helium.addRegistry(registry1); @@ -103,10 +108,11 @@ public void testRestoreRegistryInstances() throws IOException, URISyntaxExceptio } @Test - public void testRefresh() throws IOException, URISyntaxException, TaskRunnerException { + void testRefresh() throws IOException, URISyntaxException, TaskRunnerException { File heliumConf = new File(tmpDir, "helium.conf"); Helium helium = new Helium( - heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, null); + heliumConf.getAbsolutePath(), localRegistryPath.getAbsolutePath(), null, null, null, null, + zConf); HeliumTestRegistry registry1 = new HeliumTestRegistry("r1", "r1"); helium.addRegistry(registry1); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTestRegistry.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTestRegistry.java index a7b153816c6..e8ab9c1baab 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTestRegistry.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/helium/HeliumTestRegistry.java @@ -17,7 +17,6 @@ package org.apache.zeppelin.helium; import java.io.IOException; -import java.net.URI; import java.util.LinkedList; import java.util.List; diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/AbstractInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/AbstractInterpreterTest.java index bf63964be14..cb93d2f30b0 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/AbstractInterpreterTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/AbstractInterpreterTest.java @@ -24,11 +24,15 @@ import org.apache.zeppelin.helium.ApplicationEventListener; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.apache.zeppelin.notebook.AuthorizationService; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteManager; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.repo.InMemoryNotebookRepo; import org.apache.zeppelin.notebook.repo.NotebookRepo; +import org.apache.zeppelin.plugin.PluginManager; +import org.apache.zeppelin.storage.ConfigStorage; import org.apache.zeppelin.user.Credentials; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -52,12 +56,15 @@ public abstract class AbstractInterpreterTest { protected InterpreterSettingManager interpreterSettingManager; protected InterpreterFactory interpreterFactory; + protected NoteParser noteParser; protected Notebook notebook; protected File zeppelinHome; protected File interpreterDir; protected File confDir; protected File notebookDir; protected ZeppelinConfiguration conf; + protected ConfigStorage storage; + protected PluginManager pluginManager; @BeforeEach public void setUp() throws Exception { @@ -76,20 +83,32 @@ public void setUp() throws Exception { FileUtils.copyDirectory(new File("src/test/resources/interpreter"), interpreterDir); FileUtils.copyDirectory(new File("src/test/resources/conf"), confDir); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinHome.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), confDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_DIR.getVarName(), interpreterDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName(), "test"); + conf = ZeppelinConfiguration.load(); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), + zeppelinHome.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_CONF_DIR.getVarName(), + confDir.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_DIR.getVarName(), + interpreterDir.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + notebookDir.getAbsolutePath()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_GROUP_DEFAULT.getVarName(), + "test"); + - conf = ZeppelinConfiguration.create(); NotebookRepo notebookRepo = new InMemoryNotebookRepo(); NoteManager noteManager = new NoteManager(notebookRepo, conf); - AuthorizationService authorizationService = new AuthorizationService(noteManager, conf); + noteParser = new GsonNoteParser(conf); + storage = ConfigStorage.createConfigStorage(conf); + pluginManager = new PluginManager(conf); + AuthorizationService authorizationService = + new AuthorizationService(noteManager, conf, storage); + interpreterSettingManager = new InterpreterSettingManager(conf, - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); + mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), + mock(ApplicationEventListener.class), storage, pluginManager); interpreterFactory = new InterpreterFactory(interpreterSettingManager); - Credentials credentials = new Credentials(conf); + Credentials credentials = new Credentials(conf, storage); notebook = new Notebook(conf, authorizationService, notebookRepo, noteManager, interpreterFactory, interpreterSettingManager, credentials); interpreterSettingManager.setNotebook(notebook); } @@ -114,7 +133,8 @@ public void tearDown() throws Exception { } protected Note createNote() { - return new Note("test", "test", interpreterFactory, interpreterSettingManager, null, null, null); + return new Note("test", "test", interpreterFactory, interpreterSettingManager, null, null, null, + conf, noteParser); } protected InterpreterContext createDummyInterpreterContext() { diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ConfInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ConfInterpreterTest.java index 18fec666fd6..785c30122f1 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ConfInterpreterTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ConfInterpreterTest.java @@ -24,13 +24,13 @@ import static org.junit.jupiter.api.Assertions.*; -public class ConfInterpreterTest extends AbstractInterpreterTest { +class ConfInterpreterTest extends AbstractInterpreterTest { private ExecutionContext executionContext = new ExecutionContext("user1", "note1", "test"); @Test - public void testCorrectConf() throws InterpreterException { + void testCorrectConf() throws InterpreterException { assertTrue(interpreterFactory.getInterpreter("test.conf", executionContext) instanceof ConfInterpreter); ConfInterpreter confInterpreter = (ConfInterpreter) interpreterFactory.getInterpreter("test.conf", executionContext); @@ -60,7 +60,7 @@ public void testCorrectConf() throws InterpreterException { } @Test - public void testPropertyTrim() throws InterpreterException { + void testPropertyTrim() throws InterpreterException { assertTrue(interpreterFactory.getInterpreter("test.conf", executionContext) instanceof ConfInterpreter); ConfInterpreter confInterpreter = (ConfInterpreter) interpreterFactory.getInterpreter("test.conf", executionContext); @@ -90,7 +90,7 @@ public void testPropertyTrim() throws InterpreterException { } @Test - public void testEmptyValue() throws InterpreterException { + void testEmptyValue() throws InterpreterException { ConfInterpreter confInterpreter = (ConfInterpreter) interpreterFactory.getInterpreter("test.conf", executionContext); InterpreterContext context = InterpreterContext.builder() @@ -111,7 +111,7 @@ public void testEmptyValue() throws InterpreterException { } @Test - public void testEmptyConf() throws InterpreterException { + void testEmptyConf() throws InterpreterException { assertTrue(interpreterFactory.getInterpreter("test.conf", executionContext) instanceof ConfInterpreter); ConfInterpreter confInterpreter = (ConfInterpreter) interpreterFactory.getInterpreter("test.conf", executionContext); @@ -131,7 +131,7 @@ public void testEmptyConf() throws InterpreterException { @Test - public void testRunningAfterOtherInterpreter() throws InterpreterException { + void testRunningAfterOtherInterpreter() throws InterpreterException { assertTrue(interpreterFactory.getInterpreter("test.conf", executionContext) instanceof ConfInterpreter); ConfInterpreter confInterpreter = (ConfInterpreter) interpreterFactory.getInterpreter("test.conf", executionContext); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java index 54c6ad94ca9..d972eff735d 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterFactoryTest.java @@ -24,10 +24,10 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -public class InterpreterFactoryTest extends AbstractInterpreterTest { +class InterpreterFactoryTest extends AbstractInterpreterTest { @Test - public void testGetFactory() throws InterpreterException { + void testGetFactory() throws InterpreterException { assertTrue(interpreterFactory.getInterpreter("", new ExecutionContext("user1", "note1", "test")) instanceof RemoteInterpreter); RemoteInterpreter remoteInterpreter = (RemoteInterpreter) interpreterFactory.getInterpreter("", new ExecutionContext("user1", "note1", "test")); @@ -52,7 +52,7 @@ public void testGetFactory() throws InterpreterException { } @Test - public void testUnknownRepl1() { + void testUnknownRepl1() { try { interpreterFactory.getInterpreter("test.unknown_repl", new ExecutionContext("user1", "note1", "test")); fail("should fail due to no such interpreter"); @@ -62,7 +62,7 @@ public void testUnknownRepl1() { } @Test - public void testUnknownRepl2() { + void testUnknownRepl2() { try { interpreterFactory.getInterpreter("unknown_repl", new ExecutionContext("user1", "note1", "test")); fail("should fail due to no such interpreter"); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingManagerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingManagerTest.java index 365546eee68..5600b50d25d 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingManagerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingManagerTest.java @@ -42,7 +42,7 @@ import static org.mockito.Mockito.mock; -public class InterpreterSettingManagerTest extends AbstractInterpreterTest { +class InterpreterSettingManagerTest extends AbstractInterpreterTest { private String note1Id; private String note2Id; @@ -58,7 +58,7 @@ public void setUp() throws Exception { } @Test - public void testInitInterpreterSettingManager() throws IOException, RepositoryException { + void testInitInterpreterSettingManager() throws IOException, RepositoryException { assertEquals(6, interpreterSettingManager.get().size()); InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); assertEquals("test", interpreterSetting.getName()); @@ -87,7 +87,8 @@ public void testInitInterpreterSettingManager() throws IOException, RepositoryEx // Load it again InterpreterSettingManager interpreterSettingManager2 = new InterpreterSettingManager(conf, - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); + mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), + mock(ApplicationEventListener.class), storage, pluginManager); assertEquals(6, interpreterSettingManager2.get().size()); interpreterSetting = interpreterSettingManager2.getByName("test"); assertEquals("test", interpreterSetting.getName()); @@ -108,7 +109,7 @@ public void testInitInterpreterSettingManager() throws IOException, RepositoryEx } @Test - public void testCreateUpdateRemoveSetting() throws IOException, InterpreterException { + void testCreateUpdateRemoveSetting() throws IOException, InterpreterException { // create new interpreter setting InterpreterOption option = new InterpreterOption(); option.setPerNote("scoped"); @@ -145,7 +146,8 @@ public void testCreateUpdateRemoveSetting() throws IOException, InterpreterExcep // load it again, it should be saved in interpreter-setting.json. So we can restore it properly InterpreterSettingManager interpreterSettingManager2 = new InterpreterSettingManager(conf, - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); + mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), + mock(ApplicationEventListener.class), storage, pluginManager); assertEquals(7, interpreterSettingManager2.get().size()); interpreterSetting = interpreterSettingManager2.getByName("test3"); assertEquals("test3", interpreterSetting.getName()); @@ -196,14 +198,15 @@ public void testCreateUpdateRemoveSetting() throws IOException, InterpreterExcep assertEquals(6, interpreterSettingManager.get().size()); // load it again - InterpreterSettingManager interpreterSettingManager3 = new InterpreterSettingManager(ZeppelinConfiguration.create(), - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); + InterpreterSettingManager interpreterSettingManager3 = new InterpreterSettingManager(conf, + mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), + mock(ApplicationEventListener.class), storage, pluginManager); assertEquals(6, interpreterSettingManager3.get().size()); } @Test - public void testGetEditor() { + void testGetEditor() { // get editor setting from interpreter-setting.json Map editor = interpreterSettingManager.getEditorSetting("%test.echo", note1Id); assertEquals("java", editor.get("language")); @@ -213,7 +216,7 @@ public void testGetEditor() { } @Test - public void testRestartShared() throws InterpreterException { + void testRestartShared() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("shared"); @@ -227,7 +230,7 @@ public void testRestartShared() throws InterpreterException { } @Test - public void testRestartPerUserIsolated() throws InterpreterException { + void testRestartPerUserIsolated() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("isolated"); interpreterSetting.getOption().setPerNote("shared"); @@ -241,7 +244,7 @@ public void testRestartPerUserIsolated() throws InterpreterException { } @Test - public void testRestartPerNoteIsolated() throws InterpreterException { + void testRestartPerNoteIsolated() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("isolated"); @@ -255,7 +258,7 @@ public void testRestartPerNoteIsolated() throws InterpreterException { } @Test - public void testRestartPerUserScoped() throws InterpreterException { + void testRestartPerUserScoped() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("scoped"); interpreterSetting.getOption().setPerNote("shared"); @@ -271,7 +274,7 @@ public void testRestartPerUserScoped() throws InterpreterException { } @Test - public void testRestartPerNoteScoped() throws InterpreterException { + void testRestartPerNoteScoped() throws InterpreterException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser("shared"); interpreterSetting.getOption().setPerNote("scoped"); @@ -287,7 +290,7 @@ public void testRestartPerNoteScoped() throws InterpreterException { } @Test - public void testInterpreterInclude() throws Exception { + void testInterpreterInclude() throws Exception { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_INCLUDES.getVarName(), "mock1"); setUp(); @@ -300,7 +303,7 @@ public void testInterpreterInclude() throws Exception { } @Test - public void testInterpreterExclude() throws Exception { + void testInterpreterExclude() throws Exception { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_EXCLUDES.getVarName(), "test,config_test,mock_resource_pool"); @@ -315,7 +318,7 @@ public void testInterpreterExclude() throws Exception { } @Test - public void testInterpreterIncludeExcludeTogether() throws Exception { + void testInterpreterIncludeExcludeTogether() throws Exception { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_INCLUDES.getVarName(), "test,"); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingTest.java index 82df374b949..9784b7132b7 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/InterpreterSettingTest.java @@ -38,7 +38,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class InterpreterSettingTest extends AbstractInterpreterTest{ +class InterpreterSettingTest extends AbstractInterpreterTest{ private static final Logger LOGGER = LoggerFactory.getLogger(InterpreterSettingTest.class); @@ -59,7 +59,7 @@ public void setUp() throws Exception { } @Test - public void testCreateInterpreters() { + void testCreateInterpreters() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SHARED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -78,6 +78,7 @@ public void testCreateInterpreters() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create default interpreter for user1 and note1 @@ -95,7 +96,7 @@ public void testCreateInterpreters() { } @Test - public void testSharedMode() { + void testSharedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SHARED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -112,6 +113,7 @@ public void testSharedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create default interpreter for user1 and note1 @@ -135,7 +137,7 @@ public void testSharedMode() { } @Test - public void testPerUserScopedMode() { + void testPerUserScopedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SCOPED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -152,6 +154,7 @@ public void testPerUserScopedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -175,7 +178,7 @@ public void testPerUserScopedMode() { } @Test - public void testPerNoteScopedMode() { + void testPerNoteScopedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerNote(InterpreterOption.SCOPED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -192,6 +195,7 @@ public void testPerNoteScopedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -215,7 +219,7 @@ public void testPerNoteScopedMode() { } @Test - public void testPerUserIsolatedMode() { + void testPerUserIsolatedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.ISOLATED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -232,6 +236,7 @@ public void testPerUserIsolatedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -256,7 +261,7 @@ public void testPerUserIsolatedMode() { } @Test - public void testPerNoteIsolatedMode() { + void testPerNoteIsolatedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerNote(InterpreterOption.ISOLATED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -273,6 +278,7 @@ public void testPerNoteIsolatedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -297,7 +303,7 @@ public void testPerNoteIsolatedMode() { } @Test - public void testPerUserIsolatedPerNoteScopedMode() { + void testPerUserIsolatedPerNoteScopedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.ISOLATED); interpreterOption.setPerNote(InterpreterOption.SCOPED); @@ -315,6 +321,7 @@ public void testPerUserIsolatedPerNoteScopedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -353,7 +360,7 @@ public void testPerUserIsolatedPerNoteScopedMode() { } @Test - public void testPerUserIsolatedPerNoteIsolatedMode() { + void testPerUserIsolatedPerNoteIsolatedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.ISOLATED); interpreterOption.setPerNote(InterpreterOption.ISOLATED); @@ -371,6 +378,7 @@ public void testPerUserIsolatedPerNoteIsolatedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -415,7 +423,7 @@ public void testPerUserIsolatedPerNoteIsolatedMode() { } @Test - public void testPerUserScopedPerNoteScopedMode() { + void testPerUserScopedPerNoteScopedMode() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SCOPED); interpreterOption.setPerNote(InterpreterOption.SCOPED); @@ -433,6 +441,7 @@ public void testPerUserScopedPerNoteScopedMode() { .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) + .setConf(conf) .create(); // create interpreter for user1 and note1 @@ -474,7 +483,7 @@ public void testPerUserScopedPerNoteScopedMode() { } @Test - public void testInterpreterJsonSerializable() { + void testInterpreterJsonSerializable() { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SHARED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -496,16 +505,16 @@ public void testInterpreterJsonSerializable() { String json = InterpreterSetting.toJson(interpreterSetting); InterpreterSetting checkIntpSetting = InterpreterSetting.fromJson(json); - assertEquals(checkIntpSetting.getId(), "id"); - assertEquals(checkIntpSetting.getName(), "id"); - assertEquals(checkIntpSetting.getGroup(), "group"); + assertEquals("id", checkIntpSetting.getId()); + assertEquals("id", checkIntpSetting.getName()); + assertEquals("group", checkIntpSetting.getGroup()); assertTrue(checkIntpSetting.getOption().perUserShared()); assertNotNull(checkIntpSetting.getInterpreterInfo("echo")); assertNotNull(checkIntpSetting.getInterpreterInfo("double_echo")); } @Test - public void testIsUserAuthorized() { + void testIsUserAuthorized() { List userAndRoles = new ArrayList<>(); userAndRoles.add("User1"); userAndRoles.add("Role1"); @@ -525,6 +534,7 @@ public void testIsUserAuthorized() { .setName("id") .setGroup("group") .setOption(interpreterOption) + .setConf(conf) .create(); assertTrue(interpreterSetting.isUserAuthorized(userAndRoles)); @@ -539,6 +549,7 @@ public void testIsUserAuthorized() { .setName("id") .setGroup("group") .setOption(interpreterOption) + .setConf(conf) .create(); assertFalse(interpreterSetting.isUserAuthorized(userAndRoles)); @@ -557,7 +568,7 @@ public void testIsUserAuthorized() { } @Test - public void testLoadDependency() throws InterruptedException { + void testLoadDependency() throws InterruptedException { InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setUserPermission(true); InterpreterSetting interpreterSetting = new InterpreterSetting.Builder() @@ -566,7 +577,8 @@ public void testLoadDependency() throws InterruptedException { .setGroup("group") .setOption(interpreterOption) .setIntepreterSettingManager(interpreterSettingManager) - .setDependencyResolver(new DependencyResolver("/tmp")) + .setDependencyResolver(new DependencyResolver("/tmp", conf)) + .setConf(conf) .create(); // set invalid dependency diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroupTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroupTest.java index af9bb782106..09a8974672c 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroupTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/ManagedInterpreterGroupTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.interpreter; import org.junit.jupiter.api.BeforeEach; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.eclipse.aether.RepositoryException; import org.junit.jupiter.api.Test; @@ -29,12 +30,14 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class ManagedInterpreterGroupTest { +class ManagedInterpreterGroupTest { private InterpreterSetting interpreterSetting; + private ZeppelinConfiguration zConf; @BeforeEach public void setUp() throws IOException, RepositoryException { + zConf = ZeppelinConfiguration.load(); InterpreterOption interpreterOption = new InterpreterOption(); interpreterOption.setPerUser(InterpreterOption.SCOPED); InterpreterInfo interpreterInfo1 = new InterpreterInfo(EchoInterpreter.class.getName(), @@ -50,12 +53,14 @@ public void setUp() throws IOException, RepositoryException { .setGroup("test") .setInterpreterInfos(interpreterInfos) .setOption(interpreterOption) + .setConf(zConf) .create(); } @Test - public void testInterpreterGroup() { - ManagedInterpreterGroup interpreterGroup = new ManagedInterpreterGroup("group_1", interpreterSetting); + void testInterpreterGroup() { + ManagedInterpreterGroup interpreterGroup = + new ManagedInterpreterGroup("group_1", interpreterSetting, zConf); assertEquals(0, interpreterGroup.getSessionNum()); // create session_1 diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SessionConfInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SessionConfInterpreterTest.java index ab04d569b10..44701cf5491 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SessionConfInterpreterTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/SessionConfInterpreterTest.java @@ -18,6 +18,7 @@ package org.apache.zeppelin.interpreter; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.remote.RemoteInterpreter; import org.junit.jupiter.api.Test; @@ -29,10 +30,10 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -public class SessionConfInterpreterTest { +class SessionConfInterpreterTest { @Test - public void testUserSessionConfInterpreter() throws InterpreterException { + void testUserSessionConfInterpreter() throws InterpreterException { InterpreterSetting mockInterpreterSetting = mock(InterpreterSetting.class); ManagedInterpreterGroup mockInterpreterGroup = mock(ManagedInterpreterGroup.class); @@ -45,7 +46,8 @@ public void testUserSessionConfInterpreter() throws InterpreterException { properties, "session_1", "group_1", mockInterpreterSetting); RemoteInterpreter remoteInterpreter = - new RemoteInterpreter(properties, "session_1", "clasName", "user1"); + new RemoteInterpreter(properties, "session_1", "clasName", "user1", + ZeppelinConfiguration.load()); List interpreters = new ArrayList<>(); interpreters.add(confInterpreter); interpreters.add(remoteInterpreter); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/install/InstallInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/install/InstallInterpreterTest.java index 800dc5b6277..32b3a525692 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/install/InstallInterpreterTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/install/InstallInterpreterTest.java @@ -9,6 +9,7 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -29,15 +30,16 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -public class InstallInterpreterTest { +class InstallInterpreterTest { private File tmpDir; private InstallInterpreter installer; private File interpreterBaseDir; + private ZeppelinConfiguration zConf; @BeforeEach public void setUp() throws IOException { - tmpDir = new File(System.getProperty("java.io.tmpdir")+"/ZeppelinLTest_"+System.currentTimeMillis()); - new File(tmpDir, "conf").mkdirs(); + tmpDir = Files.createTempDirectory("InstallInterpreterTest").toFile(); + zConf = ZeppelinConfiguration.load(); interpreterBaseDir = new File(tmpDir, "interpreter"); File localRepoDir = new File(tmpDir, "local-repo"); interpreterBaseDir.mkdir(); @@ -55,8 +57,8 @@ public void setUp() throws IOException { FileUtils.writeStringToFile(new File(tmpDir, "conf/interpreter-list"), interpreterList, StandardCharsets.UTF_8); - installer = new InstallInterpreter(interpreterListFile, interpreterBaseDir, localRepoDir - .getAbsolutePath()); + installer = new InstallInterpreter(interpreterListFile, interpreterBaseDir, + localRepoDir.getAbsolutePath(), zConf); } @AfterEach @@ -66,12 +68,12 @@ public void tearDown() throws IOException { @Test - public void testList() { + void testList() { assertEquals(2, installer.list().size()); } @Test - public void install() { + void install() { assertEquals(0, interpreterBaseDir.listFiles().length); installer.install("intp1"); @@ -79,7 +81,7 @@ public void install() { } @Test - public void installAll() { + void installAll() { installer.installAll(); assertTrue(new File(interpreterBaseDir, "intp1").isDirectory()); assertTrue(new File(interpreterBaseDir, "intp2").isDirectory()); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java deleted file mode 100644 index 8cbbdacd279..00000000000 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/integration/DownloadUtils.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.zeppelin.interpreter.integration; - -import org.apache.commons.io.FileUtils; -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.net.URL; -import java.nio.charset.StandardCharsets; - -/** - * Utility class for downloading spark/flink. This is used for spark/flink integration test. - */ -public class DownloadUtils { - private static Logger LOGGER = LoggerFactory.getLogger(DownloadUtils.class); - - private static String downloadFolder = System.getProperty("user.home") + "/.cache"; - - static { - try { - FileUtils.forceMkdir(new File(downloadFolder)); - } catch (IOException e) { - throw new RuntimeException("Fail to create download folder: " + downloadFolder, e); - } - } - - public static String downloadSpark(String sparkVersion, String hadoopVersion) { - String sparkDownloadFolder = downloadFolder + "/spark"; - File targetSparkHomeFolder = - new File(sparkDownloadFolder + "/spark-" + sparkVersion + "-bin-hadoop" + hadoopVersion); - if (targetSparkHomeFolder.exists()) { - LOGGER.info("Skip to download spark as it is already downloaded."); - return targetSparkHomeFolder.getAbsolutePath(); - } - download("spark", sparkVersion, "-bin-hadoop" + hadoopVersion + ".tgz"); - return targetSparkHomeFolder.getAbsolutePath(); - } - - public static String downloadFlink(String flinkVersion, String scalaVersion) { - String flinkDownloadFolder = downloadFolder + "/flink"; - File targetFlinkHomeFolder = new File(flinkDownloadFolder + "/flink-" + flinkVersion); - if (targetFlinkHomeFolder.exists()) { - LOGGER.info("Skip to download flink as it is already downloaded."); - return targetFlinkHomeFolder.getAbsolutePath(); - } - download("flink", flinkVersion, "-bin-scala_" + scalaVersion + ".tgz"); - // download other dependencies for running flink with yarn and hive - try { - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-connector-hive_" + scalaVersion + "/" - + flinkVersion + "/flink-connector-hive_" + scalaVersion + "-" + flinkVersion + ".jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-hadoop-compatibility_" + scalaVersion + "/" - + flinkVersion + "/flink-hadoop-compatibility_" + scalaVersion + "-" + flinkVersion + ".jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/hive/hive-exec/2.3.4/hive-exec-2.3.4.jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-shaded-hadoop2-uber/2.7.5-1.8.1/flink-shaded-hadoop2-uber-2.7.5-1.8.1.jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-table-api-scala_" + scalaVersion + "/" - + flinkVersion + "/flink-table-api-scala_" + scalaVersion + "-" + flinkVersion + ".jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"wget", - "https://repo1.maven.org/maven2/org/apache/flink/flink-table-api-scala-bridge_" + scalaVersion + "/" - + flinkVersion + "/flink-table-api-scala-bridge_" + scalaVersion + "-" + flinkVersion + ".jar", - "-P", targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"mv", - targetFlinkHomeFolder + "/opt/" + "flink-table-planner_" + scalaVersion + "-" + flinkVersion + ".jar", - targetFlinkHomeFolder + "/lib"}); - runShellCommand(new String[]{"mv", - targetFlinkHomeFolder + "/lib/" + "flink-table-planner-loader-" + flinkVersion + ".jar", - targetFlinkHomeFolder + "/opt"}); - if (SemanticVersion.of(flinkVersion).equalsOrNewerThan(SemanticVersion.of("1.16.0"))) { - runShellCommand(new String[]{"mv", - targetFlinkHomeFolder + "/opt/" + "flink-sql-client-" + flinkVersion + ".jar", - targetFlinkHomeFolder + "/lib"}); - } - } catch (Exception e) { - throw new RuntimeException("Fail to download jar", e); - } - return targetFlinkHomeFolder.getAbsolutePath(); - } - - public static String downloadHadoop(String version) { - String hadoopDownloadFolder = downloadFolder + "/hadoop"; - File targetHadoopHomeFolder = new File(hadoopDownloadFolder + "/hadoop-" + version); - if (targetHadoopHomeFolder.exists()) { - LOGGER.info("Skip to download hadoop as it is already downloaded."); - return targetHadoopHomeFolder.getAbsolutePath(); - } - download("hadoop", version, ".tar.gz", "hadoop/core"); - return targetHadoopHomeFolder.getAbsolutePath(); - } - - // Try mirrors first, if fails fallback to apache archive - private static void download(String project, String version, String postFix, String projectPath) { - String projectDownloadFolder = downloadFolder + "/" + project; - try { - String preferredMirror = IOUtils.toString(new URL("https://www.apache.org/dyn/closer.lua?preferred=true"), StandardCharsets.UTF_8); - File downloadFile = new File(projectDownloadFolder + "/" + project + "-" + version + postFix); - String downloadURL = preferredMirror + "/" + projectPath + "/" + project + "-" + version + "/" + project + "-" + version + postFix; - runShellCommand(new String[]{"wget", downloadURL, "-P", projectDownloadFolder}); - runShellCommand(new String[]{"tar", "-xvf", downloadFile.getAbsolutePath(), "-C", projectDownloadFolder}); - } catch (Exception e) { - LOGGER.warn("Failed to download " + project + " from mirror site, fallback to use apache archive", e); - File downloadFile = new File(projectDownloadFolder + "/" + project + "-" + version + postFix); - String downloadURL = - "https://archive.apache.org/dist/" + projectPath + "/" + project +"-" - + version - + "/" + project + "-" - + version - + postFix; - try { - runShellCommand(new String[]{"wget", downloadURL, "-P", projectDownloadFolder}); - runShellCommand( - new String[]{"tar", "-xvf", downloadFile.getAbsolutePath(), "-C", projectDownloadFolder}); - } catch (Exception ex) { - throw new RuntimeException("Fail to download " + project + " " + version, ex); - } - } - } - - private static void download(String project, String version, String postFix) { - download(project, version, postFix, project); - } - - private static void runShellCommand(String[] commands) throws IOException, InterruptedException { - LOGGER.info("Starting shell commands: " + StringUtils.join(commands, " ")); - Process process = Runtime.getRuntime().exec(commands); - StreamGobbler errorGobbler = new StreamGobbler(process.getErrorStream()); - StreamGobbler outputGobbler = new StreamGobbler(process.getInputStream()); - errorGobbler.start(); - outputGobbler.start(); - if (process.waitFor() != 0) { - throw new IOException("Fail to run shell commands: " + StringUtils.join(commands, " ")); - } - LOGGER.info("Complete shell commands: " + StringUtils.join(commands, " ")); - } - - private static class StreamGobbler extends Thread { - InputStream is; - - // reads everything from is until empty. - StreamGobbler(InputStream is) { - this.is = is; - } - - @Override - public void run() { - try { - InputStreamReader isr = new InputStreamReader(is); - BufferedReader br = new BufferedReader(isr); - String line = null; - long startTime = System.currentTimeMillis(); - while ((line = br.readLine()) != null) { - // logging per 5 seconds - if ((System.currentTimeMillis() - startTime) > 5000) { - LOGGER.info(line); - startTime = System.currentTimeMillis(); - } - } - } catch (IOException ioe) { - LOGGER.warn("Fail to print shell output", ioe); - } - } - } -} diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java index 15b07a245b0..c1dc975b3c5 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java @@ -19,9 +19,9 @@ import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; +import org.apache.zeppelin.test.DownloadUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.interpreter.InterpreterOption; -import org.apache.zeppelin.interpreter.integration.DownloadUtils; import org.apache.zeppelin.interpreter.remote.ExecRemoteInterpreterProcess; import org.apache.zeppelin.util.Util; import org.junit.jupiter.api.BeforeEach; @@ -41,30 +41,26 @@ import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; -public class SparkInterpreterLauncherTest { +class SparkInterpreterLauncherTest { private static final Logger LOGGER = LoggerFactory.getLogger(SparkInterpreterLauncher.class); private String sparkHome; private String zeppelinHome; + private ZeppelinConfiguration zConf; @BeforeEach public void setUp() { - for (final ZeppelinConfiguration.ConfVars confVar : ZeppelinConfiguration.ConfVars.values()) { - System.clearProperty(confVar.getVarName()); - } - - sparkHome = DownloadUtils.downloadSpark("3.4.1", "3"); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), - new File("..").getAbsolutePath()); - - zeppelinHome = ZeppelinConfiguration.create().getZeppelinHome(); + sparkHome = DownloadUtils.downloadSpark(); + zConf = ZeppelinConfiguration.load(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_HOME.getVarName(), + new File("..").getAbsolutePath()); + zeppelinHome = zConf.getZeppelinHome(); LOGGER.info("ZEPPELIN_HOME: " + zeppelinHome); } @Test - public void testConnectTimeOut() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testConnectTimeOut() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -87,8 +83,7 @@ public void testConnectTimeOut() throws IOException { } @Test - public void testLocalMode() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testLocalMode() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -118,8 +113,7 @@ public void testLocalMode() throws IOException { } @Test - public void testYarnClientMode_1() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testYarnClientMode_1() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -152,8 +146,7 @@ public void testYarnClientMode_1() throws IOException { } @Test - public void testYarnClientMode_2() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testYarnClientMode_2() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -188,8 +181,7 @@ public void testYarnClientMode_2() throws IOException { } @Test - public void testYarnClusterMode_1() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testYarnClusterMode_1() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -231,8 +223,7 @@ public void testYarnClusterMode_1() throws IOException { } @Test - public void testYarnClusterMode_2() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testYarnClusterMode_2() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); @@ -283,8 +274,7 @@ public void testYarnClusterMode_2() throws IOException { } @Test - public void testYarnClusterMode_3() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testYarnClusterMode_3() throws IOException { SparkInterpreterLauncher launcher = new SparkInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("SPARK_HOME", sparkHome); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/StandardInterpreterLauncherTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/StandardInterpreterLauncherTest.java index 6912a26712d..a9db3ee05ac 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/StandardInterpreterLauncherTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/StandardInterpreterLauncherTest.java @@ -29,17 +29,17 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; -public class StandardInterpreterLauncherTest { +class StandardInterpreterLauncherTest { + + private ZeppelinConfiguration zConf; + @BeforeEach public void setUp() { - for (final ZeppelinConfiguration.ConfVars confVar : ZeppelinConfiguration.ConfVars.values()) { - System.clearProperty(confVar.getVarName()); - } + zConf = ZeppelinConfiguration.load(); } @Test - public void testLauncher() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testLauncher() throws IOException { StandardInterpreterLauncher launcher = new StandardInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty("ENV_1", "VALUE_1"); @@ -60,11 +60,11 @@ public void testLauncher() throws IOException { assertEquals("VALUE_1", interpreterProcess.getEnv().get("ENV_1")); assertTrue(interpreterProcess.getEnv().containsKey("INTERPRETER_GROUP_ID")); assertEquals(true, interpreterProcess.isUserImpersonated()); + interpreterProcess.close(); } @Test - public void testConnectTimeOut() throws IOException { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); + void testConnectTimeOut() throws IOException { StandardInterpreterLauncher launcher = new StandardInterpreterLauncher(zConf, null); Properties properties = new Properties(); properties.setProperty( @@ -83,6 +83,7 @@ public void testConnectTimeOut() throws IOException { assertTrue(interpreterProcess.getEnv().size() >= 1); assertTrue(interpreterProcess.getEnv().containsKey("INTERPRETER_GROUP_ID")); assertEquals(true, interpreterProcess.isUserImpersonated()); + interpreterProcess.close(); } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/lifecycle/TimeoutLifecycleManagerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/lifecycle/TimeoutLifecycleManagerTest.java index 46bfeb5d7fc..9e492d766df 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/lifecycle/TimeoutLifecycleManagerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/lifecycle/TimeoutLifecycleManagerTest.java @@ -44,13 +44,11 @@ class TimeoutLifecycleManagerTest extends AbstractInterpreterTest { @Override @BeforeEach public void setUp() throws Exception { - ZeppelinConfiguration zConf = ZeppelinConfiguration.create(); - zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_CLASS.getVarName(), - TimeoutLifecycleManager.class.getName()); - zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_CHECK_INTERVAL.getVarName(), "1000"); - zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_THRESHOLD.getVarName(), "10s"); - super.setUp(); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_CLASS.getVarName(), + TimeoutLifecycleManager.class.getName()); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_CHECK_INTERVAL.getVarName(), "1000"); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_LIFECYCLE_MANAGER_TIMEOUT_THRESHOLD.getVarName(), "10s"); } @Override diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java index 0c7e5df6c62..8fbc108d44e 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter1.java @@ -33,83 +33,83 @@ public class MockInterpreter1 extends Interpreter { - private static AtomicInteger IdGenerator = new AtomicInteger(); - - private int object_id; - private String pid; - Map vars = new HashMap<>(); - - public MockInterpreter1(Properties property) { - super(property); - this.object_id = IdGenerator.getAndIncrement(); - this.pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; - } - - boolean open; - - - @Override - public void open() { - open = true; - } - - @Override - public void close() { - open = false; - } - - - public boolean isOpen() { - return open; - } - - @Override - public InterpreterResult interpret(String st, InterpreterContext context) { - InterpreterResult result; - st = st.trim(); - if ("getId".equals(st)) { - // get unique id of this interpreter instance - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "" + this.object_id + "-" + this.pid); - } else if (st.startsWith("sleep")) { - try { - Thread.sleep(Integer.parseInt(st.split(" ")[1])); - } catch (InterruptedException e) { - // nothing to do - } - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: " + st); - } else { - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: " + st); - } - - if (context.getResourcePool() != null) { - context.getResourcePool().put(context.getNoteId(), context.getParagraphId(), "result", result); - } - - return result; - } - - @Override - public void cancel(InterpreterContext context) { - } - - @Override - public FormType getFormType() { - return FormType.SIMPLE; - } - - @Override - public int getProgress(InterpreterContext context) { - return 0; - } - - @Override - public Scheduler getScheduler() { - return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode()); - } - - @Override - public List completion(String buf, int cursor, - InterpreterContext interpreterContext) { - return null; - } + private static AtomicInteger IdGenerator = new AtomicInteger(); + + private int object_id; + private String pid; + Map vars = new HashMap<>(); + + public MockInterpreter1(Properties property) { + super(property); + this.object_id = IdGenerator.getAndIncrement(); + this.pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; + } + + boolean open; + + + @Override + public void open() { + open = true; + } + + @Override + public void close() { + open = false; + } + + + public boolean isOpen() { + return open; + } + + @Override + public InterpreterResult interpret(String st, InterpreterContext context) { + InterpreterResult result; + st = st.trim(); + if ("getId".equals(st)) { + // get unique id of this interpreter instance + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "" + this.object_id + "-" + this.pid); + } else if (st.startsWith("sleep")) { + try { + Thread.sleep(Integer.parseInt(st.split(" ")[1])); + } catch (InterruptedException e) { + // nothing to do + } + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: " + st); + } else { + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl1: " + st); + } + + if (context.getResourcePool() != null) { + context.getResourcePool().put(context.getNoteId(), context.getParagraphId(), "result", result); + } + + return result; + } + + @Override + public void cancel(InterpreterContext context) { + } + + @Override + public FormType getFormType() { + return FormType.SIMPLE; + } + + @Override + public int getProgress(InterpreterContext context) { + return 0; + } + + @Override + public Scheduler getScheduler() { + return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode()); + } + + @Override + public List completion(String buf, int cursor, + InterpreterContext interpreterContext) { + return null; + } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java index 43f9f517771..dd04a03b31b 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/mock/MockInterpreter2.java @@ -32,73 +32,73 @@ public class MockInterpreter2 extends Interpreter{ Map vars = new HashMap<>(); - public MockInterpreter2(Properties property) { - super(property); - } - - boolean open; - - @Override - public void open() { - open = true; - } - - @Override - public void close() { - open = false; - } - - public boolean isOpen() { - return open; - } - - - @Override - public InterpreterResult interpret(String st, InterpreterContext context) { - InterpreterResult result; - st = st.trim(); - if ("getId".equals(st)) { - // get unique id of this interpreter instance - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "" + this.hashCode()); - } else if (st.startsWith("sleep")) { - try { - Thread.sleep(Integer.parseInt(st.split(" ")[1])); - } catch (InterruptedException e) { - // nothing to do - } - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: " + st); - } else { - result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: " + st); - } - - if (context.getResourcePool() != null) { - context.getResourcePool().put(context.getNoteId(), context.getParagraphId(), "result", result); - } - return result; - } - - @Override - public void cancel(InterpreterContext context) { - } - - @Override - public FormType getFormType() { - return FormType.SIMPLE; - } - - @Override - public int getProgress(InterpreterContext context) { - return 0; - } - - @Override - public Scheduler getScheduler() { - return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode()); - } - - @Override - public List completion(String buf, int cursor, - InterpreterContext interpreterContext) { - return null; - } + public MockInterpreter2(Properties property) { + super(property); + } + + boolean open; + + @Override + public void open() { + open = true; + } + + @Override + public void close() { + open = false; + } + + public boolean isOpen() { + return open; + } + + + @Override + public InterpreterResult interpret(String st, InterpreterContext context) { + InterpreterResult result; + st = st.trim(); + if ("getId".equals(st)) { + // get unique id of this interpreter instance + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "" + this.hashCode()); + } else if (st.startsWith("sleep")) { + try { + Thread.sleep(Integer.parseInt(st.split(" ")[1])); + } catch (InterruptedException e) { + // nothing to do + } + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: " + st); + } else { + result = new InterpreterResult(InterpreterResult.Code.SUCCESS, "repl2: " + st); + } + + if (context.getResourcePool() != null) { + context.getResourcePool().put(context.getNoteId(), context.getParagraphId(), "result", result); + } + return result; + } + + @Override + public void cancel(InterpreterContext context) { + } + + @Override + public FormType getFormType() { + return FormType.SIMPLE; + } + + @Override + public int getProgress(InterpreterContext context) { + return 0; + } + + @Override + public Scheduler getScheduler() { + return SchedulerFactory.singleton().createOrGetFIFOScheduler("test_"+this.hashCode()); + } + + @Override + public List completion(String buf, int cursor, + InterpreterContext interpreterContext) { + return null; + } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/FileSystemRecoveryStorageTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/FileSystemRecoveryStorageTest.java index d099bc1c5d4..2b321908c53 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/FileSystemRecoveryStorageTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/FileSystemRecoveryStorageTest.java @@ -38,7 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class FileSystemRecoveryStorageTest extends AbstractInterpreterTest { +class FileSystemRecoveryStorageTest extends AbstractInterpreterTest { private File recoveryDir = null; private String note1Id; @@ -66,7 +66,7 @@ public void tearDown() throws Exception { } @Test - public void testSingleInterpreterProcess() throws InterpreterException, IOException { + void testSingleInterpreterProcess() throws InterpreterException, IOException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); @@ -85,7 +85,7 @@ public void testSingleInterpreterProcess() throws InterpreterException, IOExcept } @Test - public void testMultipleInterpreterProcess() throws InterpreterException, IOException { + void testMultipleInterpreterProcess() throws InterpreterException, IOException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser(InterpreterOption.ISOLATED); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/LocalRecoveryStorageTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/LocalRecoveryStorageTest.java index cce4a48a5e5..52c8cdea3d1 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/LocalRecoveryStorageTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/recovery/LocalRecoveryStorageTest.java @@ -38,7 +38,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class LocalRecoveryStorageTest extends AbstractInterpreterTest { +class LocalRecoveryStorageTest extends AbstractInterpreterTest { private File recoveryDir = null; private String note1Id; private String note2Id; @@ -66,7 +66,7 @@ public void tearDown() throws Exception { } @Test - public void testSingleInterpreterProcess() throws InterpreterException, IOException { + void testSingleInterpreterProcess() throws InterpreterException, IOException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); @@ -85,7 +85,7 @@ public void testSingleInterpreterProcess() throws InterpreterException, IOExcept } @Test - public void testMultipleInterpreterProcess() throws InterpreterException, IOException { + void testMultipleInterpreterProcess() throws InterpreterException, IOException { InterpreterSetting interpreterSetting = interpreterSettingManager.getByName("test"); interpreterSetting.getOption().setPerUser(InterpreterOption.ISOLATED); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunnerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunnerTest.java index 992ebddb08d..1d8d273cb73 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunnerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/AppendOutputRunnerTest.java @@ -43,7 +43,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -public class AppendOutputRunnerTest { +class AppendOutputRunnerTest { private static final int NUM_EVENTS = 10000; private static final int NUM_CLUBBED_EVENTS = 100; @@ -63,7 +63,7 @@ public void afterEach() { } @Test - public void testSingleEvent() throws InterruptedException { + void testSingleEvent() throws InterruptedException { RemoteInterpreterProcessListener listener = mock(RemoteInterpreterProcessListener.class); String[][] buffer = {{"note", "para", "data\n"}}; @@ -89,7 +89,7 @@ public void testMultipleEventsOfSameParagraph() throws InterruptedException { } @Test - public void testMultipleEventsOfDifferentParagraphs() throws InterruptedException { + void testMultipleEventsOfDifferentParagraphs() throws InterruptedException { RemoteInterpreterProcessListener listener = mock(RemoteInterpreterProcessListener.class); String note1 = "note1"; String note2 = "note2"; @@ -111,7 +111,7 @@ public void testMultipleEventsOfDifferentParagraphs() throws InterruptedExceptio } @Test - public void testClubbedData() throws InterruptedException { + void testClubbedData() throws InterruptedException { RemoteInterpreterProcessListener listener = mock(RemoteInterpreterProcessListener.class); AppendOutputRunner runner = new AppendOutputRunner(listener); future = service.scheduleWithFixedDelay(runner, 0, @@ -130,7 +130,7 @@ public void testClubbedData() throws InterruptedException { } @Test - public void testWarnLoggerForLargeData() throws InterruptedException { + void testWarnLoggerForLargeData() throws InterruptedException { RemoteInterpreterProcessListener listener = mock(RemoteInterpreterProcessListener.class); AppendOutputRunner runner = new AppendOutputRunner(listener); String data = "data\n"; diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectTest.java index 04849761d79..68db648aa30 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteAngularObjectTest.java @@ -34,7 +34,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class RemoteAngularObjectTest extends AbstractInterpreterTest +class RemoteAngularObjectTest extends AbstractInterpreterTest implements AngularObjectRegistryListener { private RemoteInterpreter intp; @@ -74,7 +74,7 @@ public void setUp() throws Exception { } @Test - public void testAngularObjectInterpreterSideCRUD() throws InterruptedException, InterpreterException { + void testAngularObjectInterpreterSideCRUD() throws InterruptedException, InterpreterException { InterpreterResult ret = intp.interpret("get", context); Thread.sleep(500); // waitFor eventpoller pool event String[] result = ret.message().get(0).getData().split(" "); @@ -107,7 +107,7 @@ public void testAngularObjectInterpreterSideCRUD() throws InterruptedException, } @Test - public void testAngularObjectRemovalOnZeppelinServerSide() throws InterruptedException, InterpreterException { + void testAngularObjectRemovalOnZeppelinServerSide() throws InterruptedException, InterpreterException { // test if angularobject removal from server side propagate to interpreter process's registry. // will happen when notebook is removed. @@ -132,7 +132,7 @@ public void testAngularObjectRemovalOnZeppelinServerSide() throws InterruptedExc } @Test - public void testAngularObjectAddOnZeppelinServerSide() throws InterruptedException, InterpreterException { + void testAngularObjectAddOnZeppelinServerSide() throws InterruptedException, InterpreterException { // test if angularobject add from server side propagate to interpreter process's registry. // will happen when zeppelin server loads notebook and restore the object into registry diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStream.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStreamTest.java similarity index 70% rename from zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStream.java rename to zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStreamTest.java index 1e36eaa7b42..189c37ec7df 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStream.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterOutputTestStreamTest.java @@ -17,19 +17,19 @@ package org.apache.zeppelin.interpreter.remote; +import org.apache.commons.lang3.RandomStringUtils; import org.apache.zeppelin.interpreter.AbstractInterpreterTest; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; -import org.apache.zeppelin.interpreter.thrift.ParagraphInfo; +import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import java.io.File; import java.io.IOException; -import java.util.List; -import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,21 +37,27 @@ /** * Test for remote interpreter output stream */ -public class RemoteInterpreterOutputTestStream extends AbstractInterpreterTest - implements RemoteInterpreterProcessListener { +class RemoteInterpreterOutputTestStreamTest extends AbstractInterpreterTest { private InterpreterSetting interpreterSetting; + private String noteId; + @Override @BeforeEach public void setUp() throws Exception { super.setUp(); interpreterSetting = interpreterSettingManager.get("test"); + noteId = notebook.createNote(File.separator + RandomStringUtils.randomAlphabetic(6), + AuthenticationInfo.ANONYMOUS); } + @Override @AfterEach public void tearDown() throws Exception { interpreterSetting.close(); + notebook.removeNote(noteId, AuthenticationInfo.ANONYMOUS); + super.tearDown(); } private InterpreterContext createInterpreterContext() { @@ -62,8 +68,8 @@ private InterpreterContext createInterpreterContext() { } @Test - public void testInterpreterResultOnly() throws InterpreterException { - RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_stream"); + void testInterpreterResultOnly() throws InterpreterException, IOException { + RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", noteId, "mock_stream"); InterpreterResult ret = intp.interpret("SUCCESS::staticresult", createInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, ret.code()); assertEquals("staticresult", ret.message().get(0).getData()); @@ -78,8 +84,9 @@ public void testInterpreterResultOnly() throws InterpreterException { } @Test - public void testInterpreterOutputStreamOnly() throws InterpreterException { - RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_stream"); + void testInterpreterOutputStreamOnly() throws InterpreterException { + RemoteInterpreter intp = + (RemoteInterpreter) interpreterSetting.getInterpreter("user1", noteId, "mock_stream"); InterpreterResult ret = intp.interpret("SUCCESS:streamresult:", createInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, ret.code()); assertEquals("streamresult", ret.message().get(0).getData()); @@ -90,8 +97,9 @@ public void testInterpreterOutputStreamOnly() throws InterpreterException { } @Test - public void testInterpreterResultOutputStreamMixed() throws InterpreterException { - RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_stream"); + void testInterpreterResultOutputStreamMixed() throws InterpreterException { + RemoteInterpreter intp = + (RemoteInterpreter) interpreterSetting.getInterpreter("user1", noteId, "mock_stream"); InterpreterResult ret = intp.interpret("SUCCESS:stream:static", createInterpreterContext()); assertEquals(InterpreterResult.Code.SUCCESS, ret.code()); assertEquals("stream", ret.message().get(0).getData()); @@ -99,8 +107,9 @@ public void testInterpreterResultOutputStreamMixed() throws InterpreterException } @Test - public void testOutputType() throws InterpreterException { - RemoteInterpreter intp = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", "note1", "mock_stream"); + void testOutputType() throws InterpreterException { + RemoteInterpreter intp = + (RemoteInterpreter) interpreterSetting.getInterpreter("user1", noteId, "mock_stream"); InterpreterResult ret = intp.interpret("SUCCESS:%html hello:", createInterpreterContext()); assertEquals(InterpreterResult.Type.HTML, ret.message().get(0).getType()); @@ -117,38 +126,4 @@ public void testOutputType() throws InterpreterException { assertEquals("world", ret.message().get(1).getData()); } - @Override - public void onOutputAppend(String noteId, String paragraphId, int index, String output) { - - } - - @Override - public void onOutputUpdated(String noteId, String paragraphId, int index, InterpreterResult.Type type, String output) { - - } - - @Override - public void onOutputClear(String noteId, String paragraphId) { - - } - - @Override - public void runParagraphs(String noteId, List paragraphIndices, List paragraphIds, String curParagraphId) throws IOException { - - } - - @Override - public void onParaInfosReceived(String noteId, String paragraphId, - String interpreterSettingId, Map metaInfos) { - } - - @Override - public List getParagraphList(String user, String noteId) { - return null; - } - - @Override - public void checkpointOutput(String noteId, String paragraphId) { - - } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java index a899dbea3a2..cdeea28dcb4 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterTest.java @@ -51,7 +51,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; -public class RemoteInterpreterTest extends AbstractInterpreterTest { +class RemoteInterpreterTest extends AbstractInterpreterTest { private InterpreterSetting interpreterSetting; private String note1Id; @@ -70,7 +70,7 @@ public void tearDown() throws Exception { } @Test - public void testSharedMode() throws InterpreterException, IOException { + void testSharedMode() throws InterpreterException, IOException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", note1Id); @@ -104,7 +104,7 @@ public void testSharedMode() throws InterpreterException, IOException { } @Test - public void testScopedMode() throws InterpreterException, IOException { + void testScopedMode() throws InterpreterException, IOException { interpreterSetting.getOption().setPerUser(InterpreterOption.SCOPED); Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", note1Id); @@ -148,7 +148,7 @@ public void testScopedMode() throws InterpreterException, IOException { } @Test - public void testIsolatedMode() throws InterpreterException, IOException { + void testIsolatedMode() throws InterpreterException, IOException { interpreterSetting.getOption().setPerUser(InterpreterOption.ISOLATED); Interpreter interpreter1 = interpreterSetting.getDefaultInterpreter("user1", note1Id); @@ -192,7 +192,7 @@ public void testIsolatedMode() throws InterpreterException, IOException { } @Test - public void testExecuteIncorrectPrecode() throws TTransportException, IOException, InterpreterException { + void testExecuteIncorrectPrecode() throws TTransportException, IOException, InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); interpreterSetting.setProperty("zeppelin.SleepInterpreter.precode", "fail test"); Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); @@ -201,7 +201,7 @@ public void testExecuteIncorrectPrecode() throws TTransportException, IOExceptio } @Test - public void testExecuteCorrectPrecode() throws TTransportException, IOException, InterpreterException { + void testExecuteCorrectPrecode() throws TTransportException, IOException, InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); interpreterSetting.setProperty("zeppelin.SleepInterpreter.precode", "1"); Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); @@ -210,7 +210,8 @@ public void testExecuteCorrectPrecode() throws TTransportException, IOException, } @Test - public void testRemoteInterperterErrorStatus() throws TTransportException, IOException, InterpreterException { + void testRemoteInterperterErrorStatus() + throws TTransportException, IOException, InterpreterException { interpreterSetting.setProperty("zeppelin.interpreter.echo.fail", "true"); interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); @@ -223,7 +224,7 @@ public void testRemoteInterperterErrorStatus() throws TTransportException, IOExc } @Test - public void testFIFOScheduler() throws InterruptedException, InterpreterException { + void testFIFOScheduler() throws InterruptedException, InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); // by default SleepInterpreter would use FIFOScheduler @@ -306,7 +307,7 @@ public void run() { } @Test - public void testRemoteInterpreterSharesTheSameSchedulerInstanceInTheSameGroup() { + void testRemoteInterpreterSharesTheSameSchedulerInstanceInTheSameGroup() { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); Interpreter interpreter2 = interpreterSetting.getInterpreter("user1", note1Id, "echo"); @@ -315,7 +316,7 @@ public void testRemoteInterpreterSharesTheSameSchedulerInstanceInTheSameGroup() } @Test - public void testMultiInterpreterSession() { + void testMultiInterpreterSession() { interpreterSetting.getOption().setPerUser(InterpreterOption.SCOPED); Interpreter interpreter1_user1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); Interpreter interpreter2_user1 = interpreterSetting.getInterpreter("user1", note1Id, "echo"); @@ -332,7 +333,7 @@ public void testMultiInterpreterSession() { } @Test - public void should_push_local_angular_repo_to_remote() throws Exception { + void should_push_local_angular_repo_to_remote() throws Exception { final AngularObjectRegistry registry = new AngularObjectRegistry("spark", null); registry.add("name_1", "value_1", "note_1", "paragraphId_1"); @@ -347,7 +348,7 @@ public void should_push_local_angular_repo_to_remote() throws Exception { } @Test - public void testEnvStringPattern() { + void testEnvStringPattern() { assertFalse(RemoteInterpreterUtils.isEnvString(null)); assertFalse(RemoteInterpreterUtils.isEnvString("")); assertFalse(RemoteInterpreterUtils.isEnvString("abcDEF")); @@ -358,7 +359,7 @@ public void testEnvStringPattern() { } @Test - public void testEnvironmentAndProperty() throws InterpreterException { + void testEnvironmentAndProperty() throws InterpreterException { interpreterSetting.getOption().setPerUser(InterpreterOption.SHARED); interpreterSetting.setProperty("ENV_1", "VALUE_1"); interpreterSetting.setProperty("property_1", "value_1"); @@ -374,7 +375,7 @@ public void testEnvironmentAndProperty() throws InterpreterException { } @Test - public void testConvertDynamicForms() throws InterpreterException { + void testConvertDynamicForms() throws InterpreterException { GUI gui = new GUI(); OptionInput.ParamOption[] paramOptions = { new OptionInput.ParamOption("value1", "param1"), @@ -395,7 +396,7 @@ public void testConvertDynamicForms() throws InterpreterException { } @Test - public void testFailToLaunchInterpreterProcess_InvalidRunner() { + void testFailToLaunchInterpreterProcess_InvalidRunner() { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER.getVarName(), "invalid_runner"); final Interpreter interpreter1 = interpreterSetting.getInterpreter("user1", note1Id, "sleep"); @@ -414,7 +415,7 @@ public void testFailToLaunchInterpreterProcess_InvalidRunner() { } @Test - public void testFailToLaunchInterpreterProcess_ErrorInRunner() { + void testFailToLaunchInterpreterProcess_ErrorInRunner() { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER.getVarName(), zeppelinHome.getAbsolutePath() + "/zeppelin-zengine/src/test/resources/bin/interpreter_invalid.sh"); @@ -434,7 +435,7 @@ public void testFailToLaunchInterpreterProcess_ErrorInRunner() { } @Test - public void testFailToLaunchInterpreterProcess_Timeout() { + void testFailToLaunchInterpreterProcess_Timeout() { try { System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETER_REMOTE_RUNNER.getVarName(), zeppelinHome.getAbsolutePath() + "/zeppelin-zengine/src/test/resources/bin/interpreter_timeout.sh"); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/CredentialInjectorTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/CredentialInjectorTest.java index 9a17969238f..645f46b8502 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/CredentialInjectorTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/CredentialInjectorTest.java @@ -28,7 +28,7 @@ import org.apache.zeppelin.user.UsernamePassword; import org.junit.jupiter.api.Test; -public class CredentialInjectorTest { +class CredentialInjectorTest { private static final String TEMPLATE = "val jdbcUrl = \"jdbc:mysql://localhost/emp?user={mysql.user}&password={mysql.password}\""; @@ -41,7 +41,7 @@ public class CredentialInjectorTest { "jdbcUrl: String = jdbc:mysql://localhost/employees?user=username&password=###"; @Test - public void replaceCredentials() { + void replaceCredentials() { UserCredentials userCredentials = mock(UserCredentials.class); UsernamePassword usernamePassword = new UsernamePassword("username", "pwd"); when(userCredentials.getUsernamePassword("mysql")).thenReturn(usernamePassword); @@ -56,7 +56,7 @@ public void replaceCredentials() { } @Test - public void replaceCredentialNoTexts() { + void replaceCredentialNoTexts() { UserCredentials userCredentials = mock(UserCredentials.class); CredentialInjector testee = new CredentialInjector(userCredentials); String actual = testee.replaceCredentials(null); @@ -64,7 +64,7 @@ public void replaceCredentialNoTexts() { } @Test - public void replaceCredentialsNotExisting() { + void replaceCredentialsNotExisting() { UserCredentials userCredentials = mock(UserCredentials.class); CredentialInjector testee = new CredentialInjector(userCredentials); String actual = testee.replaceCredentials(TEMPLATE); @@ -75,9 +75,9 @@ public void replaceCredentialsNotExisting() { assertEquals(1, hiddenResult.message().size()); assertEquals(ANSWER, hiddenResult.message().get(0).getData()); } - + @Test - public void hidePasswordsNoResult() { + void hidePasswordsNoResult() { UserCredentials userCredentials = mock(UserCredentials.class); CredentialInjector testee = new CredentialInjector(userCredentials); assertNull(testee.hidePasswords(null)); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteAuthTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteAuthTest.java index b8dff580670..149a15fd0a8 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteAuthTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteAuthTest.java @@ -31,11 +31,11 @@ import java.util.Map; import java.util.Set; -public class NoteAuthTest { +class NoteAuthTest { private ZeppelinConfiguration conf = mock(ZeppelinConfiguration.class); @Test - public void testAnonymous() { + void testAnonymous() { NoteAuth auth = new NoteAuth("note1", conf); assertEquals(0, auth.getOwners().size()); assertEquals(0, auth.getReaders().size()); @@ -44,7 +44,7 @@ public void testAnonymous() { } @Test - public void testPublicNotes() { + void testPublicNotes() { when(conf.isNotebookPublic()).thenReturn(true); @@ -68,7 +68,7 @@ public void testPublicNotes() { } @Test - public void testNoPublicNotes() { + void testNoPublicNotes() { when(conf.isNotebookPublic()).thenReturn(false); @@ -96,7 +96,7 @@ public void testNoPublicNotes() { } @Test - public void testFoceLowerCaseUsers() { + void testFoceLowerCaseUsers() { when(conf.isNotebookPublic()).thenReturn(false); when(conf.isUsernameForceLowerCase()).thenReturn(true); @@ -116,7 +116,7 @@ public void testFoceLowerCaseUsers() { } @Test - public void testMapConstructor() { + void testMapConstructor() { when(conf.isNotebookPublic()).thenReturn(false); NoteAuth auth = new NoteAuth("note1", getTestMap("TestUser", "TestGroup"), conf); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteManagerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteManagerTest.java index c0044c926b7..b3e25b4ec39 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteManagerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteManagerTest.java @@ -21,7 +21,6 @@ import org.apache.zeppelin.notebook.exception.NotePathAlreadyExistsException; import org.apache.zeppelin.notebook.repo.InMemoryNotebookRepo; import org.apache.zeppelin.user.AuthenticationInfo; - import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -38,20 +37,21 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -public class NoteManagerTest { +class NoteManagerTest { private NoteManager noteManager; private ZeppelinConfiguration conf; - + private NoteParser noteParser; @BeforeEach public void setUp() throws IOException { - conf = ZeppelinConfiguration.create(); + conf = ZeppelinConfiguration.load(); this.noteManager = new NoteManager(new InMemoryNotebookRepo(), conf); + this.noteParser = new GsonNoteParser(conf); } @Test - public void testNoteOperations() throws IOException { + void testNoteOperations() throws IOException { assertEquals(0, this.noteManager.getNotesInfo().size()); Note note1 = createNote("/prod/my_note1"); @@ -94,7 +94,7 @@ public void testNoteOperations() throws IOException { } @Test - public void testAddNoteRejectsDuplicatePath() throws IOException { + void testAddNoteRejectsDuplicatePath() throws IOException { assertThrows(NotePathAlreadyExistsException.class, () -> { @@ -108,7 +108,7 @@ public void testAddNoteRejectsDuplicatePath() throws IOException { } @Test - public void testMoveNoteRejectsDuplicatePath() throws IOException { + void testMoveNoteRejectsDuplicatePath() throws IOException { assertThrows(NotePathAlreadyExistsException.class, () -> { Note note1 = createNote("/prod/note-1"); @@ -123,11 +123,11 @@ public void testMoveNoteRejectsDuplicatePath() throws IOException { } private Note createNote(String notePath) { - return new Note(notePath, "test", null, null, null, null, null); + return new Note(notePath, "test", null, null, null, null, null, conf, noteParser); } @Test - public void testLruCache() throws IOException { + void testLruCache() throws IOException { int cacheThreshold = conf.getNoteCacheThreshold(); @@ -175,7 +175,7 @@ public void testLruCache() throws IOException { } @Test - public void testConcurrentOperation() throws Exception { + void testConcurrentOperation() throws Exception { int threshold = 10, noteNum = 150; Map notes = new ConcurrentHashMap<>(); ExecutorService threadPool = Executors.newFixedThreadPool(threshold); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteTest.java index 4ed2a2cd1ce..b7d1cb9ad78 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NoteTest.java @@ -17,6 +17,7 @@ package org.apache.zeppelin.notebook; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.display.AngularObject; import org.apache.zeppelin.display.ui.TextBox; import org.apache.zeppelin.interpreter.Interpreter; @@ -30,6 +31,7 @@ import org.apache.zeppelin.scheduler.Scheduler; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.Credentials; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; @@ -68,6 +70,15 @@ class NoteTest { private AuthenticationInfo anonymous = new AuthenticationInfo("anonymous"); + private static ZeppelinConfiguration zConf; + private static NoteParser noteParser; + + @BeforeAll + public static void before() { + zConf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(zConf); + } + @BeforeEach public void setUp() { repo = mock(NotebookRepo.class); @@ -89,7 +100,7 @@ void runNormalTest() throws InterpreterNotFoundException { when(interpreterGroup.getInterpreterSetting()).thenReturn(interpreterSetting); String pText = "%spark sc.version"; - Note note = new Note("test", "test", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "test", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); p.setText(pText); @@ -105,7 +116,8 @@ void runNormalTest() throws InterpreterNotFoundException { @Test void addParagraphWithEmptyReplNameTest() { - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, + paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); assertNull(p.getText()); } @@ -113,7 +125,7 @@ void addParagraphWithEmptyReplNameTest() { @Test void addParagraphWithLastReplNameTest() throws InterpreterNotFoundException { when(interpreterFactory.getInterpreter(eq("spark"), any())).thenReturn(interpreter); - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); p1.setText("%spark "); Paragraph p2 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); @@ -124,7 +136,7 @@ void addParagraphWithLastReplNameTest() throws InterpreterNotFoundException { @Test void insertParagraphWithLastReplNameTest() throws InterpreterNotFoundException { when(interpreterFactory.getInterpreter(eq("spark"), any())).thenReturn(interpreter); - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); p1.setText("%spark "); Paragraph p2 = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS); @@ -135,7 +147,7 @@ void insertParagraphWithLastReplNameTest() throws InterpreterNotFoundException { @Test void insertParagraphWithInvalidReplNameTest() throws InterpreterNotFoundException { when(interpreterFactory.getInterpreter(eq("invalid"), any())).thenReturn(null); - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); p1.setText("%invalid "); Paragraph p2 = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS); @@ -145,7 +157,8 @@ void insertParagraphWithInvalidReplNameTest() throws InterpreterNotFoundExceptio @Test void insertParagraphwithUser() { - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, + paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p = note.insertNewParagraph(note.getParagraphs().size(), AuthenticationInfo.ANONYMOUS); assertEquals("anonymous", p.getUser()); } @@ -153,7 +166,8 @@ void insertParagraphwithUser() { @Test void clearAllParagraphOutputTest() throws InterpreterNotFoundException { - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, + paragraphJobListener, credentials, noteEventListener, zConf, noteParser); Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); InterpreterResult result = new InterpreterResult(InterpreterResult.Code.SUCCESS, InterpreterResult.Type.TEXT, "result"); p1.setResult(result); @@ -170,7 +184,8 @@ void clearAllParagraphOutputTest() throws InterpreterNotFoundException { @Test void personalizedModeReturnDifferentParagraphInstancePerUser() { - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, + paragraphJobListener, credentials, noteEventListener, zConf, noteParser); String user1 = "user1"; String user2 = "user2"; note.setPersonalizedMode(true); @@ -184,7 +199,8 @@ void personalizedModeReturnDifferentParagraphInstancePerUser() { } public void testNoteJson() throws IOException { - Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, paragraphJobListener, credentials, noteEventListener); + Note note = new Note("test", "", interpreterFactory, interpreterSettingManager, + paragraphJobListener, credentials, noteEventListener, zConf, noteParser); note.setName("/test_note"); note.getConfig().put("config_1", "value_1"); note.getInfo().put("info_1", "value_1"); @@ -197,12 +213,12 @@ public void testNoteJson() throws IOException { note.getAngularObjects().put("ao_1", Arrays.asList(new AngularObject("name_1", "value_1", note.getId(), p.getId(), null))); // test Paragraph Json - Paragraph p2 = Paragraph.fromJson(p.toJson()); + Paragraph p2 = p.fromJson(p.toJson()); assertEquals(p2.settings, p.settings); assertEquals(p2, p); // test Note Json - Note note2 = Note.fromJson(null, note.toJson()); + Note note2 = noteParser.fromJson(null, note.toJson()); assertEquals(note2, note); } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java index e6927dddb0c..1c01878539b 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/NotebookTest.java @@ -50,6 +50,7 @@ import org.quartz.SchedulerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.eclipse.aether.RepositoryException; import java.io.File; @@ -82,7 +83,7 @@ import static org.mockito.Mockito.mock; -public class NotebookTest extends AbstractInterpreterTest implements ParagraphJobListener { +class NotebookTest extends AbstractInterpreterTest implements ParagraphJobListener { private static final Logger logger = LoggerFactory.getLogger(NotebookTest.class); private Notebook notebook; @@ -97,16 +98,17 @@ public class NotebookTest extends AbstractInterpreterTest implements ParagraphJo @Override @BeforeEach public void setUp() throws Exception { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), "true"); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_ENABLE.getVarName(), "true"); super.setUp(); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), "true"); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_ENABLE.getVarName(), "true"); notebookRepo = new VFSNotebookRepo(); - notebookRepo.init(conf); + notebookRepo.init(conf, noteParser); noteManager = new NoteManager(notebookRepo, conf); - authorizationService = new AuthorizationService(noteManager, conf); - credentials = new Credentials(conf); + authorizationService = new AuthorizationService(noteManager, conf, storage); + + credentials = new Credentials(conf, storage); notebook = new Notebook(conf, authorizationService, notebookRepo, noteManager, interpreterFactory, interpreterSettingManager, credentials, null); notebook.setParagraphJobListener(this); schedulerService = new QuartzSchedulerService(conf, notebook); @@ -126,7 +128,7 @@ public void tearDown() throws Exception { } @Test - public void testRevisionSupported() throws IOException { + void testRevisionSupported() throws IOException { NotebookRepo notebookRepo; Notebook notebook; @@ -144,7 +146,7 @@ public void testRevisionSupported() throws IOException { public static class DummyNotebookRepo implements NotebookRepo { @Override - public void init(ZeppelinConfiguration zConf) throws IOException { + public void init(ZeppelinConfiguration zConf, NoteParser noteParser) throws IOException { } @@ -197,6 +199,11 @@ public List getSettings(AuthenticationInfo subject) { public void updateSettings(Map settings, AuthenticationInfo subject) { } + + @Override + public NoteParser getNoteParser() { + return null; + } } public static class DummyNotebookRepoWithVersionControl implements @@ -225,7 +232,7 @@ public Note setNoteRevision(String noteId, String notePath, String revId, Authen } @Override - public void init(ZeppelinConfiguration zConf) throws IOException { + public void init(ZeppelinConfiguration zConf, NoteParser noteParser) throws IOException { } @@ -278,10 +285,15 @@ public List getSettings(AuthenticationInfo subject) { public void updateSettings(Map settings, AuthenticationInfo subject) { } + + @Override + public NoteParser getNoteParser() { + return null; + } } @Test - public void testSelectingReplImplementation() throws IOException { + void testSelectingReplImplementation() throws IOException { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -310,7 +322,7 @@ public void testSelectingReplImplementation() throws IOException { } @Test - public void testReloadAndSetInterpreter() throws IOException { + void testReloadAndSetInterpreter() throws IOException { String noteId = notebook.createNote("note1", AuthenticationInfo.ANONYMOUS); notebook.processNote(noteId, note -> { @@ -339,7 +351,7 @@ public void testReloadAndSetInterpreter() throws IOException { } @Test - public void testReloadAllNotes() throws IOException { + void testReloadAllNotes() throws IOException { String note1Id = notebook.createNote("note1", AuthenticationInfo.ANONYMOUS); notebook.processNote(note1Id, note1 -> { @@ -387,7 +399,7 @@ public void testReloadAllNotes() throws IOException { } @Test - public void testLoadAllNotes() { + void testLoadAllNotes() { try { assertEquals(0, notebook.getNotesInfo().size()); String noteId = notebook.createNote("note1", anonymous); @@ -409,7 +421,7 @@ public void testLoadAllNotes() { } @Test - public void testPersist() throws IOException, SchedulerException { + void testPersist() throws IOException, SchedulerException { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -427,7 +439,7 @@ public void testPersist() throws IOException, SchedulerException { } @Test - public void testCreateNoteWithSubject() throws IOException, SchedulerException, RepositoryException { + void testCreateNoteWithSubject() throws IOException, SchedulerException, RepositoryException { AuthenticationInfo subject = new AuthenticationInfo("user1"); String noteId = notebook.createNote("note1", subject); assertNotNull(authorizationService.getOwners(noteId)); @@ -439,7 +451,7 @@ public void testCreateNoteWithSubject() throws IOException, SchedulerException, } @Test - public void testClearParagraphOutput() throws IOException, SchedulerException { + void testClearParagraphOutput() throws IOException, SchedulerException { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -463,7 +475,7 @@ public void testClearParagraphOutput() throws IOException, SchedulerException { } @Test - public void testRunBlankParagraph() throws IOException, SchedulerException, InterruptedException { + void testRunBlankParagraph() throws IOException, SchedulerException, InterruptedException { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -485,7 +497,7 @@ public void testRunBlankParagraph() throws IOException, SchedulerException, Inte } @Test - public void testRemoveNote() throws IOException, InterruptedException { + void testRemoveNote() throws IOException, InterruptedException { try { LOGGER.info("--------------- Test testRemoveNote ---------------"); // create a note and a paragraph @@ -521,28 +533,28 @@ public void testRemoveNote() throws IOException, InterruptedException { } @Test - public void testRemoveCorruptedNote() throws IOException{ - LOGGER.info("--------------- Test testRemoveCorruptedNote ---------------"); - // create a note and a paragraph - String corruptedNoteId = notebook.createNote("note1", anonymous); - String corruptedNotePath = notebook.processNote(corruptedNoteId, - corruptedNote -> { - return notebookDir.getAbsolutePath() + corruptedNote.getPath() + "_" + corruptedNote.getId() + ".zpln"; - }); + void testRemoveCorruptedNote() throws IOException { + LOGGER.info("--------------- Test testRemoveCorruptedNote ---------------"); + // create a note and a paragraph + String corruptedNoteId = notebook.createNote("note1", anonymous); + String corruptedNotePath = notebook.processNote(corruptedNoteId, + corruptedNote -> { + return notebookDir.getAbsolutePath() + corruptedNote.getPath() + "_" + corruptedNote.getId() + ".zpln"; + }); - // corrupt note - FileWriter myWriter = new FileWriter(corruptedNotePath); - myWriter.write("{{{I'm corrupted;;;"); - myWriter.close(); - LOGGER.info("--------------- Finish Test testRemoveCorruptedNote ---------------"); - int numberOfNotes = notebook.getNotesInfo().size(); - notebook.removeNote(corruptedNoteId, anonymous); - assertEquals(numberOfNotes - 1, notebook.getNotesInfo().size()); - LOGGER.info("--------------- Finish Test testRemoveCorruptedNote ---------------"); + // corrupt note + FileWriter myWriter = new FileWriter(corruptedNotePath); + myWriter.write("{{{I'm corrupted;;;"); + myWriter.close(); + LOGGER.info("--------------- Finish Test testRemoveCorruptedNote ---------------"); + int numberOfNotes = notebook.getNotesInfo().size(); + notebook.removeNote(corruptedNoteId, anonymous); + assertEquals(numberOfNotes - 1, notebook.getNotesInfo().size()); + LOGGER.info("--------------- Finish Test testRemoveCorruptedNote ---------------"); } @Test - public void testInvalidInterpreter() throws IOException, InterruptedException { + void testInvalidInterpreter() throws IOException, InterruptedException { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -567,7 +579,7 @@ public void testInvalidInterpreter() throws IOException, InterruptedException { } @Test - public void testRunAll() throws Exception { + void testRunAll() throws Exception { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -605,7 +617,7 @@ public void testRunAll() throws Exception { } @Test - public void testSchedule() throws InterruptedException, IOException { + void testSchedule() throws InterruptedException, IOException { // create a note and a paragraph String noteId = notebook.createNote("note1", anonymous); // use write lock, because note configuration is overwritten @@ -639,17 +651,17 @@ public void testSchedule() throws InterruptedException, IOException { }); schedulerService.refreshCron(noteId); // wait a little until running processes are finished - Thread.sleep(3 * 1000); + Thread.sleep(5*1000); Date dateFinished = paragraph.getDateFinished(); assertNotNull(dateFinished); // wait a little bit to check that no other tasks are being executed - Thread.sleep(2 * 1000); + Thread.sleep(3 * 1000); assertEquals(dateFinished, paragraph.getDateFinished()); notebook.removeNote(noteId, anonymous); } @Test - public void testScheduleAgainstRunningAndPendingParagraph() throws InterruptedException, IOException { + void testScheduleAgainstRunningAndPendingParagraph() throws InterruptedException, IOException { // create a note String noteId = notebook.createNote("note1", anonymous); // append running and pending paragraphs to the note @@ -703,8 +715,8 @@ public void testScheduleAgainstRunningAndPendingParagraph() throws InterruptedEx } @Test - public void testSchedulePoolUsage() throws InterruptedException, IOException { - final int timeout = 30; + void testSchedulePoolUsage() throws InterruptedException, IOException { + final int timeout = 60; final String everySecondCron = "* * * * * ?"; // each run starts a new JVM and the job takes about ~5 seconds final CountDownLatch jobsToExecuteCount = new CountDownLatch(5); @@ -741,83 +753,74 @@ private void executeNewParagraphByCron(String noteId, String cron) throws IOExce } @Test - public void testScheduleDisabled() throws InterruptedException, IOException { + void testScheduleDisabled() throws InterruptedException, IOException { + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_ENABLE.getVarName(), "false"); + final int timeout = 10; + final String everySecondCron = "* * * * * ?"; + final CountDownLatch jobsToExecuteCount = new CountDownLatch(5); + final String noteId = notebook.createNote("note1", anonymous); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_ENABLE.getVarName(), "false"); - try { - final int timeout = 10; - final String everySecondCron = "* * * * * ?"; - final CountDownLatch jobsToExecuteCount = new CountDownLatch(5); - final String noteId = notebook.createNote("note1", anonymous); - - executeNewParagraphByCron(noteId, everySecondCron); - afterStatusChangedListener = new StatusChangedListener() { - @Override - public void onStatusChanged(Job job, Status before, Status after) { - if (after == Status.FINISHED) { - jobsToExecuteCount.countDown(); - } + executeNewParagraphByCron(noteId, everySecondCron); + afterStatusChangedListener = new StatusChangedListener() { + @Override + public void onStatusChanged(Job job, Status before, Status after) { + if (after == Status.FINISHED) { + jobsToExecuteCount.countDown(); } - }; + } + }; - //This job should not run because "ZEPPELIN_NOTEBOOK_CRON_ENABLE" is set to false - assertFalse(jobsToExecuteCount.await(timeout, TimeUnit.SECONDS)); + // This job should not run because "ZEPPELIN_NOTEBOOK_CRON_ENABLE" is set to false + assertFalse(jobsToExecuteCount.await(timeout, TimeUnit.SECONDS)); - terminateScheduledNote(noteId); - afterStatusChangedListener = null; - } finally { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_ENABLE.getVarName(), "true"); - } + terminateScheduledNote(noteId); + afterStatusChangedListener = null; } @Test - public void testScheduleDisabledWithName() throws InterruptedException, IOException { + void testScheduleDisabledWithName() throws InterruptedException, IOException { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_FOLDERS.getVarName(), "/System"); - try { - final int timeout = 30; - final String everySecondCron = "* * * * * ?"; - // each run starts a new JVM and the job takes about ~5 seconds - final CountDownLatch jobsToExecuteCount = new CountDownLatch(5); - final String noteId = notebook.createNote("note1", anonymous); - - executeNewParagraphByCron(noteId, everySecondCron); - afterStatusChangedListener = new StatusChangedListener() { - @Override - public void onStatusChanged(Job job, Status before, Status after) { - if (after == Status.FINISHED) { - jobsToExecuteCount.countDown(); - } + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_FOLDERS.getVarName(), "/System"); + final int timeout = 60; + final String everySecondCron = "* * * * * ?"; + // each run starts a new JVM and the job takes about ~5 seconds + final CountDownLatch jobsToExecuteCount = new CountDownLatch(5); + final String noteId = notebook.createNote("note1", anonymous); + + executeNewParagraphByCron(noteId, everySecondCron); + afterStatusChangedListener = new StatusChangedListener() { + @Override + public void onStatusChanged(Job job, Status before, Status after) { + if (after == Status.FINISHED) { + jobsToExecuteCount.countDown(); } - }; + } + }; - //This job should not run because it's path does not matches "ZEPPELIN_NOTEBOOK_CRON_FOLDERS" - assertFalse(jobsToExecuteCount.await(timeout, TimeUnit.SECONDS)); + // This job should not run because it's path does not matches "ZEPPELIN_NOTEBOOK_CRON_FOLDERS" + assertFalse(jobsToExecuteCount.await(timeout, TimeUnit.SECONDS)); - terminateScheduledNote(noteId); - afterStatusChangedListener = null; + terminateScheduledNote(noteId); + afterStatusChangedListener = null; - final String noteNameSystemId = notebook.createNote("/System/test1", anonymous); - final CountDownLatch jobsToExecuteCountNameSystem = new CountDownLatch(5); + final String noteNameSystemId = notebook.createNote("/System/test1", anonymous); + final CountDownLatch jobsToExecuteCountNameSystem = new CountDownLatch(5); - executeNewParagraphByCron(noteNameSystemId, everySecondCron); - afterStatusChangedListener = new StatusChangedListener() { - @Override - public void onStatusChanged(Job job, Status before, Status after) { - if (after == Status.FINISHED) { - jobsToExecuteCountNameSystem.countDown(); - } + executeNewParagraphByCron(noteNameSystemId, everySecondCron); + afterStatusChangedListener = new StatusChangedListener() { + @Override + public void onStatusChanged(Job job, Status before, Status after) { + if (after == Status.FINISHED) { + jobsToExecuteCountNameSystem.countDown(); } - }; + } + }; - //This job should run because it's path contains "System/" - assertTrue(jobsToExecuteCountNameSystem.await(timeout, TimeUnit.SECONDS)); + // This job should run because it's path contains "System/" + assertTrue(jobsToExecuteCountNameSystem.await(timeout, TimeUnit.SECONDS)); - terminateScheduledNote(noteNameSystemId); - afterStatusChangedListener = null; - } finally { - System.clearProperty(ConfVars.ZEPPELIN_NOTEBOOK_CRON_FOLDERS.getVarName()); - } + terminateScheduledNote(noteNameSystemId); + afterStatusChangedListener = null; } private void terminateScheduledNote(String noteId) throws IOException { @@ -989,7 +992,7 @@ public void testCronWithReleaseResourceClosesOnlySpecificInterpreters() } @Test - public void testCronNoteInTrash() throws InterruptedException, IOException, SchedulerException { + void testCronNoteInTrash() throws InterruptedException, IOException, SchedulerException { String noteId = notebook.createNote("~Trash/NotCron", anonymous); // use write lock because we overwrite the note config notebook.processNote(noteId, @@ -1017,7 +1020,7 @@ public void testCronNoteInTrash() throws InterruptedException, IOException, Sche } @Test - public void testExportAndImportNote() throws Exception { + void testExportAndImportNote() throws Exception { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, @@ -1068,7 +1071,7 @@ public void testExportAndImportNote() throws Exception { } @Test - public void testCloneNote() throws Exception { + void testCloneNote() throws Exception { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, note -> { @@ -1116,7 +1119,7 @@ public void testCloneNote() throws Exception { } @Test - public void testResourceRemovealOnParagraphNoteRemove() throws Exception { + void testResourceRemovealOnParagraphNoteRemove() throws Exception { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, @@ -1150,7 +1153,7 @@ public void testResourceRemovealOnParagraphNoteRemove() throws Exception { } @Test - public void testAngularObjectRemovalOnNotebookRemove() throws InterruptedException, + void testAngularObjectRemovalOnNotebookRemove() throws InterruptedException, IOException { // create a note and a paragraph String noteId = notebook.createNote("note1", anonymous); @@ -1184,7 +1187,7 @@ public void testAngularObjectRemovalOnNotebookRemove() throws InterruptedExcepti } @Test - public void testAngularObjectRemovalOnParagraphRemove() throws InterruptedException, + void testAngularObjectRemovalOnParagraphRemove() throws InterruptedException, IOException { // create a note and a paragraph String noteId = notebook.createNote("note1", anonymous); @@ -1227,7 +1230,7 @@ public void testAngularObjectRemovalOnParagraphRemove() throws InterruptedExcept } @Test - public void testAngularObjectRemovalOnInterpreterRestart() throws InterruptedException, + void testAngularObjectRemovalOnInterpreterRestart() throws InterruptedException, IOException, InterpreterException { // create a note and a paragraph String noteId = notebook.createNote("note1", anonymous); @@ -1260,7 +1263,7 @@ public void testAngularObjectRemovalOnInterpreterRestart() throws InterruptedExc } @Test - public void testPermissions() throws IOException { + void testPermissions() throws IOException { // create a note and a paragraph String noteId = notebook.createNote("note1", anonymous); // empty owners, readers or writers means note is public @@ -1313,7 +1316,7 @@ public void testPermissions() throws IOException { } @Test - public void testAuthorizationRoles() throws IOException { + void testAuthorizationRoles() throws IOException { String user1 = "user1"; String user2 = "user2"; Set roles = new HashSet<>(Arrays.asList("admin")); @@ -1359,10 +1362,11 @@ public void testAuthorizationRoles() throws IOException { } @Test - public void testInterpreterSettingConfig() { + void testInterpreterSettingConfig() { LOGGER.info("testInterpreterSettingConfig >>> "); Note note = new Note("testInterpreterSettingConfig", "config_test", - interpreterFactory, interpreterSettingManager, this, credentials, new ArrayList<>()); + interpreterFactory, interpreterSettingManager, this, credentials, new ArrayList<>(), conf, + noteParser); // create paragraphs Paragraph p1 = note.addNewParagraph(anonymous); @@ -1414,7 +1418,7 @@ public void testInterpreterSettingConfig() { } @Test - public void testAbortParagraphStatusOnInterpreterRestart() throws Exception { + void testAbortParagraphStatusOnInterpreterRestart() throws Exception { String noteId = notebook.createNote("note1", anonymous); notebook.processNote(noteId, @@ -1458,7 +1462,7 @@ public void testAbortParagraphStatusOnInterpreterRestart() throws Exception { } @Test - public void testPerSessionInterpreterCloseOnNoteRemoval() throws IOException, InterpreterException { + void testPerSessionInterpreterCloseOnNoteRemoval() throws IOException, InterpreterException { // create a notes String note1Id = notebook.createNote("note1", anonymous); InterpreterResult result = notebook.processNote(note1Id, @@ -1502,7 +1506,7 @@ public void testPerSessionInterpreterCloseOnNoteRemoval() throws IOException, In } @Test - public void testPerSessionInterpreter() throws IOException, InterpreterException { + void testPerSessionInterpreter() throws IOException, InterpreterException { // create two notes String note1Id = notebook.createNote("note1", anonymous); notebook.processNote(note1Id, @@ -1559,7 +1563,7 @@ public void testPerSessionInterpreter() throws IOException, InterpreterException @Test - public void testPerNoteSessionInterpreter() throws IOException, InterpreterException { + void testPerNoteSessionInterpreter() throws IOException, InterpreterException { // create two notes String note1Id = notebook.createNote("note1", anonymous); notebook.processNote(note1Id, @@ -1698,7 +1702,7 @@ public void onParagraphStatusChange(Paragraph p, Status status) { } @Test - public void testGetAllNotes() throws Exception { + void testGetAllNotes() throws Exception { String note1Id = notebook.createNote("note1", anonymous); String note2Id = notebook.createNote("note2", anonymous); assertEquals(2, notebook.getNotesInfo(noteId -> authorizationService.isReader(noteId, new HashSet<>(Arrays.asList("anonymous")))).size()); @@ -1722,7 +1726,7 @@ public void testGetAllNotes() throws Exception { } @Test - public void testCreateDuplicateNote() throws Exception { + void testCreateDuplicateNote() throws Exception { String note1Id = notebook.createNote("note1", anonymous); try { notebook.createNote("note1", anonymous); @@ -1735,7 +1739,7 @@ public void testCreateDuplicateNote() throws Exception { } @Test - public void testGetAllNotesWithDifferentPermissions() throws IOException { + void testGetAllNotesWithDifferentPermissions() throws IOException { List notes1 = notebook.getNotesInfo(noteId -> authorizationService.isReader(noteId, new HashSet<>(Arrays.asList("user1")))); List notes2 = notebook.getNotesInfo(noteId -> authorizationService.isReader(noteId, new HashSet<>(Arrays.asList("user2")))); assertEquals(0, notes1.size()); @@ -1771,7 +1775,7 @@ public void testGetAllNotesWithDifferentPermissions() throws IOException { } @Test - public void testPublicPrivateNewNote() throws IOException { + void testPublicPrivateNewNote() throws IOException { // case of public note assertTrue(conf.isNotebookPublic()); assertTrue(authorizationService.isPublic()); @@ -1799,10 +1803,13 @@ public void testPublicPrivateNewNote() throws IOException { assertEquals(0, authorizationService.getWriters(notePublicId).size()); // case of private note - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), "false"); - ZeppelinConfiguration conf2 = ZeppelinConfiguration.create(); + + ZeppelinConfiguration conf2 = ZeppelinConfiguration.load(); + conf2.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), "false"); assertFalse(conf2.isNotebookPublic()); - // notebook authorization reads from conf, so no need to re-initilize + authorizationService = new AuthorizationService(noteManager, conf2, storage); + notebook = new Notebook(conf2, authorizationService, notebookRepo, noteManager, + interpreterFactory, interpreterSettingManager, credentials, null); assertFalse(authorizationService.isPublic()); // check that still 1 note per user @@ -1833,14 +1840,10 @@ public void testPublicPrivateNewNote() throws IOException { assertEquals(1, authorizationService.getReaders(notePrivateId).size()); assertEquals(1, authorizationService.getRunners(notePrivateId).size()); assertEquals(1, authorizationService.getWriters(notePrivateId).size()); - - //set back public to true - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_PUBLIC.getVarName(), "true"); - ZeppelinConfiguration.create(); } @Test - public void testCloneImportCheck() throws IOException { + void testCloneImportCheck() throws IOException { String sourceNoteId = notebook.createNote("note1", new AuthenticationInfo("user")); notebook.processNote(sourceNoteId, sourceNote -> { @@ -1869,7 +1872,7 @@ public void testCloneImportCheck() throws IOException { @ParameterizedTest @MethodSource("provideMoveTestParameters") - public void testMoveNote(String oldName, String newPath) throws InterruptedException, IOException { + void testMoveNote(String oldName, String newPath) throws InterruptedException, IOException { String noteId = null; String newName = FilenameUtils.getBaseName(newPath); try { diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTest.java index 612ee2f728e..869d5622fcb 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTest.java @@ -66,10 +66,10 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -public class ParagraphTest extends AbstractInterpreterTest { +class ParagraphTest extends AbstractInterpreterTest { @Test - public void scriptBodyWithReplName() { + void scriptBodyWithReplName() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%test (1234567"); @@ -82,7 +82,7 @@ public void scriptBodyWithReplName() { } @Test - public void scriptBodyWithoutReplName() { + void scriptBodyWithoutReplName() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("1234567"); @@ -91,7 +91,7 @@ public void scriptBodyWithoutReplName() { } @Test - public void replNameAndNoBody() { + void replNameAndNoBody() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%test"); @@ -100,7 +100,7 @@ public void replNameAndNoBody() { } @Test - public void replSingleCharName() { + void replSingleCharName() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%r a"); @@ -109,7 +109,7 @@ public void replSingleCharName() { } @Test - public void testParagraphProperties() { + void testParagraphProperties() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%test(p1=v1,p2=v2) a"); @@ -135,18 +135,18 @@ public void testParagraphProperties() { } @Test - public void testInvalidProperties() { + void testInvalidProperties() { + Note note = createNote(); + Paragraph paragraph = new Paragraph(note, null); assertThrows(RuntimeException.class, () -> { - Note note = createNote(); - Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%test(p1=v1=v2) a"); }, "Invalid paragraph properties format"); } @Test - public void replInvalid() { + void replInvalid() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("foo %r"); @@ -163,7 +163,7 @@ public void replInvalid() { } @Test - public void replNameEndsWithWhitespace() { + void replNameEndsWithWhitespace() { Note note = createNote(); Paragraph paragraph = new Paragraph(note, null); paragraph.setText("%test\r\n###Hello"); @@ -204,7 +204,7 @@ public void replNameEndsWithWhitespace() { } @Test - public void should_extract_variable_from_angular_object_registry() throws Exception { + void should_extract_variable_from_angular_object_registry() throws Exception { //Given final String noteId = "noteId"; @@ -243,7 +243,7 @@ public void should_extract_variable_from_angular_object_registry() throws Except } @Test - public void returnDefaultParagraphWithNewUser() { + void returnDefaultParagraphWithNewUser() { Paragraph p = new Paragraph("para_1", null, null); String defaultValue = "Default Value"; p.setResult(new InterpreterResult(Code.SUCCESS, defaultValue)); @@ -253,7 +253,8 @@ public void returnDefaultParagraphWithNewUser() { } @Disabled - public void returnUnchangedResultsWithDifferentUser() throws Throwable { + @Test + void returnUnchangedResultsWithDifferentUser() throws Throwable { Note mockNote = mock(Note.class); when(mockNote.getCredentials()).thenReturn(mock(Credentials.class)); Paragraph spyParagraph = spy(new Paragraph("para_1", mockNote, null)); @@ -319,7 +320,7 @@ public void returnUnchangedResultsWithDifferentUser() throws Throwable { } @Test - public void testCursorPosition() { + void testCursorPosition() { Paragraph paragraph = spy(new Paragraph()); // left = buffer, middle = cursor position into source code, right = cursor position after parse List> dataSet = Arrays.asList( @@ -350,7 +351,7 @@ public void testCursorPosition() { } @Test - public void credentialReplacement() throws Throwable { + void credentialReplacement() throws Throwable { Note mockNote = mock(Note.class); Credentials creds = mock(Credentials.class); when(mockNote.getCredentials()).thenReturn(creds); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTextParserTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTextParserTest.java index c1f508a3512..ef62f0cf3dd 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTextParserTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/ParagraphTextParserTest.java @@ -17,16 +17,15 @@ package org.apache.zeppelin.notebook; -import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; -public class ParagraphTextParserTest { +class ParagraphTextParserTest { @Test - public void testJupyter() { + void testJupyter() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%jupyter(kernel=ir)"); assertEquals("jupyter", parseResult.getIntpText()); assertEquals(1, parseResult.getLocalProperties().size()); @@ -36,7 +35,7 @@ public void testJupyter() { @Test - public void testCassandra() { + void testCassandra() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%cassandra(locale=ru_RU, timeFormat=\"E, d MMM yy\", floatPrecision = 5, output=cql)\n" + "select * from system_auth.roles;"); @@ -47,7 +46,7 @@ public void testCassandra() { } @Test - public void testSparkSubmit() { + void testSparkSubmit() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%spark-submit --class A a.jar"); assertEquals("spark-submit", parseResult.getIntpText()); @@ -55,7 +54,7 @@ public void testSparkSubmit() { } @Test - public void testParagraphTextLocalPropertiesAndText() { + void testParagraphTextLocalPropertiesAndText() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark.pyspark(pool=pool_1) sc.version"); assertEquals("spark.pyspark", parseResult.getIntpText()); assertEquals(1, parseResult.getLocalProperties().size()); @@ -64,7 +63,7 @@ public void testParagraphTextLocalPropertiesAndText() { } @Test - public void testParagraphTextLocalPropertiesNoText() { + void testParagraphTextLocalPropertiesNoText() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark.pyspark(pool=pool_1)"); assertEquals("spark.pyspark", parseResult.getIntpText()); assertEquals(1, parseResult.getLocalProperties().size()); @@ -73,7 +72,7 @@ public void testParagraphTextLocalPropertiesNoText() { } @Test - public void testParagraphTextLocalPropertyNoValueNoText() { + void testParagraphTextLocalPropertyNoValueNoText() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark.pyspark(pool)"); assertEquals("spark.pyspark", parseResult.getIntpText()); assertEquals(1, parseResult.getLocalProperties().size()); @@ -82,7 +81,7 @@ public void testParagraphTextLocalPropertyNoValueNoText() { } @Test - public void testParagraphTextNoLocalProperties() { + void testParagraphTextNoLocalProperties() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark.pyspark\nsc.version"); assertEquals("spark.pyspark", parseResult.getIntpText()); assertEquals(0, parseResult.getLocalProperties().size()); @@ -90,7 +89,7 @@ public void testParagraphTextNoLocalProperties() { } @Test - public void testParagraphNoInterpreter() { + void testParagraphNoInterpreter() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("sc.version"); assertEquals("", parseResult.getIntpText()); assertEquals(0, parseResult.getLocalProperties().size()); @@ -98,7 +97,7 @@ public void testParagraphNoInterpreter() { } @Test - public void testParagraphInterpreterWithoutProperties() { + void testParagraphInterpreterWithoutProperties() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse("%spark() sc.version"); assertEquals("spark", parseResult.getIntpText()); assertEquals(0, parseResult.getLocalProperties().size()); @@ -106,7 +105,7 @@ public void testParagraphInterpreterWithoutProperties() { } @Test - public void testParagraphTextQuotedPropertyValue1() { + void testParagraphTextQuotedPropertyValue1() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%spark.pyspark(pool=\"value with = inside\")"); assertEquals("spark.pyspark", parseResult.getIntpText()); @@ -116,7 +115,7 @@ public void testParagraphTextQuotedPropertyValue1() { } @Test - public void testParagraphTextQuotedPropertyValue2() { + void testParagraphTextQuotedPropertyValue2() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%spark.pyspark(pool=\"value with \\\" inside\", p=\"eol\\ninside\" )"); assertEquals("spark.pyspark", parseResult.getIntpText()); @@ -127,7 +126,7 @@ public void testParagraphTextQuotedPropertyValue2() { } @Test - public void testParagraphTextQuotedPropertyKeyAndValue() { + void testParagraphTextQuotedPropertyKeyAndValue() { ParagraphTextParser.ParseResult parseResult = ParagraphTextParser.parse( "%spark.pyspark(\"po ol\"=\"value with \\\" inside\")"); assertEquals("spark.pyspark", parseResult.getIntpText()); @@ -138,28 +137,28 @@ public void testParagraphTextQuotedPropertyKeyAndValue() { @Test - public void testParagraphTextUnfinishedConfig() { + void testParagraphTextUnfinishedConfig() { assertThrows(RuntimeException.class, () -> ParagraphTextParser.parse("%spark.pyspark(pool="), "Problems by parsing paragraph. Not finished interpreter configuration"); } @Test - public void testParagraphTextUnfinishedQuote() { + void testParagraphTextUnfinishedQuote() { assertThrows(RuntimeException.class, () -> ParagraphTextParser.parse("%spark.pyspark(pool=\"2314234) sc.version"), "Problems by parsing paragraph. Not finished interpreter configuration"); } @Test - public void testParagraphTextUnclosedBackslash() { + void testParagraphTextUnclosedBackslash() { assertThrows(RuntimeException.class, () -> ParagraphTextParser.parse("%spark.pyspark(pool=\\"), "Problems by parsing paragraph. Unfinished escape sequence"); } @Test - public void testParagraphTextEmptyKey() { + void testParagraphTextEmptyKey() { assertThrows((RuntimeException.class), () -> ParagraphTextParser.parse("%spark.pyspark(pool=123, ,)"), "Problems by parsing paragraph. Local property key is empty"); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/GitNotebookRepoTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/GitNotebookRepoTest.java index 054b90c21c7..0c8f4742b2a 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/GitNotebookRepoTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/GitNotebookRepoTest.java @@ -27,6 +27,7 @@ import java.io.File; import java.io.IOException; +import java.nio.file.Files; import java.util.List; import java.util.Map; @@ -35,8 +36,10 @@ import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; import org.apache.zeppelin.interpreter.InterpreterFactory; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.notebook.repo.NotebookRepoWithVersionControl.Revision; import org.apache.zeppelin.user.AuthenticationInfo; @@ -53,7 +56,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class GitNotebookRepoTest { +class GitNotebookRepoTest { private static final Logger LOG = LoggerFactory.getLogger(GitNotebookRepoTest.class); private static final String TEST_NOTE_ID = "2A94M5J1Z"; @@ -62,47 +65,46 @@ public class GitNotebookRepoTest { private static final String TEST_NOTE_PATH2 = "/my_project/my_note2"; private File zeppelinDir; - private String notebooksDir; + private File notebooksDir; private ZeppelinConfiguration conf; + private NoteParser noteParser; private GitNotebookRepo notebookRepo; @BeforeEach public void setUp() throws Exception { - String zpath = System.getProperty("java.io.tmpdir") + "/ZeppelinTest_" + System.currentTimeMillis(); - zeppelinDir = new File(zpath); - zeppelinDir.mkdirs(); - new File(zeppelinDir, "conf").mkdirs(); + conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); + zeppelinDir = Files.createTempDirectory(this.getClass().getName()).toFile(); + File confDir = new File(zeppelinDir, "conf"); + confDir.mkdirs(); - notebooksDir = String.join(File.separator, zpath, "notebook"); - File notebookDir = new File(notebooksDir); - notebookDir.mkdirs(); + notebooksDir = new File(zeppelinDir, "notebook"); + notebooksDir.mkdirs(); FileUtils.copyDirectory( new File(GitNotebookRepoTest.class.getResource("/notebook").getFile()), - new File(notebooksDir)); + notebooksDir); - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); - - conf = ZeppelinConfiguration.create(); + conf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinDir.getAbsolutePath()); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebooksDir.getAbsolutePath()); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), + "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); } @AfterEach public void tearDown() throws Exception { - if (!FileUtils.deleteQuietly(zeppelinDir)) { - LOG.error("Failed to delete {} ", zeppelinDir.getName()); - } + FileUtils.deleteDirectory(zeppelinDir); } @Test - public void initNonemptyNotebookDir() throws IOException, GitAPIException { + void initNonemptyNotebookDir() throws IOException, GitAPIException { //given - .git does not exit - File dotGit = new File(String.join(File.separator, notebooksDir, ".git")); + File dotGit = new File(notebooksDir, ".git"); assertFalse(dotGit.exists()); //when - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); //then Git git = notebookRepo.getGit(); @@ -117,9 +119,10 @@ public void initNonemptyNotebookDir() throws IOException, GitAPIException { } @Test - public void showNotebookHistoryEmptyTest() throws GitAPIException, IOException { + void showNotebookHistoryEmptyTest() throws GitAPIException, IOException { //given - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); //when @@ -131,9 +134,10 @@ public void showNotebookHistoryEmptyTest() throws GitAPIException, IOException { } @Test - public void showNotebookHistoryMultipleNotesTest() throws IOException { + void showNotebookHistoryMultipleNotesTest() throws IOException { //initial checks - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID)); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID2)); @@ -176,9 +180,10 @@ public void showNotebookHistoryMultipleNotesTest() throws IOException { } @Test - public void addCheckpointTest() throws IOException, GitAPIException { + void addCheckpointTest() throws IOException, GitAPIException { // initial checks - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID)); assertTrue(notebookRepo.revisionHistory(TEST_NOTE_ID, TEST_NOTE_PATH, null).isEmpty()); @@ -243,9 +248,10 @@ private boolean containsNote(Map notes, String noteId) { } @Test - public void getRevisionTest() throws IOException { + void getRevisionTest() throws IOException { // initial checks - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID)); assertTrue(notebookRepo.revisionHistory(TEST_NOTE_ID, TEST_NOTE_PATH, null).isEmpty()); @@ -301,9 +307,10 @@ public void getRevisionTest() throws IOException { } @Test - public void getRevisionFailTest() throws IOException { + void getRevisionFailTest() throws IOException { // initial checks - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID)); assertTrue(notebookRepo.revisionHistory(TEST_NOTE_ID, TEST_NOTE_PATH, null).isEmpty()); @@ -343,9 +350,10 @@ public void getRevisionFailTest() throws IOException { } @Test - public void setRevisionTest() throws IOException { + void setRevisionTest() throws IOException { //create repo and check that note doesn't contain revisions - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); assertFalse(notebookRepo.list(null).isEmpty()); assertTrue(containsNote(notebookRepo.list(null), TEST_NOTE_ID)); assertTrue(notebookRepo.revisionHistory(TEST_NOTE_ID, TEST_NOTE_PATH, null).isEmpty()); @@ -405,9 +413,10 @@ public void setRevisionTest() throws IOException { } @Test - public void moveNoteTest() throws IOException, GitAPIException { + void moveNoteTest() throws IOException, GitAPIException { //given - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); notebookRepo.checkpoint(TEST_NOTE_ID, TEST_NOTE_PATH, "first commit, note1", null); //when @@ -422,9 +431,10 @@ public void moveNoteTest() throws IOException, GitAPIException { } @Test - public void moveFolderTest() throws IOException, GitAPIException { + void moveFolderTest() throws IOException, GitAPIException { //given - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); notebookRepo.checkpoint(TEST_NOTE_ID, TEST_NOTE_PATH, "first commit, note1", null); notebookRepo.checkpoint(TEST_NOTE_ID2, TEST_NOTE_PATH2, "second commit, note2", null); @@ -439,9 +449,10 @@ public void moveFolderTest() throws IOException, GitAPIException { } @Test - public void removeNoteTest() throws IOException, GitAPIException { + void removeNoteTest() throws IOException, GitAPIException { //given - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); notebookRepo.checkpoint(TEST_NOTE_ID, TEST_NOTE_PATH, "first commit, note1", null); //when @@ -452,9 +463,10 @@ public void removeNoteTest() throws IOException, GitAPIException { } @Test - public void removeFolderTest() throws IOException, GitAPIException { + void removeFolderTest() throws IOException, GitAPIException { //given - notebookRepo = new GitNotebookRepo(conf); + notebookRepo = new GitNotebookRepo(); + notebookRepo.init(conf, noteParser); notebookRepo.checkpoint(TEST_NOTE_ID, TEST_NOTE_PATH, "first commit, note1", null); //when diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncInitializationTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncInitializationTest.java index bd940017ae8..6bcbe977dd1 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncInitializationTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncInitializationTest.java @@ -17,25 +17,26 @@ package org.apache.zeppelin.notebook.repo; +import org.apache.commons.io.FileUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; +import org.apache.zeppelin.notebook.GsonNoteParser; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.repo.mock.VFSNotebookRepoMock; -import org.junit.jupiter.api.AfterEach; +import org.apache.zeppelin.plugin.PluginManager; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; +import java.nio.file.Files; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.junit.jupiter.api.Assertions.fail; -//TODO(zjffdu) move it to zeppelin-zengine -public class NotebookRepoSyncInitializationTest { - private static final Logger LOGGER = LoggerFactory.getLogger(NotebookRepoSyncInitializationTest.class); +class NotebookRepoSyncInitializationTest { + private String validFirstStorageClass = "org.apache.zeppelin.notebook.repo.VFSNotebookRepo"; private String validSecondStorageClass = "org.apache.zeppelin.notebook.repo.mock.VFSNotebookRepoMock"; private String invalidStorageClass = "org.apache.zeppelin.notebook.repo.DummyNotebookRepo"; @@ -45,122 +46,126 @@ public class NotebookRepoSyncInitializationTest { private String unsupportedStorageConf = validFirstStorageClass + "," + validSecondStorageClass + "," + validSecondStorageClass; private String emptyStorageConf = ""; + private ZeppelinConfiguration zConf; + private NoteParser noteParser; + private PluginManager pluginManager; + @BeforeEach public void setUp(){ - System.setProperty(ConfVars.ZEPPELIN_PLUGINS_DIR.getVarName(), new File("../../../plugins").getAbsolutePath()); + zConf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(zConf); System.setProperty("zeppelin.isTest", "true"); - } - - @AfterEach - public void tearDown() { - System.clearProperty("zeppelin.isTest"); + zConf.setProperty(ConfVars.ZEPPELIN_PLUGINS_DIR.getVarName(), + new File("../../../plugins").getAbsolutePath()); + pluginManager = new PluginManager(zConf); } @Test - public void validInitOneStorageTest() throws IOException { + void validInitOneStorageTest() throws IOException { // no need to initialize folder due to one storage // set confs - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), validOneStorageConf); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), validOneStorageConf); // create repo - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - // check proper initialization of one storage - assertEquals(notebookRepoSync.getRepoCount(), 1); - assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + + // check proper initialization of one storage + assertEquals(1, notebookRepoSync.getRepoCount()); + assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); + } } @Test - public void validInitTwoStorageTest() throws IOException { - // initialize folders for each storage - String zpath = System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis(); - File mainZepDir = new File(zpath); - mainZepDir.mkdirs(); - new File(mainZepDir, "conf").mkdirs(); - String mainNotePath = zpath+"/notebook"; - String secNotePath = mainNotePath + "_secondary"; - File mainNotebookDir = new File(mainNotePath); - File secNotebookDir = new File(secNotePath); + void validInitTwoStorageTest() throws IOException { + + File mainZepDir = Files.createTempDirectory(this.getClass().getSimpleName()).toFile(); + File conf = new File(mainZepDir, "conf"); + conf.mkdirs(); + File mainNotebookDir = new File(mainZepDir, "notebook"); + File secNotebookDir = new File(mainZepDir, "notebook_secondary"); mainNotebookDir.mkdirs(); secNotebookDir.mkdirs(); // set confs - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), mainZepDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), validTwoStorageConf); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + zConf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), mainZepDir.getAbsolutePath()); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + mainNotebookDir.getAbsolutePath()); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), validTwoStorageConf); // create repo - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - // check that both initialized - assertEquals(notebookRepoSync.getRepoCount(), 2); - assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); - assertTrue(notebookRepoSync.getRepo(1) instanceof VFSNotebookRepoMock); + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + // check that both initialized + assertEquals(2, notebookRepoSync.getRepoCount()); + assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); + assertTrue(notebookRepoSync.getRepo(1) instanceof VFSNotebookRepoMock); + } + FileUtils.deleteDirectory(mainZepDir); + } @Test - public void invalidInitTwoStorageTest() throws IOException { + void invalidInitTwoStorageTest() throws IOException { // set confs - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), invalidTwoStorageConf); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), invalidTwoStorageConf); // create repo - try { - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - fail("Should throw exception due to invalid NotebookRepo"); - } catch (IOException e) { - LOGGER.error(e.getMessage()); - assertTrue(e.getMessage().contains("Fail to instantiate notebookrepo from classpath directly")); - } + IOException exception = assertThrows(IOException.class, () -> { + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + } + }); + assertTrue(exception.getMessage() + .contains("Fail to instantiate notebookrepo from classpath directly")); } @Test - public void initUnsupportedNumberStoragesTest() throws IOException { + void initUnsupportedNumberStoragesTest() throws IOException { // initialize folders for each storage, currently for 2 only - String zpath = System.getProperty("java.io.tmpdir") + "/ZeppelinLTest_" + System.currentTimeMillis(); - File mainZepDir = new File(zpath); + File mainZepDir = Files.createTempDirectory(this.getClass().getSimpleName()).toFile(); mainZepDir.mkdirs(); new File(mainZepDir, "conf").mkdirs(); - String mainNotePath = zpath+"/notebook"; - String secNotePath = mainNotePath + "_secondary"; - File mainNotebookDir = new File(mainNotePath); - File secNotebookDir = new File(secNotePath); + File mainNotebookDir = new File(mainZepDir, "notebook"); + File secNotebookDir = new File(mainZepDir, "notebook_secondary"); mainNotebookDir.mkdirs(); secNotebookDir.mkdirs(); // set confs - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), mainZepDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), unsupportedStorageConf); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + zConf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), mainZepDir.getAbsolutePath()); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + mainNotebookDir.getAbsolutePath()); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), unsupportedStorageConf); // create repo - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - // check that first two storages initialized instead of three - assertEquals(notebookRepoSync.getRepoCount(), 2); - assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); - assertTrue(notebookRepoSync.getRepo(1) instanceof VFSNotebookRepoMock); + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + // check that first two storages initialized instead of three + assertEquals(2, notebookRepoSync.getRepoCount()); + assertTrue(notebookRepoSync.getRepo(0) instanceof VFSNotebookRepo); + assertTrue(notebookRepoSync.getRepo(1) instanceof VFSNotebookRepoMock); + } + FileUtils.deleteDirectory(mainZepDir); } @Test - public void initEmptyStorageTest() throws IOException { + void initEmptyStorageTest() throws IOException { // set confs - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), emptyStorageConf); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), emptyStorageConf); // create repo - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - // check initialization of one default storage - assertEquals(notebookRepoSync.getRepoCount(), 1); - assertTrue(notebookRepoSync.getRepo(0) instanceof NotebookRepoWithVersionControl); + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + // check initialization of one default storage + assertEquals(1, notebookRepoSync.getRepoCount()); + assertTrue(notebookRepoSync.getRepo(0) instanceof NotebookRepoWithVersionControl); + } } @Test - public void initOneDummyStorageTest() { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), invalidStorageClass); - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); + void initOneDummyStorageTest() { + zConf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), invalidStorageClass); // create repo - try { - NotebookRepoSync notebookRepoSync = new NotebookRepoSync(conf); - fail("Should throw exception due to invalid NotebookRepo"); - } catch (IOException e) { - LOGGER.error(e.getMessage()); - assertTrue(e.getMessage().contains("Fail to instantiate notebookrepo from classpath directly")); - } + IOException e = assertThrows(IOException.class, () -> { + try (NotebookRepoSync notebookRepoSync = new NotebookRepoSync(pluginManager)) { + notebookRepoSync.init(zConf, noteParser); + } + }); + assertTrue(e.getMessage().contains("Fail to instantiate notebookrepo from classpath directly")); } } \ No newline at end of file diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncTest.java index 08ff441cf04..db21bf9655a 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/NotebookRepoSyncTest.java @@ -21,9 +21,9 @@ import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.mock; -import com.google.common.io.Files; import java.io.File; import java.io.IOException; +import java.nio.file.Files; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -36,11 +36,14 @@ import org.apache.zeppelin.interpreter.InterpreterSettingManager; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; import org.apache.zeppelin.notebook.AuthorizationService; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; import org.apache.zeppelin.notebook.NoteManager; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Notebook; import org.apache.zeppelin.notebook.Paragraph; +import org.apache.zeppelin.plugin.PluginManager; import org.apache.zeppelin.storage.ConfigStorage; import org.apache.zeppelin.user.AuthenticationInfo; import org.apache.zeppelin.user.Credentials; @@ -51,10 +54,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class NotebookRepoSyncTest { +class NotebookRepoSyncTest { - private File ZEPPELIN_HOME; + private File zeppelinHome; private ZeppelinConfiguration conf; + private NoteParser noteParser; + private ConfigStorage storage; + private PluginManager pluginManager; private File mainNotebookDir; private File secNotebookDir; private Notebook notebook; @@ -70,60 +76,62 @@ public class NotebookRepoSyncTest { @BeforeEach public void setUp() throws Exception { System.setProperty("zeppelin.isTest", "true"); - ZEPPELIN_HOME = Files.createTempDir(); - new File(ZEPPELIN_HOME, "conf").mkdirs(); - String mainNotePath = ZEPPELIN_HOME.getAbsolutePath() + "/notebook"; - String secNotePath = ZEPPELIN_HOME.getAbsolutePath() + "/notebook_secondary"; - mainNotebookDir = new File(mainNotePath); - secNotebookDir = new File(secNotePath); + zeppelinHome = Files.createTempDirectory(this.getClass().getSimpleName()).toFile(); + File confDir = new File(zeppelinHome, "conf"); + confDir.mkdirs(); + String mainNotePath = zeppelinHome.getAbsolutePath() + "/notebook"; + String secNotePath = zeppelinHome.getAbsolutePath() + "/notebook_secondary"; + mainNotebookDir = new File(zeppelinHome, "notebook"); + secNotebookDir = new File(zeppelinHome, "notebook_secondary"); mainNotebookDir.mkdirs(); secNotebookDir.mkdirs(); - - System.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), ZEPPELIN_HOME.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.VFSNotebookRepo,org.apache.zeppelin.notebook.repo.mock.VFSNotebookRepoMock"); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC.getVarName(), "false"); - System.setProperty(ConfVars.ZEPPELIN_CONFIG_FS_DIR.getVarName(), ZEPPELIN_HOME.getAbsolutePath() + "/conf"); - System.setProperty(ConfVars.ZEPPELIN_PLUGINS_DIR.getVarName(), new File("../../../plugins").getAbsolutePath()); + conf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(conf); + storage = ConfigStorage.createConfigStorage(conf); + conf.setProperty(ConfVars.ZEPPELIN_HOME.getVarName(), zeppelinHome.getAbsolutePath()); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath()); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.VFSNotebookRepo,org.apache.zeppelin.notebook.repo.mock.VFSNotebookRepoMock"); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC.getVarName(), "false"); + conf.setProperty(ConfVars.ZEPPELIN_CONFIG_FS_DIR.getVarName(), zeppelinHome.getAbsolutePath() + "/conf"); + conf.setProperty(ConfVars.ZEPPELIN_PLUGINS_DIR.getVarName(), new File("../../../plugins").getAbsolutePath()); LOG.info("main Note dir : " + mainNotePath); LOG.info("secondary note dir : " + secNotePath); - conf = ZeppelinConfiguration.create(); - - ConfigStorage.reset(); - + pluginManager = new PluginManager(conf); interpreterSettingManager = new InterpreterSettingManager(conf, - mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), mock(ApplicationEventListener.class)); + mock(AngularObjectRegistryListener.class), mock(RemoteInterpreterProcessListener.class), + mock(ApplicationEventListener.class), storage, pluginManager); factory = new InterpreterFactory(interpreterSettingManager); - notebookRepoSync = new NotebookRepoSync(conf); + notebookRepoSync = new NotebookRepoSync(pluginManager); + notebookRepoSync.init(conf, noteParser); noteManager = new NoteManager(notebookRepoSync, conf); - authorizationService = new AuthorizationService(noteManager, conf); - credentials = new Credentials(conf); + authorizationService = new AuthorizationService(noteManager, conf, storage); + credentials = new Credentials(conf, storage); notebook = new Notebook(conf, authorizationService, notebookRepoSync, noteManager, factory, interpreterSettingManager, credentials, null); anonymous = new AuthenticationInfo("anonymous"); } @AfterEach public void tearDown() throws Exception { - delete(ZEPPELIN_HOME); + FileUtils.deleteDirectory(zeppelinHome); System.clearProperty("zeppelin.isTest"); } @Test - public void testRepoCount() throws IOException { + void testRepoCount() throws IOException { assertTrue(notebookRepoSync.getMaxRepoNum() >= notebookRepoSync.getRepoCount()); } @Test - public void testSyncOnCreate() throws IOException { + void testSyncOnCreate() throws IOException { /* check that both storage systems are empty */ assertTrue(notebookRepoSync.getRepoCount() > 1); assertEquals(0, notebookRepoSync.list(0, anonymous).size()); assertEquals(0, notebookRepoSync.list(1, anonymous).size()); /* create note */ - String noteId = notebook.createNote("test", "", anonymous); + notebook.createNote("test", "", anonymous); // check that automatically saved on both storages assertEquals(1, notebookRepoSync.list(0, anonymous).size()); @@ -135,13 +143,13 @@ public void testSyncOnCreate() throws IOException { } @Test - public void testSyncOnDelete() throws IOException { + void testSyncOnDelete() throws IOException { /* create note */ assertTrue(notebookRepoSync.getRepoCount() > 1); assertEquals(0, notebookRepoSync.list(0, anonymous).size()); assertEquals(0, notebookRepoSync.list(1, anonymous).size()); - String noteId = notebook.createNote("test", "", anonymous); + notebook.createNote("test", "", anonymous); /* check that created in both storage systems */ assertEquals(1, notebookRepoSync.list(0, anonymous).size()); @@ -159,7 +167,7 @@ public void testSyncOnDelete() throws IOException { } @Test - public void testSyncUpdateMain() throws IOException { + void testSyncUpdateMain() throws IOException { /* create note */ String noteId = notebook.createNote("/test", "test", anonymous); @@ -167,7 +175,7 @@ public void testSyncUpdateMain() throws IOException { note -> { note.setInterpreterFactory(mock(InterpreterFactory.class)); Paragraph p1 = note.addNewParagraph(AuthenticationInfo.ANONYMOUS); - Map config = p1.getConfig(); + Map config = p1.getConfig(); config.put("enabled", true); p1.setConfig(config); p1.setText("hello world"); @@ -225,7 +233,7 @@ public void testSyncUpdateMain() throws IOException { } @Test - public void testSyncOnReloadedList() throws Exception { + void testSyncOnReloadedList() throws Exception { /* check that both storage repos are empty */ assertTrue(notebookRepoSync.getRepoCount() > 1); assertEquals(0, notebookRepoSync.list(0, anonymous).size()); @@ -251,11 +259,13 @@ public void testSyncOnReloadedList() throws Exception { } @Test - public void testOneWaySyncOnReloadedList() throws IOException, SchedulerException { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), mainNotebookDir.getAbsolutePath()); - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC.getVarName(), "true"); - conf = ZeppelinConfiguration.create(); - notebookRepoSync = new NotebookRepoSync(conf); + void testOneWaySyncOnReloadedList() throws IOException, SchedulerException { + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + mainNotebookDir.getAbsolutePath()); + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_ONE_WAY_SYNC.getVarName(), "true"); + + notebookRepoSync = new NotebookRepoSync(pluginManager); + notebookRepoSync.init(conf, noteParser); notebook = new Notebook(conf, mock(AuthorizationService.class), notebookRepoSync, new NoteManager(notebookRepoSync, conf), factory, interpreterSettingManager, credentials, null); // check that both storage repos are empty @@ -298,12 +308,14 @@ public void testOneWaySyncOnReloadedList() throws IOException, SchedulerExceptio } @Test - public void testCheckpointOneStorage() throws IOException, SchedulerException { - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); - ZeppelinConfiguration vConf = ZeppelinConfiguration.create(); + void testCheckpointOneStorage() throws IOException, SchedulerException { + conf.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_STORAGE.getVarName(), + "org.apache.zeppelin.notebook.repo.GitNotebookRepo"); - NotebookRepoSync vRepoSync = new NotebookRepoSync(vConf); - Notebook vNotebookSync = new Notebook(vConf, mock(AuthorizationService.class), vRepoSync, new NoteManager(vRepoSync, conf), factory, interpreterSettingManager, credentials, null); + NotebookRepoSync vRepoSync = new NotebookRepoSync(pluginManager); + vRepoSync.init(conf, noteParser); + Notebook vNotebookSync = new Notebook(conf, mock(AuthorizationService.class), vRepoSync, + new NoteManager(vRepoSync, conf), factory, interpreterSettingManager, credentials, null); // one git versioned storage initialized assertEquals(1, vRepoSync.getRepoCount()); @@ -346,7 +358,7 @@ public void testCheckpointOneStorage() throws IOException, SchedulerException { } @Test - public void testSyncWithAcl() throws IOException { + void testSyncWithAcl() throws IOException { /* scenario 1 - note exists with acl on main storage */ AuthenticationInfo user1 = new AuthenticationInfo("user1"); String noteId = notebook.createNote("/test", "test", user1); @@ -422,18 +434,4 @@ public void testSyncWithAcl() throws IOException { assertEquals(0, authorizationService.getRunners(noteId).size()); assertEquals(0, authorizationService.getWriters(noteId).size()); } - - static void delete(File file) { - if (file.isFile()) { - file.delete(); - } else if (file.isDirectory()) { - File[] files = file.listFiles(); - if (files != null && files.length > 0) { - for (File f : files) { - delete(f); - } - } - file.delete(); - } - } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepoTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepoTest.java index 6aef8c8359e..c69d1dbed51 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepoTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/VFSNotebookRepoTest.java @@ -18,10 +18,13 @@ package org.apache.zeppelin.notebook.repo; import com.google.common.collect.ImmutableMap; + import org.apache.commons.io.FileUtils; import org.apache.zeppelin.conf.ZeppelinConfiguration; +import org.apache.zeppelin.notebook.GsonNoteParser; import org.apache.zeppelin.notebook.Note; import org.apache.zeppelin.notebook.NoteInfo; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.Paragraph; import org.apache.zeppelin.user.AuthenticationInfo; import org.junit.jupiter.api.AfterEach; @@ -37,20 +40,22 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class VFSNotebookRepoTest { +class VFSNotebookRepoTest { private ZeppelinConfiguration zConf; + private NoteParser noteParser; private VFSNotebookRepo notebookRepo; private File notebookDir; @BeforeEach public void setUp() throws IOException { - notebookDir = Files.createTempDirectory("notebookDir").toFile(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), + zConf = ZeppelinConfiguration.load(); + noteParser = new GsonNoteParser(zConf); + notebookDir = Files.createTempDirectory(this.getClass().getSimpleName()).toFile(); + zConf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), notebookDir.getAbsolutePath()); notebookRepo = new VFSNotebookRepo(); - zConf = ZeppelinConfiguration.create(); - notebookRepo.init(zConf); + notebookRepo.init(zConf, noteParser); } @AfterEach @@ -59,12 +64,13 @@ public void tearDown() throws IOException { } @Test - public void testBasics() throws IOException { + void testBasics() throws IOException { assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); // create note1 Note note1 = new Note(); note1.setPath("/my_project/my_note1"); + note1.setNoteParser(noteParser); Paragraph p1 = note1.insertNewParagraph(0, AuthenticationInfo.ANONYMOUS); p1.setText("%md hello world"); p1.setTitle("my title"); @@ -78,6 +84,7 @@ public void testBasics() throws IOException { // create note2 Note note2 = new Note(); note2.setPath("/my_note2"); + note2.setNoteParser(noteParser); Paragraph p2 = note2.insertNewParagraph(0, AuthenticationInfo.ANONYMOUS); p2.setText("%md hello world2"); p2.setTitle("my title2"); @@ -91,6 +98,7 @@ public void testBasics() throws IOException { notebookRepo.move(note2.getId(), note2.getPath(), "/my_project2/my_note2", AuthenticationInfo.ANONYMOUS); Note note3 = notebookRepo.get(note2.getId(), newPath, AuthenticationInfo.ANONYMOUS); + note3.setNoteParser(noteParser); assertEquals(note2, note3); // move folder @@ -99,6 +107,7 @@ public void testBasics() throws IOException { assertEquals(2, noteInfos.size()); Note note4 = notebookRepo.get(note3.getId(), "/my_project3/my_project2/my_note2", AuthenticationInfo.ANONYMOUS); + note4.setNoteParser(noteParser); assertEquals(note3, note4); // remote note1 @@ -107,12 +116,13 @@ public void testBasics() throws IOException { } @Test - public void testNoteNameWithColon() throws IOException { + void testNoteNameWithColon() throws IOException { assertEquals(0, notebookRepo.list(AuthenticationInfo.ANONYMOUS).size()); // create note with colon in name Note note1 = new Note(); note1.setPath("/my_project/my:note1"); + note1.setNoteParser(noteParser); Paragraph p1 = note1.insertNewParagraph(0, AuthenticationInfo.ANONYMOUS); p1.setText("%md hello world"); p1.setTitle("my title"); @@ -122,7 +132,7 @@ public void testNoteNameWithColon() throws IOException { } @Test - public void testUpdateSettings() throws IOException { + void testUpdateSettings() throws IOException { List repoSettings = notebookRepo.getSettings(AuthenticationInfo.ANONYMOUS); assertEquals(1, repoSettings.size()); NotebookRepoSettingsInfo settingInfo = repoSettings.get(0); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/mock/VFSNotebookRepoMock.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/mock/VFSNotebookRepoMock.java index 7f450dfd72e..6c75972d208 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/mock/VFSNotebookRepoMock.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/notebook/repo/mock/VFSNotebookRepoMock.java @@ -17,7 +17,7 @@ package org.apache.zeppelin.notebook.repo.mock; import org.apache.zeppelin.conf.ZeppelinConfiguration; -import org.apache.zeppelin.conf.ZeppelinConfiguration.ConfVars; +import org.apache.zeppelin.notebook.NoteParser; import org.apache.zeppelin.notebook.repo.VFSNotebookRepo; import java.io.IOException; @@ -25,12 +25,13 @@ public class VFSNotebookRepoMock extends VFSNotebookRepo { public VFSNotebookRepoMock() { - String secNotebookDir = ZeppelinConfiguration.create().getNotebookDir() + "_secondary"; - System.setProperty(ConfVars.ZEPPELIN_NOTEBOOK_DIR.getVarName(), secNotebookDir); } - public void init(ZeppelinConfiguration conf) throws IOException { - super.init(conf); + @Override + public void init(ZeppelinConfiguration conf, NoteParser noteParser) throws IOException { + this.conf = conf; + this.noteParser = noteParser; + setNotebookDirectory(conf.getNotebookDir() + "_secondary"); } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/plugin/PluginManagerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/plugin/PluginManagerTest.java index 16c14a9113b..f104c8d6559 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/plugin/PluginManagerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/plugin/PluginManagerTest.java @@ -17,6 +17,7 @@ package org.apache.zeppelin.plugin; +import org.apache.zeppelin.conf.ZeppelinConfiguration; import org.apache.zeppelin.notebook.repo.GitNotebookRepo; import org.apache.zeppelin.notebook.repo.NotebookRepo; import org.junit.jupiter.api.Test; @@ -26,11 +27,11 @@ import static org.junit.jupiter.api.Assertions.assertTrue; -public class PluginManagerTest { +class PluginManagerTest { @Test - public void testLoadGitNotebookRepo() throws IOException { - NotebookRepo notebookRepo = PluginManager.get() + void testLoadGitNotebookRepo() throws IOException { + NotebookRepo notebookRepo = new PluginManager(ZeppelinConfiguration.load()) .loadNotebookRepo("org.apache.zeppelin.notebook.repo.GitNotebookRepo"); assertTrue(notebookRepo instanceof GitNotebookRepo); } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/resource/DistributedResourcePoolTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/resource/DistributedResourcePoolTest.java index aea076b8258..664d2a4d97e 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/resource/DistributedResourcePoolTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/resource/DistributedResourcePoolTest.java @@ -35,7 +35,7 @@ /** * Unittest for DistributedResourcePool */ -public class DistributedResourcePoolTest extends AbstractInterpreterTest { +class DistributedResourcePoolTest extends AbstractInterpreterTest { private RemoteInterpreter intp1; private RemoteInterpreter intp2; @@ -73,7 +73,7 @@ public void tearDown() throws Exception { } @Test - public void testRemoteDistributedResourcePool() throws InterpreterException { + void testRemoteDistributedResourcePool() throws InterpreterException { Gson gson = new Gson(); InterpreterResult ret; intp1.interpret("put key1 value1", context); @@ -93,7 +93,7 @@ public void testRemoteDistributedResourcePool() throws InterpreterException { } @Test - public void testDistributedResourcePool() { + void testDistributedResourcePool() { final LocalResourcePool pool2 = new LocalResourcePool("pool2"); final LocalResourcePool pool3 = new LocalResourcePool("pool3"); @@ -155,7 +155,7 @@ public Resource invokeMethod(ResourceId id, String methodName, Class[] paramType } @Test - public void testResourcePoolUtils() throws InterpreterException { + void testResourcePoolUtils() throws InterpreterException { Gson gson = new Gson(); // when create some resources @@ -193,7 +193,7 @@ public void testResourcePoolUtils() throws InterpreterException { } @Test - public void testResourceInvokeMethod() throws InterpreterException { + void testResourceInvokeMethod() throws InterpreterException { Gson gson = new Gson(); InterpreterResult ret; intp1.interpret("put key1 hey", context); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java index de9484a38b0..8bb9deafd70 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/scheduler/RemoteSchedulerTest.java @@ -20,11 +20,8 @@ import org.apache.zeppelin.interpreter.AbstractInterpreterTest; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterException; -import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.InterpreterSetting; import org.apache.zeppelin.interpreter.remote.RemoteInterpreter; -import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcessListener; -import org.apache.zeppelin.interpreter.thrift.ParagraphInfo; import org.apache.zeppelin.resource.LocalResourcePool; import org.apache.zeppelin.scheduler.Job.Status; import org.apache.zeppelin.user.AuthenticationInfo; @@ -32,8 +29,6 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import java.io.IOException; -import java.util.List; import java.util.Map; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -41,8 +36,8 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -public class RemoteSchedulerTest extends AbstractInterpreterTest - implements RemoteInterpreterProcessListener { +class RemoteSchedulerTest extends AbstractInterpreterTest +{ private InterpreterSetting interpreterSetting; private SchedulerFactory schedulerSvc; @@ -66,7 +61,7 @@ public void tearDown() { } @Test - public void test() throws Exception { + void test() throws Exception { final RemoteInterpreter intpA = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", note1Id, "mock"); intpA.open(); @@ -136,7 +131,7 @@ public void setResult(Object results) { } @Test - public void testAbortOnPending() throws Exception { + void testAbortOnPending() throws Exception { final RemoteInterpreter intpA = (RemoteInterpreter) interpreterSetting.getInterpreter("user1", note1Id, "mock"); intpA.open(); @@ -269,38 +264,4 @@ public void setResult(Object results) { schedulerSvc.removeScheduler("test"); } - @Override - public void onOutputAppend(String noteId, String paragraphId, int index, String output) { - - } - - @Override - public void onOutputUpdated(String noteId, String paragraphId, int index, InterpreterResult.Type type, String output) { - - } - - @Override - public void onOutputClear(String noteId, String paragraphId) { - - } - - @Override - public void runParagraphs(String noteId, List paragraphIndices, List paragraphIds, String curParagraphId) throws IOException { - - } - - @Override - public void onParaInfosReceived(String noteId, String paragraphId, - String interpreterSettingId, Map metaInfos) { - } - - @Override - public List getParagraphList(String user, String noteId) { - return null; - } - - @Override - public void checkpointOutput(String noteId, String paragraphId) { - - } } diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/search/LuceneSearchTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/search/LuceneSearchTest.java index 0e554fe218f..ecc7e9f9306 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/search/LuceneSearchTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/search/LuceneSearchTest.java @@ -49,7 +49,7 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -public class LuceneSearchTest { +class LuceneSearchTest { private Notebook notebook; private InterpreterSettingManager interpreterSettingManager; @@ -59,10 +59,12 @@ public class LuceneSearchTest { @BeforeEach public void startUp() throws IOException { - indexDir = Files.createTempDirectory("lucene").toFile(); - System.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SEARCH_INDEX_PATH.getVarName(), indexDir.getAbsolutePath()); + indexDir = Files.createTempDirectory(this.getClass().getSimpleName()).toFile(); + ZeppelinConfiguration conf = ZeppelinConfiguration.load(); + conf.setProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_SEARCH_INDEX_PATH.getVarName(), + indexDir.getAbsolutePath()); + - ZeppelinConfiguration conf = ZeppelinConfiguration.create(); noteManager = new NoteManager(new InMemoryNotebookRepo(), conf); interpreterSettingManager = mock(InterpreterSettingManager.class); InterpreterSetting defaultInterpreterSetting = mock(InterpreterSetting.class); @@ -72,7 +74,7 @@ public void startUp() throws IOException { mock(NotebookRepo.class), noteManager, mock(InterpreterFactory.class), interpreterSettingManager, mock(Credentials.class), null); - noteSearchService = new LuceneSearch(ZeppelinConfiguration.create(), notebook); + noteSearchService = new LuceneSearch(conf, notebook); } @AfterEach @@ -89,9 +91,9 @@ private void drainSearchEvents() throws InterruptedException { } @Test - public void canIndexAndQuery() throws IOException, InterruptedException { + void canIndexAndQuery() throws IOException, InterruptedException { // given - String note1Id = newNoteWithParagraph("Notebook1", "test"); + newNoteWithParagraph("Notebook1", "test"); String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); @@ -109,10 +111,10 @@ public void canIndexAndQuery() throws IOException, InterruptedException { } @Test - public void canIndexAndQueryByNotebookName() throws IOException, InterruptedException { + void canIndexAndQueryByNotebookName() throws IOException, InterruptedException { // given String note1Id = newNoteWithParagraph("Notebook1", "test"); - String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); + newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); // when @@ -125,10 +127,10 @@ public void canIndexAndQueryByNotebookName() throws IOException, InterruptedExce } @Test - public void canIndexAndQueryByParagraphTitle() throws IOException, InterruptedException { + void canIndexAndQueryByParagraphTitle() throws IOException, InterruptedException { // given - String note1Id = newNoteWithParagraph("Notebook1", "test", "testingTitleSearch"); - String note2Id = newNoteWithParagraph("Notebook2", "not test", "notTestingTitleSearch"); + newNoteWithParagraph("Notebook1", "test", "testingTitleSearch"); + newNoteWithParagraph("Notebook2", "not test", "notTestingTitleSearch"); drainSearchEvents(); // when @@ -147,7 +149,7 @@ public void canIndexAndQueryByParagraphTitle() throws IOException, InterruptedEx } @Test - public void indexKeyContract() throws IOException, InterruptedException { + void indexKeyContract() throws IOException, InterruptedException { // given String note1Id = newNoteWithParagraph("Notebook1", "test"); drainSearchEvents(); @@ -163,7 +165,7 @@ public void indexKeyContract() throws IOException, InterruptedException { } @Test // (expected=IllegalStateException.class) - public void canNotSearchBeforeIndexing() { + void canNotSearchBeforeIndexing() { // given NO noteSearchService.index() was called // when List> result = noteSearchService.query("anything"); @@ -174,9 +176,9 @@ public void canNotSearchBeforeIndexing() { } @Test - public void canIndexAndReIndex() throws IOException, InterruptedException { + void canIndexAndReIndex() throws IOException, InterruptedException { // given - String note1Id = newNoteWithParagraph("Notebook1", "test"); + newNoteWithParagraph("Notebook1", "test"); String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); @@ -199,7 +201,7 @@ public void canIndexAndReIndex() throws IOException, InterruptedException { } @Test - public void canDeleteNull() throws IOException { + void canDeleteNull() { // give // looks like a bug in web UI: it tries to delete a note twice (after it has just been deleted) // when @@ -207,9 +209,9 @@ public void canDeleteNull() throws IOException { } @Test - public void canDeleteFromIndex() throws IOException, InterruptedException { + void canDeleteFromIndex() throws IOException, InterruptedException { // given - String note1Id = newNoteWithParagraph("Notebook1", "test"); + newNoteWithParagraph("Notebook1", "test"); String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); @@ -228,10 +230,10 @@ public void canDeleteFromIndex() throws IOException, InterruptedException { } @Test - public void indexParagraphUpdatedOnNoteSave() throws IOException, InterruptedException { + void indexParagraphUpdatedOnNoteSave() throws IOException, InterruptedException { // given: total 2 notebooks, 3 paragraphs String note1Id = newNoteWithParagraph("Notebook1", "test"); - String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); + newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); assertEquals(3, resultForQuery("test").size()); @@ -261,10 +263,10 @@ public void indexParagraphUpdatedOnNoteSave() throws IOException, InterruptedExc } @Test - public void indexNoteNameUpdatedOnNoteSave() throws IOException, InterruptedException { + void indexNoteNameUpdatedOnNoteSave() throws IOException, InterruptedException { // given: total 2 notebooks, 3 paragraphs String note1Id = newNoteWithParagraph("Notebook1", "test"); - String note2Id = newNoteWithParagraphs("Notebook2", "not test", "not test at all"); + newNoteWithParagraphs("Notebook2", "not test", "not test at all"); drainSearchEvents(); assertEquals(3, resultForQuery("test").size()); @@ -277,7 +279,7 @@ public void indexNoteNameUpdatedOnNoteSave() throws IOException, InterruptedExce return null; }); drainSearchEvents(); - Thread.sleep(1000); + Thread.sleep(3000); // then assertTrue(resultForQuery("Notebook1").isEmpty()); assertFalse(resultForQuery("NotebookN").isEmpty()); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/storage/LocalConfigStorageTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/storage/LocalConfigStorageTest.java index 64a675b6a0f..8c9decab53c 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/storage/LocalConfigStorageTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/storage/LocalConfigStorageTest.java @@ -33,11 +33,11 @@ import static org.junit.jupiter.api.Assertions.*; -public class LocalConfigStorageTest { +class LocalConfigStorageTest { public static final String TEST_STRING = "this is a test!"; @Test - public void testWritingAtomically() throws IOException { + void testWritingAtomically() throws IOException { final Path destination = Files.createTempFile("test-", "file"); final File destinationFile = destination.toFile(); try { @@ -52,7 +52,7 @@ public void testWritingAtomically() throws IOException { } @Test - public void testWritingAtomicallyNonExistingDir() throws IOException { + void testWritingAtomicallyNonExistingDir() throws IOException { Random rnd = new Random(); final Path destDir = Paths.get(System.getProperty("java.io.tmpdir"), "non-existing-" + rnd.nextLong()); final Path destination = Paths.get(destDir.toString(),"test-" + rnd.nextLong() + "-file"); @@ -70,7 +70,7 @@ public void testWritingAtomicallyNonExistingDir() throws IOException { } @Test - public void testReading() throws IOException { + void testReading() throws IOException { final Path destination = Files.createTempFile("test-", "file"); final File destinationFile = destination.toFile(); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/CredentialsTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/CredentialsTest.java index 34aeab56a05..494716dea5f 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/CredentialsTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/CredentialsTest.java @@ -23,10 +23,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class CredentialsTest { +class CredentialsTest { @Test - public void testDefaultProperty() throws IOException { + void testDefaultProperty() throws IOException { Credentials credentials = new Credentials(); UserCredentials userCredentials = new UserCredentials(); UsernamePassword up1 = new UsernamePassword("user2", "password"); diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/EncryptorTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/EncryptorTest.java index 1e57beeaa6c..73613e538a4 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/EncryptorTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/user/EncryptorTest.java @@ -24,10 +24,10 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; -public class EncryptorTest { +class EncryptorTest { @Test - public void testEncryption() throws IOException { + void testEncryption() throws IOException { Encryptor encryptor = new Encryptor("foobar1234567890"); String input = "test"; diff --git a/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java index 57727f7edc6..3dbd9ee499c 100644 --- a/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java +++ b/zeppelin-zengine/src/test/java/org/apache/zeppelin/util/UtilTest.java @@ -21,15 +21,15 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; -public class UtilTest { +class UtilTest { @Test - public void getVersionTest() { + void getVersionTest() { assertNotNull(Util.getVersion()); } @Test - public void getGitInfoTest() { + void getGitInfoTest() { assertNotNull(Util.getGitCommitId()); assertNotNull(Util.getGitTimestamp()); }