@Library('test-shared-library') _

def NODE_LABEL = 'h2o-3'

def pipelineContext = null
def result = 'FAILURE'

try {
    ansiColor('xterm') {
        timestamps {

            node(NODE_LABEL) {
                def insideDocker = null

                env.BUILD_NUMBER_DIR = currentBuild.number
                env.DATA_DIR = "/mnt/h2o-shared-data"

                env.GRADLE_OPTS = "-Xmx4g -XX:MaxPermSize=512m"

                env.PYTHON_VERSION = '3.7'
                env.R_VERSION = '3.4.1'

                sh 'printenv | sort'

                final String CHECKOUT_STAGE_NAME = 'Checkout'
                stage(CHECKOUT_STAGE_NAME) {

                    def scmEnv = null
                    dir(env.BUILD_NUMBER_DIR) {
                        deleteDir()
                        retryWithTimeout(60, 5) {
                            scmEnv = checkout scm
                        }

                        env.THIS_BUILD_GIT_HASH_LONG = sh(script: 'git rev-parse --verify HEAD', returnStdout: true).trim()
                        env.THIS_BUILD_GIT_HASH_SHORT = sh(script: 'git describe --always', returnStdout: true).trim()
                    }

                    insideDocker = load("${env.BUILD_NUMBER_DIR}/scripts/jenkins/groovy/insideDocker.groovy")

                    def pipelineContextFactory = load("${env.BUILD_NUMBER_DIR}/scripts/jenkins/groovy/pipelineContext.groovy")
                    pipelineContext = pipelineContextFactory(env.BUILD_NUMBER_DIR, 'MODE_RELEASE', scmEnv, true)
                    env.BRANCH_NAME = env.BRANCH_NAME.replaceAll('/', '_')

                    pipelineContext.getBuildSummary().addStageSummary(this, CHECKOUT_STAGE_NAME, env.BUILD_NUMBER_DIR)

                    setReleaseJobProperties(pipelineContext)

                    pipelineContext.getBuildSummary().addDetailsSection(this)
                    final String version = sh(script: "cd ${env.BUILD_NUMBER_DIR} && cat gradle.properties | grep -Eo '^version=[0-9\\.]+' | grep -Eo '[0-9\\.]+'", returnStdout: true).trim()
                    String releaseContent = """
                        <ul>
                            <li><strong>Version:</strong> ${version}.${currentBuild.number}</li>
                            <li><strong>Node:</strong> ${env.NODE_NAME}</li>
                            <li><strong>Test Release:</strong> ${params.TEST_RELEASE}</li>
                            <li><strong>Test Credentials:</strong> ${params.TEST_CREDENTIALS}</li>
                            <li><strong>Nexus Upload:</strong> ${params.UPLOAD_NEXUS}</li>
                            <li><strong>Build Hadoop:</strong> ${params.BUILD_HADOOP}</li>
                            <li><strong>Build PyPI:</strong> ${params.BUILD_PYPI}</li>
                            <li><strong>PyPI Upload:</strong> ${params.UPLOAD_TO_PYPI}</li>
                            <li><strong>Build Conda:</strong> ${params.BUILD_CONDA}</li>
                            <li><strong>Anaconda Upload:</strong> ${params.UPLOAD_TO_ANACONDA}</li>
                            <li><strong>Update Top-level Latest Links</strong> ${params.UPDATE_LATEST}</li>
                            <li><strong>Update Branch Latest Links</strong> ${params.UPDATE_LATEST_BRANCH}</li>
                            <li><strong>Publish H2O K8S Docker image:</strong> ${params.BUILD_H2O_DOCKER}</li>
                            <li><strong>S3 Root:</strong> ${env.S3_ROOT}</li>
                        </ul>
                    """
                    pipelineContext.getBuildSummary().addSection(this, 'release', 'Release', releaseContent)

                    pipelineContext.getBuildSummary().setStageDetails(this, CHECKOUT_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                    pipelineContext.getBuildSummary().markStageSuccessful(this, CHECKOUT_STAGE_NAME)
                }

                if (params.TEST_CREDENTIALS) {
                    final String TEST_CREDENTIALS_STAGE_NAME = 'Test Credentials'
                    stage(TEST_CREDENTIALS_STAGE_NAME) {
                        try {
                            pipelineContext.getBuildSummary().addStageSummary(this, TEST_CREDENTIALS_STAGE_NAME, env.BUILD_NUMBER_DIR)
                            pipelineContext.getBuildSummary().setStageDetails(this, TEST_CREDENTIALS_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)

                            echo "Testing credentials for PyPI, Anaconda, Docker, and S3..."

                            // Test PyPI credentials
                            withCredentials([usernamePassword(credentialsId: 'pypi-credentials', usernameVariable: 'TWINE_USERNAME', passwordVariable: 'TWINE_PASSWORD')]) {
                                insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                    sh """
                                        echo "=== Testing PyPI Credentials ==="
                                        echo "Activating Python ${env.PYTHON_VERSION}"
                                        . /envs/h2o_env_python${env.PYTHON_VERSION}/bin/activate

                                        # Test PyPI login by checking if credentials work
                                        python -c "import requests; auth=('\$TWINE_USERNAME', '\$TWINE_PASSWORD'); r=requests.get('https://upload.pypi.org/', auth=auth); print('PyPI credentials: OK' if r.status_code in [200, 405] else 'PyPI credentials: FAILED')"
                                    """
                                }
                            }

                            // Test Anaconda credentials
                            withCredentials([usernamePassword(credentialsId: 'anaconda-credentials', usernameVariable: 'ANACONDA_USERNAME', passwordVariable: 'ANACONDA_PASSWORD')]) {
                                insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                    sh '''
                                        echo "=== Testing Anaconda Credentials ==="
                                        anaconda login --username "$ANACONDA_USERNAME" --password "$ANACONDA_PASSWORD"
                                        anaconda whoami
                                        anaconda logout
                                        echo "Anaconda credentials: OK"
                                    '''
                                }
                            }

                            // Test Docker credentials
                            withCredentials([usernamePassword(credentialsId: 'dockerhub', passwordVariable: 'DOCKERHUB_PASSWORD', usernameVariable: 'DOCKERHUB_USERNAME')]) {
                                sh '''
                                    echo "=== Testing Docker Credentials ==="
                                    echo "$DOCKERHUB_PASSWORD" | docker login --username "$DOCKERHUB_USERNAME" --password-stdin
                                    echo "Docker credentials: OK"
                                    docker logout
                                '''
                            }

                            // Test S3/AWS credentials
                            insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                sh """
                                    echo "=== Testing S3/AWS Credentials ==="
                                    aws sts get-caller-identity
                                    echo "S3/AWS credentials: OK"
                                """
                            }

                            echo "All credentials tested successfully!"
                            pipelineContext.getBuildSummary().markStageSuccessful(this, TEST_CREDENTIALS_STAGE_NAME)
                        } catch (Exception e) {
                            pipelineContext.getBuildSummary().markStageFailed(this, TEST_CREDENTIALS_STAGE_NAME)
                            throw e
                        }
                    }
                }

                final String BUILD_STAGE_NAME = 'Build'
                stage(BUILD_STAGE_NAME) {
                    try {
                        pipelineContext.getBuildSummary().addStageSummary(this, BUILD_STAGE_NAME, env.BUILD_NUMBER_DIR)
                        pipelineContext.getBuildSummary().setStageDetails(this, BUILD_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                        withCredentials([file(credentialsId: 'release-gradle.properties', variable: 'GRADLE_PROPERTIES_PATH'), file(credentialsId: 'release-secret-key-ring-file', variable: 'RING_FILE_PATH'), string(credentialsId: "central-portal-bearer-token", variable: "BEARER_TOKEN")]) {
                            insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS', "-v ${GRADLE_PROPERTIES_PATH}:${GRADLE_PROPERTIES_PATH} -v ${RING_FILE_PATH}:${RING_FILE_PATH}") {
                                printReleaseConfiguration(pipelineContext)
                                sh """
                                # Log commands.
                                set -x
                                # Stop on error.
                                set -e

                                export BUILD_HADOOP=${params.BUILD_HADOOP}
                                export JAVA_HOME=/usr/lib/jvm/java-8-oracle
                                echo "Activating Python ${env.PYTHON_VERSION}"
                                . /envs/h2o_env_python${env.PYTHON_VERSION}/bin/activate

                                # FIXME: Version of docutils should be installed in install_python_version to fix PUBDEV-8327 issue
                                pip install docutils==0.16
                                pip install "jinja2<3.1"
                                pip install sphinx-sitemap==2.7.2

                                mkdir -p ${env.BUILD_NUMBER_DIR}
                                cd ${env.BUILD_NUMBER_DIR}

                                # Log some stuff for debug purposes.
                                date
                                pwd
                                env
                                echo \$PATH
                                which java
                                java -version
                                du -h
                                pip freeze

                                # Fix git
                                # TODO github Find better solution
                                git config --global --add safe.directory '*'

                                # Update the build number.
                                mkdir -p ci
                                echo "BUILD_NUMBER=${currentBuild.number}" > gradle/buildnumber.properties
                                echo "BUILD_BRANCH_NAME=${env.BRANCH_NAME}" > gradle/git.properties
                                echo "BUILD_HASH=${env.THIS_BUILD_GIT_HASH_LONG}" >> gradle/git.properties
                                echo "BUILD_HASH_SHORT=${env.THIS_BUILD_GIT_HASH_SHORT}" >> gradle/git.properties

                                # Log some git stuff for debug purposes.
                                echo
                                echo GIT INFO
                                echo
                                git branch | grep '*' | sed 's/* //'
                                git log -1 --format="%H"
                                git describe --always --dirty
                                git status

                                # Do the build.
                                if [ -n "${env.DATA_DIR}" ]; then
                                    rm -f -r smalldata
                                    ln -s "${env.DATA_DIR}/smalldata"
                                    rm -f -r bigdata
                                    ln -s "${env.DATA_DIR}/bigdata"
                                else
                                    ./gradlew syncSmalldata
                                fi
                                if [ \$DO_RELEASE ]; then
                                    echo 'Copy gradle properties and modify gradle.properties'
                                    cp ${GRADLE_PROPERTIES_PATH} \$GRADLE_USER_HOME/gradle.properties
                                    chmod +w \$GRADLE_USER_HOME/gradle.properties
                                    echo "signing.secretKeyRingFile=${RING_FILE_PATH}" >> \$GRADLE_USER_HOME/gradle.properties
                                    chmod -w \$GRADLE_USER_HOME/gradle.properties
                                fi
                                ./gradlew build -x test
                                if [ "\$TEST_RELEASE" = true ]; then
                                    ./gradlew buildH2oDevDist
                                else
                                    ./gradlew -PvalidateRelease buildH2oDevDist
                                fi

                                # Run OSSRH commands if UPLOAD_NEXUS is enabled
                                if [ "\$UPLOAD_NEXUS" = true ]; then
                                    # Debug output - see if gradle published to OSSRH Staging API
                                    curl -H "Authorization: Bearer ${BEARER_TOKEN}" \
                                     'https://ossrh-staging-api.central.sonatype.com/manual/search/repositories?profile_id=ai.h2o'

                                    # Close and send to central portal to be able to manually publish
                                    curl -X 'POST' \
                                        'https://ossrh-staging-api.central.sonatype.com/manual/upload/defaultRepository/ai.h2o?publishing_type=user_managed' \
                                        -H 'accept: */*' \
                                        -H "Authorization: Bearer ${BEARER_TOKEN}" \
                                        -d '' \
                                        --connect-timeout 120 \
                                        --max-time 1200 \
                                        --retry 5 \
                                        --retry-delay 5 \
                                        --retry-max-time 2400
                                fi
                            """
                            }
                        }
                        pipelineContext.getBuildSummary().markStageSuccessful(this, BUILD_STAGE_NAME)
                    } catch (Exception e) {
                        pipelineContext.getBuildSummary().markStageFailed(this, BUILD_STAGE_NAME)
                        throw e
                    }
                }

                env.PROJECT_VERSION = sh(script: "cd ${env.BUILD_NUMBER_DIR} && cat target/project_version", returnStdout: true).trim()
                env.SHA256_HASH = sh(script: "cd ${env.BUILD_NUMBER_DIR} && sha256sum target/h2o-*${currentBuild.number}.zip", returnStdout: true).trim()

                if (!params.TEST_RELEASE) {
                    final String TAG_STAGE_NAME = 'Create Git Tag'
                    stage(TAG_STAGE_NAME) {
                        try {
                            pipelineContext.getBuildSummary().addStageSummary(this, TAG_STAGE_NAME, env.BUILD_NUMBER_DIR)
                            pipelineContext.getBuildSummary().setStageDetails(this, TAG_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                            withCredentials([
                                usernamePassword(credentialsId: 'H2O-OPS-GH-TOKEN', usernameVariable: 'GH_USERNAME', passwordVariable: 'GH_TOKEN'),
                                string(credentialsId: "H2O_OPS_GH_EMAIL", variable: "GH_EMAIL"),
                            ]) {
                                sh """
                                    git config --global user.name "${GH_USERNAME}"
                                    git config --global user.email "${GH_EMAIL}"
                                    cd ${env.BUILD_NUMBER_DIR}
                                    git remote set-url origin https://${GH_TOKEN}@github.com/h2oai/h2o-3.git
                                    git fetch --tags
                                """
                            }
                            sh """
                                cd ${env.BUILD_NUMBER_DIR}
                                git tag -a jenkins-${env.BRANCH_NAME}-${currentBuild.number} -m "Jenkins build branch_name ${env.BRANCH_NAME} build_number ${env.PROJECT_VERSION}"
                            """
                            if ((env.NIGHTLY_BUILD == null || env.NIGHTLY_BUILD.toLowerCase() == 'false')) {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}
                                    echo "Process release tags"
                                    git tag -d jenkins-rel-latest-stable
                                    git push origin :refs/tags/jenkins-rel-latest-stable
                                    git tag -a jenkins-rel-latest-stable -f -m "Jenkins build branch_name ${env.BRANCH_NAME} build_number ${env.PROJECT_VERSION}"
                                    git tag -a jenkins-${env.PROJECT_VERSION} -m "Jenkins build branch_name ${env.BRANCH_NAME} build_number ${env.PROJECT_VERSION}"
                                """
                            }
                            sh """
                                cd ${env.BUILD_NUMBER_DIR}
                                git push --tags
                            """
                            pipelineContext.getBuildSummary().markStageSuccessful(this, TAG_STAGE_NAME)
                        } catch (Exception e) {
                            pipelineContext.getBuildSummary().markStageFailed(this, TAG_STAGE_NAME)
                            throw e
                        }
                    }
                }
                
                def PUBLISH_STAGE_NAME = 'Publish to S3'
                stage(PUBLISH_STAGE_NAME) {
                    try {
                        pipelineContext.getBuildSummary().addStageSummary(this, PUBLISH_STAGE_NAME, env.BUILD_NUMBER_DIR)
                        pipelineContext.getBuildSummary().setStageDetails(this, PUBLISH_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                        insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 3, 'HOURS') {
                            sh """
                                cd ${env.BUILD_NUMBER_DIR}
                                # Publish the output to S3.
                                aws s3 sync target/ ${env.S3_ROOT}/${env.BRANCH_NAME}/${currentBuild.number}/ --acl public-read
                            """
                            if (params.UPDATE_LATEST) {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}
                                    aws s3 sync target/R/ ${env.S3_ROOT}/latest_stable_R/ --acl public-read
                                    aws s3 sync target/Python/ ${env.S3_ROOT}/latest_stable_Py/ --acl public-read
                                """
                            }
                            sh """
                                cd ${env.BUILD_NUMBER_DIR}
                                echo UPDATE LATEST POINTER
                                tmpdir=./buildh2odev.tmp
                                mkdir -p \${tmpdir}
                                echo ${currentBuild.number} > \${tmpdir}/latest
                                echo "<head>" > \${tmpdir}/latest.html
                                echo "<meta http-equiv=\\"refresh\\" content=\\"0; url=${currentBuild.number}/index.html\\" />" >> \${tmpdir}/latest.html
                                echo "</head>" >> \${tmpdir}/latest.html
                            """
                        }
                        pipelineContext.getBuildSummary().markStageSuccessful(this, PUBLISH_STAGE_NAME)
                    } catch (Exception e) {
                        pipelineContext.getBuildSummary().markStageFailed(this, PUBLISH_STAGE_NAME)
                        throw e
                    }
                }

                def UPDATE_BRANCH_LATEST_STAGE_NAME = 'Update Branch Latest Links'
                if (params.UPDATE_LATEST_BRANCH) {
                    stage (UPDATE_BRANCH_LATEST_STAGE_NAME) {
                        try {
                            pipelineContext.getBuildSummary().addStageSummary(this, UPDATE_BRANCH_LATEST_STAGE_NAME, env.BUILD_NUMBER_DIR)
                            pipelineContext.getBuildSummary().setStageDetails(this, UPDATE_BRANCH_LATEST_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                            insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}
                                    tmpdir=./buildh2odev.tmp

                                    aws s3 cp \${tmpdir}/latest ${env.S3_ROOT}/${env.BRANCH_NAME}/latest --acl public-read
                                    aws s3 cp \${tmpdir}/latest.html ${env.S3_ROOT}/${env.BRANCH_NAME}/latest.html --acl public-read
                                    aws s3 cp \${tmpdir}/latest.html ${env.S3_ROOT}/${env.BRANCH_NAME}/index.html --acl public-read
                                """
                            }
                            pipelineContext.getBuildSummary().markStageSuccessful(this, UPDATE_BRANCH_LATEST_STAGE_NAME)
                        } catch (Exception e) {
                            pipelineContext.getBuildSummary().markStageFailed(this, UPDATE_BRANCH_LATEST_STAGE_NAME)
                            throw e
                        }
                    }
                }

                def UPDATE_TOP_LEVEL_LATEST_STAGE_NAME = 'Update Top-level Latest Links'
                if (params.UPDATE_LATEST) {
                    stage(UPDATE_TOP_LEVEL_LATEST_STAGE_NAME) {
                        try {
                            pipelineContext.getBuildSummary().addStageSummary(this, UPDATE_TOP_LEVEL_LATEST_STAGE_NAME, env.BUILD_NUMBER_DIR)
                            pipelineContext.getBuildSummary().setStageDetails(this, UPDATE_TOP_LEVEL_LATEST_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                            insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}
                                    tmpdir=./buildh2odev.tmp

                                    aws s3 cp \${tmpdir}/latest ${env.S3_ROOT}/${env.BRANCH_NAME}/latest --acl public-read
                                    aws s3 cp \${tmpdir}/latest.html ${env.S3_ROOT}/${env.BRANCH_NAME}/latest.html --acl public-read
                                    aws s3 cp \${tmpdir}/latest.html ${env.S3_ROOT}/${env.BRANCH_NAME}/index.html --acl public-read

                                    LINKS="latest_stable:index.html
latest_stable_doc:docs-website/h2o-docs/index.html
latest_stable_Py:Python/h2o-${env.PROJECT_VERSION}-py2.py3-none-any.whl
latest_stable_R:R/src/contrib/h2o_${env.PROJECT_VERSION}.tar.gz
latest_stable_Rdoc:docs-website/h2o-r/h2o_package.pdf
latest_stable_Pydoc:docs-website/h2o-py/docs/index.html
latest_stable_GLM_booklet:docs-website/h2o-docs/booklets/GLM_Vignette.pdf
latest_stable_GBM_booklet:docs-website/h2o-docs/booklets/GBM_Vignette.pdf
latest_stable_DeepLearning_booklet:docs-website/h2o-docs/booklets/DeepLearning_Vignette.pdf
latest_stable_R_booklet:docs-website/h2o-docs/booklets/R_Vignette.pdf
latest_stable_Python_booklet:docs-website/h2o-docs/booklets/Python_booklet.pdf
latest_stable_SparklingWater_booklet:docs-website/h2o-docs/booklets/SparklingWaterVignette.pdf
latest_stable_javadoc_genmodel:docs-website/h2o-genmodel/javadoc/index.html
latest_stable_javadoc_core:docs-website/h2o-core/javadoc/index.html
latest_stable_javadoc_algos:docs-website/h2o-algos/javadoc/index.html
latest_stable_restdoc_route_reference:docs-website/h2o-docs/index.html#route-reference
latest_stable_restdoc_schema_reference:docs-website/h2o-docs/index.html#schema-reference"

                                    # Output stable link
                                    echo "https://h2o-release.s3.amazonaws.com/h2o/${env.BRANCH_NAME}/${currentBuild.number}/h2o-${env.PROJECT_VERSION}.zip" > \${tmpdir}/latest_stable

                                    # Output all other links
                                    echo "\$LINKS" | while IFS=":" read link dest; do
                                    cat <<EOF > \${tmpdir}/\${link}.html
<head>
<meta http-equiv="refresh" content="0; url=/h2o/${env.BRANCH_NAME}/${currentBuild.number}/\${dest}" />
</head>
EOF
                                    done

                                    # Upload
                                    # Upload first stable link
                                    aws s3 cp \${tmpdir}/latest_stable ${env.S3_ROOT}/latest_stable --acl public-read
                                    echo "\$LINKS" | while IFS=":" read link dest; do
                                    aws s3 cp \${tmpdir}/\${link}.html ${env.S3_ROOT}/\${link}.html --acl public-read
                                    done
                                """
                            }
                            pipelineContext.getBuildSummary().markStageSuccessful(this, UPDATE_TOP_LEVEL_LATEST_STAGE_NAME)
                        } catch (Exception e) {
                            pipelineContext.getBuildSummary().markStageFailed(this, UPDATE_TOP_LEVEL_LATEST_STAGE_NAME)
                            throw e
                        }
                    }
                }

                if (params.TEST_RELEASE || env.BRANCH_NAME.startsWith(pipelineContext.getBuildConfig().RELEASE_BRANCH_PREFIX)) {
                    prepareH2OVersionCheckScript(env.PROJECT_VERSION)

                    if (params.BUILD_PYPI) {
                        // create and upload PyPI package
                        def BUILD_PYPI_STAGE_NAME = 'Build PyPI Package'
                        stage(BUILD_PYPI_STAGE_NAME) {
                            try {
                                pipelineContext.getBuildSummary().addStageSummary(this, BUILD_PYPI_STAGE_NAME, env.BUILD_NUMBER_DIR)
                                pipelineContext.getBuildSummary().setStageDetails(this, BUILD_PYPI_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                                withCredentials([usernamePassword(credentialsId: 'pypi-credentials', usernameVariable: 'TWINE_USERNAME', passwordVariable: 'TWINE_PASSWORD')]) {
                                    insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                        sh """
                                            echo "Activating Python ${env.PYTHON_VERSION}"
                                            . /envs/h2o_env_python${env.PYTHON_VERSION}/bin/activate

                                            cd ${env.BUILD_NUMBER_DIR}/h2o-py/build/main
                                            python setup.py sdist

                                            s3cmd put --acl-public dist/h2o-${env.PROJECT_VERSION}.tar.gz ${env.S3_ROOT}/${env.BRANCH_NAME}/${currentBuild.number}/Python/

                                            if [ "\$UPLOAD_TO_PYPI" = true ]; then
                                                if [ "\$TEST_RELEASE" = true ]; then
                                                    echo '****** WARNING! Upload to PyPI suppressed ******'
                                                else
                                                    echo '****** Upload to PyPI ******'
                                                    twine upload dist/h2o-${env.PROJECT_VERSION}.tar.gz
                                                    echo '****** Upload h2o_client to PyPI ******'
                                                    cd ../client/dist
                                                    twine upload h2o_client-${env.PROJECT_VERSION}-py2.py3-none-any.whl
                                                fi
                                            else
                                                echo '****** WARNING! Upload PyPI Conda suppressed ******'
                                            fi
                                        """
                                    }
                                }
                                pipelineContext.getBuildSummary().markStageSuccessful(this, BUILD_PYPI_STAGE_NAME)
                            } catch (Exception e) {
                                pipelineContext.getBuildSummary().markStageFailed(this, BUILD_PYPI_STAGE_NAME)
                                e.printStackTrace()
                            }
                        }
                    }

                    if (params.BUILD_CONDA) {

                        final def condaS3Dir = "${env.S3_ROOT}/${env.BRANCH_NAME}/${currentBuild.number}/Python/Conda"
                        final def artifacts = ['main', 'client']
                        final def pyVersions = pipelineContext.getBuildConfig().PYTHON_VERSIONS

                        // build for all Python versions
                        for (artifact in artifacts) {
                            for (pyVersion in pyVersions) {
                                def uploadToCondaStageName = "Build Py${pyVersion} Conda Packages: $artifact"
                                stage(uploadToCondaStageName) {
                                    withCredentials([usernamePassword(credentialsId: 'anaconda-credentials', usernameVariable: 'ANACONDA_USERNAME', passwordVariable: 'ANACONDA_PASSWORD')]) {
                                        pipelineContext.getBuildSummary().addStageSummary(this, uploadToCondaStageName, env.BUILD_NUMBER_DIR)
                                        pipelineContext.getBuildSummary().setStageDetails(this, uploadToCondaStageName, env.NODE_NAME, env.WORKSPACE)
                                        try {
                                            insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                                sh """
                                                    export CONDA_PKGS_DIRS=${env.BUILD_NUMBER_DIR}/h2o-py/build/conda_cache
                                                    mkdir -p \$CONDA_PKGS_DIRS
                                                    cp -r ${env.BUILD_NUMBER_DIR}/h2o-py/conda ${env.BUILD_NUMBER_DIR}/h2o-py/build/${artifact}
                                                    cd ${env.BUILD_NUMBER_DIR}/h2o-py/build/${artifact}/conda
                                                    # Create conda package for current platform
                                                    conda build h2o-${artifact} --output-folder "." --no-anaconda-upload --py ${pyVersion} --channel anaconda
                                                    # Get name of the package
                                                    CONDA_PKG_CURRENT_ARCH_PATH=\$(conda build h2o-${artifact} --py ${pyVersion} --output-folder "." --output | tail -1)
                                                    PKG_NAME=\$(basename \$CONDA_PKG_CURRENT_ARCH_PATH)
                                                    # Convert conda package for all other platforms
                                                    conda convert \$CONDA_PKG_CURRENT_ARCH_PATH -p all

                                                    s3cmd --acl-public put osx-64/\${PKG_NAME} ${condaS3Dir}/osx-64/\${PKG_NAME}
                                                    s3cmd --acl-public put linux-64/\${PKG_NAME} ${condaS3Dir}/linux-64/\${PKG_NAME}
                                                    s3cmd --acl-public put win-64/\${PKG_NAME} ${condaS3Dir}/win-64/\${PKG_NAME}

                                                    if [ "\$UPLOAD_TO_ANACONDA" = true ]; then
                                                        if [ "\$TEST_RELEASE" = true ]; then
                                                            echo 'Upload to conda ignored, because this is a TEST_RELEASE'
                                                        else
                                                            echo '****** Upload to Conda ******'
                                                            # Right now packages for all platforms are in the current directory
                                                            # upload all distribution packages
                                                            anaconda login --username ${ANACONDA_USERNAME} --password ${ANACONDA_PASSWORD}
                                                            anaconda upload osx-64/\${PKG_NAME}
                                                            anaconda upload linux-64/\${PKG_NAME}
                                                            anaconda upload win-64/\${PKG_NAME}
                                                        fi
                                                    else
                                                        echo '****** WARNING! Upload to Conda suppressed ******'
                                                    fi
                                                """
                                            }
                                            pipelineContext.getBuildSummary().markStageSuccessful(this, uploadToCondaStageName)
                                        } catch (Exception e) {
                                            pipelineContext.getBuildSummary().markStageFailed(this, uploadToCondaStageName)
                                            e.printStackTrace()
                                        }
                                    }
                                }
                            }
                        }

                        for (pyVersion in pyVersions) {
                            def checkCondaOfflineStageName = "Check Py${pyVersion} Conda Package Offline"
                            def condaPkgPyVersion = pyVersion.replaceAll('\\.','')
                            def pkgName = "h2o-${env.PROJECT_VERSION}-py${condaPkgPyVersion}*.tar.bz2"
                            stage(checkCondaOfflineStageName) {
                                pipelineContext.getBuildSummary().addStageSummary(this, checkCondaOfflineStageName, env.BUILD_NUMBER_DIR)
                                pipelineContext.getBuildSummary().setStageDetails(this, checkCondaOfflineStageName, env.NODE_NAME, env.WORKSPACE)
                                try {
                                    insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                        echo "Check Conda package for Python ${pyVersion}"
                                        sh "conda create -y -n py${pyVersion} python=${pyVersion} tabulate requests"
                                        sh "s3cmd get ${condaS3Dir}/linux-64/${pkgName}"
                                        sh """
                                                bash -c \"\"\"
                                                    source activate py${pyVersion}

                                                    conda install ${pkgName} --offline
                                                    python --version
                                                    python h2o_test.py
                                                \"\"\"
                                            """
                                    }
                                    pipelineContext.getBuildSummary().markStageSuccessful(this, checkCondaOfflineStageName)
                                } catch (Exception e) {
                                    pipelineContext.getBuildSummary().markStageFailed(this, checkCondaOfflineStageName)
                                    e.printStackTrace()
                                }
                            }
                        }

                        // check that Conda package for each Python version reports correct H2O version
                        if (params.UPLOAD_TO_ANACONDA && !params.TEST_RELEASE) {
                            for (pyVersion in pyVersions) {
                                def checkCondaStageName = "Check Py${pyVersion} Conda Package"
                                stage(checkCondaStageName) {
                                    pipelineContext.getBuildSummary().addStageSummary(this, checkCondaStageName, env.BUILD_NUMBER_DIR)
                                    pipelineContext.getBuildSummary().setStageDetails(this, checkCondaStageName, env.NODE_NAME, env.WORKSPACE)
                                    try {
                                        insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS') {
                                            echo "Check Conda package for Python ${pyVersion}"
                                            sh "conda create -y -n py${pyVersion} python=${pyVersion}"
                                            sh """
                                                bash -c \"\"\"
                                                    source activate py${pyVersion}
                                                    conda config --append channels conda-forge

                                                    conda install -y -c h2oai h2o
                                                    python --version
                                                    python h2o_test.py
                                                \"\"\"
                                            """
                                        }
                                        pipelineContext.getBuildSummary().markStageSuccessful(this, checkCondaStageName)
                                    } catch (Exception e) {
                                        pipelineContext.getBuildSummary().markStageFailed(this, checkCondaStageName)
                                        e.printStackTrace()
                                    }
                                }
                            }
                        } else {
                            echo 'Marked as TEST_RELEASE, don\'t check Conda packages from anaconda.org'
                        }
                    }

                    def HELM_STAGE_NAME = 'Publish HELM to S3'
                    stage(HELM_STAGE_NAME) {
                        try {
                            pipelineContext.getBuildSummary().addStageSummary(this, HELM_STAGE_NAME, env.BUILD_NUMBER_DIR)
                            pipelineContext.getBuildSummary().setStageDetails(this, HELM_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                            
                            // Helm automatically installs latest chart if version is unspecified by the user, therefore there is no need for a latest tag
                            THREE_DIGITS_VERSION = sh (
                                script: "echo ${env.PROJECT_VERSION} | sed \"s/[0-9]*\\.\\([0-9]*\\.[0-9]*\\.[0-9]\\)*/\\1/g\"",
                                returnStdout: true
                            ).trim()
                            
                            if (params.TEST_RELEASE) {
                                CHART_NAME = "h2o-test-3"
                                BUCKET = "h2oai-test-helm-charts"
                                VERSION = THREE_DIGITS_VERSION
                            } else{
                                if(env.NIGHTLY_BUILD){
                                    CHART_NAME = "h2o-3-nightly"
                                    BUCKET = "h2oai-test-helm-charts"
                                    VERSION = THREE_DIGITS_VERSION
                                } else {
                                    CHART_NAME = "h2o-3"
                                    BUCKET = "h2oai-charts"
                                    VERSION = THREE_DIGITS_VERSION
                                }
                            }
                            
                            insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 3, 'HOURS') {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}/h2o-helm/
                                    aws s3 cp s3://${BUCKET}/index.yaml index.yaml
                                """
                            }
                            
                            withCredentials([file(credentialsId: 'release-gradle.properties', variable: 'GRADLE_PROPERTIES_PATH'), file(credentialsId: 'release-secret-key-ring-file', variable: 'RING_FILE_PATH')]) {
                                insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS', "-v ${GRADLE_PROPERTIES_PATH}:${GRADLE_PROPERTIES_PATH} -v ${RING_FILE_PATH}:${RING_FILE_PATH}") {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}/h2o-helm/
                                    sed -i "s/name: h2o-3/name: ${CHART_NAME}/" Chart.yaml
                                    helm package . --app-version ${VERSION} --version ${VERSION}
                                    mkdir h2o-3
                                    mv ${CHART_NAME}-${VERSION}.tgz h2o-3/
                                    helm repo index . --merge index.yaml
                                """
                                }
                            }
                            
                            insideDocker([], pipelineContext.getBuildConfig().AWSCLI_IMAGE, pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 3, 'HOURS') {
                                sh """
                                    cd ${env.BUILD_NUMBER_DIR}/h2o-helm/
                                    aws s3 cp h2o-3/${CHART_NAME}-${VERSION}.tgz s3://${BUCKET}/h2o-3/${CHART_NAME}-${VERSION}.tgz --acl public-read
                                    aws s3 cp index.yaml s3://${BUCKET}/index.yaml --acl public-read
                                """
                            }
    
                            pipelineContext.getBuildSummary().markStageSuccessful(this, HELM_STAGE_NAME)
                        } catch (Exception e) {
                            pipelineContext.getBuildSummary().markStageFailed(this, HELM_STAGE_NAME)
                            e.printStackTrace()
                        }
                    }    
                }

                if (params.BUILD_H2O_DOCKER) {
                    def assemblies = [
                        main: "h2o-open-source-k8s",
                        steam: "h2o-open-source-k8s-minimal"
                    ]
                    assemblies.each { assemblyName, imageType ->
                        def BUILD_H2O_DOCKER_STAGE_NAME = "Build H2O Docker container (${imageType})"
                        stage(BUILD_H2O_DOCKER_STAGE_NAME) {
                            try {
                                pipelineContext.getBuildSummary().addStageSummary(this, BUILD_H2O_DOCKER_STAGE_NAME, env.BUILD_NUMBER_DIR)
                                pipelineContext.getBuildSummary().setStageDetails(this, BUILD_H2O_DOCKER_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                                withCredentials([
                                      usernamePassword(credentialsId: 'dockerhub', passwordVariable: 'DOCKERHUB_PASSWORD', usernameVariable: 'DOCKERHUB_USERNAME')
                                      ]) {
                                    env["H2O_ASSEMBLY"] = assemblyName

                                    // Copy resources outside of the build task because of the permissions
                                    insideDocker([], pipelineContext.getBuildConfig().getReleaseImage(), pipelineContext.getBuildConfig().DOCKER_REGISTRY, pipelineContext.getBuildConfig(), 2, 'HOURS', "") {
                                        sh """
                                            id
                                            printenv | sort
                                            ls -la
                                            cd ${env.BUILD_NUMBER_DIR}
                                            ls -la

                                            make -f scripts/jenkins/Makefile.jenkins h2o-k8s-docker-copy-resources
                                        """
                                    }

                                    if (params.TEST_RELEASE) {
                                        env["DOCKER_IMAGE_NAME"] = "opsh2oai/${imageType}-test"
                                        // Test release goes to opsh2oai namespace
                                        env["DOCKER_IMAGE_LATEST_TAG"] = "latest"
                                        // the most recent test build is always latest
                                        env["DOCKER_IMAGE_TAG"] = env["BRANCH_NAME"] + "-" + currentBuild.number
    
                                        sh """
                                          cd ${env.BUILD_NUMBER_DIR}
                                          make -f scripts/jenkins/Makefile.jenkins h2o-k8s-docker-build-latest h2o-k8s-docker-push-latest
                                        """
                                    } else {
                                        env["DOCKER_IMAGE_NAME"] = "h2oai/${imageType}"
                                        // Normal releases go to h2oai/ namespace
                                        if (env.NIGHTLY_BUILD) {
                                            // If the build is not a test build and is nightly, then re-write the latest nightly
                                            env["DOCKER_IMAGE_TAG"] = "nightly"
                                            sh """
                                              cd ${env.BUILD_NUMBER_DIR}
                                              make -f scripts/jenkins/Makefile.jenkins h2o-k8s-docker-build h2o-k8s-docker-push
                                            """
                                        } else {
                                            env["DOCKER_IMAGE_TAG"] = env["PROJECT_VERSION"]
                                            if (params.UPDATE_LATEST) {
                                                // If the build is not a test build and is an ordinary release, update the version tag.
                                                // The "latest" tag means stable. If UPDATE_LATEST is set to true, also update the latest docker tag.
                                                env["DOCKER_IMAGE_LATEST_TAG"] = "latest"
                                                sh """
                                                  cd ${env.BUILD_NUMBER_DIR}
                                                  make -f scripts/jenkins/Makefile.jenkins h2o-k8s-docker-build-latest h2o-k8s-docker-push-latest
                                                """
                                            } else {
                                                // If the build is not a test build and is an ordinary release, update the latest tag.
                                                // The "latest" tag means stable. If UPDATE_LATEST is set to false do not update the latest tag and push only the version tag.
                                                // Push to Red Hat either way
                                                sh """
                                                  cd ${env.BUILD_NUMBER_DIR}
                                                  make -f scripts/jenkins/Makefile.jenkins h2o-k8s-docker-build h2o-k8s-docker-push
                                                """
                                            }
                                        }
                                    }
                                }
                                pipelineContext.getBuildSummary().markStageSuccessful(this, BUILD_H2O_DOCKER_STAGE_NAME)
                            } catch (Exception e) {
                                pipelineContext.getBuildSummary().markStageFailed(this, BUILD_H2O_DOCKER_STAGE_NAME)
                                e.printStackTrace()
                            }
                        }
                    }
                }

                final String CLEANUP_STAGE_NAME = 'Cleanup'
                stage(CLEANUP_STAGE_NAME) {
                    try {
                        pipelineContext.getBuildSummary().addStageSummary(this, CLEANUP_STAGE_NAME, env.BUILD_NUMBER_DIR)
                        pipelineContext.getBuildSummary().setStageDetails(this, CLEANUP_STAGE_NAME, env.NODE_NAME, env.WORKSPACE)
                        docker.image("alpine:latest").inside("--user=root:root") {
                            sh """
                                id
                                printenv | sort
                                ls -la
                                addgroup -g 1002 jenkins
                                adduser -D -u 1002 -G jenkins -s /bin/sh jenkins
                                chown -R jenkins:jenkins ${env.BUILD_NUMBER_DIR}
                                cd ${env.BUILD_NUMBER_DIR}
                                rm -rf target/*.zip
                                rm -rf h2o-dist
                                find . -name 'h2odriver-3.*.0.jar' -delete -print
                            """
                        }
                        pipelineContext.getBuildSummary().markStageSuccessful(this, CLEANUP_STAGE_NAME)
                    } catch (Exception e) {
                        pipelineContext.getBuildSummary().markStageFailed(this, CLEANUP_STAGE_NAME)
                        throw e
                    }
                }
            }
        }
    }
    result = 'SUCCESS'
} finally {
    if (pipelineContext != null) {
        pipelineContext.getEmailer().sendEmail(this, result, pipelineContext.getBuildSummary().getSummaryHTML(this), getRelevantRecipients(pipelineContext, result))
    }
}

private setReleaseJobProperties(final pipelineContext) {

    def TEST_RELEASE_BUCKET = 's3://test.0xdata.com/test-release/h2o'
    def RELEASE_BUCKET = 's3://h2o-release/h2o'

    final boolean isReleaseBranch = env.BRANCH_NAME.startsWith(pipelineContext.getBuildConfig().RELEASE_BRANCH_PREFIX)

    def jobProperties = [
        disableConcurrentBuilds(),
        parameters([
            booleanParam(defaultValue: !isReleaseBranch && env.BRANCH_NAME != 'master', description: "If set don't upload to PyPI and Conda, just build the packages if required; also push to ${TEST_RELEASE_BUCKET} instead of ${RELEASE_BUCKET}", name: 'TEST_RELEASE'),
            booleanParam(defaultValue: env.BRANCH_NAME == 'master', description: 'If set, test all credentials (PyPI, Anaconda, Docker, S3) before building.', name: 'TEST_CREDENTIALS'),
            booleanParam(defaultValue: isReleaseBranch && !params.TEST_RELEASE, description: 'If set, update top-level latest links and latest DOCKER tag', name: 'UPDATE_LATEST'),
            booleanParam(defaultValue: true, description: 'If set, update latest links for this branch', name: 'UPDATE_LATEST_BRANCH'),
            booleanParam(defaultValue: isReleaseBranch && !params.TEST_RELEASE, description: 'If set, publish to Nexus', name: 'UPLOAD_NEXUS'),
            booleanParam(defaultValue: true, description: 'If set, build with Hadoop support. Should be unchecked only for test releases when you do not want to wait for the full build.', name: 'BUILD_HADOOP'),
            booleanParam(defaultValue: true, description: 'If set, build PyPI package and upload it to S3', name: 'BUILD_PYPI'),
            booleanParam(defaultValue: isReleaseBranch && !params.TEST_RELEASE, description: 'If set and building rel- branch, publish to PyPI', name: 'UPLOAD_TO_PYPI'),
            booleanParam(defaultValue: true, description: 'If set, build conda packages and upload them to S3', name: 'BUILD_CONDA'),
            booleanParam(defaultValue: isReleaseBranch && !params.TEST_RELEASE, description: 'If set and building rel- branch, publish to Anaconda', name: 'UPLOAD_TO_ANACONDA'),
            booleanParam(defaultValue: isReleaseBranch && !params.TEST_RELEASE, description: 'If set, build H2O Docker image and push to H2O official docker hub.', name: 'BUILD_H2O_DOCKER'),
        ])
    ]
    if (env.BRANCH_NAME == 'master') {
        // in case of master branch enable the periodical builds and buildDiscarder
        jobProperties += pipelineTriggers(
            [cron('30 23 * * *')]
        )
        jobProperties += buildDiscarder(
            logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '25')
        )

    }
    properties(jobProperties)

    if (!params.TEST_RELEASE && (env.BRANCH_NAME == 'master' || isReleaseBranch)) {
        env.S3_ROOT = RELEASE_BUCKET
    } else {
        env.S3_ROOT = TEST_RELEASE_BUCKET
    }
    echo "Release will be pushed to ${env.S3_ROOT}"

    if (env.BRANCH_NAME == 'master') {
        // we are building nightly build
        env.NIGHTLY_BUILD = true
    } else if (params.TEST_RELEASE || isReleaseBranch) {
        // in case of release branch and enabled upload to Maven, we have to set DO_RELEASE
        if (params.UPLOAD_NEXUS) {
            env.DO_RELEASE = true
        }
    }
    if (params.UPLOAD_NEXUS) {
        env.DO_RELEASE = true
    }
    sh "printenv | sort"
}

private printReleaseConfiguration(final pipelineContext) {
    echo """
=======================================
Configuration:
---------------------------------------
    SHA:                    | ${env.GIT_SHA}
    Branch:                 | ${env.BRANCH_NAME}
    Docker Image:           | ${pipelineContext.getBuildConfig().getReleaseImage()}
    Test Release:           | ${params.TEST_RELEASE}
    Test Credentials:       | ${params.TEST_CREDENTIALS}
    S3 Bucket:              | ${env.S3_ROOT}
    Publish to Nexus:       | ${params.UPLOAD_NEXUS}
    Build Hadoop:           | ${params.BUILD_HADOOP}
    Publish to PyPI:        | ${params.UPLOAD_TO_PYPI}
    Publish to Conda:       | ${params.UPLOAD_TO_ANACONDA}
    Publish to Docker Hub   | ${params.BUILD_H2O_DOCKER}
=======================================
"""
}

/**
 * Creates Python script which checks if h2o module is of expected version. Script is saved in $(pwd)/h2o_test.py
 * @param projectVersion expected h2o module version, like 3.16.0.2
 */
private prepareH2OVersionCheckScript(final String projectVersion) {
    sh """
echo '
import h2o
actual_version = h2o.__version__
expected_version = "${projectVersion}"
assert actual_version == expected_version, "Version should be %s but was %s" % (expected_version, actual_version)
h2o.init()
' > h2o_test.py
"""
}

private getRelevantRecipients(final pipelineContext, final String result) {
    def RELEASE_NIGHTLY_ALWAYS_RECIPIENTS = ['michalr@h2o.ai']
    def RELEASE_NIGHTLY_FAILURE_RECIPIENTS = ['michalk@h2o.ai', 'anmol@h2o.ai'] + RELEASE_NIGHTLY_ALWAYS_RECIPIENTS
    def RELEASE_ALWAYS_RECIPIENTS = ['michalk@h2o.ai', 'anmol@h2o.ai', 'michalr@h2o.ai']
    def RELEASE_FAILURE_RECIPIENTS = [] + RELEASE_ALWAYS_RECIPIENTS

    if (params.TEST_RELEASE) {
        return ['michalr@h2o.ai']
    }
    if (result.toLowerCase() == pipelineContext.getBuildSummary().RESULT_SUCCESS) {
        if (env.NIGHTLY_BUILD != null && env.NIGHTLY_BUILD.toLowerCase() == 'true') {
            return RELEASE_NIGHTLY_ALWAYS_RECIPIENTS
        }
        return RELEASE_ALWAYS_RECIPIENTS
    }
    if (env.NIGHTLY_BUILD != null && env.NIGHTLY_BUILD.toLowerCase() == 'true') {
        return RELEASE_NIGHTLY_FAILURE_RECIPIENTS
    }
    return RELEASE_FAILURE_RECIPIENTS
}
