Add Spark-Connect Tests - CI & Test Suite Update #23
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Unit tests | |
on: | |
push: | |
branches: | |
- main | |
- planning-1.0-release | |
pull_request: | |
branches: | |
- main | |
- planning-1.0-release | |
workflow_dispatch: | |
jobs: | |
test: | |
runs-on: ubuntu-latest | |
strategy: | |
fail-fast: false | |
matrix: | |
include: | |
- pyspark-version: 3.3.4 | |
pip-packages: "pyspark==3.3.4" | |
- pyspark-version: 3.4.3 | |
pip-packages: "pyspark==3.4.3" | |
- pyspark-version: 3.5.1 | |
pip-packages: "pyspark==3.5.1" | |
steps: | |
- uses: actions/checkout@v1 | |
with: | |
fetch-depth: 1 | |
- name: Setup Java | |
uses: actions/setup-java@v3 | |
with: | |
distribution: 'zulu' | |
java-version: '17' # Spark 4.0 will drop Java 11; | |
- name: Set up Python ${{ steps.python_version.outputs.value }} | |
uses: actions/setup-python@v2 | |
with: | |
python-version: '3.10' | |
- name: Install Poetry | |
uses: snok/install-poetry@v1 | |
with: | |
version: '1.6.1' | |
- name: Cache Poetry virtualenv | |
uses: actions/cache@v1 | |
id: cache | |
with: | |
path: ~/.virtualenvs | |
key: poetry-${{ hashFiles('**/poetry.lock') }} | |
restore-keys: | | |
poetry-${{ hashFiles('**/poetry.lock') }} | |
- name: Install dependencies | |
run: make install_test | |
if: steps.cache.outputs.cache-hit != 'true' | |
- name: Change PySpark to version ${{ matrix.pyspark-version }} | |
env: | |
PIP_PACKAGES: ${{ matrix.pip-packages }} | |
run: poetry run pip install $PIP_PACKAGES # Using pip shouldn't mess up poetry cache | |
- name: Run tests with pytest against PySpark ${{ matrix.pyspark-version }} | |
run: make test | |
- name: Run tests using Spark-Connect against PySpark ${{ matrix.pyspark-version }} | |
env: | |
HADOOP_VERSION: 3 | |
SPARK_VERSION: ${{ matrix.pyspark-version }} | |
SPARK_CONNECT_MODE_ENABLE: 1 | |
run: | | |
if [[ "${SPARK_VERSION}" > "3.4" ]]; then | |
sh scripts/run_spark_connect_server.sh | |
make test_spark_connect | |
else | |
echo "Skipping Spark-Connect tests for Spark version <= 3.4" | |
fi | |
check-license-headers: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
with: | |
fetch-depth: 0 | |
- name: Check License Header | |
uses: apache/skywalking-eyes/dependency@main | |
with: | |
log: debug | |
config: .licenserc.yaml | |
mode: check |