Skip to content

fix

fix #40

Workflow file for this run

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
#
# See https://github.com/r-lib/actions/tree/master/examples#readme for
# additional example workflows available for the R community.
name: ci-tests
on:
push:
branches: [ "feature/google_bigquery_connector" ]
pull_request:
branches: [ "feature/google_bigquery_connector" ]
jobs:
build:
runs-on: ${{ matrix.config.os-name }}-${{ matrix.config.os-version }}
name: >-
${{ matrix.config.os-name }}-${{ matrix.config.os-version }}
R ${{ matrix.config.r-version}} - Java ${{ matrix.config.java}}
Spark ${{ matrix.config.spark }}
strategy:
fail-fast: false
matrix:
config:
# - os-name: ubuntu
# os-version: latest
# java: 17
# spark: "3.5"
# r-version: release
# - os-name: ubuntu
# os-version: latest
# java: 8
# spark: "2.4"
# r-version: devel
# - os-name: ubuntu
# os-version: "22.04"
# java: 11
# spark: "3.0"
# r-version: oldrel
# - os-name: macos
# os-version: latest
# java: 8
# spark: "3.2"
# r-version: release
# - os-name: macos
# os-version: latest
# java: 17
# spark: "3.4"
# r-version: devel
# - os-name: windows
# os-version: latest
# java: 8
# spark: "3.1"
# r-version: oldrel
- os-name: windows
os-version: "2022"
java: 17
spark: "3.3"
r-version: release
env:
SPARK_VERSION: ${{ matrix.config.spark }}
BIGQUERY_PROJECT_ID: mirai-sbb
BIGQUERY_MATERIALIZATION_DATASET: test
BIGQUERY_APPLICATION_CREDENTIALS: ${{ github.workspace }}/adc.json
R_DEFAULT_INTERNET_TIMEOUT: 1800
# Override where sparklyr is getting Spark version information from
R_SPARKINSTALL_INSTALL_INFO_PATH: ${{ github.workspace }}/spark_versions.json
steps:
- uses: actions/checkout@v4
- name: Setup Java
uses: actions/setup-java@v4.4.0
with:
distribution: 'zulu'
java-version: ${{ matrix.config.java }}
java-package: jdk
architecture: x64
- name: Print effective Java version
run: java -version
- name: Set up R ${{ matrix.r-version }}
uses: r-lib/actions/setup-r@v2
with:
r-version: ${{ matrix.config.r-version }}
use-public-rspm: true
- name: Print effective R version
run: version
shell: Rscript {0}
- name: R CMD javareconf
if: runner.os != 'Windows'
run: |
java -version
echo java_home:$JAVA_HOME
echo library paths: $LD_LIBRARY_PATH
sudo R CMD javareconf JAVA_HOME=$JAVA_HOME
- name: Install and cache dependencies
uses: r-lib/actions/setup-r-dependencies@v2
with:
cache-version: ${{ matrix.config.java }}
extra-packages: |
any::rcmdcheck
- name: Install Microsoft Visual C++ 2010 SP1 Redistributable Package via Chocolatey
if: runner.os == 'Windows'
run: |
# Install the package using Chocolatey
choco install vcredist2010 -y --allow-empty-checksums
- name: Install Spark
run: |
info <- sparklyr::spark_install(version="${{ matrix.config.spark }}")
if (.Platform$OS.type == "windows") {
hadoop_path <- file.path(info$sparkVersionDir, "tmp", "hadoop")
hadoop_bin_path <- file.path(hadoop_path, "bin")
dir.create(hadoop_bin_path, recursive = TRUE)
cat(
c(
paste0("HADOOP_VERSION=", info$hadoopVersion),
paste0("HADOOP_HOME=", hadoop_path),
paste0("PATH=", Sys.getenv("PATH"), ":", hadoop_bin_path)
),
file = Sys.getenv("GITHUB_ENV"),
sep = "\n",
append = TRUE
)
}
shell: Rscript {0}
- name: Install Winutils
if: runner.os == 'Windows'
run: |
git clone https://github.com/cdarlint/winutils.git C:/tmp/winutils
LATEST_HADOOP_DIR=$(ls -d C:/tmp/winutils/hadoop-${HADOOP_VERSION}* | sort -V | tail -n 1)
echo "Latest Hadoop Directory: ${LATEST_HADOOP_DIR}"
cp -R ${LATEST_HADOOP_DIR}/bin ${HADOOP_HOME}
shell: bash
- name: Set Google application default credentials
env:
ADC: ${{ secrets.GCLOUD_APPLICATION_CREDENTIALS }}
shell: bash
run: |
echo "$ADC" > $BIGQUERY_APPLICATION_CREDENTIALS
- name: Run R CMD check
uses: r-lib/actions/check-r-package@v2
with:
error-on: '"error"'
check-dir: '"check"'
- name: Upload check results
if: failure()
uses: actions/upload-artifact@main
with:
name: ${{ matrix.config.os-name }}${{matrix.config.os-version}}-Java${{ matrix.config.java }}-R${{ matrix.config.r-version }}-Spark${{ matrix.config.spark }}-results
path: |
check/sparkbq.Rcheck/
!check/sparkbq.Rcheck/00_pkg_src/
!adc.json
- name: Upload successfully built package
if: success()
uses: actions/upload-artifact@main
with:
name: ${{ matrix.config.os-name }}${{matrix.config.os-version}}-Java${{ matrix.config.java }}-R${{ matrix.config.r-version }}-Spark${{ matrix.config.spark }}-results
path: |
check/sparkbq_*.tar.gz
!adc.json