Skip to content

Commit

Permalink
tests should be faster now
Browse files Browse the repository at this point in the history
  • Loading branch information
lukasmartinelli committed Jun 3, 2018
1 parent 548bf15 commit 2788fc8
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 56 deletions.
32 changes: 15 additions & 17 deletions download_samples.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,30 +5,28 @@ SAMPLES_DIR="$CWD/samples"
function download_json_samples() {
mkdir -p $SAMPLES_DIR
cd $SAMPLES_DIR
wget -nc http://data.githubarchive.org/2015-01-01-15.json.gz && gunzip -f 2015-01-01-15.json.gz
wget -nc wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/json_sample_2015-01-01-15.json
cd $CWD
}

function download_csv_samples() {
mkdir -p $SAMPLES_DIR
cd $SAMPLES_DIR
wget -nc -O local_severe_wheather_warning_systems.csv https://data.mo.gov/api/views/n59h-ggai/rows.csv
wget -nc -O montgomery_crime.csv https://data.montgomerycountymd.gov/api/views/icn6-v9z3/rows.csv
wget -nc -O employee_salaries.csv https://data.montgomerycountymd.gov/api/views/54rh-89p8/rows.csv
wget -nc -O residential_permits.csv https://data.montgomerycountymd.gov/api/views/m88u-pqki/rows.csv
wget -nc -O customer_complaints.csv https://data.consumerfinance.gov/api/views/x94z-ydhh/rows.csv
wget -nc -O traffic_violations.csv https://data.montgomerycountymd.gov/api/views/4mse-ku6q/rows.csv
wget -nc -O distribution_of_wealth_switzerland.csv http://bar-opendata-ch.s3.amazonaws.com/Kanton-ZH/Statistik/Distribution_of_wealth.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/Kanton-ZH/Statistik/Wealth_groups.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/Kanton-ZH/Statistik/Vermoegensklassen.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/Kanton-ZH/Statistik/Steuertarife.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/Kanton-ZH/Statistik/Tax_rates.csv
wget -nc -O whitehouse_visits_2014.zip https://www.whitehouse.gov/sites/default/files/disclosures/whitehouse_waves-2014_12.csv_.zip && unzip -o whitehouse_visits_2014.zip && rm -f whitehouse_visits_2014.csv && mv whitehouse_waves-2014_12.csv.csv whitehouse_visits_2014.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/ch.bag/Spitalstatistikdateien/qip/2012/qip12_tabdaten.csv
wget -nc http://bar-opendata-ch.s3.amazonaws.com/ch.bar.bar-02/Metadatenbank-Vernehmlassungen-OGD-V1-3.csv
wget -nc https://www.data.gov/app/uploads/2015/08/opendatasites.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_distribution_of_wealth_switzerland.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_employee_salaries.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_local_severe_wheather_warning_systems.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_montgomery_crime.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_qip12_tabdaten.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_residential_permits.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_sacramentocrime_jan_2006.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_sacramento_realestate_transactions.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_sales_jan_2009.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_steuertarife.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_techcrunch_continental_usa.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_vermoegensklassen.csv
wget -nc https://github.com/lukasmartinelli/pgfutter/releases/download/v0.1-alpha/csv_sample_metadatenbank.csv
cd $CWD
}

download_csv_samples
download_json_samples
download_csv_samples
71 changes: 32 additions & 39 deletions test.sh
Original file line number Diff line number Diff line change
@@ -1,36 +1,31 @@
#!/bin/bash
readonly CWD=$(pwd)
readonly SAMPLES_DIR="$CWD/samples"
readonly DB_USER=${DB_USER:-postgres}
readonly DB_USER="${DB_USER:-postgres}"
readonly DB_NAME="integration_test"
readonly DB_SCHEMA="import" # Use public schema instead of import because of permissions

function recreate_db() {
psql -U ${DB_USER} -c "drop database if exists ${DB_NAME};"
psql -U ${DB_USER} -c "create database ${DB_NAME};"
psql -U "${DB_USER}" -c "drop database if exists ${DB_NAME};"
psql -U "${DB_USER}" -c "create database ${DB_NAME};"
}

function query_counts() {
local table=$1
local counts=$(psql -U ${DB_USER} -d ${DB_NAME} -t -c "select count(*) from ${DB_SCHEMA}.${table}")
local table="$1"
local counts=$(psql -U "${DB_USER}" -d "${DB_NAME}" -t -c "select count(*) from ${DB_SCHEMA}.${table}")
echo "$counts"
}

function query_field_type() {
local table=$1
local data_type=$(psql -U ${DB_USER} -d ${DB_NAME} -t -c "SELECT data_type FROM information_schema.columns WHERE table_schema='${DB_SCHEMA}' AND table_name='${table}'")
local table="$1"
local data_type=$(psql -U "${DB_USER}" -d "${DB_NAME}" -t -c "SELECT data_type FROM information_schema.columns WHERE table_schema='${DB_SCHEMA}' AND table_name='${table}'")
echo "$data_type"
}

function test_readme_csv_sample() {
# test whether readme docs still work
echo "test"
}

function import_csv_with_special_delimiter_and_trailing() {
local table="qip12_tabdaten"
local filename="$SAMPLES_DIR/qip12_tabdaten.csv"
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER csv "$filename" --delimiter=";"
local table="csv_sample_qip12_tabdaten"
local filename="$SAMPLES_DIR/csv_sample_qip12_tabdaten.csv"
pgfutter --schema "$DB_SCHEMA" --db "$DB_NAME" --user "$DB_USER" csv "$filename" --delimiter=";"
if [ $? -ne 0 ]; then
echo "pgfutter could not import $filename"
exit 300
Expand All @@ -41,18 +36,17 @@ function import_csv_with_special_delimiter_and_trailing() {
}

function import_csv_and_skip_header_row_with_custom_fields() {
local table="qip12_tabdaten"
local filename="$SAMPLES_DIR/qip12_tabdaten.csv"
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER csv "$filename"
local table="csv_sample_qip12_tabdaten"
local filename="$SAMPLES_DIR/csv_sample_qip12_tabdaten.csv"
pgfutter --schema "$DB_SCHEMA" --db "$DB_NAME" --user "$DB_USER" csv "$filename"
if [ $? -eq 0 ]; then
echo "pgfutter should not be able to import $filename"
exit 300
fi
}

function csv_with_wrong_delimiter_should_fail() {
local table="metadatenbank_vernehmlassungen_ogd_v1_3"
local filename="$SAMPLES_DIR/Metadatenbank-Vernehmlassungen-OGD-V1-3.csv"
local filename="$SAMPLES_DIR/csv_sample_metadatenbank.csv"
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER csv "$filename" --delimiter ";" --skip-header --fields "nr;typ_vernehmlassungsgegenstandes;titel_vernehmlassungsverfahrens;federfuhrendes_departement;fundort;adressaten;archivunterlagen;dokumententypen"
if [ $? -eq 0 ]; then
echo "pgfutter should not be able to import $filename"
Expand All @@ -63,7 +57,7 @@ function csv_with_wrong_delimiter_should_fail() {
function import_and_test_json() {
local table=$1
local filename=$2
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER json "$filename"
pgfutter --schema "$DB_SCHEMA" --db "$DB_NAME" --user "$DB_USER" json "$filename"
if [ $? -ne 0 ]; then
echo "pgfutter could not import $filename"
exit 300
Expand All @@ -77,13 +71,13 @@ function import_and_test_json() {
function import_and_test_json_as_jsonb() {
local table=$1
local filename=$2
pgfutter --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER --jsonb json "$filename"
pgfutter --schema "$DB_SCHEMA" --db "$DB_NAME" --user "$DB_USER" --jsonb json "$filename"
if [ $? -ne 0 ]; then
echo "pgfutter could not import $filename"
exit 300
else
local db_count=$(query_counts $table)
local data_type=$(query_field_type $table)
local db_count=$(query_counts "$table")
local data_type=$(query_field_type "$table")
echo "Imported $(expr $db_count) records into $table as $data_type"
fi
}
Expand All @@ -94,7 +88,7 @@ function import_and_test_csv() {
local delimiter=${3:-,}
local general_args=${4:-}

pgfutter $general_args --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER csv "$filename" --delimiter "$delimiter"
pgfutter $general_args --table $table --schema $DB_SCHEMA --db $DB_NAME --user $DB_USER csv "$filename" --delimiter "$delimiter"
if [ $? -ne 0 ]; then
echo "pgfutter could not import $filename"
exit 300
Expand All @@ -110,24 +104,23 @@ csv_with_wrong_delimiter_should_fail
import_csv_and_skip_header_row_with_custom_fields
import_csv_with_special_delimiter_and_trailing

import_and_test_json "_2015_01_01_15" "$SAMPLES_DIR/2015-01-01-15.json"
import_and_test_json "json_sample_2015_01_01_15" "$SAMPLES_DIR/json_sample_2015-01-01-15.json"

# We change the type of the data column for this test, so we have to recreate the database
recreate_db
import_and_test_json_as_jsonb "_2015_01_01_15" "$SAMPLES_DIR/2015-01-01-15.json"
import_and_test_json_as_jsonb "json_sample_2015_01_01_15" "$SAMPLES_DIR/json_sample_2015-01-01-15.json"

# File can no longer be downloaded
#import_and_test_csv "local_severe_wheather_warning_systems" "$SAMPLES_DIR/local_severe_wheather_warning_systems.csv"
# CSV file broke and has now invalid number of columns
# import_and_test_csv "montgomery_crime" "$SAMPLES_DIR/montgomery_crime.csv"
#import_and_test_csv "employee_salaries" "$SAMPLES_DIR/employee_salaries.csv"
import_and_test_csv "residential_permits" "$SAMPLES_DIR/residential_permits.csv"
import_and_test_csv "steuertarife" "$SAMPLES_DIR/Steuertarife.csv"
import_and_test_csv "vermoegensklassen" "$SAMPLES_DIR/Vermoegensklassen.csv"
import_and_test_csv "distribution_of_wealth_switzerland" "$SAMPLES_DIR/distribution_of_wealth_switzerland.csv"
# Customer complaints no longer available
# import_and_test_csv "customer_complaints" "$SAMPLES_DIR/customer_complaints.csv"
import_and_test_csv "whitehouse_visits_2014" "$SAMPLES_DIR/whitehouse_visits_2014.csv"
import_and_test_csv "traffic_violations" "$SAMPLES_DIR/traffic_violations.csv"
import_and_test_csv "distribution_of_wealth_switzerland" "$SAMPLES_DIR/csv_sample_distribution_of_wealth_switzerland.csv"
import_and_test_csv "employee_salaries" "$SAMPLES_DIR/csv_sample_employee_salaries.csv"
import_and_test_csv "local_severe_wheather_warning_systems" "$SAMPLES_DIR/csv_sample_local_severe_wheather_warning_systems.csv"
import_and_test_csv "montgomery_crime" "$SAMPLES_DIR/csv_sample_montgomery_crime.csv"
import_and_test_csv "residential_permits" "$SAMPLES_DIR/csv_residential_permits.csv"
import_and_test_csv "sacramentocrime_jan_2006" "$SAMPLES_DIR/csv_sample_sacramentocrime_jan_2006.csv"
import_and_test_csv "sacramento_realestate_transactions" "$SAMPLES_DIR/csv_sample_sacramento_realestate_transactions.csv"
import_and_test_csv "sales_jan_2009" "$SAMPLES_DIR/csv_sample_sales_jan_2009.csv"
import_and_test_csv "steuertarife" "$SAMPLES_DIR/csv_sample_steuertarife.csv"
import_and_test_csv "techcrunch_continental_usa" "$SAMPLES_DIR/csv_sample_techcrunch_continental_usa.csv"
import_and_test_csv "vermoegensklassen" "$SAMPLES_DIR/csv_sample_vermoegensklassen.csv"

recreate_db

0 comments on commit 2788fc8

Please sign in to comment.