Skip to content

Commit

Permalink
Add test harness (#57)
Browse files Browse the repository at this point in the history
The `GhcidNg` type copies a given directory to a tempdir and then
launches `ghcid-ng` there. It also spawns an async task to read JSON log
events from the log file.

`GhcidNg` uses thread-local storage to determine which version of `ghc`
to use when launching tests, and will error appropriately if the
thread-local storage is not set. This takes advantage of the fact that
async tests run in the `tokio` "current-thread" runtime by default,
which schedules all tasks in the test on the same thread.

The second part is a proc macro attribute exported as
`#[test_harness::test]`, which rewrites tests so that

1. The tests are async functions which run under the default
`#[tokio::test]` current-thread runtime.
2. Tracing is set up in the tests so that log messages from the
`test-harness` library are visible in the test output.
3. The appropriate thread-local variables are set for each test (this
includes the current GHC version).
4. One test is generated for each GHC version.
5. If tests fail, the relevant `ghcid-ng` logs are saved to a directory
under `target/` and the path is printed at the end of the tests.

Here's a sample test from #44 using this test harness:

```rust
/// Test that `ghcid-ng` can start up and then reload on changes.
#[test]
async fn can_reload() {
    let mut session = GhcidNg::new("tests/data/simple")
        .await
        .expect("ghcid-ng starts");
    session
        .wait_until_ready()
        .await
        .expect("ghcid-ng loads ghci");
    fs::append(session.path("src/MyLib.hs"), "\n\nhello = 1\n")
        .await
        .unwrap();
    session
        .wait_until_reload()
        .await
        .expect("ghcid-ng reloads on changes");
    session
        .get_log(
            Matcher::span_close()
                .in_module("ghcid_ng::ghci")
                .in_spans(["on_action", "reload"]),
        )
        .await
        .expect("ghcid-ng finishes reloading");
}
```

---------

Co-authored-by: Gabriella Gonzalez <gabriella@mercury.com>
  • Loading branch information
9999years and Gabriella439 authored Sep 1, 2023
1 parent 55e616b commit 5c1f1b5
Show file tree
Hide file tree
Showing 13 changed files with 776 additions and 26 deletions.
25 changes: 25 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
members = [
"ghcid-ng",
"test-harness",
"test-harness-macro",
]

resolver = "2"
27 changes: 26 additions & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,22 @@
};
inherit (pkgs) lib;

# GHC versions to include in the environment for integration tests.
ghcVersions = [
"ghc90"
"ghc92"
"ghc94"
"ghc96"
];

ghcPackages = builtins.map (ghcVersion: pkgs.haskell.compiler.${ghcVersion}) ghcVersions;

ghcBuildInputs =
[pkgs.haskellPackages.cabal-install]
++ ghcPackages;

GHC_VERSIONS = builtins.map (drv: drv.version) ghcPackages;

craneLib = crane.lib.${system};

src = lib.cleanSourceWith {
Expand All @@ -64,6 +80,9 @@
pkgs.darwin.apple_sdk.frameworks.CoreServices
];

# Provide GHC versions to use to the integration test suite.
inherit GHC_VERSIONS;

cargoBuildCommand = "cargoWithProfile build --all";
cargoCheckExtraArgs = "--all";
cargoTestExtraArgs = "--all";
Expand All @@ -90,7 +109,10 @@
});
in {
checks = {
ghcid-ng-tests = craneLib.cargoTest commonArgs;
ghcid-ng-tests = craneLib.cargoTest (commonArgs
// {
buildInputs = (commonArgs.buildInputs or []) ++ ghcBuildInputs;
});
ghcid-ng-clippy = craneLib.cargoClippy (commonArgs
// {
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
Expand All @@ -112,6 +134,9 @@
# Make rust-analyzer work
RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc;

# Provide GHC versions to use to the integration test suite.
inherit GHC_VERSIONS;

# Any dev tools you use in excess of the rust ones
nativeBuildInputs = [
pkgs.rust-analyzer
Expand Down
15 changes: 15 additions & 0 deletions test-harness-macro/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
[package]
name = "test-harness-macro"
version = "0.1.0"
edition = "2021"

description = "Test attribute for ghcid-ng"

publish = false

[lib]
proc-macro = true

[dependencies]
quote = "1.0.33"
syn = { version = "2.0.29", features = ["full"] }
92 changes: 92 additions & 0 deletions test-harness-macro/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
use proc_macro::TokenStream;

use quote::quote;
use quote::ToTokens;
use syn::parse;
use syn::parse::Parse;
use syn::parse::ParseStream;
use syn::Attribute;
use syn::Block;
use syn::Ident;
use syn::ItemFn;

/// Runs a test asynchronously in the `tokio` current-thread runtime with `tracing` enabled.
///
/// One test is generated for each GHC version listed in the `$GHC_VERSIONS` environment variable
/// at compile-time.
#[proc_macro_attribute]
pub fn test(_attr: TokenStream, item: TokenStream) -> TokenStream {
// Parse annotated function
let mut function: ItemFn = parse(item).expect("Could not parse item as function");

// Add attributes to run the test in the `tokio` current-thread runtime and enable tracing.
function.attrs.extend(
parse::<Attributes>(
quote! {
#[tokio::test]
#[tracing_test::traced_test]
#[allow(non_snake_case)]
}
.into(),
)
.expect("Could not parse quoted attributes")
.0,
);

let ghc_versions = match option_env!("GHC_VERSIONS") {
None => {
panic!("`$GHC_VERSIONS` should be set to a list of GHC versions to run tests under, separated by spaces, like `9.0.2 9.2.8 9.4.6 9.6.2`.");
}
Some(versions) => versions.split_ascii_whitespace().collect::<Vec<_>>(),
};

// Generate functions for each GHC version we want to test.
let mut ret = TokenStream::new();
for ghc_version in ghc_versions {
ret.extend::<TokenStream>(
make_test_fn(function.clone(), ghc_version)
.to_token_stream()
.into(),
);
}
ret
}

struct Attributes(Vec<Attribute>);

impl Parse for Attributes {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Self(input.call(Attribute::parse_outer)?))
}
}

fn make_test_fn(mut function: ItemFn, ghc_version: &str) -> ItemFn {
let ghc_version_ident = ghc_version.replace('.', "");
let stmts = function.block.stmts;
let test_name_base = function.sig.ident.to_string();
let test_name = format!("{test_name_base}_{ghc_version_ident}");
function.sig.ident = Ident::new(&test_name, function.sig.ident.span());

// Wrap the test code in startup/cleanup code.
let new_body = parse::<Block>(
quote! {
{
::test_harness::internal::wrap_test(
async {
#(#stmts);*
},
#ghc_version,
#test_name,
env!("CARGO_TARGET_TMPDIR"),
).await;
}
}
.into(),
)
.expect("Could not parse function body");

// Replace function body
*function.block = new_body;

function
}
5 changes: 5 additions & 0 deletions test-harness/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,15 @@ publish = false

[dependencies]
backoff = { version = "0.4.0", default-features = false }
fs_extra = "1.3.0"
itertools = "0.11.0"
miette = { version = "5.9.0", features = ["fancy"] }
nix = { version = "0.26.2", default_features = false, features = ["process"] }
regex = "1.9.4"
serde = { version = "1.0.186", features = ["derive"] }
serde_json = "1.0.105"
tempfile = "3.8.0"
test-harness-macro = { path = "../test-harness-macro" }
test_bin = "0.4.0"
tokio = { version = "1.28.2", features = ["full", "tracing"] }
tracing = "0.1.37"
102 changes: 94 additions & 8 deletions test-harness/src/fs.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,25 @@
use std::fmt::Display;
//! Filesystem utilities for writing integration tests for `ghcid-ng`.

use std::fmt::Debug;
use std::path::Path;
use std::time::Duration;

use backoff::backoff::Backoff;
use backoff::ExponentialBackoff;
use miette::miette;
use miette::Context;
use miette::IntoDiagnostic;
use tokio::fs::File;
use tokio::fs::OpenOptions;
use tokio::io::AsyncWriteExt;

/// Touch a path.
pub async fn touch(path: impl AsRef<Path>) -> miette::Result<()> {
#[tracing::instrument]
pub async fn touch(path: impl AsRef<Path> + Debug) -> miette::Result<()> {
let path = path.as_ref();
if let Some(parent) = path.parent() {
create_dir(parent).await?;
}
OpenOptions::new()
.create(true)
.write(true)
Expand All @@ -23,24 +30,36 @@ pub async fn touch(path: impl AsRef<Path>) -> miette::Result<()> {
.map(|_| ())
}

/// Write some data to a path, replacing its previous contents.
#[tracing::instrument(skip(data))]
pub async fn write(path: impl AsRef<Path> + Debug, data: impl AsRef<[u8]>) -> miette::Result<()> {
let path = path.as_ref();
if let Some(parent) = path.parent() {
create_dir(parent).await?;
}
tokio::fs::write(path, data)
.await
.into_diagnostic()
.wrap_err_with(|| format!("Failed to write {path:?}"))
}

/// Append some data to a path.
pub async fn append(path: impl AsRef<Path>, data: impl Display) -> miette::Result<()> {
#[tracing::instrument(skip(data))]
pub async fn append(path: impl AsRef<Path> + Debug, data: impl AsRef<[u8]>) -> miette::Result<()> {
let path = path.as_ref();
let mut file = OpenOptions::new()
.append(true)
.open(path)
.await
.into_diagnostic()
.wrap_err_with(|| format!("Failed to open {path:?}"))?;
file.write_all(data.to_string().as_bytes())
.await
.into_diagnostic()?;
Ok(())
file.write_all(data.as_ref()).await.into_diagnostic()
}

/// Wait for a path to be created.
///
/// This should generaly be run under a [`tokio::time::timeout`].
/// This should generally be run under a [`tokio::time::timeout`].
#[tracing::instrument]
pub async fn wait_for_path(path: &Path) {
let mut backoff = ExponentialBackoff {
max_interval: Duration::from_secs(1),
Expand All @@ -54,3 +73,70 @@ pub async fn wait_for_path(path: &Path) {
tokio::time::sleep(duration).await;
}
}

/// Read a path into a string.
#[tracing::instrument]
pub async fn read(path: impl AsRef<Path> + Debug) -> miette::Result<String> {
let path = path.as_ref();
tokio::fs::read_to_string(path)
.await
.into_diagnostic()
.wrap_err_with(|| format!("Failed to read {path:?}"))
}

/// Read from a path, run a string replacement on its contents, and then write the result.
#[tracing::instrument(skip(from, to))]
pub async fn replace(
path: impl AsRef<Path> + Debug,
from: impl AsRef<str>,
to: impl AsRef<str>,
) -> miette::Result<()> {
let path = path.as_ref();
let old_contents = read(path).await?;
let new_contents = old_contents.replace(from.as_ref(), to.as_ref());
if old_contents == new_contents {
return Err(miette!(
"Replacing substring in file didn't make any changes"
));
}
write(path, new_contents).await
}

/// Creates a directory and all of its parent components.
#[tracing::instrument]
pub async fn create_dir(path: impl AsRef<Path> + Debug) -> miette::Result<()> {
let path = path.as_ref();
tokio::fs::create_dir_all(path)
.await
.into_diagnostic()
.wrap_err_with(|| format!("Failed to create directory {path:?}"))
}

/// Remove the file or directory at the given path.
///
/// Directories are removed recursively; be careful.
#[tracing::instrument]
pub async fn remove(path: impl AsRef<Path> + Debug) -> miette::Result<()> {
let path = path.as_ref();
if path.is_dir() {
tokio::fs::remove_dir_all(path).await
} else {
tokio::fs::remove_file(path).await
}
.into_diagnostic()
.wrap_err_with(|| format!("Failed to remove {path:?}"))
}

/// Move the path at `from` to the path at `to`.
#[tracing::instrument]
pub async fn rename(
from: impl AsRef<Path> + Debug,
to: impl AsRef<Path> + Debug,
) -> miette::Result<()> {
let from = from.as_ref();
let to = to.as_ref();
tokio::fs::rename(from, to)
.await
.into_diagnostic()
.wrap_err_with(|| format!("Failed to move {from:?} to {to:?}"))
}
Loading

0 comments on commit 5c1f1b5

Please sign in to comment.