Skip to content

Commit

Permalink
flake.lock: Update (#1545)
Browse files Browse the repository at this point in the history
* flake.lock: Update

Flake lock file updates:

• Updated input 'crane':
    'github:ipetkov/crane/7809d369710abb17767b624f9e72b500373580bc' (2023-08-12)
  → 'github:ipetkov/crane/ef5d11e3c2e5b3924eb0309dba2e1fea2d9062ae' (2023-08-23)
• Updated input 'flake-utils':
    'github:numtide/flake-utils/919d646de7be200f3bf08cb76ae1f09402b6f9b4' (2023-07-11)
  → 'github:numtide/flake-utils/f9e7cf818399d17d347f847525c5a5a8032e4e44' (2023-08-23)
• Updated input 'nixpkgs':
    'github:NixOS/nixpkgs/d680ded26da5cf104dd2735a51e88d2d8f487b4d' (2023-08-19)
  → 'github:NixOS/nixpkgs/5690c4271f2998c304a45c91a0aeb8fb69feaea7' (2023-08-25)
• Updated input 'rust-overlay':
    'github:oxalica/rust-overlay/598b2f04ed252eb5808b108d7a10084c0c548753' (2023-08-19)
  → 'github:oxalica/rust-overlay/e90223633068a44f0fb62374e0fa360ccc987292' (2023-08-26)
• Updated input 'topiary':
    'github:tweag/topiary/e30432a29b6d3bb606cbb09b022a5ad0598810dc' (2023-07-31)
  → 'github:tweag/topiary/577fe940aa0b9dae478b463bddd1238e20f86e3a' (2023-08-24)

* Run `cargo fmt`

* Fix new clippy lints

---------

Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: Viktor Kleen <viktor.kleen@tweag.io>
  • Loading branch information
3 people authored Aug 28, 2023
1 parent 7299264 commit 182568a
Show file tree
Hide file tree
Showing 17 changed files with 123 additions and 82 deletions.
4 changes: 2 additions & 2 deletions cli/tests/integration/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ fn test_creates_output_files(command: &[&str]) {
.spawn()
.expect("Nickel should be runnable");
let Some(mut stdin) = nickel.stdin.take() else {
panic!("couldn't retrieve stdin handle to Nickel")
};
panic!("couldn't retrieve stdin handle to Nickel")
};
stdin
.write_all(b"{foo=1}")
.expect("writing into Nickel stdin should work");
Expand Down
2 changes: 1 addition & 1 deletion core/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -962,7 +962,7 @@ impl Cache {
/// Returns true if a particular file id represents a Nickel standard library file, false otherwise.
pub fn is_stdlib_module(&self, file: FileId) -> bool {
let Some(table) = &self.stdlib_ids else {
return false
return false;
};
table.values().any(|stdlib_file| *stdlib_file == file)
}
Expand Down
8 changes: 2 additions & 6 deletions core/src/eval/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,11 +405,7 @@ fn merge_fields<'a, C: Cache, I: DoubleEndedIterator<Item = &'a LocIdent> + Clon
};

let mut pending_contracts = pending_contracts1.revert_closurize(cache, env_final, env1.clone());
pending_contracts.extend(
pending_contracts2
.revert_closurize(cache, env_final, env2.clone())
.into_iter(),
);
pending_contracts.extend(pending_contracts2.revert_closurize(cache, env_final, env2.clone()));

// Annotation aren't used anymore at runtime. We still accumulate them to answer metadata
// queries, but we don't need to e.g. closurize or revert them.
Expand All @@ -429,7 +425,7 @@ fn merge_fields<'a, C: Cache, I: DoubleEndedIterator<Item = &'a LocIdent> + Clon
let contracts: Vec<_> = annot1
.contracts
.into_iter()
.chain(annot2.contracts.into_iter())
.chain(annot2.contracts)
.collect();

let metadata = FieldMetadata {
Expand Down
2 changes: 1 addition & 1 deletion core/src/eval/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let mut path = path.0.into_iter().peekable();

let Some(mut prev_id) = path.peek().cloned() else {
return Ok(field)
return Ok(field);
};

for id in path {
Expand Down
76 changes: 62 additions & 14 deletions core/src/eval/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
.ok_or_else(|| EvalError::NotEnoughArgs(2, String::from("generate"), pos_op))?;

let Term::Num(ref n) = *t else {
return Err(mk_type_error!("generate", "Number"))
return Err(mk_type_error!("generate", "Number"));
};

if n < &Number::ZERO {
Expand All @@ -576,12 +576,12 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
}

let Ok(n_int) = u32::try_from(n) else {
return Err(EvalError::Other(
return Err(EvalError::Other(
format!(
"generate expects its first argument to be an integer smaller than {}, got {n}", u32::MAX,
),
pos_op,
))
));
};

let mut shared_env = Environment::new();
Expand Down Expand Up @@ -1268,11 +1268,11 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
}
BinaryOp::Modulo() => {
let Term::Num(ref n1) = *t1 else {
return Err(mk_type_error!("(%)", "Number", 1, t1, pos1))
return Err(mk_type_error!("(%)", "Number", 1, t1, pos1));
};

let Term::Num(ref n2) = *t2 else {
return Err(mk_type_error!("(%)", "Number", 2, t2, pos2))
return Err(mk_type_error!("(%)", "Number", 2, t2, pos2));
};

if n2 == &Number::ZERO {
Expand Down Expand Up @@ -1779,7 +1779,7 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
BinaryOp::ArrayElemAt() => match (&*t1, &*t2) {
(Term::Array(ts, attrs), Term::Num(n)) => {
let Ok(n_as_usize) = usize::try_from(n) else {
return Err(EvalError::Other(format!("elem_at expects its second argument to be a positive integer smaller than {}, got {n}", usize::MAX), pos_op))
return Err(EvalError::Other(format!("elem_at expects its second argument to be a positive integer smaller than {}, got {n}", usize::MAX), pos_op));
};

if n_as_usize >= ts.len() {
Expand Down Expand Up @@ -2078,11 +2078,23 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let t2 = t2.into_owned();

let Term::Str(message) = t1 else {
return Err(mk_type_error!("label_with_message", "String", 1, t1.into(), pos1))
return Err(mk_type_error!(
"label_with_message",
"String",
1,
t1.into(),
pos1
));
};

let Term::Lbl(label) = t2 else {
return Err(mk_type_error!("label_with_message", "String", 2, t2.into(), pos2))
return Err(mk_type_error!(
"label_with_message",
"String",
2,
t2.into(),
pos2
));
};

Ok(Closure::atomic_closure(RichTerm::new(
Expand All @@ -2108,7 +2120,13 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let t1 = t1_subst.term.into_owned();

let Term::Array(array, _) = t1 else {
return Err(mk_type_error!("label_with_notes", "Array String", 1, t1.into(), pos1));
return Err(mk_type_error!(
"label_with_notes",
"Array String",
1,
t1.into(),
pos1
));
};

let notes = array
Expand All @@ -2131,7 +2149,13 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
.collect::<Result<Vec<_>, _>>()?;

let Term::Lbl(label) = t2 else {
return Err(mk_type_error!("label_with_notes", "Label", 2, t2.into(), pos2))
return Err(mk_type_error!(
"label_with_notes",
"Label",
2,
t2.into(),
pos2
));
};

Ok(Closure::atomic_closure(RichTerm::new(
Expand All @@ -2144,11 +2168,23 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let t2 = t2.into_owned();

let Term::Str(note) = t1 else {
return Err(mk_type_error!("label_append_note", "String", 1, t1.into(), pos1));
return Err(mk_type_error!(
"label_append_note",
"String",
1,
t1.into(),
pos1
));
};

let Term::Lbl(label) = t2 else {
return Err(mk_type_error!("label_append_note", "Label", 2, t2.into(), pos2));
return Err(mk_type_error!(
"label_append_note",
"Label",
2,
t2.into(),
pos2
));
};

Ok(Closure::atomic_closure(RichTerm::new(
Expand All @@ -2161,11 +2197,23 @@ impl<R: ImportResolver, C: Cache> VirtualMachine<R, C> {
let t2 = t2.into_owned();

let Term::SealingKey(key) = t1 else {
return Err(mk_type_error!("lookup_type_variable", "SealingKey", 1, t1.into(), pos1));
return Err(mk_type_error!(
"lookup_type_variable",
"SealingKey",
1,
t1.into(),
pos1
));
};

let Term::Lbl(label) = t2 else {
return Err(mk_type_error!("lookup_type_variable", "Label", 2, t2.into(), pos2));
return Err(mk_type_error!(
"lookup_type_variable",
"Label",
2,
t2.into(),
pos2
));
};

Ok(Closure::atomic_closure(RichTerm::new(
Expand Down
2 changes: 1 addition & 1 deletion core/src/parser/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -513,7 +513,7 @@ fn ascii_escape() {
#[test]
fn multiline_str_escape() {
assert_eq!(
parse_without_pos(r##"m%"%Hel%%lo%%%"%"##),
parse_without_pos(r#"m%"%Hel%%lo%%%"%"#),
mk_single_chunk("%Hel%%lo%%%"),
);
}
Expand Down
21 changes: 3 additions & 18 deletions core/src/parser/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,8 +345,8 @@ impl Combine for TypeAnnotation {
let contracts = left
.contracts
.into_iter()
.chain(leftover.into_iter())
.chain(right.contracts.into_iter())
.chain(leftover)
.chain(right.contracts)
.collect();

TypeAnnotation { typ, contracts }
Expand Down Expand Up @@ -451,22 +451,7 @@ impl From<FieldMetadata> for FieldExtAnnot {

/// Turn dynamic accesses using literal chunks only into static accesses
pub fn mk_access(access: RichTerm, root: RichTerm) -> RichTerm {
let label = match *access.term {
Term::StrChunks(ref chunks) => {
chunks
.iter()
.fold(Some(String::new()), |acc, next| match (acc, next) {
(Some(mut acc), StrChunk::Literal(lit)) => {
acc.push_str(lit);
Some(acc)
}
_ => None,
})
}
_ => None,
};

if let Some(label) = label {
if let Some(label) = access.as_ref().try_str_chunk_as_static_str() {
mk_term::op1(
UnaryOp::StaticAccess(LocIdent::new_with_pos(label, access.pos)),
root,
Expand Down
4 changes: 2 additions & 2 deletions core/src/serialize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ mod tests {
assert_json_eq("null", null);

assert_json_eq("if true then false else true", false);
assert_json_eq(r##""Hello, %{"world"}!""##, "Hello, world!");
assert_json_eq(r#""Hello, %{"world"}!""#, "Hello, world!");
assert_json_eq("'foo", "foo");
}

Expand All @@ -350,7 +350,7 @@ mod tests {
assert_json_eq("[]", json!([]));
assert_json_eq("[null, (1+1), (2+2), (3+3)]", json!([null, 2, 4, 6]));
assert_json_eq(
r##"['a, ("b" ++ "c"), "d%{"e"}f", "g"]"##,
r#"['a, ("b" ++ "c"), "d%{"e"}f", "g"]"#,
json!(["a", "bc", "def", "g"]),
);
assert_json_eq(
Expand Down
4 changes: 2 additions & 2 deletions core/src/term/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -818,8 +818,8 @@ impl Term {
Term::StrChunks(chunks) => {
chunks
.iter()
.fold(Some(String::new()), |acc, next| match (acc, next) {
(Some(mut acc), StrChunk::Literal(lit)) => {
.try_fold(String::new(), |mut acc, next| match next {
StrChunk::Literal(lit) => {
acc.push_str(lit);
Some(acc)
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/term/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ impl NickelString {
}
});

Array::from_iter(result.into_iter())
Array::from_iter(result)
}
}

Expand Down
24 changes: 18 additions & 6 deletions core/src/typecheck/destructuring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,10 +170,16 @@ pub fn inject_pattern_variables(
// ```
//
// As such, we don't need to add it to the environment.
let UnifType::Concrete { typ: TypeF::Record(rs), .. } = ty else {
unreachable!("since this is a destructured record, \
let UnifType::Concrete {
typ: TypeF::Record(rs),
..
} = ty
else {
unreachable!(
"since this is a destructured record, \
its type was constructed by build_pattern_ty, \
which means it must be a concrete record type")
which means it must be a concrete record type"
)
};
inject_pattern_variables(state, env, pat, rs)
}
Expand All @@ -182,10 +188,16 @@ pub fn inject_pattern_variables(

env.insert(alias.ident(), ty.clone());

let UnifType::Concrete{ typ: TypeF::Record(rs), .. } = ty else {
unreachable!("since this is a destructured record, \
let UnifType::Concrete {
typ: TypeF::Record(rs),
..
} = ty
else {
unreachable!(
"since this is a destructured record, \
its type was constructed by build_pattern_ty, \
which means it must be a concrete record type")
which means it must be a concrete record type"
)
};
inject_pattern_variables(state, env, pattern, rs)
}
Expand Down
2 changes: 1 addition & 1 deletion core/src/typecheck/unif.rs
Original file line number Diff line number Diff line change
Expand Up @@ -930,7 +930,7 @@ pub fn constr_unify_rrows(
} => constr_unify_rrows(constr, var_id, tail),
UnifRecordRows::UnifVar { id, .. } if *id != var_id => {
if let Some(u_constr) = constr.get_mut(id) {
u_constr.extend(p_constr.into_iter());
u_constr.extend(p_constr);
} else {
constr.insert(*id, p_constr);
}
Expand Down
Loading

0 comments on commit 182568a

Please sign in to comment.