Skip to content

Commit

Permalink
download_sysext: fix issues of reading into buffer
Browse files Browse the repository at this point in the history
Fix bugs when reading from File to buffer. We need to first create a
BufReader for reading from the buffer, pass that into parsing functions.
That would make the code much easier to maintain, instead of passing
File itself. Then we can read data without having to first open the file
and track read positions.

We need to get length of header and data, reading from the begining of
the stream including the whole data including delta update header as well
as manifest. And pass the length to hash_on_disk to calculate the hash
without having to read the while data into memory. Doing that, signature
verification works well.

Also introduce get_data_blob() to read only data without header,
manifest.
  • Loading branch information
dongsupark committed Nov 14, 2023
1 parent b64f479 commit 3a986a7
Show file tree
Hide file tree
Showing 8 changed files with 250 additions and 47 deletions.
44 changes: 34 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ log = "0.4"
argh = "0.1"
globset = "0.4"
protobuf = "3.2.0"
bzip2 = "0.4.4"
tempfile = "3.8.1"

[dependencies.hard-xml]
path = "vendor/hard-xml"
Expand Down
8 changes: 8 additions & 0 deletions omaha/src/hash_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,14 @@ impl<T: HashAlgo> str::FromStr for Hash<T> {
}
}

impl<T: HashAlgo> Into<Vec<u8>> for Hash<T> {
fn into(self) -> Vec<u8> {
let mut vec = Vec::new();
vec.append(&mut self.0.as_ref().to_vec());
vec
}
}

impl<T: HashAlgo> Hash<T> {
#[inline]
fn decode<D: Decoder>(hash: &str) -> Result<Self, CodecError> {
Expand Down
104 changes: 87 additions & 17 deletions src/bin/download_sysext.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
use std::error::Error;
use std::borrow::Cow;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::fs::File;
use std::fs;
use std::io;
use std::io::{Read, Write};
use std::io::BufReader;

#[macro_use]
extern crate log;
Expand Down Expand Up @@ -36,13 +38,49 @@ struct Package<'a> {

impl<'a> Package<'a> {
#[rustfmt::skip]
fn hash_on_disk(&mut self, path: &Path) -> Result<omaha::Hash<omaha::Sha256>, Box<dyn Error>> {
// Return Sha256 hash of data in the given path.
// If maxlen is 0, a simple read to the end of the file.
// If maxlen > 0, read only until the given length.
fn hash_on_disk(&mut self, path: &Path, maxlen: usize) -> Result<omaha::Hash<omaha::Sha256>, Box<dyn Error>> {
use sha2::{Sha256, Digest};

let mut file = File::open(path)?;
let file = File::open(path)?;
let mut hasher = Sha256::new();

io::copy(&mut file, &mut hasher)?;
let filelen = file.metadata().unwrap().len() as usize;

let mut maxlen_to_read: usize;
if filelen < maxlen || maxlen == 0{
maxlen_to_read = filelen;
} else {
maxlen_to_read = maxlen;
}

const CHUNKLEN: usize = 104857600; // 100M

let mut chunklen: usize;
if maxlen_to_read < CHUNKLEN {
chunklen = maxlen_to_read;
} else {
chunklen = CHUNKLEN;
}

let mut databuf = vec![0u8; chunklen];
let mut freader = BufReader::new(file);

while maxlen_to_read > 0 {
freader.read_exact(&mut databuf)?;

if maxlen_to_read < CHUNKLEN {
chunklen = maxlen_to_read;
} else {
chunklen = CHUNKLEN;
}

maxlen_to_read -= chunklen;

hasher.write_all(&databuf)?;
}

Ok(omaha::Hash::from_bytes(
hasher.finalize().into()
Expand Down Expand Up @@ -75,7 +113,7 @@ impl<'a> Package<'a> {

if size_on_disk == expected_size {
info!("{}: download complete, checking hash...", path.display());
let hash = self.hash_on_disk(&path)?;
let hash = self.hash_on_disk(&path, 0)?;
if self.verify_checksum(hash) {
info!("{}: good hash, will continue without re-download", path.display());
} else {
Expand Down Expand Up @@ -120,20 +158,47 @@ impl<'a> Package<'a> {
}
}

fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result<(), Box<dyn Error>> {
fn verify_signature_on_disk(&mut self, from_path: &Path, pubkey_path: &str) -> Result<PathBuf, Box<dyn Error>> {
let upfile = File::open(from_path)?;

// create a BufReader to pass down to parsing functions.
let upfreader = &mut BufReader::new(upfile);

// Read update payload from file, read delta update header from the payload.
let res_data = fs::read_to_string(from_path);
let header = delta_update::read_delta_update_header(upfreader)?;

let header = delta_update::read_delta_update_header(&upfile)?;
let mut delta_archive_manifest = delta_update::get_manifest_bytes(upfreader, &header)?;

// Extract signature from header.
let sigbytes = delta_update::get_signatures_bytes(&upfile, &header)?;
let sigbytes = delta_update::get_signatures_bytes(upfreader, &header, &mut delta_archive_manifest)?;

// tmp dir == "/var/tmp/outdir/.tmp"
let tmpdirpathbuf = from_path.parent().unwrap().parent().unwrap().join(".tmp");
let tmpdir = tmpdirpathbuf.as_path();
let datablobspath = tmpdir.join("ue_data_blobs");

// Get length of header and data
let header_data_length = delta_update::get_header_data_length(&header, &delta_archive_manifest);
let hdhash = self.hash_on_disk(from_path, header_data_length)?;
let hdhashvec: Vec<u8> = hdhash.into();

// Extract data blobs into file path.
delta_update::get_data_blobs(upfreader, &header, &delta_archive_manifest, datablobspath.as_path())?;

// Parse signature data from the signature containing data, version, special fields.
let _sigdata = match delta_update::parse_signature_data(res_data.unwrap().as_bytes(), &sigbytes, pubkey_path) {
Some(data) => data,
// Check for hash of data blobs with new_partition_info hash.
let pinfo_hash = match &delta_archive_manifest.new_partition_info.hash {
Some(hash) => hash,
None => return Err("unable to parse signature data".into()),
};

let datahash = self.hash_on_disk(datablobspath.as_path(), 0)?;
if datahash != omaha::Hash::from_bytes(pinfo_hash.as_slice()[..].try_into().unwrap_or_default()) {
return Err("data hash mismatch with new_partition_info hash".into());
}

// Parse signature data from sig blobs, data blobs, public key.
match delta_update::parse_signature_data(&sigbytes, hdhashvec.as_slice(), pubkey_path) {
Some(_) => (),
_ => {
self.status = PackageStatus::BadSignature;
return Err("unable to parse signature data".into());
Expand All @@ -143,7 +208,7 @@ impl<'a> Package<'a> {
println!("Parsed and verified signature data from file {:?}", from_path);

self.status = PackageStatus::Verified;
Ok(())
Ok(datablobspath)
}
}

Expand Down Expand Up @@ -249,7 +314,9 @@ async fn main() -> Result<(), Box<dyn Error>> {
}

let unverified_dir = output_dir.join(".unverified");
let temp_dir = output_dir.join(".tmp");
fs::create_dir_all(&unverified_dir)?;
fs::create_dir_all(&temp_dir)?;

////
// parse response
Expand All @@ -272,16 +339,19 @@ async fn main() -> Result<(), Box<dyn Error>> {
pkg.download(&unverified_dir, &client).await?;

let pkg_unverified = unverified_dir.join(&*pkg.name);
let pkg_verified = output_dir.join(&*pkg.name);
let pkg_verified = output_dir.join(pkg_unverified.with_extension("raw").file_name().unwrap_or_default());

match pkg.verify_signature_on_disk(&pkg_unverified, &args.pubkey_file) {
Ok(_) => {
// move the verified file back from unverified_dir to output_dir
fs::rename(&pkg_unverified, &pkg_verified)?;
Ok(datablobspath) => {
// write extracted data into the final data.
fs::rename(datablobspath, pkg_verified.clone())?;
debug!("data blobs written into file {:?}", pkg_verified);
}
_ => return Err(format!("unable to verify signature \"{}\"", pkg.name).into()),
};
}

fs::remove_dir_all(temp_dir)?;

Ok(())
}
44 changes: 38 additions & 6 deletions test/crau_verify.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
use std::io::Write;
use std::io;
use std::io::{BufReader, Write};
use std::error::Error;
use std::fs;
use std::fs::File;
use std::path::Path;
use tempfile;

use update_format_crau::delta_update;
use update_format_crau::{delta_update, proto};

use argh::FromArgs;

Expand All @@ -20,6 +24,17 @@ struct Args {
sig_path: String,
}

fn hash_on_disk(path: &Path) -> Result<omaha::Hash<omaha::Sha256>, Box<dyn Error>> {
use sha2::{Sha256, Digest};

let mut file = File::open(path)?;
let mut hasher = Sha256::new();

io::copy(&mut file, &mut hasher)?;

Ok(omaha::Hash::from_bytes(hasher.finalize().into()))
}

fn main() -> Result<(), Box<dyn Error>> {
let args: Args = argh::from_env();

Expand All @@ -28,15 +43,32 @@ fn main() -> Result<(), Box<dyn Error>> {

// Read update payload from srcpath, read delta update header from the payload.
let upfile = fs::File::open(srcpath.clone())?;
let header = delta_update::read_delta_update_header(&upfile)?;

let freader = &mut BufReader::new(upfile);
let header = delta_update::read_delta_update_header(freader)?;

let mut delta_archive_manifest: proto::DeltaArchiveManifest = Default::default();

// Extract signature from header.
let sigbytes = delta_update::get_signatures_bytes(&upfile, &header)?;
let sigbytes = delta_update::get_signatures_bytes(freader, &header, &mut delta_archive_manifest)?;

// Parse signature data from the signature containing data, version, special fields.
let tmpdir = tempfile::tempdir()?.into_path();
fs::create_dir_all(tmpdir.clone())?;

let headerdatapath = tmpdir.join("ue_header_data");

let hdhash = hash_on_disk(headerdatapath.as_path())?;
let hdhashvec: Vec<u8> = hdhash.into();

// Get length of header and data
let datablobspath = tmpdir.join("ue_data_blobs");

const TESTDATA: &str = "test data for verifying signature";
// Extract data blobs into file path.
delta_update::get_data_blobs(freader, &header, &delta_archive_manifest, datablobspath.as_path())?;

// Parse signature data from the signature containing data, version, special fields.
let sigdata = match delta_update::parse_signature_data(TESTDATA.as_bytes(), &sigbytes, PUBKEY_FILE) {
let sigdata = match delta_update::parse_signature_data(&sigbytes, hdhashvec.as_slice(), PUBKEY_FILE) {
Some(data) => Box::leak(data),
_ => return Err("unable to parse signature data".into()),
};
Expand Down
1 change: 1 addition & 0 deletions update-format-crau/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
bzip2 = "0.4.4"
log = "0.4.19"
protobuf = "3"
rsa = { version = "0.9.2", features = ["sha2"] }
Loading

0 comments on commit 3a986a7

Please sign in to comment.