Skip to content

Commit

Permalink
Use split_first_chunk as get_split impl
Browse files Browse the repository at this point in the history
  • Loading branch information
nickbabcock committed Apr 2, 2024
1 parent c9e9a87 commit 3e0d32c
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 17 deletions.
24 changes: 12 additions & 12 deletions src/binary/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,13 +85,13 @@ impl LexemeId {
#[inline]
pub(crate) fn read_id(data: &[u8]) -> Result<(LexemeId, &[u8]), LexError> {
let (head, rest) = get_split::<2>(data).ok_or(LexError::Eof)?;
Ok((LexemeId::new(u16::from_le_bytes(head)), rest))
Ok((LexemeId::new(u16::from_le_bytes(*head)), rest))
}

#[inline]
pub(crate) fn read_string(data: &[u8]) -> Result<(Scalar, &[u8]), LexError> {
let (head, rest) = get_split::<2>(data).ok_or(LexError::Eof)?;
let text_len = usize::from(u16::from_le_bytes(head));
let text_len = usize::from(u16::from_le_bytes(*head));
if text_len <= rest.len() {
let (text, rest) = rest.split_at(text_len);
Ok((Scalar::new(text), rest))
Expand All @@ -109,34 +109,34 @@ pub(crate) fn read_bool(data: &[u8]) -> Result<(bool, &[u8]), LexError> {
#[inline]
pub(crate) fn read_u32(data: &[u8]) -> Result<(u32, &[u8]), LexError> {
let (head, rest) = get_split::<4>(data).ok_or(LexError::Eof)?;
Ok((u32::from_le_bytes(head), rest))
Ok((u32::from_le_bytes(*head), rest))
}

#[inline]
pub(crate) fn read_u64(data: &[u8]) -> Result<(u64, &[u8]), LexError> {
let (head, rest) = get_split::<8>(data).ok_or(LexError::Eof)?;
Ok((u64::from_le_bytes(head), rest))
Ok((u64::from_le_bytes(*head), rest))
}

#[inline]
pub(crate) fn read_i64(data: &[u8]) -> Result<(i64, &[u8]), LexError> {
let (head, rest) = get_split::<8>(data).ok_or(LexError::Eof)?;
Ok((i64::from_le_bytes(head), rest))
Ok((i64::from_le_bytes(*head), rest))
}

#[inline]
pub(crate) fn read_i32(data: &[u8]) -> Result<(i32, &[u8]), LexError> {
let (head, rest) = get_split::<4>(data).ok_or(LexError::Eof)?;
Ok((i32::from_le_bytes(head), rest))
Ok((i32::from_le_bytes(*head), rest))
}

#[inline]
pub(crate) fn read_f32(data: &[u8]) -> Result<([u8; 4], &[u8]), LexError> {
pub(crate) fn read_f32(data: &[u8]) -> Result<(&[u8; 4], &[u8]), LexError> {
get_split::<4>(data).ok_or(LexError::Eof)
}

#[inline]
pub(crate) fn read_f64(data: &[u8]) -> Result<([u8; 8], &[u8]), LexError> {
pub(crate) fn read_f64(data: &[u8]) -> Result<(&[u8; 8], &[u8]), LexError> {
get_split::<8>(data).ok_or(LexError::Eof)
}

Expand Down Expand Up @@ -307,8 +307,8 @@ pub(crate) fn read_token(data: &[u8]) -> Result<(Token, &[u8]), LexError> {
LexemeId::BOOL => read_bool(data).map(|(x, d)| (Token::Bool(x), d)),
LexemeId::QUOTED => read_string(data).map(|(x, d)| (Token::Quoted(x), d)),
LexemeId::UNQUOTED => read_string(data).map(|(x, d)| (Token::Unquoted(x), d)),
LexemeId::F32 => read_f32(data).map(|(x, d)| (Token::F32(x), d)),
LexemeId::F64 => read_f64(data).map(|(x, d)| (Token::F64(x), d)),
LexemeId::F32 => read_f32(data).map(|(x, d)| (Token::F32(*x), d)),
LexemeId::F64 => read_f64(data).map(|(x, d)| (Token::F64(*x), d)),
LexemeId::RGB => read_rgb(data).map(|(x, d)| (Token::Rgb(x), d)),
LexemeId::I64 => read_i64(data).map(|(x, d)| (Token::I64(x), d)),
LexemeId(id) => Ok((Token::Id(id), data)),
Expand Down Expand Up @@ -714,7 +714,7 @@ impl<'a> Lexer<'a> {
pub fn read_f32(&mut self) -> Result<[u8; 4], LexerError> {
let (result, rest) = read_f32(self.data).map_err(|e| self.err_position(e))?;
self.data = rest;
Ok(result)
Ok(*result)
}

/// Advance the lexer through 64 bits of floating point data and return the bytes
Expand All @@ -730,7 +730,7 @@ impl<'a> Lexer<'a> {
pub fn read_f64(&mut self) -> Result<[u8; 8], LexerError> {
let (result, rest) = read_f64(self.data).map_err(|e| self.err_position(e))?;
self.data = rest;
Ok(result)
Ok(*result)
}

/// Advance the lexer through an rgb value (with optional alpha channel)
Expand Down
6 changes: 3 additions & 3 deletions src/binary/tape.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ impl<'a, 'b> ParserState<'a, 'b> {

#[inline]
fn parse_next_id_opt(&mut self, data: &'a [u8]) -> Option<(&'a [u8], u16)> {
get_split::<2>(data).map(|(head, rest)| (rest, u16::from_le_bytes(head)))
get_split::<2>(data).map(|(head, rest)| (rest, u16::from_le_bytes(*head)))
}

#[inline]
Expand Down Expand Up @@ -194,14 +194,14 @@ impl<'a, 'b> ParserState<'a, 'b> {
#[inline]
fn parse_f32(&mut self, data: &'a [u8]) -> Result<&'a [u8], Error> {
let (result, rest) = read_f32(data).map_err(|e| self.err_position(e, data))?;
self.token_tape.alloc().init(BinaryToken::F32(result));
self.token_tape.alloc().init(BinaryToken::F32(*result));
Ok(rest)
}

#[inline]
fn parse_f64(&mut self, data: &'a [u8]) -> Result<&'a [u8], Error> {
let (result, rest) = read_f64(data).map_err(|e| self.err_position(e, data))?;
self.token_tape.alloc().init(BinaryToken::F64(result));
self.token_tape.alloc().init(BinaryToken::F64(*result));
Ok(rest)
}

Expand Down
4 changes: 2 additions & 2 deletions src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ fn take<const N: usize>(data: &[u8]) -> [u8; N] {
}

#[inline]
pub(crate) fn get_split<const N: usize>(data: &[u8]) -> Option<([u8; N], &[u8])> {
data.get(N..).map(|d| (take::<N>(data), d))
pub(crate) fn get_split<const N: usize>(data: &[u8]) -> Option<(&[u8; N], &[u8])> {
data.split_first_chunk::<N>()
}

/// https://youtu.be/wlvKAT7SZIQ?si=EndNPTY6f8oEBS--&t=2426
Expand Down

0 comments on commit 3e0d32c

Please sign in to comment.