Skip to content

Commit

Permalink
Merge pull request #292 from KisaragiEffective/refactor/cargo-clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
KisaragiEffective authored Oct 19, 2023
2 parents 4a559e2 + afdf5d1 commit 2968fde
Show file tree
Hide file tree
Showing 12 changed files with 80 additions and 71 deletions.
10 changes: 5 additions & 5 deletions package/origlang-ast/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ pub enum AtomicPattern {
impl Display for AtomicPattern {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
AtomicPattern::Discard => f.write_str("_"),
AtomicPattern::Bind(i) => f.write_str(i.as_name()),
AtomicPattern::Tuple(v) => {
Self::Discard => f.write_str("_"),
Self::Bind(i) => f.write_str(i.as_name()),
Self::Tuple(v) => {
f.write_str("(")?;
for i in v {
Display::fmt(i, f)?;
Expand Down Expand Up @@ -112,8 +112,8 @@ impl From<Identifier> for TypeSignature {
impl Display for TypeSignature {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
TypeSignature::Simple(x) => <Identifier as Display>::fmt(x, f),
TypeSignature::Tuple(v) => {
Self::Simple(x) => <Identifier as Display>::fmt(x, f),
Self::Tuple(v) => {
for x in v {
<Self as Display>::fmt(x, f)?;
}
Expand Down
4 changes: 2 additions & 2 deletions package/origlang-cli/src/task/repl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ impl Repl {
let ir = tra.into_ir();
let trans = TheTranspiler::new(&NoOptimization);
let ir = trans.lower(ir);
let ir = trans.lower(ir);


ir
trans.lower(ir)
}
}

Expand Down
18 changes: 12 additions & 6 deletions package/origlang-compiler-entrypoint/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#![warn(clippy::pedantic, clippy::nursery)]

use std::any::type_name;
use std::ops::Deref;

use log::debug;
use thiserror::Error;
use origlang_ast::Statement;
Expand All @@ -24,7 +24,7 @@ pub struct TheCompiler {

impl TheCompiler {
/// Creates new instance without any optimization, scanner, nor diagnostic receiver.
pub fn new() -> Self {
#[must_use] pub fn new() -> Self {
Self {
scanner: ScannerRegistry::default(),
optimization_preset: OptimizationPresetCollection::none(),
Expand All @@ -38,7 +38,7 @@ impl TheCompiler {
self
}

pub fn register_diagnostic_receiver<DS: DiagnosticSink + 'static>(mut self, receiver: Box<DS>) -> Self {
#[must_use] pub fn register_diagnostic_receiver<DS: DiagnosticSink + 'static>(mut self, receiver: Box<DS>) -> Self {
debug!("registered {ds}", ds = type_name::<DS>());

self.diagnostic_receivers.push(receiver as _);
Expand Down Expand Up @@ -71,7 +71,7 @@ impl TheCompiler {
let diagnostics = diags.iter();
for diag in diagnostics {
for receiver in &self.diagnostic_receivers {
receiver.handle_diagnostic(diag.deref())
receiver.handle_diagnostic(&**diag);
}
}
}
Expand All @@ -83,7 +83,7 @@ impl TheCompiler {
let diagnostics = diags.iter();
for diag in diagnostics {
for receiver in &self.diagnostic_receivers {
receiver.handle_diagnostic(diag.deref())
receiver.handle_diagnostic(&**diag);
}
}
}
Expand All @@ -93,6 +93,12 @@ impl TheCompiler {
}
}

impl Default for TheCompiler {
fn default() -> Self {
Self::new()
}
}

#[derive(Debug, Eq, PartialEq, Error)]
pub enum PartialCompilation {
#[error("syntax error: {0}")]
Expand Down Expand Up @@ -145,7 +151,7 @@ pub struct OptimizationPresetCollection {

impl OptimizationPresetCollection {
fn none() -> Self {
OptimizationPresetCollection {
Self {
ir0: Box::new(NoOptimization) as Box<_>,
ir1: Box::new(NoOptimization) as Box<_>,
ir2: Box::new(NoOptimization) as Box<_>,
Expand Down
2 changes: 1 addition & 1 deletion package/origlang-compiler/src/chars/boundary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ impl Utf8CharBoundaryStartByte {
self.0
}

pub const fn stride(self, stride: Utf8CharStride) -> Utf8CharBoundaryStartByte {
pub const fn stride(self, stride: Utf8CharStride) -> Self {
Self::new(self.as_usize() + stride.as_usize())
}
}
Expand Down
32 changes: 16 additions & 16 deletions package/origlang-compiler/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ pub(crate) mod error;
mod tests;
pub mod token;

use std::borrow::Cow;

use std::cell::Cell;
use std::convert::Infallible;

Expand Down Expand Up @@ -73,7 +73,7 @@ impl Lexer<'_> {
if let Some(b) = self.source.get((start.as_usize())..end_exclusive) {
if s == b {
match self.set_current_index(Utf8CharBoundaryStartByte::new(end_exclusive)) {
Ok(_) => Ok(Some(s)),
Ok(()) => Ok(Some(s)),
Err(OutOfRangeError { .. }) => Ok(None),
}
} else {
Expand Down Expand Up @@ -134,14 +134,14 @@ impl Lexer<'_> {
.or_else(|| self.try_and_eat_str(r#"("#).expect("huh?").map(|_| Token::SymLeftPar))
.or_else(|| self.try_and_eat_str(r#")"#).expect("huh?").map(|_| Token::SymRightPar))
.or_else(|| {
if let Some(_) = self.try_and_eat_str(r#"<"#).expect("huh?") {
if let Some(_) = self.try_and_eat_str(r#"="#).expect("huh?") {
if let Some(_) = self.try_and_eat_str(r#">"#).expect("huh?") {
if self.try_and_eat_str(r#"<"#).expect("huh?").is_some() {
if self.try_and_eat_str(r#"="#).expect("huh?").is_some() {
if self.try_and_eat_str(r#">"#).expect("huh?").is_some() {
Some(Token::PartLessEqMore)
} else {
Some(Token::PartLessEq)
}
} else if let Some(_) = self.try_and_eat_str(r#"<"#).expect("huh?") {
} else if self.try_and_eat_str(r#"<"#).expect("huh?").is_some() {
Some(Token::PartLessLess)
} else {
Some(Token::SymLess)
Expand All @@ -151,10 +151,10 @@ impl Lexer<'_> {
}
})
.or_else(|| {
if let Some(_) = self.try_and_eat_str(r#">"#).expect("huh?") {
if let Some(_) = self.try_and_eat_str(r#"="#).expect("huh?") {
if self.try_and_eat_str(r#">"#).expect("huh?").is_some() {
if self.try_and_eat_str(r#"="#).expect("huh?").is_some() {
Some(Token::PartMoreEq)
} else if let Some(_) = self.try_and_eat_str(r#">"#).expect("huh?") {
} else if self.try_and_eat_str(r#">"#).expect("huh?").is_some() {
Some(Token::PartMoreMore)
} else {
Some(Token::SymMore)
Expand Down Expand Up @@ -300,7 +300,7 @@ impl Lexer<'_> {
let r = self.byte_skip_n(plus);

if let Ok(b) = r {
if (b'0'..=b'9').contains(&b) {
if b.is_ascii_digit() {
plus += 1;
} else {
break
Expand Down Expand Up @@ -366,13 +366,13 @@ impl Lexer<'_> {
new - (old + old_relative)
} else {
let mut c = self.column.get().get();
c += (new - old);
c += new - old;

c
};

self.line.set(NonZeroUsize::new(new_line).expect("overflow"));
self.column.set(NonZeroUsize::new(new_col).expect("overflow"))
self.column.set(NonZeroUsize::new(new_col).expect("overflow"));
} else {
// back
let new_line = current_line - src[new..old].bytes().filter(|x| *x == b'\n').count();
Expand Down Expand Up @@ -400,7 +400,7 @@ impl Lexer<'_> {
};

self.line.set(NonZeroUsize::new(new_line).expect("overflow"));
self.column.set(NonZeroUsize::new(new_col).expect("overflow"))
self.column.set(NonZeroUsize::new(new_col).expect("overflow"));
}

debug!("index: requested = {future_index:?}");
Expand All @@ -411,7 +411,7 @@ impl Lexer<'_> {

fn scan_line_comment(&self) -> Result<Token, LexerError> {
let start = self.source_bytes_nth.get().as_usize();
let rel_pos = self.source[start..].find("\n").unwrap_or(self.source.len());
let rel_pos = self.source[start..].find('\n').unwrap_or(self.source.len());
self.advance_bytes(rel_pos)?;

let content = self.source[start..(start + rel_pos)].to_string();
Expand Down Expand Up @@ -536,11 +536,11 @@ impl Lexer<'_> {
}

fn current_byte(&self) -> Result<u8, LexerError> {
self.source.bytes().nth(self.source_bytes_nth.get().as_usize()).ok_or_else(|| self.report_out_of_range_error())
self.source.as_bytes().get(self.source_bytes_nth.get().as_usize()).copied().ok_or_else(|| self.report_out_of_range_error())
}

fn byte_skip_n(&self, skip: usize) -> Result<u8, LexerError> {
self.source.bytes().nth(self.source_bytes_nth.get().as_usize() + skip).ok_or_else(|| self.report_out_of_range_error())
self.source.as_bytes().get(self.source_bytes_nth.get().as_usize() + skip).copied().ok_or_else(|| self.report_out_of_range_error())
}

fn report_out_of_range_error(&self) -> LexerError {
Expand Down
34 changes: 17 additions & 17 deletions package/origlang-compiler/src/lexer/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,82 +15,82 @@ fn test(str_lit: &str) {

#[test]
fn parse_string_literal_ascii() {
test("123456")
test("123456");
}

#[test]
fn parse_string_literal_empty() {
test("")
test("");
}

#[test]
fn parse_string_literal_two_bytes() {
test("\u{80}")
test("\u{80}");
}

#[test]
fn parse_string_literal_three_bytes() {
test("\u{800}")
test("\u{800}");
}

#[test]
fn parse_string_literal_mixed_1_2() {
test("1\u{80}")
test("1\u{80}");
}

#[test]
fn parse_string_literal_mixed_1_3() {
test("1あ")
test("1あ");
}

#[test]
fn parse_string_literal_mixed_1_4() {
test("1\u{10000}")
test("1\u{10000}");
}

#[test]
fn parse_string_literal_mixed_2_1() {
test("\u{80}1")
test("\u{80}1");
}

#[test]
fn parse_string_literal_mixed_2_3() {
test("\u{80}あ")
test("\u{80}あ");
}

#[test]
fn parse_string_literal_mixed_2_4() {
test("\u{80}\u{10000}")
test("\u{80}\u{10000}");
}

#[test]
fn parse_string_literal_mixed_3_1() {
test("あ1")
test("あ1");
}

#[test]
fn parse_string_literal_mixed_3_2() {
test("あ\u{80}")
test("あ\u{80}");
}

#[test]
fn parse_string_literal_mixed_3_4() {
test("あ\u{10000}")
test("あ\u{10000}");
}

#[test]
fn parse_string_literal_mixed_4_1() {
test("\u{10000}1")
test("\u{10000}1");
}

#[test]
fn parse_string_literal_mixed_4_2() {
test("\u{10000}\u{80}")
test("\u{10000}\u{80}");
}

#[test]
fn parse_string_literal_mixed_4_3() {
test("\u{10000}あ")
test("\u{10000}あ");
}

use std::num::NonZeroUsize;
Expand Down Expand Up @@ -231,5 +231,5 @@ fn off_by_one_range_regression() {
line: NonZeroUsize::new(1).unwrap(),
column: NonZeroUsize::new(1).unwrap(),
}
})
});
}
7 changes: 5 additions & 2 deletions package/origlang-compiler/src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ pub struct TemporalLexerUnwindToken {
}

impl TemporalLexerUnwindToken {
#[must_use]
pub fn new(reset_to: Utf8CharBoundaryStartByte) -> Self {
Self {
unwind_index: reset_to
Expand Down Expand Up @@ -159,12 +160,14 @@ impl Token {
}
}

#[must_use]
pub const fn is_error(&self) -> bool {
matches!(self, Token::UnexpectedChar { .. })
matches!(self, Self::UnexpectedChar { .. })
}

#[must_use]
pub const fn is_end(&self) -> bool {
matches!(self, Token::EndOfFile)
matches!(self, Self::EndOfFile)
}
}

Expand Down
4 changes: 2 additions & 2 deletions package/origlang-compiler/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ impl Parser<'_> {
if let Token::Identifier { inner: aliased } = aliased.data {
let except_eq = self.lexer.next();

if let Token::SymEq = except_eq.data {
if except_eq.data == Token::SymEq {

let Ok(replace_with) = self.lexer.parse_fallible(|| self.parse_type()) else {
return Err(SimpleErrorWithPos {
Expand Down Expand Up @@ -729,7 +729,7 @@ impl Parser<'_> {
}
})
}
other_token => return Err(SimpleErrorWithPos {
other_token => Err(SimpleErrorWithPos {
kind: ParserError::UnexpectedToken {
pat: TokenKind::StartOfTypeSignature,
unmatch: other_token,
Expand Down
Loading

0 comments on commit 2968fde

Please sign in to comment.