Skip to content

Commit

Permalink
Commandline error reporting now also supports UTF8
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Aug 26, 2023
1 parent 9b0be1b commit 5f7e8a9
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 11 deletions.
8 changes: 8 additions & 0 deletions src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@ use core::ops::Range;
#[derive(Clone,Copy,Debug,PartialEq,Eq)]
pub struct Span(pub usize, pub usize);

impl Span {
pub fn to_range<T : Clone>(&self, tokens : &[Range<T>]) -> Range<T> {
let min = tokens[self.0].start.clone();
let max = tokens[self.1].end.clone();
min..max
}
}

#[derive(Debug,Clone,Copy,PartialEq,Eq)]
pub enum IdentifierType {
Input,
Expand Down
26 changes: 25 additions & 1 deletion src/dev_aid/syntax_highlighting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,28 @@ pub fn create_token_ide_info<'a>(parsed: &FullParseResult) -> Vec<IDEToken> {
result
}

fn generate_character_offsets(file_text : &str, tokens : &[Token]) -> Vec<Range<usize>> {
let mut character_offsets : Vec<Range<usize>> = Vec::new();
character_offsets.reserve(tokens.len());

let mut cur_char = 0;
let mut whitespace_start = 0;
for tok in tokens {
let tok_range = tok.get_range();

// whitespace
cur_char += file_text[whitespace_start..tok_range.start].chars().count();
let token_start_char = cur_char;

// actual text
cur_char += file_text[tok_range.clone()].chars().count();
character_offsets.push(token_start_char..cur_char);
whitespace_start = tok_range.end;
}

character_offsets
}

pub fn syntax_highlight_file(file_path : &str) {
let file_text = match std::fs::read_to_string(file_path) {
Ok(file_text) => file_text,
Expand All @@ -158,8 +180,10 @@ pub fn syntax_highlight_file(file_path : &str) {

let (full_parse, errors) = perform_full_semantic_parse(&file_text);

let token_offsets = generate_character_offsets(&file_text, &full_parse.tokens);

for err in errors {
err.pretty_print_error(&file_path, &file_text, &full_parse.tokens);
err.pretty_print_error(&file_path, &file_text, &token_offsets);
}

print_tokens(&file_text, &full_parse.tokens);
Expand Down
13 changes: 3 additions & 10 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
use std::ops::Range;

use crate::ast::Span;
use crate::tokenizer::Token;
use ariadne::*;

use crate::tokenizer::{TokenTypeIdx, get_token_type_name};
Expand All @@ -18,19 +17,13 @@ pub struct ParsingError<T> {
pub infos : Vec<ErrorInfo<T>>
}

fn cvt_span_to_char_range(span : Span, tokens : &[Token]) -> Range<usize> {
let min = tokens[span.0].get_range().start;
let max = tokens[span.1].get_range().end;
min..max
}

impl<'a> ParsingError<Span> {
pub fn pretty_print_error(&self, file_name : &str, file_text : &str, tokens : &[Token]) {
pub fn pretty_print_error(&self, file_name : &str, file_text : &str, character_ranges : &[Range<usize>]) {
// Generate & choose some colours for each of our elements
let err_color = Color::Red;
let info_color = Color::Blue;

let error_span = cvt_span_to_char_range(self.error.position, tokens);
let error_span = self.error.position.to_range(character_ranges);
let mut report = Report::build(ReportKind::Error, file_name, error_span.start)
.with_message(&self.error.reason)
.with_label(
Expand All @@ -40,7 +33,7 @@ impl<'a> ParsingError<Span> {
);

for info in &self.infos {
let info_span = cvt_span_to_char_range(info.position, tokens);
let info_span = info.position.to_range(character_ranges);
report = report.with_label(
Label::new((file_name, info_span))
.with_message(&info.reason)
Expand Down

0 comments on commit 5f7e8a9

Please sign in to comment.