Compilation: point caret in error message at the main token

This commit is contained in:
Veikka Tuominen 2022-07-15 11:38:16 +03:00
parent b5a838247b
commit 1463144fc8
5 changed files with 73 additions and 57 deletions

View file

@ -427,11 +427,15 @@ pub const AllErrors = struct {
else => try stderr.writeByte(b),
};
try stderr.writeByte('\n');
try stderr.writeByteNTimes(' ', src.column);
ttyconf.setColor(stderr, .Green);
try stderr.writeByte('^');
// TODO basic unicode code point monospace width
try stderr.writeByteNTimes('~', src.span.end - src.span.start - 1);
const before_caret = src.span.main - src.span.start;
// -1 since span.main includes the caret
const after_caret = src.span.end - src.span.main -| 1;
try stderr.writeByteNTimes(' ', src.column - before_caret);
ttyconf.setColor(stderr, .Green);
try stderr.writeByteNTimes('~', before_caret);
try stderr.writeByte('^');
try stderr.writeByteNTimes('~', after_caret);
try stderr.writeByte('\n');
ttyconf.setColor(stderr, .Reset);
}
@ -472,8 +476,7 @@ pub const AllErrors = struct {
hasher.update(src.src_path);
std.hash.autoHash(&hasher, src.line);
std.hash.autoHash(&hasher, src.column);
std.hash.autoHash(&hasher, src.span.start);
std.hash.autoHash(&hasher, src.span.end);
std.hash.autoHash(&hasher, src.span.main);
},
.plain => |plain| {
hasher.update(plain.msg);
@ -492,8 +495,7 @@ pub const AllErrors = struct {
mem.eql(u8, a_src.src_path, b_src.src_path) and
a_src.line == b_src.line and
a_src.column == b_src.column and
a_src.span.start == b_src.span.start and
a_src.span.end == b_src.span.end;
a_src.span.main == b_src.span.main;
},
.plain => return false,
},
@ -533,12 +535,12 @@ pub const AllErrors = struct {
).init(allocator);
const err_source = try module_err_msg.src_loc.file_scope.getSource(module.gpa);
const err_span = try module_err_msg.src_loc.span(module.gpa);
const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.start);
const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main);
for (module_err_msg.notes) |module_note| {
const source = try module_note.src_loc.file_scope.getSource(module.gpa);
const span = try module_note.src_loc.span(module.gpa);
const loc = std.zig.findLineColumn(source.bytes, span.start);
const loc = std.zig.findLineColumn(source.bytes, span.main);
const file_path = try module_note.src_loc.file_scope.fullPath(allocator);
const note = &notes_buf[note_i];
note.* = .{
@ -604,9 +606,10 @@ pub const AllErrors = struct {
}
const token_starts = file.tree.tokens.items(.start);
const start = token_starts[item.data.token] + item.data.byte_offset;
break :blk Module.SrcLoc.Span{ .start = start, .end = start + 1 };
const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len);
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
};
const err_loc = std.zig.findLineColumn(file.source, err_span.start);
const err_loc = std.zig.findLineColumn(file.source, err_span.main);
var notes: []Message = &[0]Message{};
if (item.data.notes != 0) {
@ -622,9 +625,10 @@ pub const AllErrors = struct {
}
const token_starts = file.tree.tokens.items(.start);
const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
break :blk Module.SrcLoc.Span{ .start = start, .end = start + 1 };
const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len);
break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
};
const loc = std.zig.findLineColumn(file.source, span.start);
const loc = std.zig.findLineColumn(file.source, span.main);
note.* = .{
.src = .{
@ -2665,7 +2669,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
.msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{
err_msg.msg,
}),
.span = .{ .start = 0, .end = 1 },
.span = .{ .start = 0, .end = 1, .main = 0 },
.line = err_msg.line,
.column = err_msg.column,
.source_line = null, // TODO

View file

@ -2085,20 +2085,21 @@ pub const SrcLoc = struct {
pub const Span = struct {
start: u32,
end: u32,
main: u32,
};
pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span {
switch (src_loc.lazy) {
.unneeded => unreachable,
.entire_file => return Span{ .start = 0, .end = 1 },
.entire_file => return Span{ .start = 0, .end = 1, .main = 0 },
.byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1 },
.byte_abs => |byte_index| return Span{ .start = byte_index, .end = byte_index + 1, .main = byte_index },
.token_abs => |tok_index| {
const tree = try src_loc.file_scope.getTree(gpa);
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_abs => |node| {
const tree = try src_loc.file_scope.getTree(gpa);
@ -2109,14 +2110,14 @@ pub const SrcLoc = struct {
const tok_index = src_loc.declSrcToken();
const start = tree.tokens.items(.start)[tok_index] + byte_off;
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.token_offset => |tok_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const tok_index = src_loc.declSrcToken() + tok_off;
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_offset => |traced_off| {
const node_off = traced_off.x;
@ -2137,7 +2138,7 @@ pub const SrcLoc = struct {
const tok_index = tree.firstToken(node) - 2;
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_var_decl_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@ -2158,7 +2159,7 @@ pub const SrcLoc = struct {
};
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_builtin_call_arg0 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 0),
.node_offset_builtin_call_arg1 => |n| return src_loc.byteOffsetBuiltinCallArg(gpa, n, 1),
@ -2186,16 +2187,13 @@ pub const SrcLoc = struct {
.slice_sentinel => tree.sliceSentinel(node),
else => unreachable,
};
const main_tokens = tree.nodes.items(.main_token);
const part_node = main_tokens[
switch (src_loc.lazy) {
.node_offset_slice_ptr => full.ast.sliced,
.node_offset_slice_start => full.ast.start,
.node_offset_slice_end => full.ast.end,
.node_offset_slice_sentinel => full.ast.sentinel,
else => unreachable,
}
];
const part_node = switch (src_loc.lazy) {
.node_offset_slice_ptr => full.ast.sliced,
.node_offset_slice_start => full.ast.start,
.node_offset_slice_end => full.ast.end,
.node_offset_slice_sentinel => full.ast.sentinel,
else => unreachable,
};
return nodeToSpan(tree, part_node);
},
.node_offset_call_func => |node_off| {
@ -2231,7 +2229,7 @@ pub const SrcLoc = struct {
};
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_deref_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
@ -2422,7 +2420,7 @@ pub const SrcLoc = struct {
const tok_index = full.lib_name.?;
const start = tree.tokens.items(.start)[tok_index];
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
return Span{ .start = start, .end = end };
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_array_type_len => |node_off| {
@ -2495,28 +2493,25 @@ pub const SrcLoc = struct {
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
const token_starts = tree.tokens.items(.start);
const main_token = tree.nodes.items(.main_token)[node];
const start = tree.firstToken(node);
const end = tree.lastToken(node);
if (tree.tokensOnSameLine(start, end)) {
const start_off = token_starts[start];
const end_off = token_starts[end] + @intCast(u32, tree.tokenSlice(end).len);
return Span{ .start = start_off, .end = end_off };
}
var start_tok = start;
var end_tok = end;
const main_token = tree.nodes.items(.main_token)[node];
if (tree.tokensOnSameLine(start, main_token)) {
const start_off = token_starts[start];
const end_off = token_starts[main_token] + @intCast(u32, tree.tokenSlice(main_token).len);
return Span{ .start = start_off, .end = end_off };
if (tree.tokensOnSameLine(start, end)) {
// do nothing
} else if (tree.tokensOnSameLine(start, main_token)) {
end_tok = main_token;
} else if (tree.tokensOnSameLine(main_token, end)) {
start_tok = main_token;
} else {
start_tok = main_token;
end_tok = main_token;
}
if (tree.tokensOnSameLine(main_token, end)) {
const start_off = token_starts[main_token];
const end_off = token_starts[end] + @intCast(u32, tree.tokenSlice(end).len);
return Span{ .start = start_off, .end = end_off };
}
const start_off = token_starts[main_token];
const end_off = token_starts[main_token] + @intCast(u32, tree.tokenSlice(main_token).len);
return Span{ .start = start_off, .end = end_off };
const start_off = token_starts[start_tok];
const end_off = token_starts[end_tok] + @intCast(u32, tree.tokenSlice(end_tok).len);
return Span{ .start = start_off, .end = end_off, .main = token_starts[main_token] };
}
};
@ -3283,7 +3278,7 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
.lazy = if (extra_offset == 0) .{
.token_abs = parse_err.token,
} else .{
.byte_abs = token_starts[parse_err.token],
.byte_abs = token_starts[parse_err.token] + extra_offset,
},
},
.msg = msg.toOwnedSlice(),

View file

@ -4381,7 +4381,7 @@ fn printErrsMsgToStdErr(
.msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{
std.zig.fmtEscapes(tree.source[byte_offset..][0..1]),
}),
.span = .{ .start = byte_offset, .end = byte_offset + 1 },
.span = .{ .start = byte_offset, .end = byte_offset + 1, .main = byte_offset },
.line = @intCast(u32, start_loc.line),
.column = @intCast(u32, start_loc.column) + bad_off,
.source_line = source_line,
@ -4401,7 +4401,11 @@ fn printErrsMsgToStdErr(
.src = .{
.src_path = path,
.msg = try arena.dupe(u8, text_buf.items),
.span = .{ .start = byte_offset, .end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len) },
.span = .{
.start = byte_offset,
.end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len),
.main = byte_offset,
},
.line = @intCast(u32, note_loc.line),
.column = @intCast(u32, note_loc.column),
.source_line = tree.source[note_loc.line_start..note_loc.line_end],
@ -4417,7 +4421,11 @@ fn printErrsMsgToStdErr(
.src = .{
.src_path = path,
.msg = text,
.span = .{ .start = byte_offset, .end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len) },
.span = .{
.start = byte_offset,
.end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len),
.main = byte_offset,
},
.line = @intCast(u32, start_loc.line),
.column = @intCast(u32, start_loc.column) + extra_offset,
.source_line = source_line,

View file

@ -2386,7 +2386,7 @@ const Writer = struct {
const end = std.zig.findLineColumn(tree.source, src_span.end);
try stream.print("{s}:{d}:{d} to :{d}:{d}", .{
@tagName(src), start.line + 1, start.column + 1,
end.line + 1, end.column + 1,
end.line + 1, end.column + 1,
});
}
}

View file

@ -174,6 +174,15 @@ pub fn addCases(ctx: *TestContext) !void {
});
}
{
const case = ctx.obj("missing semicolon at EOF", .{});
case.addError(
\\const foo = 1
, &[_][]const u8{
\\:1:14: error: expected ';' after declaration
});
}
// TODO test this in stage2, but we won't even try in stage1
//ctx.objErrStage1("inline fn calls itself indirectly",
// \\export fn foo() void {