Merge pull request #22902 from ianprime0509/autodoc-error-reporting

Autodoc: improve error reporting
This commit is contained in:
Andrew Kelley 2025-02-22 17:03:52 -05:00 committed by GitHub
commit 0dcba03b67
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 100 additions and 36 deletions

View file

@ -6,6 +6,9 @@
<title>Zig Documentation</title>
<link rel="icon" href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCAxNTMgMTQwIj48ZyBmaWxsPSIjRjdBNDFEIj48Zz48cG9seWdvbiBwb2ludHM9IjQ2LDIyIDI4LDQ0IDE5LDMwIi8+PHBvbHlnb24gcG9pbnRzPSI0NiwyMiAzMywzMyAyOCw0NCAyMiw0NCAyMiw5NSAzMSw5NSAyMCwxMDAgMTIsMTE3IDAsMTE3IDAsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMzEsOTUgMTIsMTE3IDQsMTA2Ii8+PC9nPjxnPjxwb2x5Z29uIHBvaW50cz0iNTYsMjIgNjIsMzYgMzcsNDQiLz48cG9seWdvbiBwb2ludHM9IjU2LDIyIDExMSwyMiAxMTEsNDQgMzcsNDQgNTYsMzIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTE2LDk1IDk3LDExNyA5MCwxMDQiLz48cG9seWdvbiBwb2ludHM9IjExNiw5NSAxMDAsMTA0IDk3LDExNyA0MiwxMTcgNDIsOTUiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTUwLDAgNTIsMTE3IDMsMTQwIDEwMSwyMiIvPjwvZz48Zz48cG9seWdvbiBwb2ludHM9IjE0MSwyMiAxNDAsNDAgMTIyLDQ1Ii8+PHBvbHlnb24gcG9pbnRzPSIxNTMsMjIgMTUzLDExNyAxMDYsMTE3IDEyMCwxMDUgMTI1LDk1IDEzMSw5NSAxMzEsNDUgMTIyLDQ1IDEzMiwzNiAxNDEsMjIiIHNoYXBlLXJlbmRlcmluZz0iY3Jpc3BFZGdlcyIvPjxwb2x5Z29uIHBvaW50cz0iMTI1LDk1IDEzMCwxMTAgMTA2LDExNyIvPjwvZz48L2c+PC9zdmc+">
<style type="text/css">
*, *::before, *::after {
box-sizing: border-box;
}
body {
font-family: system-ui, -apple-system, Roboto, "Segoe UI", sans-serif;
color: #000000;
@ -157,6 +160,23 @@
cursor: default;
}
#errors {
background-color: #faa;
position: fixed;
left: 0;
bottom: 0;
width: 100%;
max-height: min(20em, 50vh);
padding: 0.5em;
overflow: auto;
}
#errors h1 {
font-size: 1.5em;
}
#errors pre {
background-color: #fcc;
}
#listSearchResults li.selected {
background-color: #93e196;
}
@ -252,6 +272,14 @@
#listSearchResults li.selected a {
color: #fff;
}
#errors {
background-color: #800;
color: #fff;
}
#errors pre {
background-color: #a00;
color: #fff;
}
dl > div {
border-color: #373737;
}
@ -414,6 +442,10 @@
<dl><dt><kbd></kbd></dt><dd>Move down in search results</dd></dl>
<dl><dt><kbd></kbd></dt><dd>Go to active search result</dd></dl>
</div>
<div id="errors" class="hidden">
<h1>Errors</h1>
<pre id="errorsText"></pre>
</div>
<script src="main.js"></script>
</body>
</html>

View file

@ -11,6 +11,11 @@
const CAT_type_type = 9;
const CAT_type_function = 10;
const LOG_err = 0;
const LOG_warn = 1;
const LOG_info = 2;
const LOG_debug = 3;
const domDocTestsCode = document.getElementById("docTestsCode");
const domFnErrorsAnyError = document.getElementById("fnErrorsAnyError");
const domFnProto = document.getElementById("fnProto");
@ -48,6 +53,8 @@
const domStatus = document.getElementById("status");
const domTableFnErrors = document.getElementById("tableFnErrors");
const domTldDocs = document.getElementById("tldDocs");
const domErrors = document.getElementById("errors");
const domErrorsText = document.getElementById("errorsText");
var searchTimer = null;
@ -84,13 +91,24 @@
WebAssembly.instantiateStreaming(wasm_promise, {
js: {
log: function(ptr, len) {
log: function(level, ptr, len) {
const msg = decodeString(ptr, len);
console.log(msg);
},
panic: function (ptr, len) {
const msg = decodeString(ptr, len);
throw new Error("panic: " + msg);
switch (level) {
case LOG_err:
console.error(msg);
domErrorsText.textContent += msg + "\n";
domErrors.classList.remove("hidden");
break;
case LOG_warn:
console.warn(msg);
break;
case LOG_info:
console.info(msg);
break;
case LOG_debug:
console.debug(msg);
break;
}
},
},
}).then(function(obj) {

View file

@ -406,15 +406,11 @@ pub const ModuleIndex = enum(u32) {
};
pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
const ast = try parse(bytes);
const ast = try parse(file_name, bytes);
assert(ast.errors.len == 0);
const file_index: File.Index = @enumFromInt(files.entries.len);
try files.put(gpa, file_name, .{ .ast = ast });
if (ast.errors.len > 0) {
log.err("can't index '{s}' because it has syntax errors", .{file_index.path()});
return file_index;
}
var w: Walk = .{
.file = file_index,
};
@ -434,20 +430,41 @@ pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
return file_index;
}
fn parse(source: []u8) Oom!Ast {
/// Parses a file and returns its `Ast`. If the file cannot be parsed, returns
/// the `Ast` of an empty file, so that the rest of the Autodoc logic does not
/// need to handle parse errors.
fn parse(file_name: []const u8, source: []u8) Oom!Ast {
// Require every source file to end with a newline so that Zig's tokenizer
// can continue to require null termination and Autodoc implementation can
// avoid copying source bytes from the decompressed tar file buffer.
const adjusted_source: [:0]const u8 = s: {
if (source.len == 0)
break :s "";
assert(source[source.len - 1] == '\n');
if (source[source.len - 1] != '\n') {
log.err("{s}: expected newline at end of file", .{file_name});
break :s "";
}
source[source.len - 1] = 0;
break :s source[0 .. source.len - 1 :0];
};
return Ast.parse(gpa, adjusted_source, .zig);
var ast = try Ast.parse(gpa, adjusted_source, .zig);
if (ast.errors.len > 0) {
defer ast.deinit(gpa);
const token_offsets = ast.tokens.items(.start);
var rendered_err: std.ArrayListUnmanaged(u8) = .{};
defer rendered_err.deinit(gpa);
for (ast.errors) |err| {
const err_offset = token_offsets[err.token] + ast.errorOffset(err);
const err_loc = std.zig.findLineColumn(ast.source, err_offset);
rendered_err.clearRetainingCapacity();
try ast.renderError(err, rendered_err.writer(gpa));
log.err("{s}:{}:{}: {s}", .{ file_name, err_loc.line + 1, err_loc.column + 1, rendered_err.items });
}
return Ast.parse(gpa, "", .zig);
}
return ast;
}
pub const Scope = struct {

View file

@ -14,8 +14,15 @@ const missing_feature_url_escape = @import("html_render.zig").missing_feature_ur
const gpa = std.heap.wasm_allocator;
const js = struct {
extern "js" fn log(ptr: [*]const u8, len: usize) void;
extern "js" fn panic(ptr: [*]const u8, len: usize) noreturn;
/// Keep in sync with the `LOG_` constants in `main.js`.
const LogLevel = enum(u8) {
err,
warn,
info,
debug,
};
extern "js" fn log(level: LogLevel, ptr: [*]const u8, len: usize) void;
};
pub const std_options: std.Options = .{
@ -36,14 +43,13 @@ fn logFn(
comptime format: []const u8,
args: anytype,
) void {
const level_txt = comptime message_level.asText();
const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): ";
const prefix = if (scope == .default) "" else @tagName(scope) ++ ": ";
var buf: [500]u8 = undefined;
const line = std.fmt.bufPrint(&buf, level_txt ++ prefix2 ++ format, args) catch l: {
const line = std.fmt.bufPrint(&buf, prefix ++ format, args) catch l: {
buf[buf.len - 3 ..][0..3].* = "...".*;
break :l &buf;
};
js.log(line.ptr, line.len);
js.log(@field(js.LogLevel, @tagName(message_level)), line.ptr, line.len);
}
export fn alloc(n: usize) [*]u8 {
@ -56,7 +62,7 @@ export fn unpack(tar_ptr: [*]u8, tar_len: usize) void {
//log.debug("received {d} bytes of tar file", .{tar_bytes.len});
unpackInner(tar_bytes) catch |err| {
fatal("unable to unpack tar: {s}", .{@errorName(err)});
std.debug.panic("unable to unpack tar: {s}", .{@errorName(err)});
};
}
@ -514,7 +520,7 @@ export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) Stri
.collapse_whitespace = true,
.fn_link = if (linkify_fn_name) decl_index else .none,
}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
std.debug.panic("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
@ -524,7 +530,7 @@ export fn decl_source_html(decl_index: Decl.Index) String {
string_result.clearRetainingCapacity();
fileSourceHtml(decl.file, &string_result, decl.ast_node, .{}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
std.debug.panic("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
@ -536,7 +542,7 @@ export fn decl_doctest_html(decl_index: Decl.Index) String {
string_result.clearRetainingCapacity();
fileSourceHtml(decl.file, &string_result, doctest_ast_node, .{}) catch |err| {
fatal("unable to render source: {s}", .{@errorName(err)});
std.debug.panic("unable to render source: {s}", .{@errorName(err)});
};
return String.init(string_result.items);
}
@ -740,7 +746,7 @@ export fn decl_type_html(decl_index: Decl.Index) String {
.skip_comments = true,
.collapse_whitespace = true,
}) catch |e| {
fatal("unable to render html: {s}", .{@errorName(e)});
std.debug.panic("unable to render html: {s}", .{@errorName(e)});
};
string_result.appendSlice(gpa, "</code>") catch @panic("OOM");
break :t;
@ -791,15 +797,6 @@ fn unpackInner(tar_bytes: []u8) !void {
}
}
fn fatal(comptime format: []const u8, args: anytype) noreturn {
var buf: [500]u8 = undefined;
const line = std.fmt.bufPrint(&buf, format, args) catch l: {
buf[buf.len - 3 ..][0..3].* = "...".*;
break :l &buf;
};
js.panic(line.ptr, line.len);
}
fn ascii_lower(bytes: []u8) void {
for (bytes) |*b| b.* = std.ascii.toLower(b.*);
}