diff --git a/src/resinator/ani.zig b/lib/compiler/resinator/ani.zig similarity index 100% rename from src/resinator/ani.zig rename to lib/compiler/resinator/ani.zig diff --git a/src/resinator/ast.zig b/lib/compiler/resinator/ast.zig similarity index 100% rename from src/resinator/ast.zig rename to lib/compiler/resinator/ast.zig diff --git a/src/resinator/bmp.zig b/lib/compiler/resinator/bmp.zig similarity index 100% rename from src/resinator/bmp.zig rename to lib/compiler/resinator/bmp.zig diff --git a/src/resinator/cli.zig b/lib/compiler/resinator/cli.zig similarity index 91% rename from src/resinator/cli.zig rename to lib/compiler/resinator/cli.zig index 59bd878adfd0..deee1ed54a3b 100644 --- a/src/resinator/cli.zig +++ b/lib/compiler/resinator/cli.zig @@ -50,8 +50,14 @@ pub const usage_string_after_command_name = \\ /:auto-includes Set the automatic include path detection behavior. \\ any (default) Use MSVC if available, fall back to MinGW \\ msvc Use MSVC include paths (must be present on the system) - \\ gnu Use MinGW include paths (requires Zig as the preprocessor) + \\ gnu Use MinGW include paths \\ none Do not use any autodetected include paths + \\ /:depfile Output a file containing a list of all the files that + \\ the .rc includes or otherwise depends on. + \\ /:depfile-fmt Output format of the depfile, if /:depfile is set. + \\ json (default) A top-level JSON array of paths + \\ /:mingw-includes Path to a directory containing MinGW include files. If + \\ not specified, bundled MinGW include files will be used. \\ \\Note: For compatibility reasons, all custom options start with : \\ @@ -140,8 +146,12 @@ pub const Options = struct { debug: bool = false, print_help_and_exit: bool = false, auto_includes: AutoIncludes = .any, + depfile_path: ?[]const u8 = null, + depfile_fmt: DepfileFormat = .json, + mingw_includes_dir: ?[]const u8 = null, pub const AutoIncludes = enum { any, msvc, gnu, none }; + pub const DepfileFormat = enum { json }; pub const Preprocess = enum { no, yes, only }; pub const SymbolAction = enum { define, undefine }; pub const SymbolValue = union(SymbolAction) { @@ -207,7 +217,7 @@ pub const Options = struct { cwd.access(options.input_filename, .{}) catch |err| switch (err) { error.FileNotFound => { var filename_bytes = try options.allocator.alloc(u8, options.input_filename.len + 3); - @memcpy(filename_bytes[0 .. filename_bytes.len - 3], options.input_filename); + @memcpy(filename_bytes[0..options.input_filename.len], options.input_filename); @memcpy(filename_bytes[filename_bytes.len - 3 ..], ".rc"); options.allocator.free(options.input_filename); options.input_filename = filename_bytes; @@ -230,6 +240,12 @@ pub const Options = struct { entry.value_ptr.deinit(self.allocator); } self.symbols.deinit(self.allocator); + if (self.depfile_path) |depfile_path| { + self.allocator.free(depfile_path); + } + if (self.mingw_includes_dir) |mingw_includes_dir| { + self.allocator.free(mingw_includes_dir); + } } pub fn dumpVerbose(self: *const Options, writer: anytype) !void { @@ -394,7 +410,7 @@ pub fn parse(allocator: Allocator, args: []const []const u8, diagnostics: *Diagn var output_filename: ?[]const u8 = null; var output_filename_context: Arg.Context = undefined; - var arg_i: usize = 1; // start at 1 to skip past the exe name + var arg_i: usize = 0; next_arg: while (arg_i < args.len) { var arg = Arg.fromString(args[arg_i]) orelse break; if (arg.name().len == 0) { @@ -424,6 +440,24 @@ pub fn parse(allocator: Allocator, args: []const []const u8, diagnostics: *Diagn if (std.ascii.startsWithIgnoreCase(arg_name, ":no-preprocess")) { options.preprocess = .no; arg.name_offset += ":no-preprocess".len; + } else if (std.ascii.startsWithIgnoreCase(arg_name, ":mingw-includes")) { + const value = arg.value(":mingw-includes".len, arg_i, args) catch { + var err_details = Diagnostics.ErrorDetails{ .arg_index = arg_i, .arg_span = arg.missingSpan() }; + var msg_writer = err_details.msg.writer(allocator); + try msg_writer.print("missing value after {s}{s} option", .{ arg.prefixSlice(), arg.optionWithoutPrefix(":mingw-includes".len) }); + try diagnostics.append(err_details); + arg_i += 1; + break :next_arg; + }; + if (options.mingw_includes_dir) |overwritten_path| { + allocator.free(overwritten_path); + options.mingw_includes_dir = null; + } + const path = try allocator.dupe(u8, value.slice); + errdefer allocator.free(path); + options.mingw_includes_dir = path; + arg_i += value.index_increment; + continue :next_arg; } else if (std.ascii.startsWithIgnoreCase(arg_name, ":auto-includes")) { const value = arg.value(":auto-includes".len, arg_i, args) catch { var err_details = Diagnostics.ErrorDetails{ .arg_index = arg_i, .arg_span = arg.missingSpan() }; @@ -442,6 +476,42 @@ pub fn parse(allocator: Allocator, args: []const []const u8, diagnostics: *Diagn }; arg_i += value.index_increment; continue :next_arg; + } else if (std.ascii.startsWithIgnoreCase(arg_name, ":depfile-fmt")) { + const value = arg.value(":depfile-fmt".len, arg_i, args) catch { + var err_details = Diagnostics.ErrorDetails{ .arg_index = arg_i, .arg_span = arg.missingSpan() }; + var msg_writer = err_details.msg.writer(allocator); + try msg_writer.print("missing value after {s}{s} option", .{ arg.prefixSlice(), arg.optionWithoutPrefix(":depfile-fmt".len) }); + try diagnostics.append(err_details); + arg_i += 1; + break :next_arg; + }; + options.depfile_fmt = std.meta.stringToEnum(Options.DepfileFormat, value.slice) orelse blk: { + var err_details = Diagnostics.ErrorDetails{ .arg_index = arg_i, .arg_span = value.argSpan(arg) }; + var msg_writer = err_details.msg.writer(allocator); + try msg_writer.print("invalid depfile format setting: {s} ", .{value.slice}); + try diagnostics.append(err_details); + break :blk options.depfile_fmt; + }; + arg_i += value.index_increment; + continue :next_arg; + } else if (std.ascii.startsWithIgnoreCase(arg_name, ":depfile")) { + const value = arg.value(":depfile".len, arg_i, args) catch { + var err_details = Diagnostics.ErrorDetails{ .arg_index = arg_i, .arg_span = arg.missingSpan() }; + var msg_writer = err_details.msg.writer(allocator); + try msg_writer.print("missing value after {s}{s} option", .{ arg.prefixSlice(), arg.optionWithoutPrefix(":depfile".len) }); + try diagnostics.append(err_details); + arg_i += 1; + break :next_arg; + }; + if (options.depfile_path) |overwritten_path| { + allocator.free(overwritten_path); + options.depfile_path = null; + } + const path = try allocator.dupe(u8, value.slice); + errdefer allocator.free(path); + options.depfile_path = path; + arg_i += value.index_increment; + continue :next_arg; } else if (std.ascii.startsWithIgnoreCase(arg_name, "nologo")) { // No-op, we don't display any 'logo' to suppress arg.name_offset += "nologo".len; @@ -837,7 +907,7 @@ pub fn parse(allocator: Allocator, args: []const []const u8, diagnostics: *Diagn try diagnostics.append(err_details); const last_arg = args[args.len - 1]; - if (arg_i > 1 and last_arg.len > 0 and last_arg[0] == '/' and std.ascii.endsWithIgnoreCase(last_arg, ".rc")) { + if (arg_i > 0 and last_arg.len > 0 and last_arg[0] == '/' and std.ascii.endsWithIgnoreCase(last_arg, ".rc")) { var note_details = Diagnostics.ErrorDetails{ .type = .note, .print_args = true, .arg_index = arg_i - 1 }; var note_writer = note_details.msg.writer(allocator); try note_writer.writeAll("if this argument was intended to be the input filename, then -- should be specified in front of it to exclude it from option parsing"); @@ -1116,7 +1186,7 @@ fn testParseOutput(args: []const []const u8, expected_output: []const u8) !?Opti } test "parse errors: basic" { - try testParseError(&.{ "foo.exe", "/" }, + try testParseError(&.{"/"}, \\: error: invalid option: / \\ ... / \\ ^ @@ -1124,7 +1194,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "/ln" }, + try testParseError(&.{"/ln"}, \\: error: missing language tag after /ln option \\ ... /ln \\ ~~~~^ @@ -1132,7 +1202,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "-vln" }, + try testParseError(&.{"-vln"}, \\: error: missing language tag after -ln option \\ ... -vln \\ ~ ~~~^ @@ -1140,7 +1210,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "/_not-an-option" }, + try testParseError(&.{"/_not-an-option"}, \\: error: invalid option: /_not-an-option \\ ... /_not-an-option \\ ~^~~~~~~~~~~~~~ @@ -1148,7 +1218,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "-_not-an-option" }, + try testParseError(&.{"-_not-an-option"}, \\: error: invalid option: -_not-an-option \\ ... -_not-an-option \\ ~^~~~~~~~~~~~~~ @@ -1156,7 +1226,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "--_not-an-option" }, + try testParseError(&.{"--_not-an-option"}, \\: error: invalid option: --_not-an-option \\ ... --_not-an-option \\ ~~^~~~~~~~~~~~~~ @@ -1164,7 +1234,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "/v_not-an-option" }, + try testParseError(&.{"/v_not-an-option"}, \\: error: invalid option: /_not-an-option \\ ... /v_not-an-option \\ ~ ^~~~~~~~~~~~~~ @@ -1172,7 +1242,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "-v_not-an-option" }, + try testParseError(&.{"-v_not-an-option"}, \\: error: invalid option: -_not-an-option \\ ... -v_not-an-option \\ ~ ^~~~~~~~~~~~~~ @@ -1180,7 +1250,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "--v_not-an-option" }, + try testParseError(&.{"--v_not-an-option"}, \\: error: invalid option: --_not-an-option \\ ... --v_not-an-option \\ ~~ ^~~~~~~~~~~~~~ @@ -1188,7 +1258,7 @@ test "parse errors: basic" { \\ \\ ); - try testParseError(&.{ "foo.exe", "/some/absolute/path/parsed/as/an/option.rc" }, + try testParseError(&.{"/some/absolute/path/parsed/as/an/option.rc"}, \\: error: the /s option is unsupported \\ ... /some/absolute/path/parsed/as/an/option.rc \\ ~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1202,13 +1272,13 @@ test "parse errors: basic" { } test "parse errors: /ln" { - try testParseError(&.{ "foo.exe", "/ln", "invalid", "foo.rc" }, + try testParseError(&.{ "/ln", "invalid", "foo.rc" }, \\: error: invalid language tag: invalid \\ ... /ln invalid ... \\ ~~~~^~~~~~~ \\ ); - try testParseError(&.{ "foo.exe", "/lninvalid", "foo.rc" }, + try testParseError(&.{ "/lninvalid", "foo.rc" }, \\: error: invalid language tag: invalid \\ ... /lninvalid ... \\ ~~~^~~~~~~ @@ -1218,7 +1288,7 @@ test "parse errors: /ln" { test "parse: options" { { - var options = try testParse(&.{ "foo.exe", "/v", "foo.rc" }); + var options = try testParse(&.{ "/v", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(true, options.verbose); @@ -1226,7 +1296,7 @@ test "parse: options" { try std.testing.expectEqualStrings("foo.res", options.output_filename); } { - var options = try testParse(&.{ "foo.exe", "/vx", "foo.rc" }); + var options = try testParse(&.{ "/vx", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(true, options.verbose); @@ -1235,7 +1305,7 @@ test "parse: options" { try std.testing.expectEqualStrings("foo.res", options.output_filename); } { - var options = try testParse(&.{ "foo.exe", "/xv", "foo.rc" }); + var options = try testParse(&.{ "/xv", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(true, options.verbose); @@ -1244,7 +1314,7 @@ test "parse: options" { try std.testing.expectEqualStrings("foo.res", options.output_filename); } { - var options = try testParse(&.{ "foo.exe", "/xvFObar.res", "foo.rc" }); + var options = try testParse(&.{ "/xvFObar.res", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(true, options.verbose); @@ -1256,23 +1326,21 @@ test "parse: options" { test "parse: define and undefine" { { - var options = try testParse(&.{ "foo.exe", "/dfoo", "foo.rc" }); + var options = try testParse(&.{ "/dfoo", "foo.rc" }); defer options.deinit(); const action = options.symbols.get("foo").?; - try std.testing.expectEqual(Options.SymbolAction.define, action); try std.testing.expectEqualStrings("1", action.define); } { - var options = try testParse(&.{ "foo.exe", "/dfoo=bar", "/dfoo=baz", "foo.rc" }); + var options = try testParse(&.{ "/dfoo=bar", "/dfoo=baz", "foo.rc" }); defer options.deinit(); const action = options.symbols.get("foo").?; - try std.testing.expectEqual(Options.SymbolAction.define, action); try std.testing.expectEqualStrings("baz", action.define); } { - var options = try testParse(&.{ "foo.exe", "/ufoo", "foo.rc" }); + var options = try testParse(&.{ "/ufoo", "foo.rc" }); defer options.deinit(); const action = options.symbols.get("foo").?; @@ -1280,7 +1348,7 @@ test "parse: define and undefine" { } { // Once undefined, future defines are ignored - var options = try testParse(&.{ "foo.exe", "/ufoo", "/dfoo", "foo.rc" }); + var options = try testParse(&.{ "/ufoo", "/dfoo", "foo.rc" }); defer options.deinit(); const action = options.symbols.get("foo").?; @@ -1288,7 +1356,7 @@ test "parse: define and undefine" { } { // Undefined always takes precedence - var options = try testParse(&.{ "foo.exe", "/dfoo", "/ufoo", "/dfoo", "foo.rc" }); + var options = try testParse(&.{ "/dfoo", "/ufoo", "/dfoo", "foo.rc" }); defer options.deinit(); const action = options.symbols.get("foo").?; @@ -1297,7 +1365,7 @@ test "parse: define and undefine" { { // Warn + ignore invalid identifiers var options = try testParseWarning( - &.{ "foo.exe", "/dfoo bar", "/u", "0leadingdigit", "foo.rc" }, + &.{ "/dfoo bar", "/u", "0leadingdigit", "foo.rc" }, \\: warning: symbol "foo bar" is not a valid identifier and therefore cannot be defined \\ ... /dfoo bar ... \\ ~~^~~~~~~ @@ -1314,7 +1382,7 @@ test "parse: define and undefine" { } test "parse: /sl" { - try testParseError(&.{ "foo.exe", "/sl", "0", "foo.rc" }, + try testParseError(&.{ "/sl", "0", "foo.rc" }, \\: error: percent out of range: 0 (parsed from '0') \\ ... /sl 0 ... \\ ~~~~^ @@ -1322,7 +1390,7 @@ test "parse: /sl" { \\ \\ ); - try testParseError(&.{ "foo.exe", "/sl", "abcd", "foo.rc" }, + try testParseError(&.{ "/sl", "abcd", "foo.rc" }, \\: error: invalid percent format 'abcd' \\ ... /sl abcd ... \\ ~~~~^~~~ @@ -1331,25 +1399,25 @@ test "parse: /sl" { \\ ); { - var options = try testParse(&.{ "foo.exe", "foo.rc" }); + var options = try testParse(&.{"foo.rc"}); defer options.deinit(); try std.testing.expectEqual(@as(u15, lex.default_max_string_literal_codepoints), options.max_string_literal_codepoints); } { - var options = try testParse(&.{ "foo.exe", "/sl100", "foo.rc" }); + var options = try testParse(&.{ "/sl100", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(@as(u15, max_string_literal_length_100_percent), options.max_string_literal_codepoints); } { - var options = try testParse(&.{ "foo.exe", "-SL33", "foo.rc" }); + var options = try testParse(&.{ "-SL33", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(@as(u15, 2703), options.max_string_literal_codepoints); } { - var options = try testParse(&.{ "foo.exe", "/sl15", "foo.rc" }); + var options = try testParse(&.{ "/sl15", "foo.rc" }); defer options.deinit(); try std.testing.expectEqual(@as(u15, 1228), options.max_string_literal_codepoints); @@ -1357,7 +1425,7 @@ test "parse: /sl" { } test "parse: unsupported MUI-related options" { - try testParseError(&.{ "foo.exe", "/q", "blah", "/g1", "-G2", "blah", "/fm", "blah", "/g", "blah", "foo.rc" }, + try testParseError(&.{ "/q", "blah", "/g1", "-G2", "blah", "/fm", "blah", "/g", "blah", "foo.rc" }, \\: error: the /q option is unsupported \\ ... /q ... \\ ~^ @@ -1378,7 +1446,7 @@ test "parse: unsupported MUI-related options" { } test "parse: unsupported LCX/LCE-related options" { - try testParseError(&.{ "foo.exe", "/t", "/tp:", "/tp:blah", "/tm", "/tc", "/tw", "-TEti", "/ta", "/tn", "blah", "foo.rc" }, + try testParseError(&.{ "/t", "/tp:", "/tp:blah", "/tm", "/tc", "/tw", "-TEti", "/ta", "/tn", "blah", "foo.rc" }, \\: error: the /t option is unsupported \\ ... /t ... \\ ~^ @@ -1420,7 +1488,7 @@ test "maybeAppendRC" { var tmp = std.testing.tmpDir(.{}); defer tmp.cleanup(); - var options = try testParse(&.{ "foo.exe", "foo" }); + var options = try testParse(&.{"foo"}); defer options.deinit(); try std.testing.expectEqualStrings("foo", options.input_filename); diff --git a/src/resinator/code_pages.zig b/lib/compiler/resinator/code_pages.zig similarity index 97% rename from src/resinator/code_pages.zig rename to lib/compiler/resinator/code_pages.zig index be89ed3f02c7..cacd5acded3a 100644 --- a/src/resinator/code_pages.zig +++ b/lib/compiler/resinator/code_pages.zig @@ -279,6 +279,9 @@ pub const CodePage = enum(u16) { pub const Utf8 = struct { /// Implements decoding with rejection of ill-formed UTF-8 sequences based on section /// D92 of Chapter 3 of the Unicode standard (Table 3-7 specifically). + /// + /// Note: This does not match "U+FFFD Substitution of Maximal Subparts", but instead + /// matches the behavior of the Windows RC compiler. pub const WellFormedDecoder = struct { /// Like std.unicode.utf8ByteSequenceLength, but: /// - Rejects non-well-formed first bytes, i.e. C0-C1, F5-FF @@ -347,9 +350,6 @@ pub const Utf8 = struct { // Only include the byte in the invalid sequence if it's in the range // of a continuation byte. All other values should not be included in the // invalid sequence. - // - // Note: This is how the Windows RC compiler handles this, this may not - // be the correct-as-according-to-the-Unicode-standard way to do it. if (isContinuationByte(byte)) len += 1; return .{ .value = Codepoint.invalid, .byte_len = len }; } @@ -437,6 +437,19 @@ test "codepointAt invalid utf8" { }, CodePage.utf8.codepointAt(1, invalid_utf8).?); try std.testing.expectEqual(@as(?Codepoint, null), CodePage.windows1252.codepointAt(2, invalid_utf8)); } + + { + // encoded high surrogate + const invalid_utf8 = "\xED\xA0\xBD"; + try std.testing.expectEqual(Codepoint{ + .value = Codepoint.invalid, + .byte_len = 2, + }, CodePage.utf8.codepointAt(0, invalid_utf8).?); + try std.testing.expectEqual(Codepoint{ + .value = Codepoint.invalid, + .byte_len = 1, + }, CodePage.utf8.codepointAt(2, invalid_utf8).?); + } } test "codepointAt utf8 encoded" { diff --git a/src/resinator/comments.zig b/lib/compiler/resinator/comments.zig similarity index 86% rename from src/resinator/comments.zig rename to lib/compiler/resinator/comments.zig index a631b8bb9384..67504bbbeb28 100644 --- a/src/resinator/comments.zig +++ b/lib/compiler/resinator/comments.zig @@ -22,7 +22,7 @@ const formsLineEndingPair = @import("source_mapping.zig").formsLineEndingPair; /// `buf` must be at least as long as `source` /// In-place transformation is supported (i.e. `source` and `buf` can be the same slice) -pub fn removeComments(source: []const u8, buf: []u8, source_mappings: ?*SourceMappings) []u8 { +pub fn removeComments(source: []const u8, buf: []u8, source_mappings: ?*SourceMappings) ![]u8 { std.debug.assert(buf.len >= source.len); var result = UncheckedSliceWriter{ .slice = buf }; const State = enum { @@ -85,7 +85,7 @@ pub fn removeComments(source: []const u8, buf: []u8, source_mappings: ?*SourceMa else => {}, }, .multiline_comment => switch (c) { - '\r' => handleMultilineCarriageReturn(source, &line_handler, index, &result, source_mappings), + '\r' => try handleMultilineCarriageReturn(source, &line_handler, index, &result, source_mappings), '\n' => { _ = line_handler.incrementLineNumber(index); result.write(c); @@ -95,7 +95,7 @@ pub fn removeComments(source: []const u8, buf: []u8, source_mappings: ?*SourceMa }, .multiline_comment_end => switch (c) { '\r' => { - handleMultilineCarriageReturn(source, &line_handler, index, &result, source_mappings); + try handleMultilineCarriageReturn(source, &line_handler, index, &result, source_mappings); // We only want to treat this as a newline if it's part of a CRLF pair. If it's // not, then we still want to stay in .multiline_comment_end, so that e.g. `*<\r>/` still // functions as a `*/` comment ending. Kinda crazy, but that's how the Win32 implementation works. @@ -184,13 +184,21 @@ inline fn handleMultilineCarriageReturn( index: usize, result: *UncheckedSliceWriter, source_mappings: ?*SourceMappings, -) void { +) !void { + // This is a dumb way to go about this, but basically we want to determine + // if this is part of a distinct CRLF or LFCR pair. This function call will detect + // LFCR pairs correctly since the function we're in will only be called on CR, + // but will not detect CRLF pairs since it only looks at the line ending before the + // CR. So, we do a second (forward) check if the first fails to detect CRLF that is + // not part of another pair. + const is_lfcr_pair = line_handler.currentIndexFormsLineEndingPair(index); + const is_crlf_pair = !is_lfcr_pair and formsLineEndingPair(source, '\r', index + 1); // Note: Bare \r within a multiline comment should *not* be treated as a line ending for the // purposes of removing comments, but *should* be treated as a line ending for the // purposes of line counting/source mapping _ = line_handler.incrementLineNumber(index); - // So only write the \r if it's part of a CRLF pair - if (formsLineEndingPair(source, '\r', index + 1)) { + // So only write the \r if it's part of a CRLF/LFCR pair + if (is_lfcr_pair or is_crlf_pair) { result.write('\r'); } // And otherwise, we want to collapse the source mapping so that we can still know which @@ -200,7 +208,7 @@ inline fn handleMultilineCarriageReturn( // the next collapse acts on the first of the collapsed line numbers line_handler.line_number -= 1; if (source_mappings) |mappings| { - mappings.collapse(line_handler.line_number, 1); + try mappings.collapse(line_handler.line_number, 1); } } } @@ -208,7 +216,7 @@ inline fn handleMultilineCarriageReturn( pub fn removeCommentsAlloc(allocator: Allocator, source: []const u8, source_mappings: ?*SourceMappings) ![]u8 { const buf = try allocator.alloc(u8, source.len); errdefer allocator.free(buf); - const result = removeComments(source, buf, source_mappings); + const result = try removeComments(source, buf, source_mappings); return allocator.realloc(buf, result.len); } @@ -252,6 +260,16 @@ test "line comments retain newlines" { try testRemoveComments("\r\n", "//comment\r\n"); } +test "unfinished multiline comment" { + try testRemoveComments( + \\unfinished + \\ + , + \\unfinished/* + \\ + ); +} + test "crazy" { try testRemoveComments( \\blah"/*som*/\""BLAH @@ -321,20 +339,20 @@ test "remove comments with mappings" { var mut_source = "blah/*\rcommented line*\r/blah".*; var mappings = SourceMappings{}; _ = try mappings.files.put(allocator, "test.rc"); - try mappings.set(allocator, 1, .{ .start_line = 1, .end_line = 1, .filename_offset = 0 }); - try mappings.set(allocator, 2, .{ .start_line = 2, .end_line = 2, .filename_offset = 0 }); - try mappings.set(allocator, 3, .{ .start_line = 3, .end_line = 3, .filename_offset = 0 }); + try mappings.set(1, 1, 0); + try mappings.set(2, 2, 0); + try mappings.set(3, 3, 0); defer mappings.deinit(allocator); - const result = removeComments(&mut_source, &mut_source, &mappings); + const result = try removeComments(&mut_source, &mut_source, &mappings); try std.testing.expectEqualStrings("blahblah", result); - try std.testing.expectEqual(@as(usize, 1), mappings.mapping.items.len); - try std.testing.expectEqual(@as(usize, 3), mappings.mapping.items[0].end_line); + try std.testing.expectEqual(@as(usize, 1), mappings.end_line); + try std.testing.expectEqual(@as(usize, 3), mappings.getCorrespondingSpan(1).?.end_line); } test "in place" { var mut_source = "blah /* comment */ blah".*; - const result = removeComments(&mut_source, &mut_source, null); + const result = try removeComments(&mut_source, &mut_source, null); try std.testing.expectEqualStrings("blah blah", result); } diff --git a/src/resinator/compile.zig b/lib/compiler/resinator/compile.zig similarity index 97% rename from src/resinator/compile.zig rename to lib/compiler/resinator/compile.zig index 0c71b02027c7..ebe741e79b0c 100644 --- a/src/resinator/compile.zig +++ b/lib/compiler/resinator/compile.zig @@ -321,10 +321,7 @@ pub const Compiler = struct { return buf.toOwnedSlice(); }, - else => { - std.debug.print("unexpected filename token type: {}\n", .{literal_node.token}); - unreachable; // no other token types should be in a filename literal node - }, + else => unreachable, // no other token types should be in a filename literal node } }, .binary_expression => { @@ -404,6 +401,72 @@ pub const Compiler = struct { return first_error orelse error.FileNotFound; } + pub fn parseDlgIncludeString(self: *Compiler, token: Token) ![]u8 { + // For the purposes of parsing, we want to strip the L prefix + // if it exists since we want escaped integers to be limited to + // their ascii string range. + // + // We keep track of whether or not there was an L prefix, though, + // since there's more weirdness to come. + var bytes = self.sourceBytesForToken(token); + var was_wide_string = false; + if (bytes.slice[0] == 'L' or bytes.slice[0] == 'l') { + was_wide_string = true; + bytes.slice = bytes.slice[1..]; + } + + var buf = try std.ArrayList(u8).initCapacity(self.allocator, bytes.slice.len); + errdefer buf.deinit(); + + var iterative_parser = literals.IterativeStringParser.init(bytes, .{ + .start_column = token.calculateColumn(self.source, 8, null), + .diagnostics = .{ .diagnostics = self.diagnostics, .token = token }, + }); + + // No real idea what's going on here, but this matches the rc.exe behavior + while (try iterative_parser.next()) |parsed| { + const c = parsed.codepoint; + switch (was_wide_string) { + true => { + switch (c) { + 0...0x7F, 0xA0...0xFF => try buf.append(@intCast(c)), + 0x80...0x9F => { + if (windows1252.bestFitFromCodepoint(c)) |_| { + try buf.append(@intCast(c)); + } else { + try buf.append('?'); + } + }, + else => { + if (windows1252.bestFitFromCodepoint(c)) |best_fit| { + try buf.append(best_fit); + } else if (c < 0x10000 or c == code_pages.Codepoint.invalid) { + try buf.append('?'); + } else { + try buf.appendSlice("??"); + } + }, + } + }, + false => { + if (parsed.from_escaped_integer) { + try buf.append(@truncate(c)); + } else { + if (windows1252.bestFitFromCodepoint(c)) |best_fit| { + try buf.append(best_fit); + } else if (c < 0x10000 or c == code_pages.Codepoint.invalid) { + try buf.append('?'); + } else { + try buf.appendSlice("??"); + } + } + }, + } + } + + return buf.toOwnedSlice(); + } + pub fn writeResourceExternal(self: *Compiler, node: *Node.ResourceExternal, writer: anytype) !void { // Init header with data size zero for now, will need to fill it in later var header = try self.resourceHeader(node.id, node.type, .{}); @@ -414,13 +477,16 @@ pub const Compiler = struct { // DLGINCLUDE has special handling that doesn't actually need the file to exist if (maybe_predefined_type != null and maybe_predefined_type.? == .DLGINCLUDE) { const filename_token = node.filename.cast(.literal).?.token; - const parsed_filename = try self.parseQuotedStringAsAsciiString(filename_token); + const parsed_filename = try self.parseDlgIncludeString(filename_token); defer self.allocator.free(parsed_filename); + // NUL within the parsed string acts as a terminator + const parsed_filename_terminated = std.mem.sliceTo(parsed_filename, 0); + header.applyMemoryFlags(node.common_resource_attributes, self.source); - header.data_size = @intCast(parsed_filename.len + 1); + header.data_size = @intCast(parsed_filename_terminated.len + 1); try header.write(writer, .{ .diagnostics = self.diagnostics, .token = node.id }); - try writer.writeAll(parsed_filename); + try writer.writeAll(parsed_filename_terminated); try writer.writeByte(0); try writeDataPadding(writer, header.data_size); return; @@ -1141,10 +1207,7 @@ pub const Compiler = struct { errdefer self.allocator.free(parsed_string); return .{ .wide_string = parsed_string }; }, - else => { - std.debug.print("unexpected token in literal node: {}\n", .{literal_node.token}); - unreachable; // no other token types should be in a data literal node - }, + else => unreachable, // no other token types should be in a data literal node } }, .binary_expression, .grouped_expression => { @@ -1152,10 +1215,7 @@ pub const Compiler = struct { return .{ .number = result }; }, .not_expression => unreachable, - else => { - std.debug.print("{}\n", .{expression_node.id}); - @panic("TODO: evaluateDataExpression"); - }, + else => unreachable, } } @@ -1669,6 +1729,7 @@ pub const Compiler = struct { }; } + // We know the data_buffer len is limited to u32 max. const data_size: u32 = @intCast(data_buffer.items.len); var header = try self.resourceHeader(node.id, node.type, .{ .data_size = data_size, @@ -1966,6 +2027,7 @@ pub const Compiler = struct { try data_writer.writeInt(u16, 1, .little); try data_writer.writeInt(u16, button_width.asWord(), .little); try data_writer.writeInt(u16, button_height.asWord(), .little); + // Number of buttons is guaranteed by the parser to be within maxInt(u16). try data_writer.writeInt(u16, @as(u16, @intCast(node.buttons.len)), .little); for (node.buttons) |button_or_sep| { @@ -2806,19 +2868,6 @@ pub const Compiler = struct { ); } - /// Helper that calls parseQuotedStringAsAsciiString with the relevant context - /// Resulting slice is allocated by `self.allocator`. - pub fn parseQuotedStringAsAsciiString(self: *Compiler, token: Token) ![]u8 { - return literals.parseQuotedStringAsAsciiString( - self.allocator, - self.sourceBytesForToken(token), - .{ - .start_column = token.calculateColumn(self.source, 8, null), - .diagnostics = .{ .diagnostics = self.diagnostics, .token = token }, - }, - ); - } - fn addErrorDetails(self: *Compiler, details: ErrorDetails) Allocator.Error!void { try self.diagnostics.append(details); } @@ -3356,7 +3405,7 @@ test "StringTable" { } break :ids buf; }; - var prng = std.Random.DefaultPrng.init(0); + var prng = std.rand.DefaultPrng.init(0); var random = prng.random(); random.shuffle(u16, &ids); diff --git a/src/resinator/errors.zig b/lib/compiler/resinator/errors.zig similarity index 97% rename from src/resinator/errors.zig rename to lib/compiler/resinator/errors.zig index 62c67ab90b81..44a9d4616374 100644 --- a/src/resinator/errors.zig +++ b/lib/compiler/resinator/errors.zig @@ -316,7 +316,7 @@ pub const ErrorDetails = struct { rc_would_miscompile_version_value_byte_count, code_page_pragma_in_included_file, nested_resource_level_exceeds_max, - too_many_dialog_controls, + too_many_dialog_controls_or_toolbar_buttons, nested_expression_level_exceeds_max, close_paren_expression, unary_plus_expression, @@ -543,9 +543,15 @@ pub const ErrorDetails = struct { .note => return writer.print("max {s} nesting level exceeded here", .{self.extra.resource.nameForErrorDisplay()}), .hint => return, }, - .too_many_dialog_controls => switch (self.type) { - .err, .warning => return writer.print("{s} contains too many controls (max is {})", .{ self.extra.resource.nameForErrorDisplay(), std.math.maxInt(u16) }), - .note => return writer.writeAll("maximum number of controls exceeded here"), + .too_many_dialog_controls_or_toolbar_buttons => switch (self.type) { + .err, .warning => return writer.print("{s} contains too many {s} (max is {})", .{ self.extra.resource.nameForErrorDisplay(), switch (self.extra.resource) { + .toolbar => "buttons", + else => "controls", + }, std.math.maxInt(u16) }), + .note => return writer.print("maximum number of {s} exceeded here", .{switch (self.extra.resource) { + .toolbar => "buttons", + else => "controls", + }}), .hint => return, }, .nested_expression_level_exceeds_max => switch (self.type) { @@ -825,13 +831,13 @@ pub const ErrorDetails = struct { pub fn renderErrorMessage(allocator: std.mem.Allocator, writer: anytype, tty_config: std.io.tty.Config, cwd: std.fs.Dir, err_details: ErrorDetails, source: []const u8, strings: []const []const u8, source_mappings: ?SourceMappings) !void { if (err_details.type == .hint) return; - const source_line_start = err_details.token.getLineStart(source); + const source_line_start = err_details.token.getLineStartForErrorDisplay(source); // Treat tab stops as 1 column wide for error display purposes, // and add one to get a 1-based column const column = err_details.token.calculateColumn(source, 1, source_line_start) + 1; - const corresponding_span: ?SourceMappings.SourceSpan = if (source_mappings != null and source_mappings.?.has(err_details.token.line_number)) - source_mappings.?.get(err_details.token.line_number) + const corresponding_span: ?SourceMappings.CorrespondingSpan = if (source_mappings) |mappings| + mappings.getCorrespondingSpan(err_details.token.line_number) else null; const corresponding_file: ?[]const u8 = if (source_mappings != null and corresponding_span != null) @@ -877,7 +883,7 @@ pub fn renderErrorMessage(allocator: std.mem.Allocator, writer: anytype, tty_con return; } - const source_line = err_details.token.getLine(source, source_line_start); + const source_line = err_details.token.getLineForErrorDisplay(source, source_line_start); const visual_info = err_details.visualTokenInfo(source_line_start, source_line_start + source_line.len); // Need this to determine if the 'line originated from' note is worth printing @@ -965,7 +971,7 @@ const CorrespondingLines = struct { lines: std.ArrayListUnmanaged(u8) = .{}, lines_is_error_message: bool = false, - pub fn init(allocator: std.mem.Allocator, cwd: std.fs.Dir, err_details: ErrorDetails, lines_for_comparison: []const u8, corresponding_span: SourceMappings.SourceSpan, corresponding_file: []const u8) !CorrespondingLines { + pub fn init(allocator: std.mem.Allocator, cwd: std.fs.Dir, err_details: ErrorDetails, lines_for_comparison: []const u8, corresponding_span: SourceMappings.CorrespondingSpan, corresponding_file: []const u8) !CorrespondingLines { var corresponding_lines = CorrespondingLines{}; // We don't do line comparison for this error, so don't print the note if the line @@ -1035,17 +1041,27 @@ inline fn writeSourceByte(writer: anytype, byte: u8) !void { pub fn writeLinesFromStream(writer: anytype, input: anytype, start_line: usize, end_line: usize) !void { var line_num: usize = 1; + var last_byte: u8 = 0; while (try readByteOrEof(input)) |byte| { switch (byte) { - '\n' => { - if (line_num == end_line) return; - if (line_num >= start_line) try writeSourceByte(writer, byte); - line_num += 1; + '\n', '\r' => { + if (!utils.isLineEndingPair(last_byte, byte)) { + if (line_num == end_line) return; + if (line_num >= start_line) try writeSourceByte(writer, byte); + line_num += 1; + } else { + // reset last_byte to a non-line ending so that + // consecutive CRLF pairs don't get treated as one + // long line ending 'pair' + last_byte = 0; + continue; + } }, else => { if (line_num >= start_line) try writeSourceByte(writer, byte); }, } + last_byte = byte; } if (line_num != end_line) { return error.LinesNotFound; diff --git a/src/resinator/ico.zig b/lib/compiler/resinator/ico.zig similarity index 100% rename from src/resinator/ico.zig rename to lib/compiler/resinator/ico.zig diff --git a/src/resinator/lang.zig b/lib/compiler/resinator/lang.zig similarity index 99% rename from src/resinator/lang.zig rename to lib/compiler/resinator/lang.zig index 7b2b98ca9ad6..51843cff8716 100644 --- a/src/resinator/lang.zig +++ b/lib/compiler/resinator/lang.zig @@ -140,7 +140,7 @@ test "exhaustive tagToId" { writer.writeAll(parsed_sort.suffix.?) catch unreachable; const expected_field_name = comptime field: { var name_buf: [5]u8 = undefined; - @memcpy(&name_buf[0..parsed_sort.language_code.len], parsed_sort.language_code); + @memcpy(name_buf[0..parsed_sort.language_code.len], parsed_sort.language_code); name_buf[2] = '_'; @memcpy(name_buf[3..], parsed_sort.country_code.?); break :field name_buf; diff --git a/src/resinator/lex.zig b/lib/compiler/resinator/lex.zig similarity index 97% rename from src/resinator/lex.zig rename to lib/compiler/resinator/lex.zig index 89a620c415f3..91ebba467dc0 100644 --- a/src/resinator/lex.zig +++ b/lib/compiler/resinator/lex.zig @@ -71,7 +71,7 @@ pub const Token = struct { /// Returns 0-based column pub fn calculateColumn(token: Token, source: []const u8, tab_columns: usize, maybe_line_start: ?usize) usize { - const line_start = maybe_line_start orelse token.getLineStart(source); + const line_start = maybe_line_start orelse token.getLineStartForColumnCalc(source); var i: usize = line_start; var column: usize = 0; @@ -81,13 +81,9 @@ pub const Token = struct { return column; } - // TODO: This doesn't necessarily match up with how we count line numbers, but where a line starts - // has a knock-on effect on calculateColumn. More testing is needed to determine what needs - // to be changed to make this both (1) match how line numbers are counted and (2) match how - // the Win32 RC compiler counts tab columns. - // + // TODO: More testing is needed to determine if this can be merged with getLineStartForErrorDisplay // (the TODO in currentIndexFormsLineEndingPair should be taken into account as well) - pub fn getLineStart(token: Token, source: []const u8) usize { + pub fn getLineStartForColumnCalc(token: Token, source: []const u8) usize { const line_start = line_start: { if (token.start != 0) { // start checking at the byte before the token @@ -102,14 +98,26 @@ pub const Token = struct { return line_start; } - pub fn getLine(token: Token, source: []const u8, maybe_line_start: ?usize) []const u8 { - const line_start = maybe_line_start orelse token.getLineStart(source); + pub fn getLineStartForErrorDisplay(token: Token, source: []const u8) usize { + const line_start = line_start: { + if (token.start != 0) { + // start checking at the byte before the token + var index = token.start - 1; + while (true) { + if (source[index] == '\r' or source[index] == '\n') break :line_start @min(source.len - 1, index + 1); + if (index != 0) index -= 1 else break; + } + } + break :line_start 0; + }; + return line_start; + } - var line_end = line_start + 1; - if (line_end >= source.len or source[line_end] == '\n') return source[line_start..line_start]; - while (line_end < source.len and source[line_end] != '\n') : (line_end += 1) {} - while (line_end > 0 and source[line_end - 1] == '\r') : (line_end -= 1) {} + pub fn getLineForErrorDisplay(token: Token, source: []const u8, maybe_line_start: ?usize) []const u8 { + const line_start = maybe_line_start orelse token.getLineStartForErrorDisplay(source); + var line_end = line_start; + while (line_end < source.len and source[line_end] != '\r' and source[line_end] != '\n') : (line_end += 1) {} return source[line_start..line_end]; } diff --git a/src/resinator/literals.zig b/lib/compiler/resinator/literals.zig similarity index 92% rename from src/resinator/literals.zig rename to lib/compiler/resinator/literals.zig index 2d69e83cdc05..b653e08bd818 100644 --- a/src/resinator/literals.zig +++ b/lib/compiler/resinator/literals.zig @@ -98,6 +98,11 @@ pub const IterativeStringParser = struct { pub const ParsedCodepoint = struct { codepoint: u21, + /// Note: If this is true, `codepoint` will be a value with a max of maxInt(u16). + /// This is enforced by using saturating arithmetic, so in e.g. a wide string literal the + /// octal escape sequence \7777777 (2,097,151) will be parsed into the value 0xFFFF (65,535). + /// If the value needs to be truncated to a smaller integer (for ASCII string literals), then that + /// must be done by the caller. from_escaped_integer: bool = false, }; @@ -156,13 +161,14 @@ pub const IterativeStringParser = struct { .wide => 4, }; - while (self.code_page.codepointAt(self.index, self.source)) |codepoint| : (self.index += codepoint.byte_len) { + var backtrack: bool = undefined; + while (self.code_page.codepointAt(self.index, self.source)) |codepoint| : ({ + if (!backtrack) self.index += codepoint.byte_len; + }) { + backtrack = false; const c = codepoint.value; - var backtrack = false; defer { - if (backtrack) { - self.index -= codepoint.byte_len; - } else { + if (!backtrack) { if (c == '\t') { self.column += columnsUntilTabStop(self.column, 8); } else { @@ -213,10 +219,12 @@ pub const IterativeStringParser = struct { .newline => switch (c) { '\r', ' ', '\t', '\n', '\x0b', '\x0c', '\xa0' => {}, else => { - // backtrack so that we handle the current char properly + // we intentionally avoid incrementing self.index + // to handle the current char in the next call, + // and we set backtrack so column count is handled correctly backtrack = true; + // - self.index += codepoint.byte_len; self.pending_codepoint = '\n'; return .{ .codepoint = ' ' }; }, @@ -263,9 +271,10 @@ pub const IterativeStringParser = struct { else => switch (self.declared_string_type) { .wide => {}, // invalid escape sequences are skipped in wide strings .ascii => { - // backtrack so that we handle the current char properly + // we intentionally avoid incrementing self.index + // to handle the current char in the next call, + // and we set backtrack so column count is handled correctly backtrack = true; - self.index += codepoint.byte_len; return .{ .codepoint = '\\' }; }, }, @@ -277,9 +286,10 @@ pub const IterativeStringParser = struct { '\r' => {}, '\n' => state = .escaped_newlines, else => { - // backtrack so that we handle the current char properly + // we intentionally avoid incrementing self.index + // to handle the current char in the next call, + // and we set backtrack so column count is handled correctly backtrack = true; - self.index += codepoint.byte_len; return .{ .codepoint = '\\' }; }, }, @@ -297,24 +307,18 @@ pub const IterativeStringParser = struct { string_escape_n +%= std.fmt.charToDigit(@intCast(c), 8) catch unreachable; string_escape_i += 1; if (string_escape_i == max_octal_escape_digits) { - const escaped_value = switch (self.declared_string_type) { - .ascii => @as(u8, @truncate(string_escape_n)), - .wide => string_escape_n, - }; self.index += codepoint.byte_len; - return .{ .codepoint = escaped_value, .from_escaped_integer = true }; + return .{ .codepoint = string_escape_n, .from_escaped_integer = true }; } }, else => { - // backtrack so that we handle the current char properly + // we intentionally avoid incrementing self.index + // to handle the current char in the next call, + // and we set backtrack so column count is handled correctly backtrack = true; + // write out whatever byte we have parsed so far - const escaped_value = switch (self.declared_string_type) { - .ascii => @as(u8, @truncate(string_escape_n)), - .wide => string_escape_n, - }; - self.index += codepoint.byte_len; - return .{ .codepoint = escaped_value, .from_escaped_integer = true }; + return .{ .codepoint = string_escape_n, .from_escaped_integer = true }; }, }, .escaped_hex => switch (c) { @@ -323,24 +327,19 @@ pub const IterativeStringParser = struct { string_escape_n += std.fmt.charToDigit(@intCast(c), 16) catch unreachable; string_escape_i += 1; if (string_escape_i == max_hex_escape_digits) { - const escaped_value = switch (self.declared_string_type) { - .ascii => @as(u8, @truncate(string_escape_n)), - .wide => string_escape_n, - }; self.index += codepoint.byte_len; - return .{ .codepoint = escaped_value, .from_escaped_integer = true }; + return .{ .codepoint = string_escape_n, .from_escaped_integer = true }; } }, else => { - // backtrack so that we handle the current char properly + // we intentionally avoid incrementing self.index + // to handle the current char in the next call, + // and we set backtrack so column count is handled correctly backtrack = true; + // write out whatever byte we have parsed so far // (even with 0 actual digits, \x alone parses to 0) - const escaped_value = switch (self.declared_string_type) { - .ascii => @as(u8, @truncate(string_escape_n)), - .wide => string_escape_n, - }; - self.index += codepoint.byte_len; + const escaped_value = string_escape_n; return .{ .codepoint = escaped_value, .from_escaped_integer = true }; }, }, @@ -356,11 +355,7 @@ pub const IterativeStringParser = struct { }, .escaped, .escaped_cr => return .{ .codepoint = '\\' }, .escaped_octal, .escaped_hex => { - const escaped_value = switch (self.declared_string_type) { - .ascii => @as(u8, @truncate(string_escape_n)), - .wide => string_escape_n, - }; - return .{ .codepoint = escaped_value, .from_escaped_integer = true }; + return .{ .codepoint = string_escape_n, .from_escaped_integer = true }; }, .quote => unreachable, // this is a bug in the lexer } @@ -395,7 +390,8 @@ pub fn parseQuotedString( while (try iterative_parser.next()) |parsed| { const c = parsed.codepoint; if (parsed.from_escaped_integer) { - try buf.append(std.mem.nativeToLittle(T, @intCast(c))); + // We truncate here to get the correct behavior for ascii strings + try buf.append(std.mem.nativeToLittle(T, @truncate(c))); } else { switch (literal_type) { .ascii => switch (options.output_code_page) { @@ -458,11 +454,6 @@ pub fn parseQuotedStringAsWideString(allocator: std.mem.Allocator, bytes: Source return parseQuotedString(.wide, allocator, bytes, options); } -pub fn parseQuotedStringAsAsciiString(allocator: std.mem.Allocator, bytes: SourceBytes, options: StringParseOptions) ![]u8 { - std.debug.assert(bytes.slice.len >= 2); // "" - return parseQuotedString(.ascii, allocator, bytes, options); -} - test "parse quoted ascii string" { var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator); defer arena_allocator.deinit(); @@ -651,6 +642,14 @@ test "parse quoted ascii string with utf8 code page" { .{ .slice = "\"\xF2\xAF\xBA\xB4\"", .code_page = .utf8 }, .{ .output_code_page = .utf8 }, )); + + // This used to cause integer overflow when reconsuming the 4-byte long codepoint + // after the escaped CRLF pair. + try std.testing.expectEqualSlices(u8, "\u{10348}", try parseQuotedAsciiString( + arena, + .{ .slice = "\"\\\r\n\u{10348}\"", .code_page = .utf8 }, + .{ .output_code_page = .utf8 }, + )); } test "parse quoted wide string" { diff --git a/lib/compiler/resinator/main.zig b/lib/compiler/resinator/main.zig new file mode 100644 index 000000000000..e598a4512230 --- /dev/null +++ b/lib/compiler/resinator/main.zig @@ -0,0 +1,298 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const removeComments = @import("comments.zig").removeComments; +const parseAndRemoveLineCommands = @import("source_mapping.zig").parseAndRemoveLineCommands; +const compile = @import("compile.zig").compile; +const Diagnostics = @import("errors.zig").Diagnostics; +const cli = @import("cli.zig"); +const preprocess = @import("preprocess.zig"); +const renderErrorMessage = @import("utils.zig").renderErrorMessage; +const aro = @import("aro"); + +pub fn main() !void { + var gpa = std.heap.GeneralPurposeAllocator(.{}){}; + defer std.debug.assert(gpa.deinit() == .ok); + const allocator = gpa.allocator(); + + const stderr = std.io.getStdErr(); + const stderr_config = std.io.tty.detectConfig(stderr); + + const args = try std.process.argsAlloc(allocator); + defer std.process.argsFree(allocator, args); + + if (args.len < 2) { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "expected zig lib dir as first argument", .{}); + std.os.exit(1); + } + const zig_lib_dir = args[1]; + + var options = options: { + var cli_diagnostics = cli.Diagnostics.init(allocator); + defer cli_diagnostics.deinit(); + var options = cli.parse(allocator, args[2..], &cli_diagnostics) catch |err| switch (err) { + error.ParseError => { + cli_diagnostics.renderToStdErr(args, stderr_config); + std.os.exit(1); + }, + else => |e| return e, + }; + try options.maybeAppendRC(std.fs.cwd()); + + // print any warnings/notes + cli_diagnostics.renderToStdErr(args, stderr_config); + // If there was something printed, then add an extra newline separator + // so that there is a clear separation between the cli diagnostics and whatever + // gets printed after + if (cli_diagnostics.errors.items.len > 0) { + try stderr.writeAll("\n"); + } + break :options options; + }; + defer options.deinit(); + + if (options.print_help_and_exit) { + try cli.writeUsage(stderr.writer(), "zig rc"); + return; + } + + const stdout_writer = std.io.getStdOut().writer(); + if (options.verbose) { + try options.dumpVerbose(stdout_writer); + try stdout_writer.writeByte('\n'); + } + + var dependencies_list = std.ArrayList([]const u8).init(allocator); + defer { + for (dependencies_list.items) |item| { + allocator.free(item); + } + dependencies_list.deinit(); + } + const maybe_dependencies_list: ?*std.ArrayList([]const u8) = if (options.depfile_path != null) &dependencies_list else null; + + const full_input = full_input: { + if (options.preprocess != .no) { + var preprocessed_buf = std.ArrayList(u8).init(allocator); + errdefer preprocessed_buf.deinit(); + + // We're going to throw away everything except the final preprocessed output anyway, + // so we can use a scoped arena for everything else. + var aro_arena_state = std.heap.ArenaAllocator.init(allocator); + defer aro_arena_state.deinit(); + const aro_arena = aro_arena_state.allocator(); + + const include_paths = getIncludePaths(aro_arena, options.auto_includes, zig_lib_dir) catch |err| switch (err) { + error.OutOfMemory => |e| return e, + else => |e| { + switch (e) { + error.MsvcIncludesNotFound => { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "MSVC include paths could not be automatically detected", .{}); + }, + error.MingwIncludesNotFound => { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "MinGW include paths could not be automatically detected", .{}); + }, + } + try renderErrorMessage(stderr.writer(), stderr_config, .note, "to disable auto includes, use the option /:auto-includes none", .{}); + std.os.exit(1); + }, + }; + + var comp = aro.Compilation.init(aro_arena); + defer comp.deinit(); + + var argv = std.ArrayList([]const u8).init(comp.gpa); + defer argv.deinit(); + + try argv.append("arocc"); // dummy command name + try preprocess.appendAroArgs(aro_arena, &argv, options, include_paths); + try argv.append(options.input_filename); + + if (options.verbose) { + try stdout_writer.writeAll("Preprocessor: arocc (built-in)\n"); + for (argv.items[0 .. argv.items.len - 1]) |arg| { + try stdout_writer.print("{s} ", .{arg}); + } + try stdout_writer.print("{s}\n\n", .{argv.items[argv.items.len - 1]}); + } + + preprocess.preprocess(&comp, preprocessed_buf.writer(), argv.items, maybe_dependencies_list) catch |err| switch (err) { + error.GeneratedSourceError => { + // extra newline to separate this line from the aro errors + try renderErrorMessage(stderr.writer(), stderr_config, .err, "failed during preprocessor setup (this is always a bug):\n", .{}); + aro.Diagnostics.render(&comp, stderr_config); + std.os.exit(1); + }, + // ArgError can occur if e.g. the .rc file is not found + error.ArgError, error.PreprocessError => { + // extra newline to separate this line from the aro errors + try renderErrorMessage(stderr.writer(), stderr_config, .err, "failed during preprocessing:\n", .{}); + aro.Diagnostics.render(&comp, stderr_config); + std.os.exit(1); + }, + error.StreamTooLong => { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "failed during preprocessing: maximum file size exceeded", .{}); + std.os.exit(1); + }, + error.OutOfMemory => |e| return e, + }; + + break :full_input try preprocessed_buf.toOwnedSlice(); + } else { + break :full_input std.fs.cwd().readFileAlloc(allocator, options.input_filename, std.math.maxInt(usize)) catch |err| { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to read input file path '{s}': {s}", .{ options.input_filename, @errorName(err) }); + std.os.exit(1); + }; + } + }; + defer allocator.free(full_input); + + if (options.preprocess == .only) { + try std.fs.cwd().writeFile(options.output_filename, full_input); + return; + } + + // Note: We still want to run this when no-preprocess is set because: + // 1. We want to print accurate line numbers after removing multiline comments + // 2. We want to be able to handle an already-preprocessed input with #line commands in it + var mapping_results = try parseAndRemoveLineCommands(allocator, full_input, full_input, .{ .initial_filename = options.input_filename }); + defer mapping_results.mappings.deinit(allocator); + + const final_input = removeComments(mapping_results.result, mapping_results.result, &mapping_results.mappings) catch |err| switch (err) { + error.InvalidSourceMappingCollapse => { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "failed during comment removal; this is a known bug", .{}); + std.os.exit(1); + }, + else => |e| return e, + }; + + var output_file = std.fs.cwd().createFile(options.output_filename, .{}) catch |err| { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to create output file '{s}': {s}", .{ options.output_filename, @errorName(err) }); + std.os.exit(1); + }; + var output_file_closed = false; + defer if (!output_file_closed) output_file.close(); + + var diagnostics = Diagnostics.init(allocator); + defer diagnostics.deinit(); + + var output_buffered_stream = std.io.bufferedWriter(output_file.writer()); + + compile(allocator, final_input, output_buffered_stream.writer(), .{ + .cwd = std.fs.cwd(), + .diagnostics = &diagnostics, + .source_mappings = &mapping_results.mappings, + .dependencies_list = maybe_dependencies_list, + .ignore_include_env_var = options.ignore_include_env_var, + .extra_include_paths = options.extra_include_paths.items, + .default_language_id = options.default_language_id, + .default_code_page = options.default_code_page orelse .windows1252, + .verbose = options.verbose, + .null_terminate_string_table_strings = options.null_terminate_string_table_strings, + .max_string_literal_codepoints = options.max_string_literal_codepoints, + .silent_duplicate_control_ids = options.silent_duplicate_control_ids, + .warn_instead_of_error_on_invalid_code_page = options.warn_instead_of_error_on_invalid_code_page, + }) catch |err| switch (err) { + error.ParseError, error.CompileError => { + diagnostics.renderToStdErr(std.fs.cwd(), final_input, stderr_config, mapping_results.mappings); + // Delete the output file on error + output_file.close(); + output_file_closed = true; + // Failing to delete is not really a big deal, so swallow any errors + std.fs.cwd().deleteFile(options.output_filename) catch {}; + std.os.exit(1); + }, + else => |e| return e, + }; + + try output_buffered_stream.flush(); + + // print any warnings/notes + diagnostics.renderToStdErr(std.fs.cwd(), final_input, stderr_config, mapping_results.mappings); + + // write the depfile + if (options.depfile_path) |depfile_path| { + var depfile = std.fs.cwd().createFile(depfile_path, .{}) catch |err| { + try renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to create depfile '{s}': {s}", .{ depfile_path, @errorName(err) }); + std.os.exit(1); + }; + defer depfile.close(); + + const depfile_writer = depfile.writer(); + var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer); + switch (options.depfile_fmt) { + .json => { + var write_stream = std.json.writeStream(depfile_buffered_writer.writer(), .{ .whitespace = .indent_2 }); + defer write_stream.deinit(); + + try write_stream.beginArray(); + for (dependencies_list.items) |dep_path| { + try write_stream.write(dep_path); + } + try write_stream.endArray(); + }, + } + try depfile_buffered_writer.flush(); + } +} + +fn getIncludePaths(arena: std.mem.Allocator, auto_includes_option: cli.Options.AutoIncludes, zig_lib_dir: []const u8) ![]const []const u8 { + var includes = auto_includes_option; + if (builtin.target.os.tag != .windows) { + switch (includes) { + // MSVC can't be found when the host isn't Windows, so short-circuit. + .msvc => return error.MsvcIncludesNotFound, + // Skip straight to gnu since we won't be able to detect MSVC on non-Windows hosts. + .any => includes = .gnu, + .none, .gnu => {}, + } + } + + while (true) { + switch (includes) { + .none => return &[_][]const u8{}, + .any, .msvc => { + // MSVC is only detectable on Windows targets. This unreachable is to signify + // that .any and .msvc should be dealt with on non-Windows targets before this point, + // since getting MSVC include paths uses Windows-only APIs. + if (builtin.target.os.tag != .windows) unreachable; + + const target_query: std.Target.Query = .{ + .os_tag = .windows, + .abi = .msvc, + }; + const target = std.zig.resolveTargetQueryOrFatal(target_query); + const is_native_abi = target_query.isNativeAbi(); + const detected_libc = std.zig.LibCDirs.detect(arena, zig_lib_dir, target, is_native_abi, true, null) catch { + if (includes == .any) { + // fall back to mingw + includes = .gnu; + continue; + } + return error.MsvcIncludesNotFound; + }; + if (detected_libc.libc_include_dir_list.len == 0) { + if (includes == .any) { + // fall back to mingw + includes = .gnu; + continue; + } + return error.MsvcIncludesNotFound; + } + return detected_libc.libc_include_dir_list; + }, + .gnu => { + const target_query: std.Target.Query = .{ + .os_tag = .windows, + .abi = .gnu, + }; + const target = std.zig.resolveTargetQueryOrFatal(target_query); + const is_native_abi = target_query.isNativeAbi(); + const detected_libc = std.zig.LibCDirs.detect(arena, zig_lib_dir, target, is_native_abi, true, null) catch |err| switch (err) { + error.OutOfMemory => |e| return e, + else => return error.MingwIncludesNotFound, + }; + return detected_libc.libc_include_dir_list; + }, + } + } +} diff --git a/src/resinator/parse.zig b/lib/compiler/resinator/parse.zig similarity index 98% rename from src/resinator/parse.zig rename to lib/compiler/resinator/parse.zig index 7d56b92134e7..3426c389b162 100644 --- a/src/resinator/parse.zig +++ b/lib/compiler/resinator/parse.zig @@ -174,8 +174,6 @@ pub const Parser = struct { } }, }); } - // TODO: Wrapping this in a Node.Literal is superfluous but necessary - // to put it in a SimpleStatement const value_node = try self.state.arena.create(Node.Literal); value_node.* = .{ .token = value, @@ -203,8 +201,6 @@ pub const Parser = struct { const identifier = self.state.token; try self.nextToken(.whitespace_delimiter_only); try self.check(.literal); - // TODO: Wrapping this in a Node.Literal is superfluous but necessary - // to put it in a SimpleStatement const value_node = try self.state.arena.create(Node.Literal); value_node.* = .{ .token = self.state.token, @@ -539,12 +535,12 @@ pub const Parser = struct { // be able to be written into the relevant field in the .res data. if (controls.items.len >= std.math.maxInt(u16)) { try self.addErrorDetails(.{ - .err = .too_many_dialog_controls, + .err = .too_many_dialog_controls_or_toolbar_buttons, .token = id_token, .extra = .{ .resource = resource }, }); return self.addErrorDetailsAndFail(.{ - .err = .too_many_dialog_controls, + .err = .too_many_dialog_controls_or_toolbar_buttons, .type = .note, .token = control_node.getFirstToken(), .token_span_end = control_node.getLastToken(), @@ -592,8 +588,26 @@ pub const Parser = struct { try self.check(.begin); var buttons = std.ArrayListUnmanaged(*Node){}; + defer buttons.deinit(self.state.allocator); while (try self.parseToolbarButtonStatement()) |button_node| { - try buttons.append(self.state.arena, button_node); + // The number of buttons must fit in a u16 in order for it to + // be able to be written into the relevant field in the .res data. + if (buttons.items.len >= std.math.maxInt(u16)) { + try self.addErrorDetails(.{ + .err = .too_many_dialog_controls_or_toolbar_buttons, + .token = id_token, + .extra = .{ .resource = resource }, + }); + return self.addErrorDetailsAndFail(.{ + .err = .too_many_dialog_controls_or_toolbar_buttons, + .type = .note, + .token = button_node.getFirstToken(), + .token_span_end = button_node.getLastToken(), + .extra = .{ .resource = resource }, + }); + } + + try buttons.append(self.state.allocator, button_node); } try self.nextToken(.normal); @@ -608,7 +622,7 @@ pub const Parser = struct { .button_width = button_width, .button_height = button_height, .begin_token = begin_token, - .buttons = try buttons.toOwnedSlice(self.state.arena), + .buttons = try self.state.arena.dupe(*Node, buttons.items), .end_token = end_token, }; return &node.base; diff --git a/lib/compiler/resinator/preprocess.zig b/lib/compiler/resinator/preprocess.zig new file mode 100644 index 000000000000..3d6912a7c095 --- /dev/null +++ b/lib/compiler/resinator/preprocess.zig @@ -0,0 +1,140 @@ +const std = @import("std"); +const builtin = @import("builtin"); +const Allocator = std.mem.Allocator; +const cli = @import("cli.zig"); +const aro = @import("aro"); + +const PreprocessError = error{ ArgError, GeneratedSourceError, PreprocessError, StreamTooLong, OutOfMemory }; + +pub fn preprocess( + comp: *aro.Compilation, + writer: anytype, + /// Expects argv[0] to be the command name + argv: []const []const u8, + maybe_dependencies_list: ?*std.ArrayList([]const u8), +) PreprocessError!void { + try comp.addDefaultPragmaHandlers(); + + var driver: aro.Driver = .{ .comp = comp, .aro_name = "arocc" }; + defer driver.deinit(); + + var macro_buf = std.ArrayList(u8).init(comp.gpa); + defer macro_buf.deinit(); + + _ = driver.parseArgs(std.io.null_writer, macro_buf.writer(), argv) catch |err| switch (err) { + error.FatalError => return error.ArgError, + error.OutOfMemory => |e| return e, + }; + + if (hasAnyErrors(comp)) return error.ArgError; + + // .include_system_defines gives us things like _WIN32 + const builtin_macros = comp.generateBuiltinMacros(.include_system_defines) catch |err| switch (err) { + error.FatalError => return error.GeneratedSourceError, + else => |e| return e, + }; + const user_macros = comp.addSourceFromBuffer("", macro_buf.items) catch |err| switch (err) { + error.FatalError => return error.GeneratedSourceError, + else => |e| return e, + }; + const source = driver.inputs.items[0]; + + if (hasAnyErrors(comp)) return error.GeneratedSourceError; + + comp.generated_buf.items.len = 0; + var pp = try aro.Preprocessor.initDefault(comp); + defer pp.deinit(); + + if (comp.langopts.ms_extensions) { + comp.ms_cwd_source_id = source.id; + } + + pp.preserve_whitespace = true; + pp.linemarkers = .line_directives; + + pp.preprocessSources(&.{ source, builtin_macros, user_macros }) catch |err| switch (err) { + error.FatalError => return error.PreprocessError, + else => |e| return e, + }; + + if (hasAnyErrors(comp)) return error.PreprocessError; + + try pp.prettyPrintTokens(writer); + + if (maybe_dependencies_list) |dependencies_list| { + for (comp.sources.values()) |comp_source| { + if (comp_source.id == builtin_macros.id or comp_source.id == user_macros.id) continue; + if (comp_source.id == .unused or comp_source.id == .generated) continue; + const duped_path = try dependencies_list.allocator.dupe(u8, comp_source.path); + errdefer dependencies_list.allocator.free(duped_path); + try dependencies_list.append(duped_path); + } + } +} + +fn hasAnyErrors(comp: *aro.Compilation) bool { + // In theory we could just check Diagnostics.errors != 0, but that only + // gets set during rendering of the error messages, see: + // https://github.com/Vexu/arocc/issues/603 + for (comp.diagnostics.list.items) |msg| { + switch (msg.kind) { + .@"fatal error", .@"error" => return true, + else => {}, + } + } + return false; +} + +/// `arena` is used for temporary -D argument strings and the INCLUDE environment variable. +/// The arena should be kept alive at least as long as `argv`. +pub fn appendAroArgs(arena: Allocator, argv: *std.ArrayList([]const u8), options: cli.Options, system_include_paths: []const []const u8) !void { + try argv.appendSlice(&.{ + "-E", + "--comments", + "-fuse-line-directives", + "--target=x86_64-windows-msvc", + "--emulate=msvc", + "-nostdinc", + "-DRC_INVOKED", + }); + for (options.extra_include_paths.items) |extra_include_path| { + try argv.append("-I"); + try argv.append(extra_include_path); + } + + for (system_include_paths) |include_path| { + try argv.append("-isystem"); + try argv.append(include_path); + } + + if (!options.ignore_include_env_var) { + const INCLUDE = std.process.getEnvVarOwned(arena, "INCLUDE") catch ""; + + // The only precedence here is llvm-rc which also uses the platform-specific + // delimiter. There's no precedence set by `rc.exe` since it's Windows-only. + const delimiter = switch (builtin.os.tag) { + .windows => ';', + else => ':', + }; + var it = std.mem.tokenizeScalar(u8, INCLUDE, delimiter); + while (it.next()) |include_path| { + try argv.append("-isystem"); + try argv.append(include_path); + } + } + + var symbol_it = options.symbols.iterator(); + while (symbol_it.next()) |entry| { + switch (entry.value_ptr.*) { + .define => |value| { + try argv.append("-D"); + const define_arg = try std.fmt.allocPrint(arena, "{s}={s}", .{ entry.key_ptr.*, value }); + try argv.append(define_arg); + }, + .undefine => { + try argv.append("-U"); + try argv.append(entry.key_ptr.*); + }, + } + } +} diff --git a/src/resinator/rc.zig b/lib/compiler/resinator/rc.zig similarity index 100% rename from src/resinator/rc.zig rename to lib/compiler/resinator/rc.zig diff --git a/src/resinator/res.zig b/lib/compiler/resinator/res.zig similarity index 99% rename from src/resinator/res.zig rename to lib/compiler/resinator/res.zig index 86102b4e772c..991e0b8fb8a1 100644 --- a/src/resinator/res.zig +++ b/lib/compiler/resinator/res.zig @@ -608,7 +608,7 @@ const AcceleratorKeyCodepointTranslator = struct { const parsed = maybe_parsed orelse return null; if (parsed.codepoint == Codepoint.invalid) return 0xFFFD; if (parsed.from_escaped_integer and self.string_type == .ascii) { - return windows1252.toCodepoint(@intCast(parsed.codepoint)); + return windows1252.toCodepoint(@truncate(parsed.codepoint)); } return parsed.codepoint; } diff --git a/src/resinator/source_mapping.zig b/lib/compiler/resinator/source_mapping.zig similarity index 72% rename from src/resinator/source_mapping.zig rename to lib/compiler/resinator/source_mapping.zig index b6c81cafdc05..37f0b479b305 100644 --- a/src/resinator/source_mapping.zig +++ b/lib/compiler/resinator/source_mapping.zig @@ -1,8 +1,7 @@ const std = @import("std"); const Allocator = std.mem.Allocator; -const UncheckedSliceWriter = @import("utils.zig").UncheckedSliceWriter; -const parseQuotedAsciiString = @import("literals.zig").parseQuotedAsciiString; -const lex = @import("lex.zig"); +const utils = @import("utils.zig"); +const UncheckedSliceWriter = utils.UncheckedSliceWriter; pub const ParseLineCommandsResult = struct { result: []u8, @@ -79,8 +78,9 @@ pub fn parseAndRemoveLineCommands(allocator: Allocator, source: []const u8, buf: }, '\r', '\n' => { const is_crlf = formsLineEndingPair(source, c, index + 1); - try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); if (!current_mapping.ignore_contents) { + try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); + result.write(c); if (is_crlf) result.write(source[index + 1]); line_number += 1; @@ -115,8 +115,9 @@ pub fn parseAndRemoveLineCommands(allocator: Allocator, source: []const u8, buf: if (std.mem.startsWith(u8, preprocessor_str, "#line")) { try handleLineCommand(allocator, preprocessor_str, ¤t_mapping); } else { - try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); if (!current_mapping.ignore_contents) { + try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); + const line_ending_len: usize = if (is_crlf) 2 else 1; result.writeSlice(source[pending_start.? .. index + line_ending_len]); line_number += 1; @@ -131,8 +132,9 @@ pub fn parseAndRemoveLineCommands(allocator: Allocator, source: []const u8, buf: .non_preprocessor => switch (c) { '\r', '\n' => { const is_crlf = formsLineEndingPair(source, c, index + 1); - try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); if (!current_mapping.ignore_contents) { + try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); + result.write(c); if (is_crlf) result.write(source[index + 1]); line_number += 1; @@ -185,7 +187,7 @@ pub fn parseAndRemoveLineCommands(allocator: Allocator, source: []const u8, buf: // If there have been no line mappings at all, then we're dealing with an empty file. // In this case, we want to fake a line mapping just so that we return something // that is useable in the same way that a non-empty mapping would be. - if (parse_result.mappings.mapping.items.len == 0) { + if (parse_result.mappings.sources.root == null) { try handleLineEnd(allocator, line_number, &parse_result.mappings, ¤t_mapping); } @@ -197,22 +199,13 @@ pub fn formsLineEndingPair(source: []const u8, line_ending: u8, next_index: usiz if (next_index >= source.len) return false; const next_ending = source[next_index]; - if (next_ending != '\r' and next_ending != '\n') return false; - - // can't be \n\n or \r\r - if (line_ending == next_ending) return false; - - return true; + return utils.isLineEndingPair(line_ending, next_ending); } pub fn handleLineEnd(allocator: Allocator, post_processed_line_number: usize, mapping: *SourceMappings, current_mapping: *CurrentMapping) !void { const filename_offset = try mapping.files.put(allocator, current_mapping.filename.items); - try mapping.set(allocator, post_processed_line_number, .{ - .start_line = current_mapping.line_num, - .end_line = current_mapping.line_num, - .filename_offset = filename_offset, - }); + try mapping.set(post_processed_line_number, current_mapping.line_num, filename_offset); current_mapping.line_num += 1; current_mapping.pending = false; @@ -421,72 +414,192 @@ test parseFilename { } pub const SourceMappings = struct { - /// line number -> span where the index is (line number - 1) - mapping: std.ArrayListUnmanaged(SourceSpan) = .{}, + sources: Sources = .{}, files: StringTable = .{}, /// The default assumes that the first filename added is the root file. /// The value should be set to the correct offset if that assumption does not hold. root_filename_offset: u32 = 0, + source_node_pool: std.heap.MemoryPool(Sources.Node) = std.heap.MemoryPool(Sources.Node).init(std.heap.page_allocator), + end_line: usize = 0, + + const sourceCompare = struct { + fn compare(a: Source, b: Source) std.math.Order { + return std.math.order(a.start_line, b.start_line); + } + }.compare; + const Sources = std.Treap(Source, sourceCompare); - pub const SourceSpan = struct { + pub const Source = struct { start_line: usize, - end_line: usize, + span: usize = 0, + corresponding_start_line: usize, filename_offset: u32, }; pub fn deinit(self: *SourceMappings, allocator: Allocator) void { self.files.deinit(allocator); - self.mapping.deinit(allocator); + self.source_node_pool.deinit(); } - pub fn set(self: *SourceMappings, allocator: Allocator, line_num: usize, span: SourceSpan) !void { - const ptr = try self.expandAndGet(allocator, line_num); - ptr.* = span; - } + /// Find the node that 'contains' the `line`, i.e. the node's start_line is + /// >= `line` + fn findNode(self: SourceMappings, line: usize) ?*Sources.Node { + var node = self.sources.root; + var last_gt: ?*Sources.Node = null; + + var search_key: Source = undefined; + search_key.start_line = line; + while (node) |current| { + const order = sourceCompare(search_key, current.key); + if (order == .eq) break; + if (order == .gt) last_gt = current; + + node = current.children[@intFromBool(order == .gt)] orelse { + // Regardless of the current order, last_gt will contain the + // the node we want to return. + // + // If search key is > current node's key, then last_gt will be + // current which we now know is the closest node that is <= + // the search key. + // + // + // If the key is < current node's key, we want to jump back to the + // node that the search key was most recently greater than. + // This is necessary for scenarios like (where the search key is 2): + // + // 1 + // \ + // 6 + // / + // 3 + // + // In this example, we'll get down to the '3' node but ultimately want + // to return the '1' node. + // + // Note: If we've never seen a key that the search key is greater than, + // then we know that there's no valid node, so last_gt will be null. + return last_gt; + }; + } - pub fn has(self: SourceMappings, line_num: usize) bool { - return self.mapping.items.len >= line_num; + return node; } - /// Note: `line_num` is 1-indexed - pub fn get(self: SourceMappings, line_num: usize) SourceSpan { - return self.mapping.items[line_num - 1]; - } + /// Note: `line_num` and `corresponding_line_num` start at 1 + pub fn set(self: *SourceMappings, line_num: usize, corresponding_line_num: usize, filename_offset: u32) !void { + const maybe_node = self.findNode(line_num); - pub fn getPtr(self: SourceMappings, line_num: usize) *SourceSpan { - return &self.mapping.items[line_num - 1]; + const need_new_node = need_new_node: { + if (maybe_node) |node| { + if (node.key.filename_offset != filename_offset) { + break :need_new_node true; + } + const exist_delta = @as(i64, @intCast(node.key.corresponding_start_line)) - @as(i64, @intCast(node.key.start_line)); + const cur_delta = @as(i64, @intCast(corresponding_line_num)) - @as(i64, @intCast(line_num)); + if (exist_delta != cur_delta) { + break :need_new_node true; + } + break :need_new_node false; + } + break :need_new_node true; + }; + if (need_new_node) { + // spans must not overlap + if (maybe_node) |node| { + std.debug.assert(node.key.start_line != line_num); + } + + const key = Source{ + .start_line = line_num, + .corresponding_start_line = corresponding_line_num, + .filename_offset = filename_offset, + }; + var entry = self.sources.getEntryFor(key); + var new_node = try self.source_node_pool.create(); + new_node.key = key; + entry.set(new_node); + } + if (line_num > self.end_line) { + self.end_line = line_num; + } } - /// Expands the number of lines in the mapping to include the requested - /// line number (if necessary) and returns a pointer to the value at that - /// line number. - /// - /// Note: `line_num` is 1-indexed - pub fn expandAndGet(self: *SourceMappings, allocator: Allocator, line_num: usize) !*SourceSpan { - try self.mapping.resize(allocator, line_num); - return &self.mapping.items[line_num - 1]; + /// Note: `line_num` starts at 1 + pub fn get(self: SourceMappings, line_num: usize) ?Source { + const node = self.findNode(line_num) orelse return null; + return node.key; } - pub fn collapse(self: *SourceMappings, line_num: usize, num_following_lines_to_collapse: usize) void { - std.debug.assert(num_following_lines_to_collapse > 0); + pub const CorrespondingSpan = struct { + start_line: usize, + end_line: usize, + filename_offset: u32, + }; - var span_to_collapse_into = self.getPtr(line_num); - const last_collapsed_span = self.get(line_num + num_following_lines_to_collapse); - span_to_collapse_into.end_line = last_collapsed_span.end_line; + pub fn getCorrespondingSpan(self: SourceMappings, line_num: usize) ?CorrespondingSpan { + const source = self.get(line_num) orelse return null; + const diff = line_num - source.start_line; + const start_line = source.corresponding_start_line + (if (line_num == source.start_line) 0 else source.span + diff); + const end_line = start_line + (if (line_num == source.start_line) source.span else 0); + return CorrespondingSpan{ + .start_line = start_line, + .end_line = end_line, + .filename_offset = source.filename_offset, + }; + } - const after_collapsed_start = line_num + num_following_lines_to_collapse; - const new_num_lines = self.mapping.items.len - num_following_lines_to_collapse; - std.mem.copyForwards(SourceSpan, self.mapping.items[line_num..new_num_lines], self.mapping.items[after_collapsed_start..]); + pub fn collapse(self: *SourceMappings, line_num: usize, num_following_lines_to_collapse: usize) !void { + std.debug.assert(num_following_lines_to_collapse > 0); + var node = self.findNode(line_num).?; + const span_diff = num_following_lines_to_collapse; + if (node.key.start_line != line_num) { + const offset = line_num - node.key.start_line; + const key = Source{ + .start_line = line_num, + .span = num_following_lines_to_collapse, + .corresponding_start_line = node.key.corresponding_start_line + node.key.span + offset, + .filename_offset = node.key.filename_offset, + }; + var entry = self.sources.getEntryFor(key); + var new_node = try self.source_node_pool.create(); + new_node.key = key; + entry.set(new_node); + node = new_node; + } else { + node.key.span += span_diff; + } - self.mapping.items.len = new_num_lines; + // now subtract the span diff from the start line number of all of + // the following nodes in order + var it = Sources.InorderIterator{ + .current = node, + .previous = node.children[0], + }; + // skip past current, but store it + var prev = it.next().?; + while (it.next()) |inorder_node| { + inorder_node.key.start_line -= span_diff; + + // This can only really happen if there are #line commands within + // a multiline comment, which in theory should be skipped over. + // However, currently, parseAndRemoveLineCommands is not aware of + // comments at all. + // + // TODO: Make parseAndRemoveLineCommands aware of comments/strings + // and turn this into an assertion + if (prev.key.start_line > inorder_node.key.start_line) { + return error.InvalidSourceMappingCollapse; + } + prev = inorder_node; + } + self.end_line -= span_diff; } /// Returns true if the line is from the main/root file (i.e. not a file that has been /// `#include`d). pub fn isRootFile(self: *SourceMappings, line_num: usize) bool { - const line_mapping = self.get(line_num); - if (line_mapping.filename_offset == self.root_filename_offset) return true; - return false; + const source = self.get(line_num) orelse return false; + return source.filename_offset == self.root_filename_offset; } }; @@ -497,16 +610,21 @@ test "SourceMappings collapse" { defer mappings.deinit(allocator); const filename_offset = try mappings.files.put(allocator, "test.rc"); - try mappings.set(allocator, 1, .{ .start_line = 1, .end_line = 1, .filename_offset = filename_offset }); - try mappings.set(allocator, 2, .{ .start_line = 2, .end_line = 3, .filename_offset = filename_offset }); - try mappings.set(allocator, 3, .{ .start_line = 4, .end_line = 4, .filename_offset = filename_offset }); - try mappings.set(allocator, 4, .{ .start_line = 5, .end_line = 5, .filename_offset = filename_offset }); - - mappings.collapse(1, 2); - - try std.testing.expectEqual(@as(usize, 2), mappings.mapping.items.len); - try std.testing.expectEqual(@as(usize, 4), mappings.mapping.items[0].end_line); - try std.testing.expectEqual(@as(usize, 5), mappings.mapping.items[1].end_line); + try mappings.set(1, 1, filename_offset); + try mappings.set(5, 5, filename_offset); + + try mappings.collapse(2, 2); + + try std.testing.expectEqual(@as(usize, 3), mappings.end_line); + const span_1 = mappings.getCorrespondingSpan(1).?; + try std.testing.expectEqual(@as(usize, 1), span_1.start_line); + try std.testing.expectEqual(@as(usize, 1), span_1.end_line); + const span_2 = mappings.getCorrespondingSpan(2).?; + try std.testing.expectEqual(@as(usize, 2), span_2.start_line); + try std.testing.expectEqual(@as(usize, 4), span_2.end_line); + const span_3 = mappings.getCorrespondingSpan(3).?; + try std.testing.expectEqual(@as(usize, 5), span_3.start_line); + try std.testing.expectEqual(@as(usize, 5), span_3.end_line); } /// Same thing as StringTable in Zig's src/Wasm.zig @@ -579,10 +697,11 @@ fn testParseAndRemoveLineCommands( std.debug.print("{}: {s}:{}-{}\n", .{ line_num, span.filename, span.start_line, span.end_line }); } std.debug.print("\nactual mappings:\n", .{}); - for (results.mappings.mapping.items, 0..) |span, i| { - const line_num = i + 1; + var i: usize = 1; + while (i <= results.mappings.end_line) : (i += 1) { + const span = results.mappings.getCorrespondingSpan(i).?; const filename = results.mappings.files.get(span.filename_offset); - std.debug.print("{}: {s}:{}-{}\n", .{ line_num, filename, span.start_line, span.end_line }); + std.debug.print("{}: {s}:{}-{}\n", .{ i, filename, span.start_line, span.end_line }); } std.debug.print("\n", .{}); return err; @@ -590,10 +709,10 @@ fn testParseAndRemoveLineCommands( } fn expectEqualMappings(expected_spans: []const ExpectedSourceSpan, mappings: SourceMappings) !void { - try std.testing.expectEqual(expected_spans.len, mappings.mapping.items.len); + try std.testing.expectEqual(expected_spans.len, mappings.end_line); for (expected_spans, 0..) |expected_span, i| { const line_num = i + 1; - const span = mappings.get(line_num); + const span = mappings.getCorrespondingSpan(line_num) orelse return error.MissingLineNum; const filename = mappings.files.get(span.filename_offset); try std.testing.expectEqual(expected_span.start_line, span.start_line); try std.testing.expectEqual(expected_span.end_line, span.end_line); @@ -685,3 +804,28 @@ test "in place" { defer result.mappings.deinit(std.testing.allocator); try std.testing.expectEqualStrings("", result.result); } + +test "line command within a multiline comment" { + // TODO: Enable once parseAndRemoveLineCommands is comment-aware + if (true) return error.SkipZigTest; + + try testParseAndRemoveLineCommands( + \\/* + \\#line 1 "irrelevant.rc" + \\ + \\ + \\*/ + , &[_]ExpectedSourceSpan{ + .{ .start_line = 1, .end_line = 1, .filename = "blah.rc" }, + .{ .start_line = 2, .end_line = 2, .filename = "blah.rc" }, + .{ .start_line = 3, .end_line = 3, .filename = "blah.rc" }, + .{ .start_line = 4, .end_line = 4, .filename = "blah.rc" }, + .{ .start_line = 5, .end_line = 5, .filename = "blah.rc" }, + }, + \\/* + \\#line 1 "irrelevant.rc" + \\ + \\ + \\*/ + , .{ .initial_filename = "blah.rc" }); +} diff --git a/src/resinator/utils.zig b/lib/compiler/resinator/utils.zig similarity index 93% rename from src/resinator/utils.zig rename to lib/compiler/resinator/utils.zig index 41f504867be0..d216f07838fd 100644 --- a/src/resinator/utils.zig +++ b/lib/compiler/resinator/utils.zig @@ -110,3 +110,13 @@ pub fn renderErrorMessage(writer: anytype, config: std.io.tty.Config, msg_type: try writer.writeByte('\n'); try config.setColor(writer, .reset); } + +pub fn isLineEndingPair(first: u8, second: u8) bool { + if (first != '\r' and first != '\n') return false; + if (second != '\r' and second != '\n') return false; + + // can't be \n\n or \r\r + if (first == second) return false; + + return true; +} diff --git a/src/resinator/windows1252.zig b/lib/compiler/resinator/windows1252.zig similarity index 100% rename from src/resinator/windows1252.zig rename to lib/compiler/resinator/windows1252.zig diff --git a/src/Compilation.zig b/src/Compilation.zig index 0fdfb6038f53..b84272e4568a 100644 --- a/src/Compilation.zig +++ b/src/Compilation.zig @@ -36,7 +36,6 @@ const Cache = std.Build.Cache; const c_codegen = @import("codegen/c.zig"); const libtsan = @import("libtsan.zig"); const Zir = std.zig.Zir; -const resinator = @import("resinator.zig"); const Builtin = @import("Builtin.zig"); const LlvmObject = @import("codegen/llvm.zig").Object; @@ -174,7 +173,7 @@ local_cache_directory: Directory, global_cache_directory: Directory, libc_include_dir_list: []const []const u8, libc_framework_dir_list: []const []const u8, -rc_include_dir_list: []const []const u8, +rc_includes: RcIncludes, thread_pool: *ThreadPool, /// Populated when we build the libc++ static library. A Job to build this is placed in the queue @@ -1243,68 +1242,6 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil options.libc_installation, ); - // The include directories used when preprocessing .rc files are separate from the - // target. Which include directories are used is determined by `options.rc_includes`. - // - // Note: It should be okay that the include directories used when compiling .rc - // files differ from the include directories used when compiling the main - // binary, since the .res format is not dependent on anything ABI-related. The - // only relevant differences would be things like `#define` constants being - // different in the MinGW headers vs the MSVC headers, but any such - // differences would likely be a MinGW bug. - const rc_dirs: std.zig.LibCDirs = b: { - // Set the includes to .none here when there are no rc files to compile - var includes = if (options.rc_source_files.len > 0) options.rc_includes else .none; - const target = options.root_mod.resolved_target.result; - if (!options.root_mod.resolved_target.is_native_os or target.os.tag != .windows) { - switch (includes) { - // MSVC can't be found when the host isn't Windows, so short-circuit. - .msvc => return error.WindowsSdkNotFound, - // Skip straight to gnu since we won't be able to detect - // MSVC on non-Windows hosts. - .any => includes = .gnu, - .none, .gnu => {}, - } - } - while (true) switch (includes) { - .any, .msvc => break :b std.zig.LibCDirs.detect( - arena, - options.zig_lib_directory.path.?, - .{ - .cpu = target.cpu, - .os = target.os, - .abi = .msvc, - .ofmt = target.ofmt, - }, - options.root_mod.resolved_target.is_native_abi, - // The .rc preprocessor will need to know the libc include dirs even if we - // are not linking libc, so force 'link_libc' to true - true, - options.libc_installation, - ) catch |err| { - if (includes == .any) { - // fall back to mingw - includes = .gnu; - continue; - } - return err; - }, - .gnu => break :b try std.zig.LibCDirs.detectFromBuilding(arena, options.zig_lib_directory.path.?, .{ - .cpu = target.cpu, - .os = target.os, - .abi = .gnu, - .ofmt = target.ofmt, - }), - .none => break :b .{ - .libc_include_dir_list = &[0][]u8{}, - .libc_installation = null, - .libc_framework_dir_list = &.{}, - .sysroot = null, - .darwin_sdk_layout = null, - }, - }; - }; - const sysroot = options.sysroot orelse libc_dirs.sysroot; const include_compiler_rt = options.want_compiler_rt orelse @@ -1492,7 +1429,7 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil .self_exe_path = options.self_exe_path, .libc_include_dir_list = libc_dirs.libc_include_dir_list, .libc_framework_dir_list = libc_dirs.libc_framework_dir_list, - .rc_include_dir_list = rc_dirs.libc_include_dir_list, + .rc_includes = options.rc_includes, .thread_pool = options.thread_pool, .clang_passthrough_mode = options.clang_passthrough_mode, .clang_preprocessor_mode = options.clang_preprocessor_mode, @@ -2506,7 +2443,7 @@ fn addNonIncrementalStuffToCacheManifest( man.hash.add(comp.link_eh_frame_hdr); man.hash.add(comp.skip_linker_dependencies); man.hash.add(comp.include_compiler_rt); - man.hash.addListOfBytes(comp.rc_include_dir_list); + man.hash.add(comp.rc_includes); man.hash.addListOfBytes(comp.force_undefined_symbols.keys()); man.hash.addListOfBytes(comp.framework_dirs); try link.hashAddSystemLibs(man, comp.system_libs); @@ -4172,7 +4109,7 @@ pub fn obtainCObjectCacheManifest( pub fn obtainWin32ResourceCacheManifest(comp: *const Compilation) Cache.Manifest { var man = comp.cache_parent.obtain(); - man.hash.addListOfBytes(comp.rc_include_dir_list); + man.hash.add(comp.rc_includes); return man; } @@ -4812,11 +4749,12 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: *std.P } fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32_resource_prog_node: *std.Progress.Node) !void { - if (!build_options.have_llvm) { - return comp.failWin32Resource(win32_resource, "clang not available: compiler built without LLVM extensions", .{}); + if (!std.process.can_spawn) { + return comp.failWin32Resource(win32_resource, "{s} does not support spawning a child process", .{@tagName(builtin.os.tag)}); } + const self_exe_path = comp.self_exe_path orelse - return comp.failWin32Resource(win32_resource, "clang compilation disabled", .{}); + return comp.failWin32Resource(win32_resource, "unable to find self exe path", .{}); const tracy_trace = trace(@src()); defer tracy_trace.end(); @@ -4856,6 +4794,7 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 if (win32_resource.src == .manifest) { _ = try man.addFile(src_path, null); + const rc_basename = try std.fmt.allocPrint(arena, "{s}.rc", .{src_basename}); const res_basename = try std.fmt.allocPrint(arena, "{s}.res", .{src_basename}); const digest = if (try man.hit()) man.final() else blk: { @@ -4867,17 +4806,12 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 var o_dir = try comp.local_cache_directory.handle.makeOpenPath(o_sub_path, .{}); defer o_dir.close(); - var output_file = o_dir.createFile(res_basename, .{}) catch |err| { - const output_file_path = try comp.local_cache_directory.join(arena, &.{ o_sub_path, res_basename }); - return comp.failWin32Resource(win32_resource, "failed to create output file '{s}': {s}", .{ output_file_path, @errorName(err) }); - }; - var output_file_closed = false; - defer if (!output_file_closed) output_file.close(); - - var diagnostics = resinator.errors.Diagnostics.init(arena); - defer diagnostics.deinit(); - - var output_buffered_stream = std.io.bufferedWriter(output_file.writer()); + const in_rc_path = try comp.local_cache_directory.join(comp.gpa, &.{ + o_sub_path, rc_basename, + }); + const out_res_path = try comp.local_cache_directory.join(comp.gpa, &.{ + o_sub_path, res_basename, + }); // In .rc files, a " within a quoted string is escaped as "" const fmtRcEscape = struct { @@ -4899,28 +4833,47 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 // 1 is CREATEPROCESS_MANIFEST_RESOURCE_ID which is the default ID used for RT_MANIFEST resources // 24 is RT_MANIFEST const input = try std.fmt.allocPrint(arena, "1 24 \"{s}\"", .{fmtRcEscape(src_path)}); + try o_dir.writeFile(rc_basename, input); + + var argv = std.ArrayList([]const u8).init(comp.gpa); + defer argv.deinit(); + + try argv.appendSlice(&.{ + self_exe_path, + "rc", + "/:no-preprocess", + "/x", // ignore INCLUDE environment variable + "/c65001", // UTF-8 codepage + "/:auto-includes", + "none", + }); + try argv.appendSlice(&.{ "--", in_rc_path, out_res_path }); - resinator.compile.compile(arena, input, output_buffered_stream.writer(), .{ - .cwd = std.fs.cwd(), - .diagnostics = &diagnostics, - .ignore_include_env_var = true, - .default_code_page = .utf8, - }) catch |err| switch (err) { - error.ParseError, error.CompileError => { - // Delete the output file on error - output_file.close(); - output_file_closed = true; - // Failing to delete is not really a big deal, so swallow any errors - o_dir.deleteFile(res_basename) catch { - const output_file_path = try comp.local_cache_directory.join(arena, &.{ o_sub_path, res_basename }); - log.warn("failed to delete '{s}': {s}", .{ output_file_path, @errorName(err) }); - }; - return comp.failWin32ResourceCompile(win32_resource, input, &diagnostics, null); - }, - else => |e| return e, + var child = std.ChildProcess.init(argv.items, arena); + child.stdin_behavior = .Ignore; + child.stdout_behavior = .Ignore; + child.stderr_behavior = .Pipe; + + try child.spawn(); + + const stderr_reader = child.stderr.?.reader(); + const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024); + const term = child.wait() catch |err| { + return comp.failWin32Resource(win32_resource, "unable to spawn {s}: {s}", .{ argv.items[0], @errorName(err) }); }; - try output_buffered_stream.flush(); + switch (term) { + .Exited => |code| { + if (code != 0) { + log.err("zig rc failed with stderr:\n{s}", .{stderr}); + return comp.failWin32Resource(win32_resource, "zig rc exited with code {d}", .{code}); + } + }, + else => { + log.err("zig rc terminated with stderr:\n{s}", .{stderr}); + return comp.failWin32Resource(win32_resource, "zig rc terminated unexpectedly", .{}); + }, + } break :blk digest; }; @@ -4951,9 +4904,6 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 const rc_basename_noext = src_basename[0 .. src_basename.len - std.fs.path.extension(src_basename).len]; const digest = if (try man.hit()) man.final() else blk: { - const rcpp_filename = try std.fmt.allocPrint(arena, "{s}.rcpp", .{rc_basename_noext}); - - const out_rcpp_path = try comp.tmpFilePath(arena, rcpp_filename); var zig_cache_tmp_dir = try comp.local_cache_directory.handle.makeOpenPath("tmp", .{}); defer zig_cache_tmp_dir.close(); @@ -4963,193 +4913,89 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 // so we need a temporary filename. const out_res_path = try comp.tmpFilePath(arena, res_filename); - var options = options: { - var resinator_args = try std.ArrayListUnmanaged([]const u8).initCapacity(comp.gpa, rc_src.extra_flags.len + 4); - defer resinator_args.deinit(comp.gpa); - - resinator_args.appendAssumeCapacity(""); // dummy 'process name' arg - resinator_args.appendSliceAssumeCapacity(rc_src.extra_flags); - resinator_args.appendSliceAssumeCapacity(&.{ "--", out_rcpp_path, out_res_path }); - - var cli_diagnostics = resinator.cli.Diagnostics.init(comp.gpa); - defer cli_diagnostics.deinit(); - const options = resinator.cli.parse(comp.gpa, resinator_args.items, &cli_diagnostics) catch |err| switch (err) { - error.ParseError => { - return comp.failWin32ResourceCli(win32_resource, &cli_diagnostics); - }, - else => |e| return e, - }; - break :options options; - }; - defer options.deinit(); - - // We never want to read the INCLUDE environment variable, so - // unconditionally set `ignore_include_env_var` to true - options.ignore_include_env_var = true; - - if (options.preprocess != .yes) { - return comp.failWin32Resource(win32_resource, "the '{s}' option is not supported in this context", .{switch (options.preprocess) { - .no => "/:no-preprocess", - .only => "/p", - .yes => unreachable, - }}); - } - var argv = std.ArrayList([]const u8).init(comp.gpa); defer argv.deinit(); - try argv.appendSlice(&[_][]const u8{ self_exe_path, "clang" }); - - try resinator.preprocess.appendClangArgs(arena, &argv, options, .{ - .clang_target = null, // handled by addCCArgs - .system_include_paths = &.{}, // handled by addCCArgs - .needs_gnu_workaround = comp.getTarget().isGnu(), - .nostdinc = false, // handled by addCCArgs + const depfile_filename = try std.fmt.allocPrint(arena, "{s}.d.json", .{rc_basename_noext}); + const out_dep_path = try comp.tmpFilePath(arena, depfile_filename); + try argv.appendSlice(&.{ + self_exe_path, + "rc", + "/:depfile", + out_dep_path, + "/:depfile-fmt", + "json", + "/x", // ignore INCLUDE environment variable + "/:auto-includes", + @tagName(comp.rc_includes), }); - - try argv.append(rc_src.src_path); - try argv.appendSlice(&[_][]const u8{ - "-o", - out_rcpp_path, - }); - - const out_dep_path = try std.fmt.allocPrint(arena, "{s}.d", .{out_rcpp_path}); - // Note: addCCArgs will implicitly add _DEBUG/NDEBUG depending on the optimization - // mode. While these defines are not normally present when calling rc.exe directly, + // While these defines are not normally present when calling rc.exe directly, // them being defined matches the behavior of how MSVC calls rc.exe which is the more // relevant behavior in this case. - try comp.addCCArgs(arena, &argv, .rc, out_dep_path, rc_src.owner); - - if (comp.verbose_cc) { - dump_argv(argv.items); + switch (rc_src.owner.optimize_mode) { + .Debug => try argv.append("-D_DEBUG"), + .ReleaseSafe => {}, + .ReleaseFast, .ReleaseSmall => try argv.append("-DNDEBUG"), } + try argv.appendSlice(rc_src.extra_flags); + try argv.appendSlice(&.{ "--", rc_src.src_path, out_res_path }); - if (std.process.can_spawn) { - var child = std.ChildProcess.init(argv.items, arena); - child.stdin_behavior = .Ignore; - child.stdout_behavior = .Ignore; - child.stderr_behavior = .Pipe; - - try child.spawn(); + var child = std.ChildProcess.init(argv.items, arena); + child.stdin_behavior = .Ignore; + child.stdout_behavior = .Ignore; + child.stderr_behavior = .Pipe; - const stderr_reader = child.stderr.?.reader(); - - const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024); - - const term = child.wait() catch |err| { - return comp.failWin32Resource(win32_resource, "unable to spawn {s}: {s}", .{ argv.items[0], @errorName(err) }); - }; - - switch (term) { - .Exited => |code| { - if (code != 0) { - // TODO parse clang stderr and turn it into an error message - // and then call failCObjWithOwnedErrorMsg - log.err("clang preprocessor failed with stderr:\n{s}", .{stderr}); - return comp.failWin32Resource(win32_resource, "clang preprocessor exited with code {d}", .{code}); - } - }, - else => { - log.err("clang preprocessor terminated with stderr:\n{s}", .{stderr}); - return comp.failWin32Resource(win32_resource, "clang preprocessor terminated unexpectedly", .{}); - }, - } - } else { - const exit_code = try clangMain(arena, argv.items); - if (exit_code != 0) { - return comp.failWin32Resource(win32_resource, "clang preprocessor exited with code {d}", .{exit_code}); - } - } + try child.spawn(); - const dep_basename = std.fs.path.basename(out_dep_path); - // Add the files depended on to the cache system. - try man.addDepFilePost(zig_cache_tmp_dir, dep_basename); - switch (comp.cache_use) { - .whole => |whole| if (whole.cache_manifest) |whole_cache_manifest| { - whole.cache_manifest_mutex.lock(); - defer whole.cache_manifest_mutex.unlock(); - try whole_cache_manifest.addDepFilePost(zig_cache_tmp_dir, dep_basename); - }, - .incremental => {}, - } - // Just to save disk space, we delete the file because it is never needed again. - zig_cache_tmp_dir.deleteFile(dep_basename) catch |err| { - log.warn("failed to delete '{s}': {s}", .{ out_dep_path, @errorName(err) }); + const stderr_reader = child.stderr.?.reader(); + const stderr = try stderr_reader.readAllAlloc(arena, 10 * 1024 * 1024); + const term = child.wait() catch |err| { + return comp.failWin32Resource(win32_resource, "unable to spawn {s}: {s}", .{ argv.items[0], @errorName(err) }); }; - const full_input = std.fs.cwd().readFileAlloc(arena, out_rcpp_path, std.math.maxInt(usize)) catch |err| switch (err) { - error.OutOfMemory => return error.OutOfMemory, - else => |e| { - return comp.failWin32Resource(win32_resource, "failed to read preprocessed file '{s}': {s}", .{ out_rcpp_path, @errorName(e) }); + switch (term) { + .Exited => |code| { + if (code != 0) { + log.err("zig rc failed with stderr:\n{s}", .{stderr}); + return comp.failWin32Resource(win32_resource, "zig rc exited with code {d}", .{code}); + } + }, + else => { + log.err("zig rc terminated with stderr:\n{s}", .{stderr}); + return comp.failWin32Resource(win32_resource, "zig rc terminated unexpectedly", .{}); }, - }; - - var mapping_results = try resinator.source_mapping.parseAndRemoveLineCommands(arena, full_input, full_input, .{ .initial_filename = rc_src.src_path }); - defer mapping_results.mappings.deinit(arena); - - const final_input = resinator.comments.removeComments(mapping_results.result, mapping_results.result, &mapping_results.mappings); - - var output_file = zig_cache_tmp_dir.createFile(out_res_path, .{}) catch |err| { - return comp.failWin32Resource(win32_resource, "failed to create output file '{s}': {s}", .{ out_res_path, @errorName(err) }); - }; - var output_file_closed = false; - defer if (!output_file_closed) output_file.close(); - - var diagnostics = resinator.errors.Diagnostics.init(arena); - defer diagnostics.deinit(); - - var dependencies_list = std.ArrayList([]const u8).init(comp.gpa); - defer { - for (dependencies_list.items) |item| { - comp.gpa.free(item); - } - dependencies_list.deinit(); } - var output_buffered_stream = std.io.bufferedWriter(output_file.writer()); - - resinator.compile.compile(arena, final_input, output_buffered_stream.writer(), .{ - .cwd = std.fs.cwd(), - .diagnostics = &diagnostics, - .source_mappings = &mapping_results.mappings, - .dependencies_list = &dependencies_list, - .system_include_paths = comp.rc_include_dir_list, - .ignore_include_env_var = true, - // options - .extra_include_paths = options.extra_include_paths.items, - .default_language_id = options.default_language_id, - .default_code_page = options.default_code_page orelse .windows1252, - .verbose = options.verbose, - .null_terminate_string_table_strings = options.null_terminate_string_table_strings, - .max_string_literal_codepoints = options.max_string_literal_codepoints, - .silent_duplicate_control_ids = options.silent_duplicate_control_ids, - .warn_instead_of_error_on_invalid_code_page = options.warn_instead_of_error_on_invalid_code_page, - }) catch |err| switch (err) { - error.ParseError, error.CompileError => { - // Delete the output file on error - output_file.close(); - output_file_closed = true; - // Failing to delete is not really a big deal, so swallow any errors - zig_cache_tmp_dir.deleteFile(out_res_path) catch { - log.warn("failed to delete '{s}': {s}", .{ out_res_path, @errorName(err) }); - }; - return comp.failWin32ResourceCompile(win32_resource, final_input, &diagnostics, mapping_results.mappings); - }, - else => |e| return e, - }; + // Read depfile and update cache manifest + { + const dep_basename = std.fs.path.basename(out_dep_path); + const dep_file_contents = try zig_cache_tmp_dir.readFileAlloc(arena, dep_basename, 50 * 1024 * 1024); + defer arena.free(dep_file_contents); - try output_buffered_stream.flush(); + const value = try std.json.parseFromSliceLeaky(std.json.Value, arena, dep_file_contents, .{}); + if (value != .array) { + return comp.failWin32Resource(win32_resource, "depfile from zig rc has unexpected format", .{}); + } - for (dependencies_list.items) |dep_file_path| { - try man.addFilePost(dep_file_path); - switch (comp.cache_use) { - .whole => |whole| if (whole.cache_manifest) |whole_cache_manifest| { - whole.cache_manifest_mutex.lock(); - defer whole.cache_manifest_mutex.unlock(); - try whole_cache_manifest.addFilePost(dep_file_path); - }, - .incremental => {}, + for (value.array.items) |element| { + if (element != .string) { + return comp.failWin32Resource(win32_resource, "depfile from zig rc has unexpected format", .{}); + } + const dep_file_path = element.string; + try man.addFilePost(dep_file_path); + switch (comp.cache_use) { + .whole => |whole| if (whole.cache_manifest) |whole_cache_manifest| { + whole.cache_manifest_mutex.lock(); + defer whole.cache_manifest_mutex.unlock(); + try whole_cache_manifest.addFilePost(dep_file_path); + }, + .incremental => {}, + } } + // Just to save disk space, we delete the file because it is never needed again. + zig_cache_tmp_dir.deleteFile(dep_basename) catch |err| { + log.warn("failed to delete '{s}': {s}", .{ out_dep_path, @errorName(err) }); + }; } // Rename into place. @@ -5159,8 +5005,6 @@ fn updateWin32Resource(comp: *Compilation, win32_resource: *Win32Resource, win32 defer o_dir.close(); const tmp_basename = std.fs.path.basename(out_res_path); try std.fs.rename(zig_cache_tmp_dir, tmp_basename, o_dir, res_filename); - const tmp_rcpp_basename = std.fs.path.basename(out_rcpp_path); - try std.fs.rename(zig_cache_tmp_dir, tmp_rcpp_basename, o_dir, rcpp_filename); break :blk digest; }; @@ -5352,16 +5196,9 @@ pub fn addCCArgs( try argv.append("-isystem"); try argv.append(c_headers_dir); - if (ext == .rc) { - for (comp.rc_include_dir_list) |include_dir| { - try argv.append("-isystem"); - try argv.append(include_dir); - } - } else { - for (comp.libc_include_dir_list) |include_dir| { - try argv.append("-isystem"); - try argv.append(include_dir); - } + for (comp.libc_include_dir_list) |include_dir| { + try argv.append("-isystem"); + try argv.append(include_dir); } if (target.cpu.model.llvm_name) |llvm_name| { @@ -5726,167 +5563,6 @@ fn failWin32ResourceWithOwnedBundle( return error.AnalysisFail; } -fn failWin32ResourceCli( - comp: *Compilation, - win32_resource: *Win32Resource, - diagnostics: *resinator.cli.Diagnostics, -) SemaError { - @setCold(true); - - var bundle: ErrorBundle.Wip = undefined; - try bundle.init(comp.gpa); - errdefer bundle.deinit(); - - try bundle.addRootErrorMessage(.{ - .msg = try bundle.addString("invalid command line option(s)"), - .src_loc = try bundle.addSourceLocation(.{ - .src_path = try bundle.addString(switch (win32_resource.src) { - .rc => |rc_src| rc_src.src_path, - .manifest => |manifest_src| manifest_src, - }), - .line = 0, - .column = 0, - .span_start = 0, - .span_main = 0, - .span_end = 0, - }), - }); - - var cur_err: ?ErrorBundle.ErrorMessage = null; - var cur_notes: std.ArrayListUnmanaged(ErrorBundle.ErrorMessage) = .{}; - defer cur_notes.deinit(comp.gpa); - for (diagnostics.errors.items) |err_details| { - switch (err_details.type) { - .err => { - if (cur_err) |err| { - try win32ResourceFlushErrorMessage(&bundle, err, cur_notes.items); - } - cur_err = .{ - .msg = try bundle.addString(err_details.msg.items), - }; - cur_notes.clearRetainingCapacity(); - }, - .warning => cur_err = null, - .note => { - if (cur_err == null) continue; - cur_err.?.notes_len += 1; - try cur_notes.append(comp.gpa, .{ - .msg = try bundle.addString(err_details.msg.items), - }); - }, - } - } - if (cur_err) |err| { - try win32ResourceFlushErrorMessage(&bundle, err, cur_notes.items); - } - - const finished_bundle = try bundle.toOwnedBundle(""); - return comp.failWin32ResourceWithOwnedBundle(win32_resource, finished_bundle); -} - -fn failWin32ResourceCompile( - comp: *Compilation, - win32_resource: *Win32Resource, - source: []const u8, - diagnostics: *resinator.errors.Diagnostics, - opt_mappings: ?resinator.source_mapping.SourceMappings, -) SemaError { - @setCold(true); - - var bundle: ErrorBundle.Wip = undefined; - try bundle.init(comp.gpa); - errdefer bundle.deinit(); - - var msg_buf: std.ArrayListUnmanaged(u8) = .{}; - defer msg_buf.deinit(comp.gpa); - var cur_err: ?ErrorBundle.ErrorMessage = null; - var cur_notes: std.ArrayListUnmanaged(ErrorBundle.ErrorMessage) = .{}; - defer cur_notes.deinit(comp.gpa); - for (diagnostics.errors.items) |err_details| { - switch (err_details.type) { - .hint => continue, - // Clear the current error so that notes don't bleed into unassociated errors - .warning => { - cur_err = null; - continue; - }, - .note => if (cur_err == null) continue, - .err => {}, - } - const err_line, const err_filename = blk: { - if (opt_mappings) |mappings| { - const corresponding_span = mappings.get(err_details.token.line_number); - const corresponding_file = mappings.files.get(corresponding_span.filename_offset); - const err_line = corresponding_span.start_line; - break :blk .{ err_line, corresponding_file }; - } else { - break :blk .{ err_details.token.line_number, "" }; - } - }; - - const source_line_start = err_details.token.getLineStart(source); - const column = err_details.token.calculateColumn(source, 1, source_line_start); - - msg_buf.clearRetainingCapacity(); - try err_details.render(msg_buf.writer(comp.gpa), source, diagnostics.strings.items); - - const src_loc = src_loc: { - var src_loc: ErrorBundle.SourceLocation = .{ - .src_path = try bundle.addString(err_filename), - .line = @intCast(err_line - 1), // 1-based -> 0-based - .column = @intCast(column), - .span_start = 0, - .span_main = 0, - .span_end = 0, - }; - if (err_details.print_source_line) { - const source_line = err_details.token.getLine(source, source_line_start); - const visual_info = err_details.visualTokenInfo(source_line_start, source_line_start + source_line.len); - src_loc.span_start = @intCast(visual_info.point_offset - visual_info.before_len); - src_loc.span_main = @intCast(visual_info.point_offset); - src_loc.span_end = @intCast(visual_info.point_offset + 1 + visual_info.after_len); - src_loc.source_line = try bundle.addString(source_line); - } - break :src_loc try bundle.addSourceLocation(src_loc); - }; - - switch (err_details.type) { - .err => { - if (cur_err) |err| { - try win32ResourceFlushErrorMessage(&bundle, err, cur_notes.items); - } - cur_err = .{ - .msg = try bundle.addString(msg_buf.items), - .src_loc = src_loc, - }; - cur_notes.clearRetainingCapacity(); - }, - .note => { - cur_err.?.notes_len += 1; - try cur_notes.append(comp.gpa, .{ - .msg = try bundle.addString(msg_buf.items), - .src_loc = src_loc, - }); - }, - .warning, .hint => unreachable, - } - } - if (cur_err) |err| { - try win32ResourceFlushErrorMessage(&bundle, err, cur_notes.items); - } - - const finished_bundle = try bundle.toOwnedBundle(""); - return comp.failWin32ResourceWithOwnedBundle(win32_resource, finished_bundle); -} - -fn win32ResourceFlushErrorMessage(wip: *ErrorBundle.Wip, msg: ErrorBundle.ErrorMessage, notes: []const ErrorBundle.ErrorMessage) !void { - try wip.addRootErrorMessage(msg); - const notes_start = try wip.reserveNotes(@intCast(notes.len)); - for (notes_start.., notes) |i, note| { - wip.extra.items[i] = @intFromEnum(wip.addErrorMessageAssumeCapacity(note)); - } -} - pub const FileExt = enum { c, cpp, diff --git a/src/main.zig b/src/main.zig index 74c78bf08961..c6e91f4e1746 100644 --- a/src/main.zig +++ b/src/main.zig @@ -291,7 +291,12 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { } else if (mem.eql(u8, cmd, "translate-c")) { return buildOutputType(gpa, arena, args, .translate_c); } else if (mem.eql(u8, cmd, "rc")) { - return cmdRc(gpa, arena, args[1..]); + return jitCmd(gpa, arena, cmd_args, .{ + .cmd_name = "resinator", + .root_src_path = "resinator/main.zig", + .depend_on_aro = true, + .prepend_zig_lib_dir_path = true, + }); } else if (mem.eql(u8, cmd, "fmt")) { return jitCmd(gpa, arena, cmd_args, .{ .cmd_name = "fmt", @@ -4625,276 +4630,6 @@ fn cmdTranslateC(comp: *Compilation, arena: Allocator, fancy_output: ?*Compilati } } -fn cmdRc(gpa: Allocator, arena: Allocator, args: []const []const u8) !void { - const resinator = @import("resinator.zig"); - - const stderr = std.io.getStdErr(); - const stderr_config = std.io.tty.detectConfig(stderr); - - var options = options: { - var cli_diagnostics = resinator.cli.Diagnostics.init(gpa); - defer cli_diagnostics.deinit(); - var options = resinator.cli.parse(gpa, args, &cli_diagnostics) catch |err| switch (err) { - error.ParseError => { - cli_diagnostics.renderToStdErr(args, stderr_config); - process.exit(1); - }, - else => |e| return e, - }; - try options.maybeAppendRC(std.fs.cwd()); - - // print any warnings/notes - cli_diagnostics.renderToStdErr(args, stderr_config); - // If there was something printed, then add an extra newline separator - // so that there is a clear separation between the cli diagnostics and whatever - // gets printed after - if (cli_diagnostics.errors.items.len > 0) { - std.debug.print("\n", .{}); - } - break :options options; - }; - defer options.deinit(); - - if (options.print_help_and_exit) { - try resinator.cli.writeUsage(stderr.writer(), "zig rc"); - return; - } - - const stdout_writer = std.io.getStdOut().writer(); - if (options.verbose) { - try options.dumpVerbose(stdout_writer); - try stdout_writer.writeByte('\n'); - } - - const full_input = full_input: { - if (options.preprocess != .no) { - if (!build_options.have_llvm) { - fatal("clang not available: compiler built without LLVM extensions", .{}); - } - - var argv = std.ArrayList([]const u8).init(gpa); - defer argv.deinit(); - - const self_exe_path = try introspect.findZigExePath(arena); - var zig_lib_directory = introspect.findZigLibDirFromSelfExe(arena, self_exe_path) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to find zig installation directory: {s}", .{@errorName(err)}); - process.exit(1); - }; - defer zig_lib_directory.handle.close(); - - const include_args = detectRcIncludeDirs(arena, zig_lib_directory.path.?, options.auto_includes) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to detect system include directories: {s}", .{@errorName(err)}); - process.exit(1); - }; - - try argv.appendSlice(&[_][]const u8{ self_exe_path, "clang" }); - - const clang_target = clang_target: { - if (include_args.target_abi) |abi| { - break :clang_target try std.fmt.allocPrint(arena, "x86_64-unknown-windows-{s}", .{abi}); - } - break :clang_target "x86_64-unknown-windows"; - }; - try resinator.preprocess.appendClangArgs(arena, &argv, options, .{ - .clang_target = clang_target, - .system_include_paths = include_args.include_paths, - .needs_gnu_workaround = if (include_args.target_abi) |abi| std.mem.eql(u8, abi, "gnu") else false, - .nostdinc = true, - }); - - try argv.append(options.input_filename); - - if (options.verbose) { - try stdout_writer.writeAll("Preprocessor: zig clang\n"); - for (argv.items[0 .. argv.items.len - 1]) |arg| { - try stdout_writer.print("{s} ", .{arg}); - } - try stdout_writer.print("{s}\n\n", .{argv.items[argv.items.len - 1]}); - } - - if (process.can_spawn) { - const result = std.ChildProcess.run(.{ - .allocator = gpa, - .argv = argv.items, - .max_output_bytes = std.math.maxInt(u32), - }) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to spawn preprocessor child process: {s}", .{@errorName(err)}); - process.exit(1); - }; - errdefer gpa.free(result.stdout); - defer gpa.free(result.stderr); - - switch (result.term) { - .Exited => |code| { - if (code != 0) { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "the preprocessor failed with exit code {}:", .{code}); - try stderr.writeAll(result.stderr); - try stderr.writeAll("\n"); - process.exit(1); - } - }, - .Signal, .Stopped, .Unknown => { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "the preprocessor terminated unexpectedly ({s}):", .{@tagName(result.term)}); - try stderr.writeAll(result.stderr); - try stderr.writeAll("\n"); - process.exit(1); - }, - } - - break :full_input result.stdout; - } else { - // need to use an intermediate file - const rand_int = std.crypto.random.int(u64); - const preprocessed_path = try std.fmt.allocPrint(gpa, "resinator{x}.rcpp", .{rand_int}); - defer gpa.free(preprocessed_path); - defer std.fs.cwd().deleteFile(preprocessed_path) catch {}; - - try argv.appendSlice(&.{ "-o", preprocessed_path }); - const exit_code = try clangMain(arena, argv.items); - if (exit_code != 0) { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "the preprocessor failed with exit code {}:", .{exit_code}); - process.exit(1); - } - break :full_input std.fs.cwd().readFileAlloc(gpa, preprocessed_path, std.math.maxInt(usize)) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to read preprocessed file path '{s}': {s}", .{ preprocessed_path, @errorName(err) }); - process.exit(1); - }; - } - } else { - break :full_input std.fs.cwd().readFileAlloc(gpa, options.input_filename, std.math.maxInt(usize)) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to read input file path '{s}': {s}", .{ options.input_filename, @errorName(err) }); - process.exit(1); - }; - } - }; - defer gpa.free(full_input); - - if (options.preprocess == .only) { - std.fs.cwd().writeFile(options.output_filename, full_input) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to write output file '{s}': {s}", .{ options.output_filename, @errorName(err) }); - process.exit(1); - }; - return cleanExit(); - } - - var mapping_results = try resinator.source_mapping.parseAndRemoveLineCommands(gpa, full_input, full_input, .{ .initial_filename = options.input_filename }); - defer mapping_results.mappings.deinit(gpa); - - const final_input = resinator.comments.removeComments(mapping_results.result, mapping_results.result, &mapping_results.mappings); - - var output_file = std.fs.cwd().createFile(options.output_filename, .{}) catch |err| { - try resinator.utils.renderErrorMessage(stderr.writer(), stderr_config, .err, "unable to create output file '{s}': {s}", .{ options.output_filename, @errorName(err) }); - process.exit(1); - }; - var output_file_closed = false; - defer if (!output_file_closed) output_file.close(); - - var diagnostics = resinator.errors.Diagnostics.init(gpa); - defer diagnostics.deinit(); - - var output_buffered_stream = std.io.bufferedWriter(output_file.writer()); - - resinator.compile.compile(gpa, final_input, output_buffered_stream.writer(), .{ - .cwd = std.fs.cwd(), - .diagnostics = &diagnostics, - .source_mappings = &mapping_results.mappings, - .dependencies_list = null, - .ignore_include_env_var = options.ignore_include_env_var, - .extra_include_paths = options.extra_include_paths.items, - .default_language_id = options.default_language_id, - .default_code_page = options.default_code_page orelse .windows1252, - .verbose = options.verbose, - .null_terminate_string_table_strings = options.null_terminate_string_table_strings, - .max_string_literal_codepoints = options.max_string_literal_codepoints, - .silent_duplicate_control_ids = options.silent_duplicate_control_ids, - .warn_instead_of_error_on_invalid_code_page = options.warn_instead_of_error_on_invalid_code_page, - }) catch |err| switch (err) { - error.ParseError, error.CompileError => { - diagnostics.renderToStdErr(std.fs.cwd(), final_input, stderr_config, mapping_results.mappings); - // Delete the output file on error - output_file.close(); - output_file_closed = true; - // Failing to delete is not really a big deal, so swallow any errors - std.fs.cwd().deleteFile(options.output_filename) catch {}; - process.exit(1); - }, - else => |e| return e, - }; - - try output_buffered_stream.flush(); - - // print any warnings/notes - diagnostics.renderToStdErr(std.fs.cwd(), final_input, stderr_config, mapping_results.mappings); - - return cleanExit(); -} - -const RcIncludeArgs = struct { - include_paths: []const []const u8 = &.{}, - target_abi: ?[]const u8 = null, -}; - -fn detectRcIncludeDirs(arena: Allocator, zig_lib_dir: []const u8, auto_includes: @import("resinator.zig").cli.Options.AutoIncludes) !RcIncludeArgs { - if (auto_includes == .none) return .{}; - var cur_includes = auto_includes; - if (builtin.target.os.tag != .windows) { - switch (cur_includes) { - // MSVC can't be found when the host isn't Windows, so short-circuit. - .msvc => return error.WindowsSdkNotFound, - // Skip straight to gnu since we won't be able to detect MSVC on non-Windows hosts. - .any => cur_includes = .gnu, - .gnu => {}, - .none => unreachable, - } - } - while (true) { - switch (cur_includes) { - .any, .msvc => { - const target_query: std.Target.Query = .{ - .os_tag = .windows, - .abi = .msvc, - }; - const target = std.zig.resolveTargetQueryOrFatal(target_query); - const is_native_abi = target_query.isNativeAbi(); - const detected_libc = std.zig.LibCDirs.detect(arena, zig_lib_dir, target, is_native_abi, true, null) catch |err| { - if (cur_includes == .any) { - // fall back to mingw - cur_includes = .gnu; - continue; - } - return err; - }; - if (detected_libc.libc_include_dir_list.len == 0) { - if (cur_includes == .any) { - // fall back to mingw - cur_includes = .gnu; - continue; - } - return error.WindowsSdkNotFound; - } - return .{ - .include_paths = detected_libc.libc_include_dir_list, - .target_abi = "msvc", - }; - }, - .gnu => { - const target_query: std.Target.Query = .{ - .os_tag = .windows, - .abi = .gnu, - }; - const target = std.zig.resolveTargetQueryOrFatal(target_query); - const is_native_abi = target_query.isNativeAbi(); - const detected_libc = try std.zig.LibCDirs.detect(arena, zig_lib_dir, target, is_native_abi, true, null); - return .{ - .include_paths = detected_libc.libc_include_dir_list, - .target_abi = "gnu", - }; - }, - .none => unreachable, - } - } -} - const usage_init = \\Usage: zig init \\ diff --git a/src/resinator.zig b/src/resinator.zig deleted file mode 100644 index 1d7e75fec026..000000000000 --- a/src/resinator.zig +++ /dev/null @@ -1,25 +0,0 @@ -comptime { - if (@import("build_options").only_core_functionality) { - @compileError("resinator included in only_core_functionality build"); - } -} - -pub const ani = @import("resinator/ani.zig"); -pub const ast = @import("resinator/ast.zig"); -pub const bmp = @import("resinator/bmp.zig"); -pub const cli = @import("resinator/cli.zig"); -pub const code_pages = @import("resinator/code_pages.zig"); -pub const comments = @import("resinator/comments.zig"); -pub const compile = @import("resinator/compile.zig"); -pub const errors = @import("resinator/errors.zig"); -pub const ico = @import("resinator/ico.zig"); -pub const lang = @import("resinator/lang.zig"); -pub const lex = @import("resinator/lex.zig"); -pub const literals = @import("resinator/literals.zig"); -pub const parse = @import("resinator/parse.zig"); -pub const preprocess = @import("resinator/preprocess.zig"); -pub const rc = @import("resinator/rc.zig"); -pub const res = @import("resinator/res.zig"); -pub const source_mapping = @import("resinator/source_mapping.zig"); -pub const utils = @import("resinator/utils.zig"); -pub const windows1252 = @import("resinator/windows1252.zig"); diff --git a/src/resinator/preprocess.zig b/src/resinator/preprocess.zig deleted file mode 100644 index 981ef5bffcbb..000000000000 --- a/src/resinator/preprocess.zig +++ /dev/null @@ -1,100 +0,0 @@ -const std = @import("std"); -const builtin = @import("builtin"); -const Allocator = std.mem.Allocator; -const cli = @import("cli.zig"); - -pub const IncludeArgs = struct { - clang_target: ?[]const u8 = null, - system_include_paths: []const []const u8, - /// Should be set to `true` when -target has the GNU abi - /// (either because `clang_target` has `-gnu` or `-target` - /// is appended via other means and it has `-gnu`) - needs_gnu_workaround: bool = false, - nostdinc: bool = false, - - pub const IncludeAbi = enum { - msvc, - gnu, - }; -}; - -/// `arena` is used for temporary -D argument strings and the INCLUDE environment variable. -/// The arena should be kept alive at least as long as `argv`. -pub fn appendClangArgs(arena: Allocator, argv: *std.ArrayList([]const u8), options: cli.Options, include_args: IncludeArgs) !void { - try argv.appendSlice(&[_][]const u8{ - "-E", // preprocessor only - "--comments", - "-fuse-line-directives", // #line instead of # - // TODO: could use --trace-includes to give info about what's included from where - "-xc", // output c - // TODO: Turn this off, check the warnings, and convert the spaces back to NUL - "-Werror=null-character", // error on null characters instead of converting them to spaces - // TODO: could remove -Werror=null-character and instead parse warnings looking for 'warning: null character ignored' - // since the only real problem is when clang doesn't preserve null characters - //"-Werror=invalid-pp-token", // will error on unfinished string literals - // TODO: could use -Werror instead - "-fms-compatibility", // Allow things like "header.h" to be resolved relative to the 'root' .rc file, among other things - // https://learn.microsoft.com/en-us/windows/win32/menurc/predefined-macros - "-DRC_INVOKED", - }); - for (options.extra_include_paths.items) |extra_include_path| { - try argv.append("-I"); - try argv.append(extra_include_path); - } - - if (include_args.nostdinc) { - try argv.append("-nostdinc"); - } - for (include_args.system_include_paths) |include_path| { - try argv.append("-isystem"); - try argv.append(include_path); - } - if (include_args.clang_target) |target| { - try argv.append("-target"); - try argv.append(target); - } - // Using -fms-compatibility and targeting the GNU abi interact in a strange way: - // - Targeting the GNU abi stops _MSC_VER from being defined - // - Passing -fms-compatibility stops __GNUC__ from being defined - // Neither being defined is a problem for things like MinGW's vadefs.h, - // which will fail during preprocessing if neither are defined. - // So, when targeting the GNU abi, we need to force __GNUC__ to be defined. - // - // TODO: This is a workaround that should be removed if possible. - if (include_args.needs_gnu_workaround) { - // This is the same default gnuc version that Clang uses: - // https://github.com/llvm/llvm-project/blob/4b5366c9512aa273a5272af1d833961e1ed156e7/clang/lib/Driver/ToolChains/Clang.cpp#L6738 - try argv.append("-fgnuc-version=4.2.1"); - } - - if (!options.ignore_include_env_var) { - const INCLUDE = std.process.getEnvVarOwned(arena, "INCLUDE") catch ""; - - // The only precedence here is llvm-rc which also uses the platform-specific - // delimiter. There's no precedence set by `rc.exe` since it's Windows-only. - const delimiter = switch (builtin.os.tag) { - .windows => ';', - else => ':', - }; - var it = std.mem.tokenizeScalar(u8, INCLUDE, delimiter); - while (it.next()) |include_path| { - try argv.append("-isystem"); - try argv.append(include_path); - } - } - - var symbol_it = options.symbols.iterator(); - while (symbol_it.next()) |entry| { - switch (entry.value_ptr.*) { - .define => |value| { - try argv.append("-D"); - const define_arg = try std.fmt.allocPrint(arena, "{s}={s}", .{ entry.key_ptr.*, value }); - try argv.append(define_arg); - }, - .undefine => { - try argv.append("-U"); - try argv.append(entry.key_ptr.*); - }, - } - } -}