diff --git a/CMakeLists.txt b/CMakeLists.txt
index a0c3ae84fa9a..6e89d87ca9bb 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -370,7 +370,6 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/lib/std/heap.zig"
"${CMAKE_SOURCE_DIR}/lib/std/heap/arena_allocator.zig"
"${CMAKE_SOURCE_DIR}/lib/std/io.zig"
- "${CMAKE_SOURCE_DIR}/lib/std/io/auto_indenting_stream.zig"
"${CMAKE_SOURCE_DIR}/lib/std/io/buffered_atomic_file.zig"
"${CMAKE_SOURCE_DIR}/lib/std/io/buffered_writer.zig"
"${CMAKE_SOURCE_DIR}/lib/std/io/change_detection_stream.zig"
@@ -408,6 +407,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/lib/std/meta.zig"
"${CMAKE_SOURCE_DIR}/lib/std/meta/trailer_flags.zig"
"${CMAKE_SOURCE_DIR}/lib/std/meta/trait.zig"
+ "${CMAKE_SOURCE_DIR}/lib/std/multi_array_list.zig"
"${CMAKE_SOURCE_DIR}/lib/std/os.zig"
"${CMAKE_SOURCE_DIR}/lib/std/os/bits.zig"
"${CMAKE_SOURCE_DIR}/lib/std/os/bits/linux.zig"
@@ -573,6 +573,7 @@ set(ZIG_STAGE2_SOURCES
"${CMAKE_SOURCE_DIR}/src/target.zig"
"${CMAKE_SOURCE_DIR}/src/tracy.zig"
"${CMAKE_SOURCE_DIR}/src/translate_c.zig"
+ "${CMAKE_SOURCE_DIR}/src/translate_c/ast.zig"
"${CMAKE_SOURCE_DIR}/src/type.zig"
"${CMAKE_SOURCE_DIR}/src/value.zig"
"${CMAKE_SOURCE_DIR}/src/windows_sdk.zig"
diff --git a/README.md b/README.md
index 5eedcdab8bfc..e98eebf29fdb 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@ A general-purpose programming language and toolchain for maintaining
## Resources
- * [Introduction](https://ziglang.org/#Introduction)
+ * [Introduction](https://ziglang.org/learn/#introduction)
* [Download & Documentation](https://ziglang.org/download)
* [Chapter 0 - Getting Started | ZigLearn.org](https://ziglearn.org/)
* [Community](https://github.com/ziglang/zig/wiki/Community)
diff --git a/build.zig b/build.zig
index f53fab73bf6c..92e03603c5d4 100644
--- a/build.zig
+++ b/build.zig
@@ -77,10 +77,12 @@ pub fn build(b: *Builder) !void {
const tracy = b.option([]const u8, "tracy", "Enable Tracy integration. Supply path to Tracy source");
const link_libc = b.option(bool, "force-link-libc", "Force self-hosted compiler to link libc") orelse enable_llvm;
+ const strip = b.option(bool, "strip", "Omit debug information") orelse false;
const main_file = if (is_stage1) "src/stage1.zig" else "src/main.zig";
var exe = b.addExecutable("zig", main_file);
+ exe.strip = strip;
exe.install();
exe.setBuildMode(mode);
exe.setTarget(target);
diff --git a/doc/docgen.zig b/doc/docgen.zig
index 90e3e3220184..4f06b63c2ca1 100644
--- a/doc/docgen.zig
+++ b/doc/docgen.zig
@@ -781,106 +781,119 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: anytype, source_token:
next_tok_is_fn = false;
const token = tokenizer.next();
+ if (mem.indexOf(u8, src[index..token.loc.start], "//")) |comment_start_off| {
+ // render one comment
+ const comment_start = index + comment_start_off;
+ const comment_end_off = mem.indexOf(u8, src[comment_start .. token.loc.start], "\n");
+ const comment_end = if (comment_end_off) |o| comment_start + o else token.loc.start;
+
+ try writeEscaped(out, src[index..comment_start]);
+ try out.writeAll("");
+ index = comment_end;
+ tokenizer.index = index;
+ continue;
+ }
+
try writeEscaped(out, src[index..token.loc.start]);
- switch (token.id) {
- .Eof => break,
-
- .Keyword_align,
- .Keyword_and,
- .Keyword_asm,
- .Keyword_async,
- .Keyword_await,
- .Keyword_break,
- .Keyword_catch,
- .Keyword_comptime,
- .Keyword_const,
- .Keyword_continue,
- .Keyword_defer,
- .Keyword_else,
- .Keyword_enum,
- .Keyword_errdefer,
- .Keyword_error,
- .Keyword_export,
- .Keyword_extern,
- .Keyword_for,
- .Keyword_if,
- .Keyword_inline,
- .Keyword_noalias,
- .Keyword_noinline,
- .Keyword_nosuspend,
- .Keyword_opaque,
- .Keyword_or,
- .Keyword_orelse,
- .Keyword_packed,
- .Keyword_anyframe,
- .Keyword_pub,
- .Keyword_resume,
- .Keyword_return,
- .Keyword_linksection,
- .Keyword_callconv,
- .Keyword_struct,
- .Keyword_suspend,
- .Keyword_switch,
- .Keyword_test,
- .Keyword_threadlocal,
- .Keyword_try,
- .Keyword_union,
- .Keyword_unreachable,
- .Keyword_usingnamespace,
- .Keyword_var,
- .Keyword_volatile,
- .Keyword_allowzero,
- .Keyword_while,
- .Keyword_anytype,
+ switch (token.tag) {
+ .eof => break,
+
+ .keyword_align,
+ .keyword_and,
+ .keyword_asm,
+ .keyword_async,
+ .keyword_await,
+ .keyword_break,
+ .keyword_catch,
+ .keyword_comptime,
+ .keyword_const,
+ .keyword_continue,
+ .keyword_defer,
+ .keyword_else,
+ .keyword_enum,
+ .keyword_errdefer,
+ .keyword_error,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_for,
+ .keyword_if,
+ .keyword_inline,
+ .keyword_noalias,
+ .keyword_noinline,
+ .keyword_nosuspend,
+ .keyword_opaque,
+ .keyword_or,
+ .keyword_orelse,
+ .keyword_packed,
+ .keyword_anyframe,
+ .keyword_pub,
+ .keyword_resume,
+ .keyword_return,
+ .keyword_linksection,
+ .keyword_callconv,
+ .keyword_struct,
+ .keyword_suspend,
+ .keyword_switch,
+ .keyword_test,
+ .keyword_threadlocal,
+ .keyword_try,
+ .keyword_union,
+ .keyword_unreachable,
+ .keyword_usingnamespace,
+ .keyword_var,
+ .keyword_volatile,
+ .keyword_allowzero,
+ .keyword_while,
+ .keyword_anytype,
=> {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
- .Keyword_fn => {
+ .keyword_fn => {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
next_tok_is_fn = true;
},
- .Keyword_undefined,
- .Keyword_null,
- .Keyword_true,
- .Keyword_false,
+ .keyword_undefined,
+ .keyword_null,
+ .keyword_true,
+ .keyword_false,
=> {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
- .StringLiteral,
- .MultilineStringLiteralLine,
- .CharLiteral,
+ .string_literal,
+ .multiline_string_literal_line,
+ .char_literal,
=> {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
- .Builtin => {
+ .builtin => {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
- .LineComment,
- .DocComment,
- .ContainerDocComment,
- .ShebangLine,
+ .doc_comment,
+ .container_doc_comment,
=> {
try out.writeAll("");
},
- .Identifier => {
+ .identifier => {
if (prev_tok_was_fn) {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
@@ -908,71 +921,71 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: anytype, source_token:
}
},
- .IntegerLiteral,
- .FloatLiteral,
+ .integer_literal,
+ .float_literal,
=> {
try out.writeAll("");
try writeEscaped(out, src[token.loc.start..token.loc.end]);
try out.writeAll("");
},
- .Bang,
- .Pipe,
- .PipePipe,
- .PipeEqual,
- .Equal,
- .EqualEqual,
- .EqualAngleBracketRight,
- .BangEqual,
- .LParen,
- .RParen,
- .Semicolon,
- .Percent,
- .PercentEqual,
- .LBrace,
- .RBrace,
- .LBracket,
- .RBracket,
- .Period,
- .PeriodAsterisk,
- .Ellipsis2,
- .Ellipsis3,
- .Caret,
- .CaretEqual,
- .Plus,
- .PlusPlus,
- .PlusEqual,
- .PlusPercent,
- .PlusPercentEqual,
- .Minus,
- .MinusEqual,
- .MinusPercent,
- .MinusPercentEqual,
- .Asterisk,
- .AsteriskEqual,
- .AsteriskAsterisk,
- .AsteriskPercent,
- .AsteriskPercentEqual,
- .Arrow,
- .Colon,
- .Slash,
- .SlashEqual,
- .Comma,
- .Ampersand,
- .AmpersandEqual,
- .QuestionMark,
- .AngleBracketLeft,
- .AngleBracketLeftEqual,
- .AngleBracketAngleBracketLeft,
- .AngleBracketAngleBracketLeftEqual,
- .AngleBracketRight,
- .AngleBracketRightEqual,
- .AngleBracketAngleBracketRight,
- .AngleBracketAngleBracketRightEqual,
- .Tilde,
+ .bang,
+ .pipe,
+ .pipe_pipe,
+ .pipe_equal,
+ .equal,
+ .equal_equal,
+ .equal_angle_bracket_right,
+ .bang_equal,
+ .l_paren,
+ .r_paren,
+ .semicolon,
+ .percent,
+ .percent_equal,
+ .l_brace,
+ .r_brace,
+ .l_bracket,
+ .r_bracket,
+ .period,
+ .period_asterisk,
+ .ellipsis2,
+ .ellipsis3,
+ .caret,
+ .caret_equal,
+ .plus,
+ .plus_plus,
+ .plus_equal,
+ .plus_percent,
+ .plus_percent_equal,
+ .minus,
+ .minus_equal,
+ .minus_percent,
+ .minus_percent_equal,
+ .asterisk,
+ .asterisk_equal,
+ .asterisk_asterisk,
+ .asterisk_percent,
+ .asterisk_percent_equal,
+ .arrow,
+ .colon,
+ .slash,
+ .slash_equal,
+ .comma,
+ .ampersand,
+ .ampersand_equal,
+ .question_mark,
+ .angle_bracket_left,
+ .angle_bracket_left_equal,
+ .angle_bracket_angle_bracket_left,
+ .angle_bracket_angle_bracket_left_equal,
+ .angle_bracket_right,
+ .angle_bracket_right_equal,
+ .angle_bracket_angle_bracket_right,
+ .angle_bracket_angle_bracket_right_equal,
+ .tilde,
=> try writeEscaped(out, src[token.loc.start..token.loc.end]),
- .Invalid, .Invalid_ampersands, .Invalid_periodasterisks => return parseError(
+ .invalid, .invalid_ampersands, .invalid_periodasterisks => return parseError(
docgen_tokenizer,
source_token,
"syntax error",
diff --git a/lib/std/heap/general_purpose_allocator.zig b/lib/std/heap/general_purpose_allocator.zig
index fb340edfd3fa..c731f22d66e2 100644
--- a/lib/std/heap/general_purpose_allocator.zig
+++ b/lib/std/heap/general_purpose_allocator.zig
@@ -98,7 +98,7 @@
//! in a `std.HashMap` using the backing allocator.
const std = @import("std");
-const log = std.log.scoped(.std);
+const log = std.log.scoped(.gpa);
const math = std.math;
const assert = std.debug.assert;
const mem = std.mem;
@@ -162,6 +162,9 @@ pub const Config = struct {
/// logged error messages with stack trace details. The downside is that every allocation
/// will be leaked!
never_unmap: bool = false,
+
+ /// Enables emitting info messages with the size and address of every allocation.
+ verbose_log: bool = false,
};
pub fn GeneralPurposeAllocator(comptime config: Config) type {
@@ -454,10 +457,19 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
const result_len = try self.backing_allocator.resizeFn(self.backing_allocator, old_mem, old_align, new_size, len_align, ret_addr);
if (result_len == 0) {
+ if (config.verbose_log) {
+ log.info("large free {d} bytes at {*}", .{ old_mem.len, old_mem.ptr });
+ }
+
self.large_allocations.removeAssertDiscard(@ptrToInt(old_mem.ptr));
return 0;
}
+ if (config.verbose_log) {
+ log.info("large resize {d} bytes at {*} to {d}", .{
+ old_mem.len, old_mem.ptr, new_size,
+ });
+ }
entry.value.bytes = old_mem.ptr[0..result_len];
collectStackTrace(ret_addr, &entry.value.stack_addresses);
return result_len;
@@ -568,6 +580,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
} else {
@memset(old_mem.ptr, undefined, old_mem.len);
}
+ if (config.verbose_log) {
+ log.info("small free {d} bytes at {*}", .{ old_mem.len, old_mem.ptr });
+ }
return @as(usize, 0);
}
const new_aligned_size = math.max(new_size, old_align);
@@ -576,6 +591,11 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
if (old_mem.len > new_size) {
@memset(old_mem.ptr + new_size, undefined, old_mem.len - new_size);
}
+ if (config.verbose_log) {
+ log.info("small resize {d} bytes at {*} to {d}", .{
+ old_mem.len, old_mem.ptr, new_size,
+ });
+ }
return new_size;
}
return error.OutOfMemory;
@@ -623,6 +643,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
gop.entry.value.bytes = slice;
collectStackTrace(ret_addr, &gop.entry.value.stack_addresses);
+ if (config.verbose_log) {
+ log.info("large alloc {d} bytes at {*}", .{ slice.len, slice.ptr });
+ }
return slice;
}
@@ -632,6 +655,9 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
const new_size_class = math.ceilPowerOfTwoAssert(usize, new_aligned_size);
const ptr = try self.allocSlot(new_size_class, ret_addr);
+ if (config.verbose_log) {
+ log.info("small alloc {d} bytes at {*}", .{ len, ptr });
+ }
return ptr[0..len];
}
diff --git a/lib/std/io.zig b/lib/std/io.zig
index 240faaa452c4..b529c57866ef 100644
--- a/lib/std/io.zig
+++ b/lib/std/io.zig
@@ -142,9 +142,6 @@ pub const bitReader = @import("io/bit_reader.zig").bitReader;
pub const BitWriter = @import("io/bit_writer.zig").BitWriter;
pub const bitWriter = @import("io/bit_writer.zig").bitWriter;
-pub const AutoIndentingStream = @import("io/auto_indenting_stream.zig").AutoIndentingStream;
-pub const autoIndentingStream = @import("io/auto_indenting_stream.zig").autoIndentingStream;
-
pub const ChangeDetectionStream = @import("io/change_detection_stream.zig").ChangeDetectionStream;
pub const changeDetectionStream = @import("io/change_detection_stream.zig").changeDetectionStream;
diff --git a/lib/std/io/auto_indenting_stream.zig b/lib/std/io/auto_indenting_stream.zig
deleted file mode 100644
index 8f8b981b9b4c..000000000000
--- a/lib/std/io/auto_indenting_stream.zig
+++ /dev/null
@@ -1,154 +0,0 @@
-// SPDX-License-Identifier: MIT
-// Copyright (c) 2015-2021 Zig Contributors
-// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
-// The MIT license requires this copyright notice to be included in all copies
-// and substantial portions of the software.
-
-const std = @import("../std.zig");
-const io = std.io;
-const mem = std.mem;
-const assert = std.debug.assert;
-
-/// Automatically inserts indentation of written data by keeping
-/// track of the current indentation level
-pub fn AutoIndentingStream(comptime UnderlyingWriter: type) type {
- return struct {
- const Self = @This();
- pub const Error = UnderlyingWriter.Error;
- pub const Writer = io.Writer(*Self, Error, write);
-
- underlying_writer: UnderlyingWriter,
-
- indent_count: usize = 0,
- indent_delta: usize,
- current_line_empty: bool = true,
- indent_one_shot_count: usize = 0, // automatically popped when applied
- applied_indent: usize = 0, // the most recently applied indent
- indent_next_line: usize = 0, // not used until the next line
-
- pub fn writer(self: *Self) Writer {
- return .{ .context = self };
- }
-
- pub fn write(self: *Self, bytes: []const u8) Error!usize {
- if (bytes.len == 0)
- return @as(usize, 0);
-
- try self.applyIndent();
- return self.writeNoIndent(bytes);
- }
-
- // Change the indent delta without changing the final indentation level
- pub fn setIndentDelta(self: *Self, indent_delta: usize) void {
- if (self.indent_delta == indent_delta) {
- return;
- } else if (self.indent_delta > indent_delta) {
- assert(self.indent_delta % indent_delta == 0);
- self.indent_count = self.indent_count * (self.indent_delta / indent_delta);
- } else {
- // assert that the current indentation (in spaces) in a multiple of the new delta
- assert((self.indent_count * self.indent_delta) % indent_delta == 0);
- self.indent_count = self.indent_count / (indent_delta / self.indent_delta);
- }
- self.indent_delta = indent_delta;
- }
-
- fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize {
- if (bytes.len == 0)
- return @as(usize, 0);
-
- try self.underlying_writer.writeAll(bytes);
- if (bytes[bytes.len - 1] == '\n')
- self.resetLine();
- return bytes.len;
- }
-
- pub fn insertNewline(self: *Self) Error!void {
- _ = try self.writeNoIndent("\n");
- }
-
- fn resetLine(self: *Self) void {
- self.current_line_empty = true;
- self.indent_next_line = 0;
- }
-
- /// Insert a newline unless the current line is blank
- pub fn maybeInsertNewline(self: *Self) Error!void {
- if (!self.current_line_empty)
- try self.insertNewline();
- }
-
- /// Push default indentation
- pub fn pushIndent(self: *Self) void {
- // Doesn't actually write any indentation.
- // Just primes the stream to be able to write the correct indentation if it needs to.
- self.indent_count += 1;
- }
-
- /// Push an indent that is automatically popped after being applied
- pub fn pushIndentOneShot(self: *Self) void {
- self.indent_one_shot_count += 1;
- self.pushIndent();
- }
-
- /// Turns all one-shot indents into regular indents
- /// Returns number of indents that must now be manually popped
- pub fn lockOneShotIndent(self: *Self) usize {
- var locked_count = self.indent_one_shot_count;
- self.indent_one_shot_count = 0;
- return locked_count;
- }
-
- /// Push an indent that should not take effect until the next line
- pub fn pushIndentNextLine(self: *Self) void {
- self.indent_next_line += 1;
- self.pushIndent();
- }
-
- pub fn popIndent(self: *Self) void {
- assert(self.indent_count != 0);
- self.indent_count -= 1;
-
- if (self.indent_next_line > 0)
- self.indent_next_line -= 1;
- }
-
- /// Writes ' ' bytes if the current line is empty
- fn applyIndent(self: *Self) Error!void {
- const current_indent = self.currentIndent();
- if (self.current_line_empty and current_indent > 0) {
- try self.underlying_writer.writeByteNTimes(' ', current_indent);
- self.applied_indent = current_indent;
- }
-
- self.indent_count -= self.indent_one_shot_count;
- self.indent_one_shot_count = 0;
- self.current_line_empty = false;
- }
-
- /// Checks to see if the most recent indentation exceeds the currently pushed indents
- pub fn isLineOverIndented(self: *Self) bool {
- if (self.current_line_empty) return false;
- return self.applied_indent > self.currentIndent();
- }
-
- fn currentIndent(self: *Self) usize {
- var indent_current: usize = 0;
- if (self.indent_count > 0) {
- const indent_count = self.indent_count - self.indent_next_line;
- indent_current = indent_count * self.indent_delta;
- }
- return indent_current;
- }
- };
-}
-
-pub fn autoIndentingStream(
- indent_delta: usize,
- underlying_writer: anytype,
-) AutoIndentingStream(@TypeOf(underlying_writer)) {
- return AutoIndentingStream(@TypeOf(underlying_writer)){
- .underlying_writer = underlying_writer,
- .indent_delta = indent_delta,
- };
-}
diff --git a/lib/std/multi_array_list.zig b/lib/std/multi_array_list.zig
new file mode 100644
index 000000000000..3306fd3ef0c2
--- /dev/null
+++ b/lib/std/multi_array_list.zig
@@ -0,0 +1,446 @@
+// SPDX-License-Identifier: MIT
+// Copyright (c) 2015-2021 Zig Contributors
+// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
+// The MIT license requires this copyright notice to be included in all copies
+// and substantial portions of the software.
+const std = @import("std.zig");
+const assert = std.debug.assert;
+const meta = std.meta;
+const mem = std.mem;
+const Allocator = mem.Allocator;
+
+pub fn MultiArrayList(comptime S: type) type {
+ return struct {
+ bytes: [*]align(@alignOf(S)) u8 = undefined,
+ len: usize = 0,
+ capacity: usize = 0,
+
+ pub const Elem = S;
+
+ pub const Field = meta.FieldEnum(S);
+
+ pub const Slice = struct {
+ /// This array is indexed by the field index which can be obtained
+ /// by using @enumToInt() on the Field enum
+ ptrs: [fields.len][*]u8,
+ len: usize,
+ capacity: usize,
+
+ pub fn items(self: Slice, comptime field: Field) []FieldType(field) {
+ const byte_ptr = self.ptrs[@enumToInt(field)];
+ const F = FieldType(field);
+ const casted_ptr = @ptrCast([*]F, @alignCast(@alignOf(F), byte_ptr));
+ return casted_ptr[0..self.len];
+ }
+
+ pub fn toMultiArrayList(self: Slice) Self {
+ if (self.ptrs.len == 0) {
+ return .{};
+ }
+ const unaligned_ptr = self.ptrs[sizes.fields[0]];
+ const aligned_ptr = @alignCast(@alignOf(S), unaligned_ptr);
+ const casted_ptr = @ptrCast([*]align(@alignOf(S)) u8, aligned_ptr);
+ return .{
+ .bytes = casted_ptr,
+ .len = self.len,
+ .capacity = self.capacity,
+ };
+ }
+
+ pub fn deinit(self: *Slice, gpa: *Allocator) void {
+ var other = self.toMultiArrayList();
+ other.deinit(gpa);
+ self.* = undefined;
+ }
+ };
+
+ const Self = @This();
+
+ const fields = meta.fields(S);
+ /// `sizes.bytes` is an array of @sizeOf each S field. Sorted by alignment, descending.
+ /// `sizes.fields` is an array mapping from `sizes.bytes` array index to field index.
+ const sizes = blk: {
+ const Data = struct {
+ size: usize,
+ size_index: usize,
+ alignment: usize,
+ };
+ var data: [fields.len]Data = undefined;
+ for (fields) |field_info, i| {
+ data[i] = .{
+ .size = @sizeOf(field_info.field_type),
+ .size_index = i,
+ .alignment = field_info.alignment,
+ };
+ }
+ const Sort = struct {
+ fn lessThan(trash: *i32, lhs: Data, rhs: Data) bool {
+ return lhs.alignment >= rhs.alignment;
+ }
+ };
+ var trash: i32 = undefined; // workaround for stage1 compiler bug
+ std.sort.sort(Data, &data, &trash, Sort.lessThan);
+ var sizes_bytes: [fields.len]usize = undefined;
+ var field_indexes: [fields.len]usize = undefined;
+ for (data) |elem, i| {
+ sizes_bytes[i] = elem.size;
+ field_indexes[i] = elem.size_index;
+ }
+ break :blk .{
+ .bytes = sizes_bytes,
+ .fields = field_indexes,
+ };
+ };
+
+ /// Release all allocated memory.
+ pub fn deinit(self: *Self, gpa: *Allocator) void {
+ gpa.free(self.allocatedBytes());
+ self.* = undefined;
+ }
+
+ /// The caller owns the returned memory. Empties this MultiArrayList.
+ pub fn toOwnedSlice(self: *Self) Slice {
+ const result = self.slice();
+ self.* = .{};
+ return result;
+ }
+
+ pub fn slice(self: Self) Slice {
+ var result: Slice = .{
+ .ptrs = undefined,
+ .len = self.len,
+ .capacity = self.capacity,
+ };
+ var ptr: [*]u8 = self.bytes;
+ for (sizes.bytes) |field_size, i| {
+ result.ptrs[sizes.fields[i]] = ptr;
+ ptr += field_size * self.capacity;
+ }
+ return result;
+ }
+
+ pub fn items(self: Self, comptime field: Field) []FieldType(field) {
+ return self.slice().items(field);
+ }
+
+ /// Overwrite one array element with new data.
+ pub fn set(self: *Self, index: usize, elem: S) void {
+ const slices = self.slice();
+ inline for (fields) |field_info, i| {
+ slices.items(@intToEnum(Field, i))[index] = @field(elem, field_info.name);
+ }
+ }
+
+ /// Obtain all the data for one array element.
+ pub fn get(self: *Self, index: usize) S {
+ const slices = self.slice();
+ var result: S = undefined;
+ inline for (fields) |field_info, i| {
+ @field(elem, field_info.name) = slices.items(@intToEnum(Field, i))[index];
+ }
+ return result;
+ }
+
+ /// Extend the list by 1 element. Allocates more memory as necessary.
+ pub fn append(self: *Self, gpa: *Allocator, elem: S) !void {
+ try self.ensureCapacity(gpa, self.len + 1);
+ self.appendAssumeCapacity(elem);
+ }
+
+ /// Extend the list by 1 element, but asserting `self.capacity`
+ /// is sufficient to hold an additional item.
+ pub fn appendAssumeCapacity(self: *Self, elem: S) void {
+ assert(self.len < self.capacity);
+ self.len += 1;
+ self.set(self.len - 1, elem);
+ }
+
+ /// Adjust the list's length to `new_len`.
+ /// Does not initialize added items, if any.
+ pub fn resize(self: *Self, gpa: *Allocator, new_len: usize) !void {
+ try self.ensureCapacity(gpa, new_len);
+ self.len = new_len;
+ }
+
+ /// Attempt to reduce allocated capacity to `new_len`.
+ /// If `new_len` is greater than zero, this may fail to reduce the capacity,
+ /// but the data remains intact and the length is updated to new_len.
+ pub fn shrinkAndFree(self: *Self, gpa: *Allocator, new_len: usize) void {
+ if (new_len == 0) {
+ gpa.free(self.allocatedBytes());
+ self.* = .{};
+ return;
+ }
+ assert(new_len <= self.capacity);
+ assert(new_len <= self.len);
+
+ const other_bytes = gpa.allocAdvanced(
+ u8,
+ @alignOf(S),
+ capacityInBytes(new_len),
+ .exact,
+ ) catch {
+ const self_slice = self.slice();
+ inline for (fields) |field_info, i| {
+ const field = @intToEnum(Field, i);
+ const dest_slice = self_slice.items(field)[new_len..];
+ const byte_count = dest_slice.len * @sizeOf(field_info.field_type);
+ // We use memset here for more efficient codegen in safety-checked,
+ // valgrind-enabled builds. Otherwise the valgrind client request
+ // will be repeated for every element.
+ @memset(@ptrCast([*]u8, dest_slice.ptr), undefined, byte_count);
+ }
+ self.len = new_len;
+ return;
+ };
+ var other = Self{
+ .bytes = other_bytes.ptr,
+ .capacity = new_len,
+ .len = new_len,
+ };
+ self.len = new_len;
+ const self_slice = self.slice();
+ const other_slice = other.slice();
+ inline for (fields) |field_info, i| {
+ const field = @intToEnum(Field, i);
+ // TODO we should be able to use std.mem.copy here but it causes a
+ // test failure on aarch64 with -OReleaseFast
+ const src_slice = mem.sliceAsBytes(self_slice.items(field));
+ const dst_slice = mem.sliceAsBytes(other_slice.items(field));
+ @memcpy(dst_slice.ptr, src_slice.ptr, src_slice.len);
+ }
+ gpa.free(self.allocatedBytes());
+ self.* = other;
+ }
+
+ /// Reduce length to `new_len`.
+ /// Invalidates pointers to elements `items[new_len..]`.
+ /// Keeps capacity the same.
+ pub fn shrinkRetainingCapacity(self: *Self, new_len: usize) void {
+ self.len = new_len;
+ }
+
+ /// Modify the array so that it can hold at least `new_capacity` items.
+ /// Implements super-linear growth to achieve amortized O(1) append operations.
+ /// Invalidates pointers if additional memory is needed.
+ pub fn ensureCapacity(self: *Self, gpa: *Allocator, new_capacity: usize) !void {
+ var better_capacity = self.capacity;
+ if (better_capacity >= new_capacity) return;
+
+ while (true) {
+ better_capacity += better_capacity / 2 + 8;
+ if (better_capacity >= new_capacity) break;
+ }
+
+ return self.setCapacity(gpa, better_capacity);
+ }
+
+ /// Modify the array so that it can hold exactly `new_capacity` items.
+ /// Invalidates pointers if additional memory is needed.
+ /// `new_capacity` must be greater or equal to `len`.
+ pub fn setCapacity(self: *Self, gpa: *Allocator, new_capacity: usize) !void {
+ assert(new_capacity >= self.len);
+ const new_bytes = try gpa.allocAdvanced(
+ u8,
+ @alignOf(S),
+ capacityInBytes(new_capacity),
+ .exact,
+ );
+ if (self.len == 0) {
+ self.bytes = new_bytes.ptr;
+ self.capacity = new_capacity;
+ return;
+ }
+ var other = Self{
+ .bytes = new_bytes.ptr,
+ .capacity = new_capacity,
+ .len = self.len,
+ };
+ const self_slice = self.slice();
+ const other_slice = other.slice();
+ inline for (fields) |field_info, i| {
+ const field = @intToEnum(Field, i);
+ // TODO we should be able to use std.mem.copy here but it causes a
+ // test failure on aarch64 with -OReleaseFast
+ const src_slice = mem.sliceAsBytes(self_slice.items(field));
+ const dst_slice = mem.sliceAsBytes(other_slice.items(field));
+ @memcpy(dst_slice.ptr, src_slice.ptr, src_slice.len);
+ }
+ gpa.free(self.allocatedBytes());
+ self.* = other;
+ }
+
+ fn capacityInBytes(capacity: usize) usize {
+ const sizes_vector: std.meta.Vector(sizes.bytes.len, usize) = sizes.bytes;
+ const capacity_vector = @splat(sizes.bytes.len, capacity);
+ return @reduce(.Add, capacity_vector * sizes_vector);
+ }
+
+ fn allocatedBytes(self: Self) []align(@alignOf(S)) u8 {
+ return self.bytes[0..capacityInBytes(self.capacity)];
+ }
+
+ fn FieldType(field: Field) type {
+ return meta.fieldInfo(S, field).field_type;
+ }
+ };
+}
+
+test "basic usage" {
+ const testing = std.testing;
+ const ally = testing.allocator;
+
+ const Foo = struct {
+ a: u32,
+ b: []const u8,
+ c: u8,
+ };
+
+ var list = MultiArrayList(Foo){};
+ defer list.deinit(ally);
+
+ try list.ensureCapacity(ally, 2);
+
+ list.appendAssumeCapacity(.{
+ .a = 1,
+ .b = "foobar",
+ .c = 'a',
+ });
+
+ list.appendAssumeCapacity(.{
+ .a = 2,
+ .b = "zigzag",
+ .c = 'b',
+ });
+
+ testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2 });
+ testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b' });
+
+ testing.expectEqual(@as(usize, 2), list.items(.b).len);
+ testing.expectEqualStrings("foobar", list.items(.b)[0]);
+ testing.expectEqualStrings("zigzag", list.items(.b)[1]);
+
+ try list.append(ally, .{
+ .a = 3,
+ .b = "fizzbuzz",
+ .c = 'c',
+ });
+
+ testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 });
+ testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' });
+
+ testing.expectEqual(@as(usize, 3), list.items(.b).len);
+ testing.expectEqualStrings("foobar", list.items(.b)[0]);
+ testing.expectEqualStrings("zigzag", list.items(.b)[1]);
+ testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]);
+
+ // Add 6 more things to force a capacity increase.
+ var i: usize = 0;
+ while (i < 6) : (i += 1) {
+ try list.append(ally, .{
+ .a = @intCast(u32, 4 + i),
+ .b = "whatever",
+ .c = @intCast(u8, 'd' + i),
+ });
+ }
+
+ testing.expectEqualSlices(
+ u32,
+ &[_]u32{ 1, 2, 3, 4, 5, 6, 7, 8, 9 },
+ list.items(.a),
+ );
+ testing.expectEqualSlices(
+ u8,
+ &[_]u8{ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i' },
+ list.items(.c),
+ );
+
+ list.shrinkAndFree(ally, 3);
+
+ testing.expectEqualSlices(u32, list.items(.a), &[_]u32{ 1, 2, 3 });
+ testing.expectEqualSlices(u8, list.items(.c), &[_]u8{ 'a', 'b', 'c' });
+
+ testing.expectEqual(@as(usize, 3), list.items(.b).len);
+ testing.expectEqualStrings("foobar", list.items(.b)[0]);
+ testing.expectEqualStrings("zigzag", list.items(.b)[1]);
+ testing.expectEqualStrings("fizzbuzz", list.items(.b)[2]);
+}
+
+// This was observed to fail on aarch64 with LLVM 11, when the capacityInBytes
+// function used the @reduce code path.
+test "regression test for @reduce bug" {
+ const ally = std.testing.allocator;
+ var list = MultiArrayList(struct {
+ tag: std.zig.Token.Tag,
+ start: u32,
+ }){};
+ defer list.deinit(ally);
+
+ try list.ensureCapacity(ally, 20);
+
+ try list.append(ally, .{ .tag = .keyword_const, .start = 0 });
+ try list.append(ally, .{ .tag = .identifier, .start = 6 });
+ try list.append(ally, .{ .tag = .equal, .start = 10 });
+ try list.append(ally, .{ .tag = .builtin, .start = 12 });
+ try list.append(ally, .{ .tag = .l_paren, .start = 19 });
+ try list.append(ally, .{ .tag = .string_literal, .start = 20 });
+ try list.append(ally, .{ .tag = .r_paren, .start = 25 });
+ try list.append(ally, .{ .tag = .semicolon, .start = 26 });
+ try list.append(ally, .{ .tag = .keyword_pub, .start = 29 });
+ try list.append(ally, .{ .tag = .keyword_fn, .start = 33 });
+ try list.append(ally, .{ .tag = .identifier, .start = 36 });
+ try list.append(ally, .{ .tag = .l_paren, .start = 40 });
+ try list.append(ally, .{ .tag = .r_paren, .start = 41 });
+ try list.append(ally, .{ .tag = .identifier, .start = 43 });
+ try list.append(ally, .{ .tag = .bang, .start = 51 });
+ try list.append(ally, .{ .tag = .identifier, .start = 52 });
+ try list.append(ally, .{ .tag = .l_brace, .start = 57 });
+ try list.append(ally, .{ .tag = .identifier, .start = 63 });
+ try list.append(ally, .{ .tag = .period, .start = 66 });
+ try list.append(ally, .{ .tag = .identifier, .start = 67 });
+ try list.append(ally, .{ .tag = .period, .start = 70 });
+ try list.append(ally, .{ .tag = .identifier, .start = 71 });
+ try list.append(ally, .{ .tag = .l_paren, .start = 75 });
+ try list.append(ally, .{ .tag = .string_literal, .start = 76 });
+ try list.append(ally, .{ .tag = .comma, .start = 113 });
+ try list.append(ally, .{ .tag = .period, .start = 115 });
+ try list.append(ally, .{ .tag = .l_brace, .start = 116 });
+ try list.append(ally, .{ .tag = .r_brace, .start = 117 });
+ try list.append(ally, .{ .tag = .r_paren, .start = 118 });
+ try list.append(ally, .{ .tag = .semicolon, .start = 119 });
+ try list.append(ally, .{ .tag = .r_brace, .start = 121 });
+ try list.append(ally, .{ .tag = .eof, .start = 123 });
+
+ const tags = list.items(.tag);
+ std.testing.expectEqual(tags[1], .identifier);
+ std.testing.expectEqual(tags[2], .equal);
+ std.testing.expectEqual(tags[3], .builtin);
+ std.testing.expectEqual(tags[4], .l_paren);
+ std.testing.expectEqual(tags[5], .string_literal);
+ std.testing.expectEqual(tags[6], .r_paren);
+ std.testing.expectEqual(tags[7], .semicolon);
+ std.testing.expectEqual(tags[8], .keyword_pub);
+ std.testing.expectEqual(tags[9], .keyword_fn);
+ std.testing.expectEqual(tags[10], .identifier);
+ std.testing.expectEqual(tags[11], .l_paren);
+ std.testing.expectEqual(tags[12], .r_paren);
+ std.testing.expectEqual(tags[13], .identifier);
+ std.testing.expectEqual(tags[14], .bang);
+ std.testing.expectEqual(tags[15], .identifier);
+ std.testing.expectEqual(tags[16], .l_brace);
+ std.testing.expectEqual(tags[17], .identifier);
+ std.testing.expectEqual(tags[18], .period);
+ std.testing.expectEqual(tags[19], .identifier);
+ std.testing.expectEqual(tags[20], .period);
+ std.testing.expectEqual(tags[21], .identifier);
+ std.testing.expectEqual(tags[22], .l_paren);
+ std.testing.expectEqual(tags[23], .string_literal);
+ std.testing.expectEqual(tags[24], .comma);
+ std.testing.expectEqual(tags[25], .period);
+ std.testing.expectEqual(tags[26], .l_brace);
+ std.testing.expectEqual(tags[27], .r_brace);
+ std.testing.expectEqual(tags[28], .r_paren);
+ std.testing.expectEqual(tags[29], .semicolon);
+ std.testing.expectEqual(tags[30], .r_brace);
+ std.testing.expectEqual(tags[31], .eof);
+}
diff --git a/lib/std/std.zig b/lib/std/std.zig
index ca73adc36e76..c0d97a9d9c2e 100644
--- a/lib/std/std.zig
+++ b/lib/std/std.zig
@@ -20,6 +20,7 @@ pub const ComptimeStringMap = @import("comptime_string_map.zig").ComptimeStringM
pub const DynLib = @import("dynamic_library.zig").DynLib;
pub const HashMap = hash_map.HashMap;
pub const HashMapUnmanaged = hash_map.HashMapUnmanaged;
+pub const MultiArrayList = @import("multi_array_list.zig").MultiArrayList;
pub const PackedIntArray = @import("packed_int_array.zig").PackedIntArray;
pub const PackedIntArrayEndian = @import("packed_int_array.zig").PackedIntArrayEndian;
pub const PackedIntSlice = @import("packed_int_array.zig").PackedIntSlice;
diff --git a/lib/std/zig.zig b/lib/std/zig.zig
index 5119a4c8251e..197d7c2c5985 100644
--- a/lib/std/zig.zig
+++ b/lib/std/zig.zig
@@ -12,7 +12,6 @@ pub const fmtId = @import("zig/fmt.zig").fmtId;
pub const fmtEscapes = @import("zig/fmt.zig").fmtEscapes;
pub const parse = @import("zig/parse.zig").parse;
pub const parseStringLiteral = @import("zig/string_literal.zig").parse;
-pub const render = @import("zig/render.zig").render;
pub const ast = @import("zig/ast.zig");
pub const system = @import("zig/system.zig");
pub const CrossTarget = @import("zig/cross_target.zig").CrossTarget;
diff --git a/lib/std/zig/ast.zig b/lib/std/zig/ast.zig
index 436d9c37ee8f..46b58e9465ac 100644
--- a/lib/std/zig/ast.zig
+++ b/lib/std/zig/ast.zig
@@ -9,71 +9,78 @@ const testing = std.testing;
const mem = std.mem;
const Token = std.zig.Token;
-pub const TokenIndex = usize;
-pub const NodeIndex = usize;
+pub const TokenIndex = u32;
+pub const ByteOffset = u32;
+
+pub const TokenList = std.MultiArrayList(struct {
+ tag: Token.Tag,
+ start: ByteOffset,
+});
+pub const NodeList = std.MultiArrayList(Node);
pub const Tree = struct {
/// Reference to externally-owned data.
source: []const u8,
- token_ids: []const Token.Id,
- token_locs: []const Token.Loc,
- errors: []const Error,
- root_node: *Node.Root,
- arena: std.heap.ArenaAllocator.State,
- gpa: *mem.Allocator,
+ tokens: TokenList.Slice,
+ /// The root AST node is assumed to be index 0. Since there can be no
+ /// references to the root node, this means 0 is available to indicate null.
+ nodes: NodeList.Slice,
+ extra_data: []Node.Index,
- /// translate-c uses this to avoid having to emit correct newlines
- /// TODO get rid of this hack
- generated: bool = false,
+ errors: []const Error,
- pub fn deinit(self: *Tree) void {
- self.gpa.free(self.token_ids);
- self.gpa.free(self.token_locs);
- self.gpa.free(self.errors);
- self.arena.promote(self.gpa).deinit();
- }
+ pub const Location = struct {
+ line: usize,
+ column: usize,
+ line_start: usize,
+ line_end: usize,
+ };
- pub fn renderError(self: *Tree, parse_error: *const Error, stream: anytype) !void {
- return parse_error.render(self.token_ids, stream);
+ pub fn deinit(tree: *Tree, gpa: *mem.Allocator) void {
+ tree.tokens.deinit(gpa);
+ tree.nodes.deinit(gpa);
+ gpa.free(tree.extra_data);
+ gpa.free(tree.errors);
+ tree.* = undefined;
}
- pub fn tokenSlice(self: *Tree, token_index: TokenIndex) []const u8 {
- return self.tokenSliceLoc(self.token_locs[token_index]);
- }
+ pub const RenderError = error{
+ /// Ran out of memory allocating call stack frames to complete rendering, or
+ /// ran out of memory allocating space in the output buffer.
+ OutOfMemory,
+ };
- pub fn tokenSliceLoc(self: *Tree, token: Token.Loc) []const u8 {
- return self.source[token.start..token.end];
- }
+ /// `gpa` is used for allocating the resulting formatted source code, as well as
+ /// for allocating extra stack memory if needed, because this function utilizes recursion.
+ /// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006.
+ /// Caller owns the returned slice of bytes, allocated with `gpa`.
+ pub fn render(tree: Tree, gpa: *mem.Allocator) RenderError![]u8 {
+ var buffer = std.ArrayList(u8).init(gpa);
+ defer buffer.deinit();
- pub fn getNodeSource(self: *const Tree, node: *const Node) []const u8 {
- const first_token = self.token_locs[node.firstToken()];
- const last_token = self.token_locs[node.lastToken()];
- return self.source[first_token.start..last_token.end];
+ try tree.renderToArrayList(&buffer);
+ return buffer.toOwnedSlice();
}
- pub const Location = struct {
- line: usize,
- column: usize,
- line_start: usize,
- line_end: usize,
- };
+ pub fn renderToArrayList(tree: Tree, buffer: *std.ArrayList(u8)) RenderError!void {
+ return @import("./render.zig").renderTree(buffer, tree);
+ }
- /// Return the Location of the token relative to the offset specified by `start_index`.
- pub fn tokenLocationLoc(self: *Tree, start_index: usize, token: Token.Loc) Location {
+ pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location {
var loc = Location{
.line = 0,
.column = 0,
- .line_start = start_index,
+ .line_start = start_offset,
.line_end = self.source.len,
};
- if (self.generated)
- return loc;
- const token_start = token.start;
- for (self.source[start_index..]) |c, i| {
- if (i + start_index == token_start) {
- loc.line_end = i + start_index;
- while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') : (loc.line_end += 1) {}
+ const token_start = self.tokens.items(.start)[token_index];
+ for (self.source[start_offset..]) |c, i| {
+ if (i + start_offset == token_start) {
+ loc.line_end = i + start_offset;
+ while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') {
+ loc.line_end += 1;
+ }
return loc;
}
if (c == '\n') {
@@ -87,3205 +94,2878 @@ pub const Tree = struct {
return loc;
}
- pub fn tokenLocation(self: *Tree, start_index: usize, token_index: TokenIndex) Location {
- return self.tokenLocationLoc(start_index, self.token_locs[token_index]);
- }
+ pub fn tokenSlice(tree: Tree, token_index: TokenIndex) []const u8 {
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+ const token_tag = token_tags[token_index];
+
+ // Many tokens can be determined entirely by their tag.
+ if (token_tag.lexeme()) |lexeme| {
+ return lexeme;
+ }
- pub fn tokensOnSameLine(self: *Tree, token1_index: TokenIndex, token2_index: TokenIndex) bool {
- return self.tokensOnSameLineLoc(self.token_locs[token1_index], self.token_locs[token2_index]);
+ // For some tokens, re-tokenization is needed to find the end.
+ var tokenizer: std.zig.Tokenizer = .{
+ .buffer = tree.source,
+ .index = token_starts[token_index],
+ .pending_invalid_token = null,
+ };
+ const token = tokenizer.next();
+ assert(token.tag == token_tag);
+ return tree.source[token.loc.start..token.loc.end];
}
- pub fn tokensOnSameLineLoc(self: *Tree, token1: Token.Loc, token2: Token.Loc) bool {
- return mem.indexOfScalar(u8, self.source[token1.end..token2.start], '\n') == null;
+ pub fn extraData(tree: Tree, index: usize, comptime T: type) T {
+ const fields = std.meta.fields(T);
+ var result: T = undefined;
+ inline for (fields) |field, i| {
+ comptime assert(field.field_type == Node.Index);
+ @field(result, field.name) = tree.extra_data[index + i];
+ }
+ return result;
}
- pub fn dump(self: *Tree) void {
- self.root_node.base.dump(0);
+ pub fn rootDecls(tree: Tree) []const Node.Index {
+ // Root is always index 0.
+ const nodes_data = tree.nodes.items(.data);
+ return tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
}
- /// Skips over comments
- pub fn prevToken(self: *Tree, token_index: TokenIndex) TokenIndex {
- var index = token_index - 1;
- while (self.token_ids[index] == Token.Id.LineComment) {
- index -= 1;
+ pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void {
+ const token_tags = tree.tokens.items(.tag);
+ switch (parse_error.tag) {
+ .asterisk_after_ptr_deref => {
+ return stream.writeAll("'.*' cannot be followed by '*'. Are you missing a space?");
+ },
+ .decl_between_fields => {
+ return stream.writeAll("declarations are not allowed between container fields");
+ },
+ .expected_block => {
+ return stream.print("expected block or field, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_block_or_assignment => {
+ return stream.print("expected block or assignment, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_block_or_expr => {
+ return stream.print("expected block or expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_block_or_field => {
+ return stream.print("expected block or field, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_container_members => {
+ return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_expr => {
+ return stream.print("expected expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_expr_or_assignment => {
+ return stream.print("expected expression or assignment, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_fn => {
+ return stream.print("expected function, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_inlinable => {
+ return stream.print("expected 'while' or 'for', found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_labelable => {
+ return stream.print("expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_param_list => {
+ return stream.print("expected parameter list, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_prefix_expr => {
+ return stream.print("expected prefix expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_primary_type_expr => {
+ return stream.print("expected primary type expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_pub_item => {
+ return stream.writeAll("expected function or variable declaration after pub");
+ },
+ .expected_return_type => {
+ return stream.print("expected return type expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_semi_or_else => {
+ return stream.print("expected ';' or 'else', found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_semi_or_lbrace => {
+ return stream.print("expected ';' or '{{', found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_statement => {
+ return stream.print("expected statement, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_string_literal => {
+ return stream.print("expected string literal, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_suffix_op => {
+ return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_type_expr => {
+ return stream.print("expected type expression, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_var_decl => {
+ return stream.print("expected variable declaration, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_var_decl_or_fn => {
+ return stream.print("expected variable declaration or function, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_loop_payload => {
+ return stream.print("expected loop payload, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .expected_container => {
+ return stream.print("expected a struct, enum or union, found '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .extra_align_qualifier => {
+ return stream.writeAll("extra align qualifier");
+ },
+ .extra_allowzero_qualifier => {
+ return stream.writeAll("extra allowzero qualifier");
+ },
+ .extra_const_qualifier => {
+ return stream.writeAll("extra const qualifier");
+ },
+ .extra_volatile_qualifier => {
+ return stream.writeAll("extra volatile qualifier");
+ },
+ .invalid_align => {
+ return stream.writeAll("alignment not allowed on arrays");
+ },
+ .invalid_and => {
+ return stream.writeAll("`&&` is invalid; note that `and` is boolean AND");
+ },
+ .invalid_bit_range => {
+ return stream.writeAll("bit range not allowed on slices and arrays");
+ },
+ .invalid_token => {
+ return stream.print("invalid token '{s}'", .{
+ token_tags[parse_error.token].symbol(),
+ });
+ },
+ .same_line_doc_comment => {
+ return stream.writeAll("same line documentation comment");
+ },
+ .unattached_doc_comment => {
+ return stream.writeAll("unattached documentation comment");
+ },
+
+ .expected_token => {
+ const found_tag = token_tags[parse_error.token];
+ const expected_symbol = parse_error.extra.expected_tag.symbol();
+ switch (found_tag) {
+ .invalid => return stream.print("expected '{s}', found invalid bytes", .{
+ expected_symbol,
+ }),
+ else => return stream.print("expected '{s}', found '{s}'", .{
+ expected_symbol, found_tag.symbol(),
+ }),
+ }
+ },
}
- return index;
}
- /// Skips over comments
- pub fn nextToken(self: *Tree, token_index: TokenIndex) TokenIndex {
- var index = token_index + 1;
- while (self.token_ids[index] == Token.Id.LineComment) {
- index += 1;
- }
- return index;
+ pub fn firstToken(tree: Tree, node: Node.Index) TokenIndex {
+ const tags = tree.nodes.items(.tag);
+ const datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
+ var end_offset: TokenIndex = 0;
+ var n = node;
+ while (true) switch (tags[n]) {
+ .root => return 0,
+
+ .test_decl,
+ .@"errdefer",
+ .@"defer",
+ .bool_not,
+ .negation,
+ .bit_not,
+ .negation_wrap,
+ .address_of,
+ .@"try",
+ .@"await",
+ .optional_type,
+ .@"switch",
+ .switch_comma,
+ .if_simple,
+ .@"if",
+ .@"suspend",
+ .@"resume",
+ .@"continue",
+ .@"break",
+ .@"return",
+ .anyframe_type,
+ .identifier,
+ .anyframe_literal,
+ .char_literal,
+ .integer_literal,
+ .float_literal,
+ .false_literal,
+ .true_literal,
+ .null_literal,
+ .undefined_literal,
+ .unreachable_literal,
+ .string_literal,
+ .multiline_string_literal,
+ .grouped_expression,
+ .builtin_call_two,
+ .builtin_call_two_comma,
+ .builtin_call,
+ .builtin_call_comma,
+ .error_set_decl,
+ .@"anytype",
+ .@"comptime",
+ .@"nosuspend",
+ .asm_simple,
+ .@"asm",
+ .array_type,
+ .array_type_sentinel,
+ .error_value,
+ => return main_tokens[n] - end_offset,
+
+ .array_init_dot,
+ .array_init_dot_comma,
+ .array_init_dot_two,
+ .array_init_dot_two_comma,
+ .struct_init_dot,
+ .struct_init_dot_comma,
+ .struct_init_dot_two,
+ .struct_init_dot_two_comma,
+ .enum_literal,
+ => return main_tokens[n] - 1 - end_offset,
+
+ .@"catch",
+ .field_access,
+ .unwrap_optional,
+ .equal_equal,
+ .bang_equal,
+ .less_than,
+ .greater_than,
+ .less_or_equal,
+ .greater_or_equal,
+ .assign_mul,
+ .assign_div,
+ .assign_mod,
+ .assign_add,
+ .assign_sub,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_and,
+ .assign_bit_xor,
+ .assign_bit_or,
+ .assign_mul_wrap,
+ .assign_add_wrap,
+ .assign_sub_wrap,
+ .assign,
+ .merge_error_sets,
+ .mul,
+ .div,
+ .mod,
+ .array_mult,
+ .mul_wrap,
+ .add,
+ .sub,
+ .array_cat,
+ .add_wrap,
+ .sub_wrap,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_and,
+ .bit_xor,
+ .bit_or,
+ .@"orelse",
+ .bool_and,
+ .bool_or,
+ .slice_open,
+ .slice,
+ .slice_sentinel,
+ .deref,
+ .array_access,
+ .array_init_one,
+ .array_init_one_comma,
+ .array_init,
+ .array_init_comma,
+ .struct_init_one,
+ .struct_init_one_comma,
+ .struct_init,
+ .struct_init_comma,
+ .call_one,
+ .call_one_comma,
+ .call,
+ .call_comma,
+ .switch_range,
+ .error_union,
+ => n = datas[n].lhs,
+
+ .fn_decl,
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ => {
+ var i = main_tokens[n]; // fn token
+ while (i > 0) {
+ i -= 1;
+ switch (token_tags[i]) {
+ .keyword_extern,
+ .keyword_export,
+ .keyword_pub,
+ .keyword_threadlocal,
+ .string_literal,
+ => continue,
+
+ else => return i + 1 - end_offset,
+ }
+ }
+ return i - end_offset;
+ },
+
+ .@"usingnamespace" => {
+ const main_token = main_tokens[n];
+ if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
+ end_offset += 1;
+ }
+ return main_token - end_offset;
+ },
+
+ .async_call_one,
+ .async_call_one_comma,
+ .async_call,
+ .async_call_comma,
+ => {
+ end_offset += 1; // async token
+ n = datas[n].lhs;
+ },
+
+ .container_field_init,
+ .container_field_align,
+ .container_field,
+ => {
+ const name_token = main_tokens[n];
+ if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
+ end_offset += 1;
+ }
+ return name_token - end_offset;
+ },
+
+ .global_var_decl,
+ .local_var_decl,
+ .simple_var_decl,
+ .aligned_var_decl,
+ => {
+ var i = main_tokens[n]; // mut token
+ while (i > 0) {
+ i -= 1;
+ switch (token_tags[i]) {
+ .keyword_extern,
+ .keyword_export,
+ .keyword_comptime,
+ .keyword_pub,
+ .keyword_threadlocal,
+ .string_literal,
+ => continue,
+
+ else => return i + 1 - end_offset,
+ }
+ }
+ return i - end_offset;
+ },
+
+ .block,
+ .block_semicolon,
+ .block_two,
+ .block_two_semicolon,
+ => {
+ // Look for a label.
+ const lbrace = main_tokens[n];
+ if (token_tags[lbrace - 1] == .colon) {
+ end_offset += 2;
+ }
+ return lbrace - end_offset;
+ },
+
+ .container_decl,
+ .container_decl_trailing,
+ .container_decl_two,
+ .container_decl_two_trailing,
+ .container_decl_arg,
+ .container_decl_arg_trailing,
+ .tagged_union,
+ .tagged_union_trailing,
+ .tagged_union_two,
+ .tagged_union_two_trailing,
+ .tagged_union_enum_tag,
+ .tagged_union_enum_tag_trailing,
+ => {
+ const main_token = main_tokens[n];
+ switch (token_tags[main_token - 1]) {
+ .keyword_packed, .keyword_extern => end_offset += 1,
+ else => {},
+ }
+ return main_token - end_offset;
+ },
+
+ .ptr_type_aligned,
+ .ptr_type_sentinel,
+ .ptr_type,
+ .ptr_type_bit_range,
+ => {
+ const main_token = main_tokens[n];
+ return switch (token_tags[main_token]) {
+ .asterisk,
+ .asterisk_asterisk,
+ => switch (token_tags[main_token - 1]) {
+ .l_bracket => main_token - 1,
+ else => main_token,
+ },
+ .l_bracket => main_token,
+ else => unreachable,
+ } - end_offset;
+ },
+
+ .switch_case_one => {
+ if (datas[n].lhs == 0) {
+ return main_tokens[n] - 1 - end_offset; // else token
+ } else {
+ n = datas[n].lhs;
+ }
+ },
+ .switch_case => {
+ const extra = tree.extraData(datas[n].lhs, Node.SubRange);
+ assert(extra.end - extra.start > 0);
+ n = tree.extra_data[extra.start];
+ },
+
+ .asm_output, .asm_input => {
+ assert(token_tags[main_tokens[n] - 1] == .l_bracket);
+ return main_tokens[n] - 1 - end_offset;
+ },
+
+ .while_simple,
+ .while_cont,
+ .@"while",
+ .for_simple,
+ .@"for",
+ => {
+ // Look for a label and inline.
+ const main_token = main_tokens[n];
+ var result = main_token;
+ if (token_tags[result - 1] == .keyword_inline) {
+ result -= 1;
+ }
+ if (token_tags[result - 1] == .colon) {
+ result -= 2;
+ }
+ return result - end_offset;
+ },
+ };
}
-};
-pub const Error = union(enum) {
- InvalidToken: InvalidToken,
- ExpectedContainerMembers: ExpectedContainerMembers,
- ExpectedStringLiteral: ExpectedStringLiteral,
- ExpectedIntegerLiteral: ExpectedIntegerLiteral,
- ExpectedPubItem: ExpectedPubItem,
- ExpectedIdentifier: ExpectedIdentifier,
- ExpectedStatement: ExpectedStatement,
- ExpectedVarDeclOrFn: ExpectedVarDeclOrFn,
- ExpectedVarDecl: ExpectedVarDecl,
- ExpectedFn: ExpectedFn,
- ExpectedReturnType: ExpectedReturnType,
- ExpectedAggregateKw: ExpectedAggregateKw,
- UnattachedDocComment: UnattachedDocComment,
- ExpectedEqOrSemi: ExpectedEqOrSemi,
- ExpectedSemiOrLBrace: ExpectedSemiOrLBrace,
- ExpectedSemiOrElse: ExpectedSemiOrElse,
- ExpectedLabelOrLBrace: ExpectedLabelOrLBrace,
- ExpectedLBrace: ExpectedLBrace,
- ExpectedColonOrRParen: ExpectedColonOrRParen,
- ExpectedLabelable: ExpectedLabelable,
- ExpectedInlinable: ExpectedInlinable,
- ExpectedAsmOutputReturnOrType: ExpectedAsmOutputReturnOrType,
- ExpectedCall: ExpectedCall,
- ExpectedCallOrFnProto: ExpectedCallOrFnProto,
- ExpectedSliceOrRBracket: ExpectedSliceOrRBracket,
- ExtraAlignQualifier: ExtraAlignQualifier,
- ExtraConstQualifier: ExtraConstQualifier,
- ExtraVolatileQualifier: ExtraVolatileQualifier,
- ExtraAllowZeroQualifier: ExtraAllowZeroQualifier,
- ExpectedTypeExpr: ExpectedTypeExpr,
- ExpectedPrimaryTypeExpr: ExpectedPrimaryTypeExpr,
- ExpectedParamType: ExpectedParamType,
- ExpectedExpr: ExpectedExpr,
- ExpectedPrimaryExpr: ExpectedPrimaryExpr,
- ExpectedToken: ExpectedToken,
- ExpectedCommaOrEnd: ExpectedCommaOrEnd,
- ExpectedParamList: ExpectedParamList,
- ExpectedPayload: ExpectedPayload,
- ExpectedBlockOrAssignment: ExpectedBlockOrAssignment,
- ExpectedBlockOrExpression: ExpectedBlockOrExpression,
- ExpectedExprOrAssignment: ExpectedExprOrAssignment,
- ExpectedPrefixExpr: ExpectedPrefixExpr,
- ExpectedLoopExpr: ExpectedLoopExpr,
- ExpectedDerefOrUnwrap: ExpectedDerefOrUnwrap,
- ExpectedSuffixOp: ExpectedSuffixOp,
- ExpectedBlockOrField: ExpectedBlockOrField,
- DeclBetweenFields: DeclBetweenFields,
- InvalidAnd: InvalidAnd,
- AsteriskAfterPointerDereference: AsteriskAfterPointerDereference,
-
- pub fn render(self: *const Error, tokens: []const Token.Id, stream: anytype) !void {
- switch (self.*) {
- .InvalidToken => |*x| return x.render(tokens, stream),
- .ExpectedContainerMembers => |*x| return x.render(tokens, stream),
- .ExpectedStringLiteral => |*x| return x.render(tokens, stream),
- .ExpectedIntegerLiteral => |*x| return x.render(tokens, stream),
- .ExpectedPubItem => |*x| return x.render(tokens, stream),
- .ExpectedIdentifier => |*x| return x.render(tokens, stream),
- .ExpectedStatement => |*x| return x.render(tokens, stream),
- .ExpectedVarDeclOrFn => |*x| return x.render(tokens, stream),
- .ExpectedVarDecl => |*x| return x.render(tokens, stream),
- .ExpectedFn => |*x| return x.render(tokens, stream),
- .ExpectedReturnType => |*x| return x.render(tokens, stream),
- .ExpectedAggregateKw => |*x| return x.render(tokens, stream),
- .UnattachedDocComment => |*x| return x.render(tokens, stream),
- .ExpectedEqOrSemi => |*x| return x.render(tokens, stream),
- .ExpectedSemiOrLBrace => |*x| return x.render(tokens, stream),
- .ExpectedSemiOrElse => |*x| return x.render(tokens, stream),
- .ExpectedLabelOrLBrace => |*x| return x.render(tokens, stream),
- .ExpectedLBrace => |*x| return x.render(tokens, stream),
- .ExpectedColonOrRParen => |*x| return x.render(tokens, stream),
- .ExpectedLabelable => |*x| return x.render(tokens, stream),
- .ExpectedInlinable => |*x| return x.render(tokens, stream),
- .ExpectedAsmOutputReturnOrType => |*x| return x.render(tokens, stream),
- .ExpectedCall => |*x| return x.render(tokens, stream),
- .ExpectedCallOrFnProto => |*x| return x.render(tokens, stream),
- .ExpectedSliceOrRBracket => |*x| return x.render(tokens, stream),
- .ExtraAlignQualifier => |*x| return x.render(tokens, stream),
- .ExtraConstQualifier => |*x| return x.render(tokens, stream),
- .ExtraVolatileQualifier => |*x| return x.render(tokens, stream),
- .ExtraAllowZeroQualifier => |*x| return x.render(tokens, stream),
- .ExpectedTypeExpr => |*x| return x.render(tokens, stream),
- .ExpectedPrimaryTypeExpr => |*x| return x.render(tokens, stream),
- .ExpectedParamType => |*x| return x.render(tokens, stream),
- .ExpectedExpr => |*x| return x.render(tokens, stream),
- .ExpectedPrimaryExpr => |*x| return x.render(tokens, stream),
- .ExpectedToken => |*x| return x.render(tokens, stream),
- .ExpectedCommaOrEnd => |*x| return x.render(tokens, stream),
- .ExpectedParamList => |*x| return x.render(tokens, stream),
- .ExpectedPayload => |*x| return x.render(tokens, stream),
- .ExpectedBlockOrAssignment => |*x| return x.render(tokens, stream),
- .ExpectedBlockOrExpression => |*x| return x.render(tokens, stream),
- .ExpectedExprOrAssignment => |*x| return x.render(tokens, stream),
- .ExpectedPrefixExpr => |*x| return x.render(tokens, stream),
- .ExpectedLoopExpr => |*x| return x.render(tokens, stream),
- .ExpectedDerefOrUnwrap => |*x| return x.render(tokens, stream),
- .ExpectedSuffixOp => |*x| return x.render(tokens, stream),
- .ExpectedBlockOrField => |*x| return x.render(tokens, stream),
- .DeclBetweenFields => |*x| return x.render(tokens, stream),
- .InvalidAnd => |*x| return x.render(tokens, stream),
- .AsteriskAfterPointerDereference => |*x| return x.render(tokens, stream),
- }
+ pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex {
+ const tags = tree.nodes.items(.tag);
+ const datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+ var n = node;
+ var end_offset: TokenIndex = 0;
+ while (true) switch (tags[n]) {
+ .root => return @intCast(TokenIndex, tree.tokens.len - 1),
+
+ .@"usingnamespace",
+ .bool_not,
+ .negation,
+ .bit_not,
+ .negation_wrap,
+ .address_of,
+ .@"try",
+ .@"await",
+ .optional_type,
+ .@"resume",
+ .@"nosuspend",
+ .@"comptime",
+ => n = datas[n].lhs,
+
+ .test_decl,
+ .@"errdefer",
+ .@"defer",
+ .@"catch",
+ .equal_equal,
+ .bang_equal,
+ .less_than,
+ .greater_than,
+ .less_or_equal,
+ .greater_or_equal,
+ .assign_mul,
+ .assign_div,
+ .assign_mod,
+ .assign_add,
+ .assign_sub,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_and,
+ .assign_bit_xor,
+ .assign_bit_or,
+ .assign_mul_wrap,
+ .assign_add_wrap,
+ .assign_sub_wrap,
+ .assign,
+ .merge_error_sets,
+ .mul,
+ .div,
+ .mod,
+ .array_mult,
+ .mul_wrap,
+ .add,
+ .sub,
+ .array_cat,
+ .add_wrap,
+ .sub_wrap,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_and,
+ .bit_xor,
+ .bit_or,
+ .@"orelse",
+ .bool_and,
+ .bool_or,
+ .anyframe_type,
+ .error_union,
+ .if_simple,
+ .while_simple,
+ .for_simple,
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .ptr_type_aligned,
+ .ptr_type_sentinel,
+ .ptr_type,
+ .ptr_type_bit_range,
+ .array_type,
+ .switch_case_one,
+ .switch_case,
+ .switch_range,
+ => n = datas[n].rhs,
+
+ .field_access,
+ .unwrap_optional,
+ .grouped_expression,
+ .multiline_string_literal,
+ .error_set_decl,
+ .asm_simple,
+ .asm_output,
+ .asm_input,
+ .error_value,
+ => return datas[n].rhs + end_offset,
+
+ .@"anytype",
+ .anyframe_literal,
+ .char_literal,
+ .integer_literal,
+ .float_literal,
+ .false_literal,
+ .true_literal,
+ .null_literal,
+ .undefined_literal,
+ .unreachable_literal,
+ .identifier,
+ .deref,
+ .enum_literal,
+ .string_literal,
+ => return main_tokens[n] + end_offset,
+
+ .@"return" => if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ return main_tokens[n] + end_offset;
+ },
+
+ .call, .async_call => {
+ end_offset += 1; // for the rparen
+ const params = tree.extraData(datas[n].rhs, Node.SubRange);
+ if (params.end - params.start == 0) {
+ return main_tokens[n] + end_offset;
+ }
+ n = tree.extra_data[params.end - 1]; // last parameter
+ },
+ .tagged_union_enum_tag => {
+ const members = tree.extraData(datas[n].rhs, Node.SubRange);
+ if (members.end - members.start == 0) {
+ end_offset += 4; // for the rparen + rparen + lbrace + rbrace
+ n = datas[n].lhs;
+ } else {
+ end_offset += 1; // for the rbrace
+ n = tree.extra_data[members.end - 1]; // last parameter
+ }
+ },
+ .call_comma,
+ .async_call_comma,
+ .tagged_union_enum_tag_trailing,
+ => {
+ end_offset += 2; // for the comma/semicolon + rparen/rbrace
+ const params = tree.extraData(datas[n].rhs, Node.SubRange);
+ assert(params.end > params.start);
+ n = tree.extra_data[params.end - 1]; // last parameter
+ },
+ .@"switch" => {
+ const cases = tree.extraData(datas[n].rhs, Node.SubRange);
+ if (cases.end - cases.start == 0) {
+ end_offset += 3; // rparen, lbrace, rbrace
+ n = datas[n].lhs; // condition expression
+ } else {
+ end_offset += 1; // for the rbrace
+ n = tree.extra_data[cases.end - 1]; // last case
+ }
+ },
+ .container_decl_arg => {
+ const members = tree.extraData(datas[n].rhs, Node.SubRange);
+ if (members.end - members.start == 0) {
+ end_offset += 1; // for the rparen
+ n = datas[n].lhs;
+ } else {
+ end_offset += 1; // for the rbrace
+ n = tree.extra_data[members.end - 1]; // last parameter
+ }
+ },
+ .@"asm" => {
+ const extra = tree.extraData(datas[n].rhs, Node.Asm);
+ return extra.rparen + end_offset;
+ },
+ .array_init,
+ .struct_init,
+ => {
+ const elements = tree.extraData(datas[n].rhs, Node.SubRange);
+ assert(elements.end - elements.start > 0);
+ end_offset += 1; // for the rbrace
+ n = tree.extra_data[elements.end - 1]; // last element
+ },
+ .array_init_comma,
+ .struct_init_comma,
+ .container_decl_arg_trailing,
+ .switch_comma,
+ => {
+ const members = tree.extraData(datas[n].rhs, Node.SubRange);
+ assert(members.end - members.start > 0);
+ end_offset += 2; // for the comma + rbrace
+ n = tree.extra_data[members.end - 1]; // last parameter
+ },
+ .array_init_dot,
+ .struct_init_dot,
+ .block,
+ .container_decl,
+ .tagged_union,
+ .builtin_call,
+ => {
+ assert(datas[n].rhs - datas[n].lhs > 0);
+ end_offset += 1; // for the rbrace
+ n = tree.extra_data[datas[n].rhs - 1]; // last statement
+ },
+ .array_init_dot_comma,
+ .struct_init_dot_comma,
+ .block_semicolon,
+ .container_decl_trailing,
+ .tagged_union_trailing,
+ .builtin_call_comma,
+ => {
+ assert(datas[n].rhs - datas[n].lhs > 0);
+ end_offset += 2; // for the comma/semicolon + rbrace/rparen
+ n = tree.extra_data[datas[n].rhs - 1]; // last member
+ },
+ .call_one,
+ .async_call_one,
+ .array_access,
+ => {
+ end_offset += 1; // for the rparen/rbracket
+ if (datas[n].rhs == 0) {
+ return main_tokens[n] + end_offset;
+ }
+ n = datas[n].rhs;
+ },
+ .array_init_dot_two,
+ .block_two,
+ .builtin_call_two,
+ .struct_init_dot_two,
+ .container_decl_two,
+ .tagged_union_two,
+ => {
+ if (datas[n].rhs != 0) {
+ end_offset += 1; // for the rparen/rbrace
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ end_offset += 1; // for the rparen/rbrace
+ n = datas[n].lhs;
+ } else {
+ switch (tags[n]) {
+ .array_init_dot_two,
+ .block_two,
+ .struct_init_dot_two,
+ => end_offset += 1, // rbrace
+ .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace
+ .container_decl_two => {
+ var i: u32 = 2; // lbrace + rbrace
+ while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+ end_offset += i;
+ },
+ .tagged_union_two => {
+ var i: u32 = 5; // (enum) {}
+ while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+ end_offset += i;
+ },
+ else => unreachable,
+ }
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .array_init_dot_two_comma,
+ .builtin_call_two_comma,
+ .block_two_semicolon,
+ .struct_init_dot_two_comma,
+ .container_decl_two_trailing,
+ .tagged_union_two_trailing,
+ => {
+ end_offset += 2; // for the comma/semicolon + rbrace/rparen
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ unreachable;
+ }
+ },
+ .simple_var_decl => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ end_offset += 1; // from mut token to name
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .aligned_var_decl => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ end_offset += 1; // for the rparen
+ n = datas[n].lhs;
+ } else {
+ end_offset += 1; // from mut token to name
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .global_var_decl => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else {
+ const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl);
+ if (extra.section_node != 0) {
+ end_offset += 1; // for the rparen
+ n = extra.section_node;
+ } else if (extra.align_node != 0) {
+ end_offset += 1; // for the rparen
+ n = extra.align_node;
+ } else if (extra.type_node != 0) {
+ n = extra.type_node;
+ } else {
+ end_offset += 1; // from mut token to name
+ return main_tokens[n] + end_offset;
+ }
+ }
+ },
+ .local_var_decl => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else {
+ const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl);
+ if (extra.align_node != 0) {
+ end_offset += 1; // for the rparen
+ n = extra.align_node;
+ } else if (extra.type_node != 0) {
+ n = extra.type_node;
+ } else {
+ end_offset += 1; // from mut token to name
+ return main_tokens[n] + end_offset;
+ }
+ }
+ },
+ .container_field_init => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .container_field_align => {
+ if (datas[n].rhs != 0) {
+ end_offset += 1; // for the rparen
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .container_field => {
+ const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
+ if (extra.value_expr != 0) {
+ n = extra.value_expr;
+ } else if (extra.align_expr != 0) {
+ end_offset += 1; // for the rparen
+ n = extra.align_expr;
+ } else if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+
+ .array_init_one,
+ .struct_init_one,
+ => {
+ end_offset += 1; // rbrace
+ if (datas[n].rhs == 0) {
+ return main_tokens[n] + end_offset;
+ } else {
+ n = datas[n].rhs;
+ }
+ },
+ .slice_open,
+ .call_one_comma,
+ .async_call_one_comma,
+ .array_init_one_comma,
+ .struct_init_one_comma,
+ => {
+ end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
+ n = datas[n].rhs;
+ assert(n != 0);
+ },
+ .slice => {
+ const extra = tree.extraData(datas[n].rhs, Node.Slice);
+ assert(extra.end != 0); // should have used SliceOpen
+ end_offset += 1; // rbracket
+ n = extra.end;
+ },
+ .slice_sentinel => {
+ const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel);
+ assert(extra.sentinel != 0); // should have used Slice
+ end_offset += 1; // rbracket
+ n = extra.sentinel;
+ },
+
+ .@"continue" => {
+ if (datas[n].lhs != 0) {
+ return datas[n].lhs + end_offset;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .@"break" => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else if (datas[n].lhs != 0) {
+ return datas[n].lhs + end_offset;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .fn_decl => {
+ if (datas[n].rhs != 0) {
+ n = datas[n].rhs;
+ } else {
+ n = datas[n].lhs;
+ }
+ },
+ .fn_proto_one => {
+ const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne);
+ // linksection, callconv, align can appear in any order, so we
+ // find the last one here.
+ var max_node: Node.Index = datas[n].rhs;
+ var max_start = token_starts[main_tokens[max_node]];
+ var max_offset: TokenIndex = 0;
+ if (extra.align_expr != 0) {
+ const start = token_starts[main_tokens[extra.align_expr]];
+ if (start > max_start) {
+ max_node = extra.align_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ if (extra.section_expr != 0) {
+ const start = token_starts[main_tokens[extra.section_expr]];
+ if (start > max_start) {
+ max_node = extra.section_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ if (extra.callconv_expr != 0) {
+ const start = token_starts[main_tokens[extra.callconv_expr]];
+ if (start > max_start) {
+ max_node = extra.callconv_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ n = max_node;
+ end_offset += max_offset;
+ },
+ .fn_proto => {
+ const extra = tree.extraData(datas[n].lhs, Node.FnProto);
+ // linksection, callconv, align can appear in any order, so we
+ // find the last one here.
+ var max_node: Node.Index = datas[n].rhs;
+ var max_start = token_starts[main_tokens[max_node]];
+ var max_offset: TokenIndex = 0;
+ if (extra.align_expr != 0) {
+ const start = token_starts[main_tokens[extra.align_expr]];
+ if (start > max_start) {
+ max_node = extra.align_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ if (extra.section_expr != 0) {
+ const start = token_starts[main_tokens[extra.section_expr]];
+ if (start > max_start) {
+ max_node = extra.section_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ if (extra.callconv_expr != 0) {
+ const start = token_starts[main_tokens[extra.callconv_expr]];
+ if (start > max_start) {
+ max_node = extra.callconv_expr;
+ max_start = start;
+ max_offset = 1; // for the rparen
+ }
+ }
+ n = max_node;
+ end_offset += max_offset;
+ },
+ .while_cont => {
+ const extra = tree.extraData(datas[n].rhs, Node.WhileCont);
+ assert(extra.then_expr != 0);
+ n = extra.then_expr;
+ },
+ .@"while" => {
+ const extra = tree.extraData(datas[n].rhs, Node.While);
+ assert(extra.else_expr != 0);
+ n = extra.else_expr;
+ },
+ .@"if", .@"for" => {
+ const extra = tree.extraData(datas[n].rhs, Node.If);
+ assert(extra.else_expr != 0);
+ n = extra.else_expr;
+ },
+ .@"suspend" => {
+ if (datas[n].lhs != 0) {
+ n = datas[n].lhs;
+ } else {
+ return main_tokens[n] + end_offset;
+ }
+ },
+ .array_type_sentinel => {
+ const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel);
+ n = extra.elem_type;
+ },
+ };
}
- pub fn loc(self: *const Error) TokenIndex {
- switch (self.*) {
- .InvalidToken => |x| return x.token,
- .ExpectedContainerMembers => |x| return x.token,
- .ExpectedStringLiteral => |x| return x.token,
- .ExpectedIntegerLiteral => |x| return x.token,
- .ExpectedPubItem => |x| return x.token,
- .ExpectedIdentifier => |x| return x.token,
- .ExpectedStatement => |x| return x.token,
- .ExpectedVarDeclOrFn => |x| return x.token,
- .ExpectedVarDecl => |x| return x.token,
- .ExpectedFn => |x| return x.token,
- .ExpectedReturnType => |x| return x.token,
- .ExpectedAggregateKw => |x| return x.token,
- .UnattachedDocComment => |x| return x.token,
- .ExpectedEqOrSemi => |x| return x.token,
- .ExpectedSemiOrLBrace => |x| return x.token,
- .ExpectedSemiOrElse => |x| return x.token,
- .ExpectedLabelOrLBrace => |x| return x.token,
- .ExpectedLBrace => |x| return x.token,
- .ExpectedColonOrRParen => |x| return x.token,
- .ExpectedLabelable => |x| return x.token,
- .ExpectedInlinable => |x| return x.token,
- .ExpectedAsmOutputReturnOrType => |x| return x.token,
- .ExpectedCall => |x| return x.node.firstToken(),
- .ExpectedCallOrFnProto => |x| return x.node.firstToken(),
- .ExpectedSliceOrRBracket => |x| return x.token,
- .ExtraAlignQualifier => |x| return x.token,
- .ExtraConstQualifier => |x| return x.token,
- .ExtraVolatileQualifier => |x| return x.token,
- .ExtraAllowZeroQualifier => |x| return x.token,
- .ExpectedTypeExpr => |x| return x.token,
- .ExpectedPrimaryTypeExpr => |x| return x.token,
- .ExpectedParamType => |x| return x.token,
- .ExpectedExpr => |x| return x.token,
- .ExpectedPrimaryExpr => |x| return x.token,
- .ExpectedToken => |x| return x.token,
- .ExpectedCommaOrEnd => |x| return x.token,
- .ExpectedParamList => |x| return x.token,
- .ExpectedPayload => |x| return x.token,
- .ExpectedBlockOrAssignment => |x| return x.token,
- .ExpectedBlockOrExpression => |x| return x.token,
- .ExpectedExprOrAssignment => |x| return x.token,
- .ExpectedPrefixExpr => |x| return x.token,
- .ExpectedLoopExpr => |x| return x.token,
- .ExpectedDerefOrUnwrap => |x| return x.token,
- .ExpectedSuffixOp => |x| return x.token,
- .ExpectedBlockOrField => |x| return x.token,
- .DeclBetweenFields => |x| return x.token,
- .InvalidAnd => |x| return x.token,
- .AsteriskAfterPointerDereference => |x| return x.token,
- }
+ pub fn tokensOnSameLine(tree: Tree, token1: TokenIndex, token2: TokenIndex) bool {
+ const token_starts = tree.tokens.items(.start);
+ const source = tree.source[token_starts[token1]..token_starts[token2]];
+ return mem.indexOfScalar(u8, source, '\n') == null;
}
- pub const InvalidToken = SingleTokenError("Invalid token '{s}'");
- pub const ExpectedContainerMembers = SingleTokenError("Expected test, comptime, var decl, or container field, found '{s}'");
- pub const ExpectedStringLiteral = SingleTokenError("Expected string literal, found '{s}'");
- pub const ExpectedIntegerLiteral = SingleTokenError("Expected integer literal, found '{s}'");
- pub const ExpectedIdentifier = SingleTokenError("Expected identifier, found '{s}'");
- pub const ExpectedStatement = SingleTokenError("Expected statement, found '{s}'");
- pub const ExpectedVarDeclOrFn = SingleTokenError("Expected variable declaration or function, found '{s}'");
- pub const ExpectedVarDecl = SingleTokenError("Expected variable declaration, found '{s}'");
- pub const ExpectedFn = SingleTokenError("Expected function, found '{s}'");
- pub const ExpectedReturnType = SingleTokenError("Expected 'var' or return type expression, found '{s}'");
- pub const ExpectedAggregateKw = SingleTokenError("Expected '" ++ Token.Id.Keyword_struct.symbol() ++ "', '" ++ Token.Id.Keyword_union.symbol() ++ "', '" ++ Token.Id.Keyword_enum.symbol() ++ "', or '" ++ Token.Id.Keyword_opaque.symbol() ++ "', found '{s}'");
- pub const ExpectedEqOrSemi = SingleTokenError("Expected '=' or ';', found '{s}'");
- pub const ExpectedSemiOrLBrace = SingleTokenError("Expected ';' or '{{', found '{s}'");
- pub const ExpectedSemiOrElse = SingleTokenError("Expected ';' or 'else', found '{s}'");
- pub const ExpectedLBrace = SingleTokenError("Expected '{{', found '{s}'");
- pub const ExpectedLabelOrLBrace = SingleTokenError("Expected label or '{{', found '{s}'");
- pub const ExpectedColonOrRParen = SingleTokenError("Expected ':' or ')', found '{s}'");
- pub const ExpectedLabelable = SingleTokenError("Expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'");
- pub const ExpectedInlinable = SingleTokenError("Expected 'while' or 'for', found '{s}'");
- pub const ExpectedAsmOutputReturnOrType = SingleTokenError("Expected '->' or '" ++ Token.Id.Identifier.symbol() ++ "', found '{s}'");
- pub const ExpectedSliceOrRBracket = SingleTokenError("Expected ']' or '..', found '{s}'");
- pub const ExpectedTypeExpr = SingleTokenError("Expected type expression, found '{s}'");
- pub const ExpectedPrimaryTypeExpr = SingleTokenError("Expected primary type expression, found '{s}'");
- pub const ExpectedExpr = SingleTokenError("Expected expression, found '{s}'");
- pub const ExpectedPrimaryExpr = SingleTokenError("Expected primary expression, found '{s}'");
- pub const ExpectedParamList = SingleTokenError("Expected parameter list, found '{s}'");
- pub const ExpectedPayload = SingleTokenError("Expected loop payload, found '{s}'");
- pub const ExpectedBlockOrAssignment = SingleTokenError("Expected block or assignment, found '{s}'");
- pub const ExpectedBlockOrExpression = SingleTokenError("Expected block or expression, found '{s}'");
- pub const ExpectedExprOrAssignment = SingleTokenError("Expected expression or assignment, found '{s}'");
- pub const ExpectedPrefixExpr = SingleTokenError("Expected prefix expression, found '{s}'");
- pub const ExpectedLoopExpr = SingleTokenError("Expected loop expression, found '{s}'");
- pub const ExpectedDerefOrUnwrap = SingleTokenError("Expected pointer dereference or optional unwrap, found '{s}'");
- pub const ExpectedSuffixOp = SingleTokenError("Expected pointer dereference, optional unwrap, or field access, found '{s}'");
- pub const ExpectedBlockOrField = SingleTokenError("Expected block or field, found '{s}'");
-
- pub const ExpectedParamType = SimpleError("Expected parameter type");
- pub const ExpectedPubItem = SimpleError("Expected function or variable declaration after pub");
- pub const UnattachedDocComment = SimpleError("Unattached documentation comment");
- pub const ExtraAlignQualifier = SimpleError("Extra align qualifier");
- pub const ExtraConstQualifier = SimpleError("Extra const qualifier");
- pub const ExtraVolatileQualifier = SimpleError("Extra volatile qualifier");
- pub const ExtraAllowZeroQualifier = SimpleError("Extra allowzero qualifier");
- pub const DeclBetweenFields = SimpleError("Declarations are not allowed between container fields");
- pub const InvalidAnd = SimpleError("`&&` is invalid. Note that `and` is boolean AND.");
- pub const AsteriskAfterPointerDereference = SimpleError("`.*` can't be followed by `*`. Are you missing a space?");
-
- pub const ExpectedCall = struct {
- node: *Node,
-
- pub fn render(self: *const ExpectedCall, tokens: []const Token.Id, stream: anytype) !void {
- return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ ", found {s}", .{
- @tagName(self.node.tag),
- });
- }
- };
+ pub fn getNodeSource(tree: Tree, node: Node.Index) []const u8 {
+ const token_starts = tree.tokens.items(.start);
+ const first_token = tree.firstToken(node);
+ const last_token = tree.lastToken(node);
+ const start = token_starts[first_token];
+ const end = token_starts[last_token] + tree.tokenSlice(last_token).len;
+ return tree.source[start..end];
+ }
- pub const ExpectedCallOrFnProto = struct {
- node: *Node,
+ pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+ assert(tree.nodes.items(.tag)[node] == .global_var_decl);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.GlobalVarDecl);
+ return tree.fullVarDecl(.{
+ .type_node = extra.type_node,
+ .align_node = extra.align_node,
+ .section_node = extra.section_node,
+ .init_node = data.rhs,
+ .mut_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- pub fn render(self: *const ExpectedCallOrFnProto, tokens: []const Token.Id, stream: anytype) !void {
- return stream.print("expected " ++ @tagName(Node.Tag.Call) ++ " or " ++
- @tagName(Node.Tag.FnProto) ++ ", found {s}", .{@tagName(self.node.tag)});
- }
- };
+ pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+ assert(tree.nodes.items(.tag)[node] == .local_var_decl);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.LocalVarDecl);
+ return tree.fullVarDecl(.{
+ .type_node = extra.type_node,
+ .align_node = extra.align_node,
+ .section_node = 0,
+ .init_node = data.rhs,
+ .mut_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- pub const ExpectedToken = struct {
- token: TokenIndex,
- expected_id: Token.Id,
+ pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+ assert(tree.nodes.items(.tag)[node] == .simple_var_decl);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullVarDecl(.{
+ .type_node = data.lhs,
+ .align_node = 0,
+ .section_node = 0,
+ .init_node = data.rhs,
+ .mut_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- pub fn render(self: *const ExpectedToken, tokens: []const Token.Id, stream: anytype) !void {
- const found_token = tokens[self.token];
- switch (found_token) {
- .Invalid => {
- return stream.print("expected '{s}', found invalid bytes", .{self.expected_id.symbol()});
- },
- else => {
- const token_name = found_token.symbol();
- return stream.print("expected '{s}', found '{s}'", .{ self.expected_id.symbol(), token_name });
- },
- }
- }
- };
+ pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+ assert(tree.nodes.items(.tag)[node] == .aligned_var_decl);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullVarDecl(.{
+ .type_node = 0,
+ .align_node = data.lhs,
+ .section_node = 0,
+ .init_node = data.rhs,
+ .mut_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- pub const ExpectedCommaOrEnd = struct {
- token: TokenIndex,
- end_id: Token.Id,
+ pub fn ifSimple(tree: Tree, node: Node.Index) full.If {
+ assert(tree.nodes.items(.tag)[node] == .if_simple);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullIf(.{
+ .cond_expr = data.lhs,
+ .then_expr = data.rhs,
+ .else_expr = 0,
+ .if_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- pub fn render(self: *const ExpectedCommaOrEnd, tokens: []const Token.Id, stream: anytype) !void {
- const actual_token = tokens[self.token];
- return stream.print("expected ',' or '{s}', found '{s}'", .{
- self.end_id.symbol(),
- actual_token.symbol(),
- });
- }
- };
+ pub fn ifFull(tree: Tree, node: Node.Index) full.If {
+ assert(tree.nodes.items(.tag)[node] == .@"if");
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.If);
+ return tree.fullIf(.{
+ .cond_expr = data.lhs,
+ .then_expr = extra.then_expr,
+ .else_expr = extra.else_expr,
+ .if_token = tree.nodes.items(.main_token)[node],
+ });
+ }
- fn SingleTokenError(comptime msg: []const u8) type {
- return struct {
- const ThisError = @This();
+ pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField {
+ assert(tree.nodes.items(.tag)[node] == .container_field);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.ContainerField);
+ return tree.fullContainerField(.{
+ .name_token = tree.nodes.items(.main_token)[node],
+ .type_expr = data.lhs,
+ .value_expr = extra.value_expr,
+ .align_expr = extra.align_expr,
+ });
+ }
- token: TokenIndex,
+ pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField {
+ assert(tree.nodes.items(.tag)[node] == .container_field_init);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullContainerField(.{
+ .name_token = tree.nodes.items(.main_token)[node],
+ .type_expr = data.lhs,
+ .value_expr = data.rhs,
+ .align_expr = 0,
+ });
+ }
- pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void {
- const actual_token = tokens[self.token];
- return stream.print(msg, .{actual_token.symbol()});
- }
- };
+ pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField {
+ assert(tree.nodes.items(.tag)[node] == .container_field_align);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullContainerField(.{
+ .name_token = tree.nodes.items(.main_token)[node],
+ .type_expr = data.lhs,
+ .value_expr = 0,
+ .align_expr = data.rhs,
+ });
}
- fn SimpleError(comptime msg: []const u8) type {
- return struct {
- const ThisError = @This();
+ pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
+ assert(tree.nodes.items(.tag)[node] == .fn_proto_simple);
+ const data = tree.nodes.items(.data)[node];
+ buffer[0] = data.lhs;
+ const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1];
+ return tree.fullFnProto(.{
+ .fn_token = tree.nodes.items(.main_token)[node],
+ .return_type = data.rhs,
+ .params = params,
+ .align_expr = 0,
+ .section_expr = 0,
+ .callconv_expr = 0,
+ });
+ }
- token: TokenIndex,
+ pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto {
+ assert(tree.nodes.items(.tag)[node] == .fn_proto_multi);
+ const data = tree.nodes.items(.data)[node];
+ const params_range = tree.extraData(data.lhs, Node.SubRange);
+ const params = tree.extra_data[params_range.start..params_range.end];
+ return tree.fullFnProto(.{
+ .fn_token = tree.nodes.items(.main_token)[node],
+ .return_type = data.rhs,
+ .params = params,
+ .align_expr = 0,
+ .section_expr = 0,
+ .callconv_expr = 0,
+ });
+ }
- pub fn render(self: *const ThisError, tokens: []const Token.Id, stream: anytype) !void {
- return stream.writeAll(msg);
- }
- };
+ pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
+ assert(tree.nodes.items(.tag)[node] == .fn_proto_one);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.FnProtoOne);
+ buffer[0] = extra.param;
+ const params = if (extra.param == 0) buffer[0..0] else buffer[0..1];
+ return tree.fullFnProto(.{
+ .fn_token = tree.nodes.items(.main_token)[node],
+ .return_type = data.rhs,
+ .params = params,
+ .align_expr = extra.align_expr,
+ .section_expr = extra.section_expr,
+ .callconv_expr = extra.callconv_expr,
+ });
}
-};
-pub const Node = struct {
- tag: Tag,
+ pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto {
+ assert(tree.nodes.items(.tag)[node] == .fn_proto);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.FnProto);
+ const params = tree.extra_data[extra.params_start..extra.params_end];
+ return tree.fullFnProto(.{
+ .fn_token = tree.nodes.items(.main_token)[node],
+ .return_type = data.rhs,
+ .params = params,
+ .align_expr = extra.align_expr,
+ .section_expr = extra.section_expr,
+ .callconv_expr = extra.callconv_expr,
+ });
+ }
- pub const Tag = enum {
- // Top level
- Root,
- Use,
- TestDecl,
-
- // Statements
- VarDecl,
- Defer,
-
- // Infix operators
- Catch,
-
- // SimpleInfixOp
- Add,
- AddWrap,
- ArrayCat,
- ArrayMult,
- Assign,
- AssignBitAnd,
- AssignBitOr,
- AssignBitShiftLeft,
- AssignBitShiftRight,
- AssignBitXor,
- AssignDiv,
- AssignSub,
- AssignSubWrap,
- AssignMod,
- AssignAdd,
- AssignAddWrap,
- AssignMul,
- AssignMulWrap,
- BangEqual,
- BitAnd,
- BitOr,
- BitShiftLeft,
- BitShiftRight,
- BitXor,
- BoolAnd,
- BoolOr,
- Div,
- EqualEqual,
- ErrorUnion,
- GreaterOrEqual,
- GreaterThan,
- LessOrEqual,
- LessThan,
- MergeErrorSets,
- Mod,
- Mul,
- MulWrap,
- Period,
- Range,
- Sub,
- SubWrap,
- OrElse,
-
- // SimplePrefixOp
- AddressOf,
- Await,
- BitNot,
- BoolNot,
- OptionalType,
- Negation,
- NegationWrap,
- Resume,
- Try,
-
- ArrayType,
- /// ArrayType but has a sentinel node.
- ArrayTypeSentinel,
- PtrType,
- SliceType,
- /// `a[b..c]`
- Slice,
- /// `a.*`
- Deref,
- /// `a.?`
- UnwrapOptional,
- /// `a[b]`
- ArrayAccess,
- /// `T{a, b}`
- ArrayInitializer,
- /// ArrayInitializer but with `.` instead of a left-hand-side operand.
- ArrayInitializerDot,
- /// `T{.a = b}`
- StructInitializer,
- /// StructInitializer but with `.` instead of a left-hand-side operand.
- StructInitializerDot,
- /// `foo()`
- Call,
-
- // Control flow
- Switch,
- While,
- For,
- If,
- Suspend,
- Continue,
- Break,
- Return,
-
- // Type expressions
- AnyType,
- ErrorType,
- FnProto,
- AnyFrameType,
-
- // Primary expressions
- IntegerLiteral,
- FloatLiteral,
- EnumLiteral,
- StringLiteral,
- MultilineStringLiteral,
- CharLiteral,
- BoolLiteral,
- NullLiteral,
- UndefinedLiteral,
- Unreachable,
- Identifier,
- GroupedExpression,
- BuiltinCall,
- ErrorSetDecl,
- ContainerDecl,
- Asm,
- Comptime,
- Nosuspend,
- Block,
- LabeledBlock,
-
- // Misc
- DocComment,
- SwitchCase, // TODO make this not a child of AST Node
- SwitchElse, // TODO make this not a child of AST Node
- Else, // TODO make this not a child of AST Node
- Payload, // TODO make this not a child of AST Node
- PointerPayload, // TODO make this not a child of AST Node
- PointerIndexPayload, // TODO make this not a child of AST Node
- ContainerField,
- ErrorTag, // TODO make this not a child of AST Node
- FieldInitializer, // TODO make this not a child of AST Node
-
- pub fn Type(tag: Tag) type {
- return switch (tag) {
- .Root => Root,
- .Use => Use,
- .TestDecl => TestDecl,
- .VarDecl => VarDecl,
- .Defer => Defer,
- .Catch => Catch,
-
- .Add,
- .AddWrap,
- .ArrayCat,
- .ArrayMult,
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .BangEqual,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BoolAnd,
- .BoolOr,
- .Div,
- .EqualEqual,
- .ErrorUnion,
- .GreaterOrEqual,
- .GreaterThan,
- .LessOrEqual,
- .LessThan,
- .MergeErrorSets,
- .Mod,
- .Mul,
- .MulWrap,
- .Period,
- .Range,
- .Sub,
- .SubWrap,
- .OrElse,
- => SimpleInfixOp,
-
- .AddressOf,
- .Await,
- .BitNot,
- .BoolNot,
- .OptionalType,
- .Negation,
- .NegationWrap,
- .Resume,
- .Try,
- => SimplePrefixOp,
-
- .Identifier,
- .BoolLiteral,
- .NullLiteral,
- .UndefinedLiteral,
- .Unreachable,
- .AnyType,
- .ErrorType,
- .IntegerLiteral,
- .FloatLiteral,
- .StringLiteral,
- .CharLiteral,
- => OneToken,
-
- .Continue,
- .Break,
- .Return,
- => ControlFlowExpression,
-
- .ArrayType => ArrayType,
- .ArrayTypeSentinel => ArrayTypeSentinel,
-
- .PtrType => PtrType,
- .SliceType => SliceType,
- .Slice => Slice,
- .Deref, .UnwrapOptional => SimpleSuffixOp,
- .ArrayAccess => ArrayAccess,
-
- .ArrayInitializer => ArrayInitializer,
- .ArrayInitializerDot => ArrayInitializerDot,
-
- .StructInitializer => StructInitializer,
- .StructInitializerDot => StructInitializerDot,
-
- .Call => Call,
- .Switch => Switch,
- .While => While,
- .For => For,
- .If => If,
- .Suspend => Suspend,
- .FnProto => FnProto,
- .AnyFrameType => AnyFrameType,
- .EnumLiteral => EnumLiteral,
- .MultilineStringLiteral => MultilineStringLiteral,
- .GroupedExpression => GroupedExpression,
- .BuiltinCall => BuiltinCall,
- .ErrorSetDecl => ErrorSetDecl,
- .ContainerDecl => ContainerDecl,
- .Asm => Asm,
- .Comptime => Comptime,
- .Nosuspend => Nosuspend,
- .Block => Block,
- .LabeledBlock => LabeledBlock,
- .DocComment => DocComment,
- .SwitchCase => SwitchCase,
- .SwitchElse => SwitchElse,
- .Else => Else,
- .Payload => Payload,
- .PointerPayload => PointerPayload,
- .PointerIndexPayload => PointerIndexPayload,
- .ContainerField => ContainerField,
- .ErrorTag => ErrorTag,
- .FieldInitializer => FieldInitializer,
- };
- }
+ pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit {
+ assert(tree.nodes.items(.tag)[node] == .struct_init_one or
+ tree.nodes.items(.tag)[node] == .struct_init_one_comma);
+ const data = tree.nodes.items(.data)[node];
+ buffer[0] = data.rhs;
+ const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+ return tree.fullStructInit(.{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .fields = fields,
+ .type_expr = data.lhs,
+ });
+ }
- pub fn isBlock(tag: Tag) bool {
- return switch (tag) {
- .Block, .LabeledBlock => true,
- else => false,
- };
- }
- };
+ pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit {
+ assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or
+ tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma);
+ const data = tree.nodes.items(.data)[node];
+ buffer.* = .{ data.lhs, data.rhs };
+ const fields = if (data.rhs != 0)
+ buffer[0..2]
+ else if (data.lhs != 0)
+ buffer[0..1]
+ else
+ buffer[0..0];
+ return tree.fullStructInit(.{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .fields = fields,
+ .type_expr = 0,
+ });
+ }
- /// Prefer `castTag` to this.
- pub fn cast(base: *Node, comptime T: type) ?*T {
- if (std.meta.fieldInfo(T, .base).default_value) |default_base| {
- return base.castTag(default_base.tag);
- }
- inline for (@typeInfo(Tag).Enum.fields) |field| {
- const tag = @intToEnum(Tag, field.value);
- if (base.tag == tag) {
- if (T == tag.Type()) {
- return @fieldParentPtr(T, "base", base);
- }
- return null;
- }
- }
- unreachable;
+ pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit {
+ assert(tree.nodes.items(.tag)[node] == .struct_init_dot or
+ tree.nodes.items(.tag)[node] == .struct_init_dot_comma);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullStructInit(.{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .fields = tree.extra_data[data.lhs..data.rhs],
+ .type_expr = 0,
+ });
}
- pub fn castTag(base: *Node, comptime tag: Tag) ?*tag.Type() {
- if (base.tag == tag) {
- return @fieldParentPtr(tag.Type(), "base", base);
- }
- return null;
+ pub fn structInit(tree: Tree, node: Node.Index) full.StructInit {
+ assert(tree.nodes.items(.tag)[node] == .struct_init or
+ tree.nodes.items(.tag)[node] == .struct_init_comma);
+ const data = tree.nodes.items(.data)[node];
+ const fields_range = tree.extraData(data.rhs, Node.SubRange);
+ return tree.fullStructInit(.{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .fields = tree.extra_data[fields_range.start..fields_range.end],
+ .type_expr = data.lhs,
+ });
}
- pub fn iterate(base: *Node, index: usize) ?*Node {
- inline for (@typeInfo(Tag).Enum.fields) |field| {
- const tag = @intToEnum(Tag, field.value);
- if (base.tag == tag) {
- return @fieldParentPtr(tag.Type(), "base", base).iterate(index);
- }
- }
- unreachable;
+ pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit {
+ assert(tree.nodes.items(.tag)[node] == .array_init_one or
+ tree.nodes.items(.tag)[node] == .array_init_one_comma);
+ const data = tree.nodes.items(.data)[node];
+ buffer[0] = data.rhs;
+ const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+ return .{
+ .ast = .{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .elements = elements,
+ .type_expr = data.lhs,
+ },
+ };
}
- pub fn firstToken(base: *const Node) TokenIndex {
- inline for (@typeInfo(Tag).Enum.fields) |field| {
- const tag = @intToEnum(Tag, field.value);
- if (base.tag == tag) {
- return @fieldParentPtr(tag.Type(), "base", base).firstToken();
- }
- }
- unreachable;
+ pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit {
+ assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or
+ tree.nodes.items(.tag)[node] == .array_init_dot_two_comma);
+ const data = tree.nodes.items(.data)[node];
+ buffer.* = .{ data.lhs, data.rhs };
+ const elements = if (data.rhs != 0)
+ buffer[0..2]
+ else if (data.lhs != 0)
+ buffer[0..1]
+ else
+ buffer[0..0];
+ return .{
+ .ast = .{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .elements = elements,
+ .type_expr = 0,
+ },
+ };
}
- pub fn lastToken(base: *const Node) TokenIndex {
- inline for (@typeInfo(Tag).Enum.fields) |field| {
- const tag = @intToEnum(Tag, field.value);
- if (base.tag == tag) {
- return @fieldParentPtr(tag.Type(), "base", base).lastToken();
- }
- }
- unreachable;
- }
-
- pub fn requireSemiColon(base: *const Node) bool {
- var n = base;
- while (true) {
- switch (n.tag) {
- .Root,
- .ContainerField,
- .Block,
- .LabeledBlock,
- .Payload,
- .PointerPayload,
- .PointerIndexPayload,
- .Switch,
- .SwitchCase,
- .SwitchElse,
- .FieldInitializer,
- .DocComment,
- .TestDecl,
- => return false,
-
- .While => {
- const while_node = @fieldParentPtr(While, "base", n);
- if (while_node.@"else") |@"else"| {
- n = &@"else".base;
- continue;
- }
+ pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit {
+ assert(tree.nodes.items(.tag)[node] == .array_init_dot or
+ tree.nodes.items(.tag)[node] == .array_init_dot_comma);
+ const data = tree.nodes.items(.data)[node];
+ return .{
+ .ast = .{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .elements = tree.extra_data[data.lhs..data.rhs],
+ .type_expr = 0,
+ },
+ };
+ }
- return !while_node.body.tag.isBlock();
- },
- .For => {
- const for_node = @fieldParentPtr(For, "base", n);
- if (for_node.@"else") |@"else"| {
- n = &@"else".base;
- continue;
- }
+ pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit {
+ assert(tree.nodes.items(.tag)[node] == .array_init or
+ tree.nodes.items(.tag)[node] == .array_init_comma);
+ const data = tree.nodes.items(.data)[node];
+ const elem_range = tree.extraData(data.rhs, Node.SubRange);
+ return .{
+ .ast = .{
+ .lbrace = tree.nodes.items(.main_token)[node],
+ .elements = tree.extra_data[elem_range.start..elem_range.end],
+ .type_expr = data.lhs,
+ },
+ };
+ }
- return !for_node.body.tag.isBlock();
- },
- .If => {
- const if_node = @fieldParentPtr(If, "base", n);
- if (if_node.@"else") |@"else"| {
- n = &@"else".base;
- continue;
- }
+ pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType {
+ assert(tree.nodes.items(.tag)[node] == .array_type);
+ const data = tree.nodes.items(.data)[node];
+ return .{
+ .ast = .{
+ .lbracket = tree.nodes.items(.main_token)[node],
+ .elem_count = data.lhs,
+ .sentinel = null,
+ .elem_type = data.rhs,
+ },
+ };
+ }
- return !if_node.body.tag.isBlock();
- },
- .Else => {
- const else_node = @fieldParentPtr(Else, "base", n);
- n = else_node.body;
- continue;
- },
- .Defer => {
- const defer_node = @fieldParentPtr(Defer, "base", n);
- return !defer_node.expr.tag.isBlock();
- },
- .Comptime => {
- const comptime_node = @fieldParentPtr(Comptime, "base", n);
- return !comptime_node.expr.tag.isBlock();
- },
- .Suspend => {
- const suspend_node = @fieldParentPtr(Suspend, "base", n);
- if (suspend_node.body) |body| {
- return !body.tag.isBlock();
- }
+ pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType {
+ assert(tree.nodes.items(.tag)[node] == .array_type_sentinel);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel);
+ return .{
+ .ast = .{
+ .lbracket = tree.nodes.items(.main_token)[node],
+ .elem_count = data.lhs,
+ .sentinel = extra.sentinel,
+ .elem_type = extra.elem_type,
+ },
+ };
+ }
- return true;
- },
- .Nosuspend => {
- const nosuspend_node = @fieldParentPtr(Nosuspend, "base", n);
- return !nosuspend_node.expr.tag.isBlock();
- },
- else => return true,
- }
- }
+ pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType {
+ assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullPtrType(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .align_node = data.lhs,
+ .sentinel = 0,
+ .bit_range_start = 0,
+ .bit_range_end = 0,
+ .child_type = data.rhs,
+ });
}
- /// Asserts the node is a Block or LabeledBlock and returns the statements slice.
- pub fn blockStatements(base: *Node) []*Node {
- if (base.castTag(.Block)) |block| {
- return block.statements();
- } else if (base.castTag(.LabeledBlock)) |labeled_block| {
- return labeled_block.statements();
- } else {
- unreachable;
- }
+ pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType {
+ assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullPtrType(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .align_node = 0,
+ .sentinel = data.lhs,
+ .bit_range_start = 0,
+ .bit_range_end = 0,
+ .child_type = data.rhs,
+ });
}
- pub fn findFirstWithId(self: *Node, id: Id) ?*Node {
- if (self.id == id) return self;
- var child_i: usize = 0;
- while (self.iterate(child_i)) |child| : (child_i += 1) {
- if (child.findFirstWithId(id)) |result| return result;
- }
- return null;
+ pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType {
+ assert(tree.nodes.items(.tag)[node] == .ptr_type);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.PtrType);
+ return tree.fullPtrType(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .align_node = extra.align_node,
+ .sentinel = extra.sentinel,
+ .bit_range_start = 0,
+ .bit_range_end = 0,
+ .child_type = data.rhs,
+ });
}
- pub fn dump(self: *Node, indent: usize) void {
- {
- var i: usize = 0;
- while (i < indent) : (i += 1) {
- std.debug.warn(" ", .{});
- }
- }
- std.debug.warn("{s}\n", .{@tagName(self.tag)});
+ pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType {
+ assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
+ return tree.fullPtrType(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .align_node = extra.align_node,
+ .sentinel = extra.sentinel,
+ .bit_range_start = extra.bit_range_start,
+ .bit_range_end = extra.bit_range_end,
+ .child_type = data.rhs,
+ });
+ }
- var child_i: usize = 0;
- while (self.iterate(child_i)) |child| : (child_i += 1) {
- child.dump(indent + 2);
- }
+ pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice {
+ assert(tree.nodes.items(.tag)[node] == .slice_open);
+ const data = tree.nodes.items(.data)[node];
+ return .{
+ .ast = .{
+ .sliced = data.lhs,
+ .lbracket = tree.nodes.items(.main_token)[node],
+ .start = data.rhs,
+ .end = 0,
+ .sentinel = 0,
+ },
+ };
}
- /// The decls data follows this struct in memory as an array of Node pointers.
- pub const Root = struct {
- base: Node = Node{ .tag = .Root },
- eof_token: TokenIndex,
- decls_len: NodeIndex,
+ pub fn slice(tree: Tree, node: Node.Index) full.Slice {
+ assert(tree.nodes.items(.tag)[node] == .slice);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.Slice);
+ return .{
+ .ast = .{
+ .sliced = data.lhs,
+ .lbracket = tree.nodes.items(.main_token)[node],
+ .start = extra.start,
+ .end = extra.end,
+ .sentinel = 0,
+ },
+ };
+ }
- /// After this the caller must initialize the decls list.
- pub fn create(allocator: *mem.Allocator, decls_len: NodeIndex, eof_token: TokenIndex) !*Root {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(Root), sizeInBytes(decls_len));
- const self = @ptrCast(*Root, bytes.ptr);
- self.* = .{
- .eof_token = eof_token,
- .decls_len = decls_len,
- };
- return self;
- }
+ pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice {
+ assert(tree.nodes.items(.tag)[node] == .slice_sentinel);
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.SliceSentinel);
+ return .{
+ .ast = .{
+ .sliced = data.lhs,
+ .lbracket = tree.nodes.items(.main_token)[node],
+ .start = extra.start,
+ .end = extra.end,
+ .sentinel = extra.sentinel,
+ },
+ };
+ }
- pub fn destroy(self: *Decl, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)];
- allocator.free(bytes);
- }
+ pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .container_decl_two or
+ tree.nodes.items(.tag)[node] == .container_decl_two_trailing);
+ const data = tree.nodes.items(.data)[node];
+ buffer.* = .{ data.lhs, data.rhs };
+ const members = if (data.rhs != 0)
+ buffer[0..2]
+ else if (data.lhs != 0)
+ buffer[0..1]
+ else
+ buffer[0..0];
+ return tree.fullContainerDecl(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .enum_token = null,
+ .members = members,
+ .arg = 0,
+ });
+ }
- pub fn iterate(self: *const Root, index: usize) ?*Node {
- var i = index;
+ pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .container_decl or
+ tree.nodes.items(.tag)[node] == .container_decl_trailing);
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullContainerDecl(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .enum_token = null,
+ .members = tree.extra_data[data.lhs..data.rhs],
+ .arg = 0,
+ });
+ }
- if (i < self.decls_len) return self.declsConst()[i];
- return null;
- }
+ pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .container_decl_arg or
+ tree.nodes.items(.tag)[node] == .container_decl_arg_trailing);
+ const data = tree.nodes.items(.data)[node];
+ const members_range = tree.extraData(data.rhs, Node.SubRange);
+ return tree.fullContainerDecl(.{
+ .main_token = tree.nodes.items(.main_token)[node],
+ .enum_token = null,
+ .members = tree.extra_data[members_range.start..members_range.end],
+ .arg = data.lhs,
+ });
+ }
- pub fn decls(self: *Root) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(Root);
- return @ptrCast([*]*Node, decls_start)[0..self.decls_len];
- }
+ pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .tagged_union_two or
+ tree.nodes.items(.tag)[node] == .tagged_union_two_trailing);
+ const data = tree.nodes.items(.data)[node];
+ buffer.* = .{ data.lhs, data.rhs };
+ const members = if (data.rhs != 0)
+ buffer[0..2]
+ else if (data.lhs != 0)
+ buffer[0..1]
+ else
+ buffer[0..0];
+ const main_token = tree.nodes.items(.main_token)[node];
+ return tree.fullContainerDecl(.{
+ .main_token = main_token,
+ .enum_token = main_token + 2, // union lparen enum
+ .members = members,
+ .arg = 0,
+ });
+ }
- pub fn declsConst(self: *const Root) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Root);
- return @ptrCast([*]const *Node, decls_start)[0..self.decls_len];
- }
+ pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .tagged_union or
+ tree.nodes.items(.tag)[node] == .tagged_union_trailing);
+ const data = tree.nodes.items(.data)[node];
+ const main_token = tree.nodes.items(.main_token)[node];
+ return tree.fullContainerDecl(.{
+ .main_token = main_token,
+ .enum_token = main_token + 2, // union lparen enum
+ .members = tree.extra_data[data.lhs..data.rhs],
+ .arg = 0,
+ });
+ }
- pub fn firstToken(self: *const Root) TokenIndex {
- if (self.decls_len == 0) return self.eof_token;
- return self.declsConst()[0].firstToken();
- }
+ pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl {
+ assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or
+ tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing);
+ const data = tree.nodes.items(.data)[node];
+ const members_range = tree.extraData(data.rhs, Node.SubRange);
+ const main_token = tree.nodes.items(.main_token)[node];
+ return tree.fullContainerDecl(.{
+ .main_token = main_token,
+ .enum_token = main_token + 2, // union lparen enum
+ .members = tree.extra_data[members_range.start..members_range.end],
+ .arg = data.lhs,
+ });
+ }
- pub fn lastToken(self: *const Root) TokenIndex {
- if (self.decls_len == 0) return self.eof_token;
- return self.declsConst()[self.decls_len - 1].lastToken();
- }
+ pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase {
+ const data = &tree.nodes.items(.data)[node];
+ const values: *[1]Node.Index = &data.lhs;
+ return tree.fullSwitchCase(.{
+ .values = if (data.lhs == 0) values[0..0] else values[0..1],
+ .arrow_token = tree.nodes.items(.main_token)[node],
+ .target_expr = data.rhs,
+ });
+ }
- fn sizeInBytes(decls_len: NodeIndex) usize {
- return @sizeOf(Root) + @sizeOf(*Node) * @as(usize, decls_len);
- }
- };
+ pub fn switchCase(tree: Tree, node: Node.Index) full.SwitchCase {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.lhs, Node.SubRange);
+ return tree.fullSwitchCase(.{
+ .values = tree.extra_data[extra.start..extra.end],
+ .arrow_token = tree.nodes.items(.main_token)[node],
+ .target_expr = data.rhs,
+ });
+ }
- /// Trailed in memory by possibly many things, with each optional thing
- /// determined by a bit in `trailer_flags`.
- pub const VarDecl = struct {
- base: Node = Node{ .tag = .VarDecl },
- trailer_flags: TrailerFlags,
- mut_token: TokenIndex,
- name_token: TokenIndex,
- semicolon_token: TokenIndex,
-
- pub const TrailerFlags = std.meta.TrailerFlags(struct {
- doc_comments: *DocComment,
- visib_token: TokenIndex,
- thread_local_token: TokenIndex,
- eq_token: TokenIndex,
- comptime_token: TokenIndex,
- extern_export_token: TokenIndex,
- lib_name: *Node,
- type_node: *Node,
- align_node: *Node,
- section_node: *Node,
- init_node: *Node,
+ pub fn asmSimple(tree: Tree, node: Node.Index) full.Asm {
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullAsm(.{
+ .asm_token = tree.nodes.items(.main_token)[node],
+ .template = data.lhs,
+ .items = &.{},
+ .rparen = data.rhs,
});
+ }
- pub fn getDocComments(self: *const VarDecl) ?*DocComment {
- return self.getTrailer(.doc_comments);
- }
+ pub fn asmFull(tree: Tree, node: Node.Index) full.Asm {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.Asm);
+ return tree.fullAsm(.{
+ .asm_token = tree.nodes.items(.main_token)[node],
+ .template = data.lhs,
+ .items = tree.extra_data[extra.items_start..extra.items_end],
+ .rparen = extra.rparen,
+ });
+ }
- pub fn setDocComments(self: *VarDecl, value: *DocComment) void {
- self.setTrailer(.doc_comments, value);
- }
+ pub fn whileSimple(tree: Tree, node: Node.Index) full.While {
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullWhile(.{
+ .while_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = data.lhs,
+ .cont_expr = 0,
+ .then_expr = data.rhs,
+ .else_expr = 0,
+ });
+ }
- pub fn getVisibToken(self: *const VarDecl) ?TokenIndex {
- return self.getTrailer(.visib_token);
- }
+ pub fn whileCont(tree: Tree, node: Node.Index) full.While {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.WhileCont);
+ return tree.fullWhile(.{
+ .while_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = data.lhs,
+ .cont_expr = extra.cont_expr,
+ .then_expr = extra.then_expr,
+ .else_expr = 0,
+ });
+ }
- pub fn setVisibToken(self: *VarDecl, value: TokenIndex) void {
- self.setTrailer(.visib_token, value);
- }
+ pub fn whileFull(tree: Tree, node: Node.Index) full.While {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.While);
+ return tree.fullWhile(.{
+ .while_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = data.lhs,
+ .cont_expr = extra.cont_expr,
+ .then_expr = extra.then_expr,
+ .else_expr = extra.else_expr,
+ });
+ }
- pub fn getThreadLocalToken(self: *const VarDecl) ?TokenIndex {
- return self.getTrailer(.thread_local_token);
- }
+ pub fn forSimple(tree: Tree, node: Node.Index) full.While {
+ const data = tree.nodes.items(.data)[node];
+ return tree.fullWhile(.{
+ .while_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = data.lhs,
+ .cont_expr = 0,
+ .then_expr = data.rhs,
+ .else_expr = 0,
+ });
+ }
- pub fn setThreadLocalToken(self: *VarDecl, value: TokenIndex) void {
- self.setTrailer(.thread_local_token, value);
- }
+ pub fn forFull(tree: Tree, node: Node.Index) full.While {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.If);
+ return tree.fullWhile(.{
+ .while_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = data.lhs,
+ .cont_expr = 0,
+ .then_expr = extra.then_expr,
+ .else_expr = extra.else_expr,
+ });
+ }
- pub fn getEqToken(self: *const VarDecl) ?TokenIndex {
- return self.getTrailer(.eq_token);
- }
+ pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call {
+ const data = tree.nodes.items(.data)[node];
+ buffer.* = .{data.rhs};
+ const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0];
+ return tree.fullCall(.{
+ .lparen = tree.nodes.items(.main_token)[node],
+ .fn_expr = data.lhs,
+ .params = params,
+ });
+ }
- pub fn setEqToken(self: *VarDecl, value: TokenIndex) void {
- self.setTrailer(.eq_token, value);
- }
+ pub fn callFull(tree: Tree, node: Node.Index) full.Call {
+ const data = tree.nodes.items(.data)[node];
+ const extra = tree.extraData(data.rhs, Node.SubRange);
+ return tree.fullCall(.{
+ .lparen = tree.nodes.items(.main_token)[node],
+ .fn_expr = data.lhs,
+ .params = tree.extra_data[extra.start..extra.end],
+ });
+ }
- pub fn getComptimeToken(self: *const VarDecl) ?TokenIndex {
- return self.getTrailer(.comptime_token);
+ fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.VarDecl = .{
+ .ast = info,
+ .visib_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .threadlocal_token = null,
+ .comptime_token = null,
+ };
+ var i = info.mut_token;
+ while (i > 0) {
+ i -= 1;
+ switch (token_tags[i]) {
+ .keyword_extern, .keyword_export => result.extern_export_token = i,
+ .keyword_comptime => result.comptime_token = i,
+ .keyword_pub => result.visib_token = i,
+ .keyword_threadlocal => result.threadlocal_token = i,
+ .string_literal => result.lib_name = i,
+ else => break,
+ }
}
+ return result;
+ }
- pub fn setComptimeToken(self: *VarDecl, value: TokenIndex) void {
- self.setTrailer(.comptime_token, value);
+ fn fullIf(tree: Tree, info: full.If.Ast) full.If {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.If = .{
+ .ast = info,
+ .payload_token = null,
+ .error_token = null,
+ .else_token = undefined,
+ };
+ // if (cond_expr) |x|
+ // ^ ^
+ const payload_pipe = tree.lastToken(info.cond_expr) + 2;
+ if (token_tags[payload_pipe] == .pipe) {
+ result.payload_token = payload_pipe + 1;
}
-
- pub fn getExternExportToken(self: *const VarDecl) ?TokenIndex {
- return self.getTrailer(.extern_export_token);
+ if (info.else_expr != 0) {
+ // then_expr else |x|
+ // ^ ^
+ result.else_token = tree.lastToken(info.then_expr) + 1;
+ if (token_tags[result.else_token + 1] == .pipe) {
+ result.error_token = result.else_token + 2;
+ }
}
+ return result;
+ }
- pub fn setExternExportToken(self: *VarDecl, value: TokenIndex) void {
- self.setTrailer(.extern_export_token, value);
+ fn fullContainerField(tree: Tree, info: full.ContainerField.Ast) full.ContainerField {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.ContainerField = .{
+ .ast = info,
+ .comptime_token = null,
+ };
+ // comptime name: type = init,
+ // ^
+ if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) {
+ result.comptime_token = info.name_token - 1;
}
+ return result;
+ }
- pub fn getLibName(self: *const VarDecl) ?*Node {
- return self.getTrailer(.lib_name);
- }
-
- pub fn setLibName(self: *VarDecl, value: *Node) void {
- self.setTrailer(.lib_name, value);
- }
-
- pub fn getTypeNode(self: *const VarDecl) ?*Node {
- return self.getTrailer(.type_node);
- }
-
- pub fn setTypeNode(self: *VarDecl, value: *Node) void {
- self.setTrailer(.type_node, value);
- }
-
- pub fn getAlignNode(self: *const VarDecl) ?*Node {
- return self.getTrailer(.align_node);
- }
-
- pub fn setAlignNode(self: *VarDecl, value: *Node) void {
- self.setTrailer(.align_node, value);
- }
-
- pub fn getSectionNode(self: *const VarDecl) ?*Node {
- return self.getTrailer(.section_node);
- }
-
- pub fn setSectionNode(self: *VarDecl, value: *Node) void {
- self.setTrailer(.section_node, value);
- }
-
- pub fn getInitNode(self: *const VarDecl) ?*Node {
- return self.getTrailer(.init_node);
- }
-
- pub fn setInitNode(self: *VarDecl, value: *Node) void {
- self.setTrailer(.init_node, value);
- }
-
- pub const RequiredFields = struct {
- mut_token: TokenIndex,
- name_token: TokenIndex,
- semicolon_token: TokenIndex,
- };
-
- fn getTrailer(self: *const VarDecl, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
- const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(VarDecl);
- return self.trailer_flags.get(trailers_start, field);
- }
-
- fn setTrailer(self: *VarDecl, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
- const trailers_start = @ptrCast([*]u8, self) + @sizeOf(VarDecl);
- self.trailer_flags.set(trailers_start, field, value);
- }
-
- pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*VarDecl {
- const trailer_flags = TrailerFlags.init(trailers);
- const bytes = try allocator.alignedAlloc(u8, @alignOf(VarDecl), sizeInBytes(trailer_flags));
- const var_decl = @ptrCast(*VarDecl, bytes.ptr);
- var_decl.* = .{
- .trailer_flags = trailer_flags,
- .mut_token = required.mut_token,
- .name_token = required.name_token,
- .semicolon_token = required.semicolon_token,
- };
- const trailers_start = bytes.ptr + @sizeOf(VarDecl);
- trailer_flags.setMany(trailers_start, trailers);
- return var_decl;
- }
-
- pub fn destroy(self: *VarDecl, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const VarDecl, index: usize) ?*Node {
- var i = index;
-
- if (self.getTypeNode()) |type_node| {
- if (i < 1) return type_node;
- i -= 1;
- }
-
- if (self.getAlignNode()) |align_node| {
- if (i < 1) return align_node;
- i -= 1;
- }
-
- if (self.getSectionNode()) |section_node| {
- if (i < 1) return section_node;
- i -= 1;
- }
-
- if (self.getInitNode()) |init_node| {
- if (i < 1) return init_node;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const VarDecl) TokenIndex {
- if (self.getVisibToken()) |visib_token| return visib_token;
- if (self.getThreadLocalToken()) |thread_local_token| return thread_local_token;
- if (self.getComptimeToken()) |comptime_token| return comptime_token;
- if (self.getExternExportToken()) |extern_export_token| return extern_export_token;
- assert(self.getLibName() == null);
- return self.mut_token;
- }
-
- pub fn lastToken(self: *const VarDecl) TokenIndex {
- return self.semicolon_token;
- }
-
- fn sizeInBytes(trailer_flags: TrailerFlags) usize {
- return @sizeOf(VarDecl) + trailer_flags.sizeInBytes();
- }
- };
-
- pub const Use = struct {
- base: Node = Node{ .tag = .Use },
- doc_comments: ?*DocComment,
- visib_token: ?TokenIndex,
- use_token: TokenIndex,
- expr: *Node,
- semicolon_token: TokenIndex,
-
- pub fn iterate(self: *const Use, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Use) TokenIndex {
- if (self.visib_token) |visib_token| return visib_token;
- return self.use_token;
- }
-
- pub fn lastToken(self: *const Use) TokenIndex {
- return self.semicolon_token;
- }
- };
-
- pub const ErrorSetDecl = struct {
- base: Node = Node{ .tag = .ErrorSetDecl },
- error_token: TokenIndex,
- rbrace_token: TokenIndex,
- decls_len: NodeIndex,
-
- /// After this the caller must initialize the decls list.
- pub fn alloc(allocator: *mem.Allocator, decls_len: NodeIndex) !*ErrorSetDecl {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(ErrorSetDecl), sizeInBytes(decls_len));
- return @ptrCast(*ErrorSetDecl, bytes.ptr);
- }
-
- pub fn free(self: *ErrorSetDecl, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.decls_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const ErrorSetDecl, index: usize) ?*Node {
- var i = index;
-
- if (i < self.decls_len) return self.declsConst()[i];
- i -= self.decls_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const ErrorSetDecl) TokenIndex {
- return self.error_token;
- }
-
- pub fn lastToken(self: *const ErrorSetDecl) TokenIndex {
- return self.rbrace_token;
- }
-
- pub fn decls(self: *ErrorSetDecl) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(ErrorSetDecl);
- return @ptrCast([*]*Node, decls_start)[0..self.decls_len];
- }
-
- pub fn declsConst(self: *const ErrorSetDecl) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ErrorSetDecl);
- return @ptrCast([*]const *Node, decls_start)[0..self.decls_len];
- }
-
- fn sizeInBytes(decls_len: NodeIndex) usize {
- return @sizeOf(ErrorSetDecl) + @sizeOf(*Node) * @as(usize, decls_len);
- }
- };
-
- /// The fields and decls Node pointers directly follow this struct in memory.
- pub const ContainerDecl = struct {
- base: Node = Node{ .tag = .ContainerDecl },
- kind_token: TokenIndex,
- layout_token: ?TokenIndex,
- lbrace_token: TokenIndex,
- rbrace_token: TokenIndex,
- fields_and_decls_len: NodeIndex,
- init_arg_expr: InitArg,
-
- pub const InitArg = union(enum) {
- None,
- Enum: ?*Node,
- Type: *Node,
- };
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, fields_and_decls_len: NodeIndex) !*ContainerDecl {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(ContainerDecl), sizeInBytes(fields_and_decls_len));
- return @ptrCast(*ContainerDecl, bytes.ptr);
- }
-
- pub fn free(self: *ContainerDecl, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.fields_and_decls_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const ContainerDecl, index: usize) ?*Node {
- var i = index;
-
- switch (self.init_arg_expr) {
- .Type => |t| {
- if (i < 1) return t;
- i -= 1;
- },
- .None, .Enum => {},
- }
-
- if (i < self.fields_and_decls_len) return self.fieldsAndDeclsConst()[i];
- i -= self.fields_and_decls_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const ContainerDecl) TokenIndex {
- if (self.layout_token) |layout_token| {
- return layout_token;
- }
- return self.kind_token;
- }
-
- pub fn lastToken(self: *const ContainerDecl) TokenIndex {
- return self.rbrace_token;
- }
-
- pub fn fieldsAndDecls(self: *ContainerDecl) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(ContainerDecl);
- return @ptrCast([*]*Node, decls_start)[0..self.fields_and_decls_len];
- }
-
- pub fn fieldsAndDeclsConst(self: *const ContainerDecl) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ContainerDecl);
- return @ptrCast([*]const *Node, decls_start)[0..self.fields_and_decls_len];
- }
-
- fn sizeInBytes(fields_and_decls_len: NodeIndex) usize {
- return @sizeOf(ContainerDecl) + @sizeOf(*Node) * @as(usize, fields_and_decls_len);
- }
- };
-
- pub const ContainerField = struct {
- base: Node = Node{ .tag = .ContainerField },
- doc_comments: ?*DocComment,
- comptime_token: ?TokenIndex,
- name_token: TokenIndex,
- type_expr: ?*Node,
- value_expr: ?*Node,
- align_expr: ?*Node,
-
- pub fn iterate(self: *const ContainerField, index: usize) ?*Node {
- var i = index;
-
- if (self.type_expr) |type_expr| {
- if (i < 1) return type_expr;
- i -= 1;
- }
-
- if (self.align_expr) |align_expr| {
- if (i < 1) return align_expr;
- i -= 1;
- }
-
- if (self.value_expr) |value_expr| {
- if (i < 1) return value_expr;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const ContainerField) TokenIndex {
- return self.comptime_token orelse self.name_token;
- }
-
- pub fn lastToken(self: *const ContainerField) TokenIndex {
- if (self.value_expr) |value_expr| {
- return value_expr.lastToken();
- }
- if (self.align_expr) |align_expr| {
- // The expression refers to what's inside the parenthesis, the
- // last token is the closing one
- return align_expr.lastToken() + 1;
- }
- if (self.type_expr) |type_expr| {
- return type_expr.lastToken();
- }
-
- return self.name_token;
- }
- };
-
- pub const ErrorTag = struct {
- base: Node = Node{ .tag = .ErrorTag },
- doc_comments: ?*DocComment,
- name_token: TokenIndex,
-
- pub fn iterate(self: *const ErrorTag, index: usize) ?*Node {
- var i = index;
-
- if (self.doc_comments) |comments| {
- if (i < 1) return &comments.base;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const ErrorTag) TokenIndex {
- return self.name_token;
- }
-
- pub fn lastToken(self: *const ErrorTag) TokenIndex {
- return self.name_token;
- }
- };
-
- pub const OneToken = struct {
- base: Node,
- token: TokenIndex,
-
- pub fn iterate(self: *const OneToken, index: usize) ?*Node {
- return null;
- }
-
- pub fn firstToken(self: *const OneToken) TokenIndex {
- return self.token;
- }
-
- pub fn lastToken(self: *const OneToken) TokenIndex {
- return self.token;
- }
- };
-
- /// The params are directly after the FnProto in memory.
- /// Next, each optional thing determined by a bit in `trailer_flags`.
- pub const FnProto = struct {
- base: Node = Node{ .tag = .FnProto },
- trailer_flags: TrailerFlags,
- fn_token: TokenIndex,
- params_len: NodeIndex,
- return_type: ReturnType,
-
- pub const TrailerFlags = std.meta.TrailerFlags(struct {
- doc_comments: *DocComment,
- body_node: *Node,
- lib_name: *Node, // populated if this is an extern declaration
- align_expr: *Node, // populated if align(A) is present
- section_expr: *Node, // populated if linksection(A) is present
- callconv_expr: *Node, // populated if callconv(A) is present
- visib_token: TokenIndex,
- name_token: TokenIndex,
- var_args_token: TokenIndex,
- extern_export_inline_token: TokenIndex,
- is_extern_prototype: void, // TODO: Remove once extern fn rewriting is
- is_async: void, // TODO: remove once async fn rewriting is
- is_inline: void, // TODO: remove once inline fn rewriting is
- });
-
- pub const RequiredFields = struct {
- fn_token: TokenIndex,
- params_len: NodeIndex,
- return_type: ReturnType,
- };
-
- pub const ReturnType = union(enum) {
- Explicit: *Node,
- InferErrorSet: *Node,
- Invalid: TokenIndex,
- };
-
- pub const ParamDecl = struct {
- doc_comments: ?*DocComment,
- comptime_token: ?TokenIndex,
- noalias_token: ?TokenIndex,
- name_token: ?TokenIndex,
- param_type: ParamType,
-
- pub const ParamType = union(enum) {
- any_type: *Node,
- type_expr: *Node,
- };
-
- pub fn iterate(self: *const ParamDecl, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) {
- switch (self.param_type) {
- .any_type, .type_expr => |node| return node,
- }
- }
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const ParamDecl) TokenIndex {
- if (self.comptime_token) |comptime_token| return comptime_token;
- if (self.noalias_token) |noalias_token| return noalias_token;
- if (self.name_token) |name_token| return name_token;
- switch (self.param_type) {
- .any_type, .type_expr => |node| return node.firstToken(),
- }
- }
-
- pub fn lastToken(self: *const ParamDecl) TokenIndex {
- switch (self.param_type) {
- .any_type, .type_expr => |node| return node.lastToken(),
- }
- }
- };
-
- /// For debugging purposes.
- pub fn dump(self: *const FnProto) void {
- const trailers_start = @alignCast(
- @alignOf(ParamDecl),
- @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
- );
- std.debug.print("{*} flags: {b} name_token: {s} {*} params_len: {d}\n", .{
- self,
- self.trailer_flags.bits,
- self.getNameToken(),
- self.trailer_flags.ptrConst(trailers_start, .name_token),
- self.params_len,
- });
- }
-
- pub fn getDocComments(self: *const FnProto) ?*DocComment {
- return self.getTrailer(.doc_comments);
- }
-
- pub fn setDocComments(self: *FnProto, value: *DocComment) void {
- self.setTrailer(.doc_comments, value);
- }
-
- pub fn getBodyNode(self: *const FnProto) ?*Node {
- return self.getTrailer(.body_node);
- }
-
- pub fn setBodyNode(self: *FnProto, value: *Node) void {
- self.setTrailer(.body_node, value);
- }
-
- pub fn getLibName(self: *const FnProto) ?*Node {
- return self.getTrailer(.lib_name);
- }
-
- pub fn setLibName(self: *FnProto, value: *Node) void {
- self.setTrailer(.lib_name, value);
- }
-
- pub fn getAlignExpr(self: *const FnProto) ?*Node {
- return self.getTrailer(.align_expr);
- }
-
- pub fn setAlignExpr(self: *FnProto, value: *Node) void {
- self.setTrailer(.align_expr, value);
- }
-
- pub fn getSectionExpr(self: *const FnProto) ?*Node {
- return self.getTrailer(.section_expr);
- }
-
- pub fn setSectionExpr(self: *FnProto, value: *Node) void {
- self.setTrailer(.section_expr, value);
- }
-
- pub fn getCallconvExpr(self: *const FnProto) ?*Node {
- return self.getTrailer(.callconv_expr);
- }
-
- pub fn setCallconvExpr(self: *FnProto, value: *Node) void {
- self.setTrailer(.callconv_expr, value);
- }
-
- pub fn getVisibToken(self: *const FnProto) ?TokenIndex {
- return self.getTrailer(.visib_token);
- }
-
- pub fn setVisibToken(self: *FnProto, value: TokenIndex) void {
- self.setTrailer(.visib_token, value);
- }
-
- pub fn getNameToken(self: *const FnProto) ?TokenIndex {
- return self.getTrailer(.name_token);
- }
-
- pub fn setNameToken(self: *FnProto, value: TokenIndex) void {
- self.setTrailer(.name_token, value);
- }
-
- pub fn getVarArgsToken(self: *const FnProto) ?TokenIndex {
- return self.getTrailer(.var_args_token);
- }
-
- pub fn setVarArgsToken(self: *FnProto, value: TokenIndex) void {
- self.setTrailer(.var_args_token, value);
- }
-
- pub fn getExternExportInlineToken(self: *const FnProto) ?TokenIndex {
- return self.getTrailer(.extern_export_inline_token);
- }
-
- pub fn setExternExportInlineToken(self: *FnProto, value: TokenIndex) void {
- self.setTrailer(.extern_export_inline_token, value);
- }
-
- pub fn getIsExternPrototype(self: *const FnProto) ?void {
- return self.getTrailer(.is_extern_prototype);
- }
-
- pub fn setIsExternPrototype(self: *FnProto, value: void) void {
- self.setTrailer(.is_extern_prototype, value);
- }
-
- pub fn getIsAsync(self: *const FnProto) ?void {
- return self.getTrailer(.is_async);
- }
-
- pub fn setIsAsync(self: *FnProto, value: void) void {
- self.setTrailer(.is_async, value);
- }
-
- pub fn getIsInline(self: *const FnProto) ?void {
- return self.getTrailer(.is_inline);
- }
-
- pub fn setIsInline(self: *FnProto, value: void) void {
- self.setTrailer(.is_inline, value);
- }
-
- fn getTrailer(self: *const FnProto, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
- const trailers_start = @alignCast(
- @alignOf(ParamDecl),
- @ptrCast([*]const u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
- );
- return self.trailer_flags.get(trailers_start, field);
- }
-
- fn setTrailer(self: *FnProto, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
- const trailers_start = @alignCast(
- @alignOf(ParamDecl),
- @ptrCast([*]u8, self) + @sizeOf(FnProto) + @sizeOf(ParamDecl) * self.params_len,
- );
- self.trailer_flags.set(trailers_start, field, value);
- }
-
- /// After this the caller must initialize the params list.
- pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*FnProto {
- const trailer_flags = TrailerFlags.init(trailers);
- const bytes = try allocator.alignedAlloc(u8, @alignOf(FnProto), sizeInBytes(
- required.params_len,
- trailer_flags,
- ));
- const fn_proto = @ptrCast(*FnProto, bytes.ptr);
- fn_proto.* = .{
- .trailer_flags = trailer_flags,
- .fn_token = required.fn_token,
- .params_len = required.params_len,
- .return_type = required.return_type,
- };
- const trailers_start = @alignCast(
- @alignOf(ParamDecl),
- bytes.ptr + @sizeOf(FnProto) + @sizeOf(ParamDecl) * required.params_len,
- );
- trailer_flags.setMany(trailers_start, trailers);
- return fn_proto;
- }
-
- pub fn destroy(self: *FnProto, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len, self.trailer_flags)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const FnProto, index: usize) ?*Node {
- var i = index;
-
- if (self.getLibName()) |lib_name| {
- if (i < 1) return lib_name;
- i -= 1;
- }
-
- const params_len: usize = if (self.params_len == 0)
- 0
- else switch (self.paramsConst()[self.params_len - 1].param_type) {
- .any_type, .type_expr => self.params_len,
- };
- if (i < params_len) {
- switch (self.paramsConst()[i].param_type) {
- .any_type => |n| return n,
- .type_expr => |n| return n,
- }
- }
- i -= params_len;
-
- if (self.getAlignExpr()) |align_expr| {
- if (i < 1) return align_expr;
- i -= 1;
- }
-
- if (self.getSectionExpr()) |section_expr| {
- if (i < 1) return section_expr;
- i -= 1;
- }
-
- switch (self.return_type) {
- .Explicit, .InferErrorSet => |node| {
- if (i < 1) return node;
- i -= 1;
- },
- .Invalid => {},
- }
-
- if (self.getBodyNode()) |body_node| {
- if (i < 1) return body_node;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const FnProto) TokenIndex {
- if (self.getVisibToken()) |visib_token| return visib_token;
- if (self.getExternExportInlineToken()) |extern_export_inline_token| return extern_export_inline_token;
- assert(self.getLibName() == null);
- return self.fn_token;
- }
-
- pub fn lastToken(self: *const FnProto) TokenIndex {
- if (self.getBodyNode()) |body_node| return body_node.lastToken();
- switch (self.return_type) {
- .Explicit, .InferErrorSet => |node| return node.lastToken(),
- .Invalid => |tok| return tok,
- }
- }
-
- pub fn params(self: *FnProto) []ParamDecl {
- const params_start = @ptrCast([*]u8, self) + @sizeOf(FnProto);
- return @ptrCast([*]ParamDecl, params_start)[0..self.params_len];
- }
-
- pub fn paramsConst(self: *const FnProto) []const ParamDecl {
- const params_start = @ptrCast([*]const u8, self) + @sizeOf(FnProto);
- return @ptrCast([*]const ParamDecl, params_start)[0..self.params_len];
- }
-
- fn sizeInBytes(params_len: NodeIndex, trailer_flags: TrailerFlags) usize {
- return @sizeOf(FnProto) + @sizeOf(ParamDecl) * @as(usize, params_len) + trailer_flags.sizeInBytes();
- }
- };
-
- pub const AnyFrameType = struct {
- base: Node = Node{ .tag = .AnyFrameType },
- anyframe_token: TokenIndex,
- result: ?Result,
-
- pub const Result = struct {
- arrow_token: TokenIndex,
- return_type: *Node,
- };
-
- pub fn iterate(self: *const AnyFrameType, index: usize) ?*Node {
- var i = index;
-
- if (self.result) |result| {
- if (i < 1) return result.return_type;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const AnyFrameType) TokenIndex {
- return self.anyframe_token;
- }
-
- pub fn lastToken(self: *const AnyFrameType) TokenIndex {
- if (self.result) |result| return result.return_type.lastToken();
- return self.anyframe_token;
- }
- };
-
- /// The statements of the block follow Block directly in memory.
- pub const Block = struct {
- base: Node = Node{ .tag = .Block },
- statements_len: NodeIndex,
- lbrace: TokenIndex,
- rbrace: TokenIndex,
-
- /// After this the caller must initialize the statements list.
- pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*Block {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(Block), sizeInBytes(statements_len));
- return @ptrCast(*Block, bytes.ptr);
- }
-
- pub fn free(self: *Block, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const Block, index: usize) ?*Node {
- var i = index;
-
- if (i < self.statements_len) return self.statementsConst()[i];
- i -= self.statements_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const Block) TokenIndex {
- return self.lbrace;
- }
-
- pub fn lastToken(self: *const Block) TokenIndex {
- return self.rbrace;
- }
-
- pub fn statements(self: *Block) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(Block);
- return @ptrCast([*]*Node, decls_start)[0..self.statements_len];
- }
-
- pub fn statementsConst(self: *const Block) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Block);
- return @ptrCast([*]const *Node, decls_start)[0..self.statements_len];
- }
-
- fn sizeInBytes(statements_len: NodeIndex) usize {
- return @sizeOf(Block) + @sizeOf(*Node) * @as(usize, statements_len);
- }
- };
-
- /// The statements of the block follow LabeledBlock directly in memory.
- pub const LabeledBlock = struct {
- base: Node = Node{ .tag = .LabeledBlock },
- statements_len: NodeIndex,
- lbrace: TokenIndex,
- rbrace: TokenIndex,
- label: TokenIndex,
-
- /// After this the caller must initialize the statements list.
- pub fn alloc(allocator: *mem.Allocator, statements_len: NodeIndex) !*LabeledBlock {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(LabeledBlock), sizeInBytes(statements_len));
- return @ptrCast(*LabeledBlock, bytes.ptr);
- }
-
- pub fn free(self: *LabeledBlock, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.statements_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const LabeledBlock, index: usize) ?*Node {
- var i = index;
-
- if (i < self.statements_len) return self.statementsConst()[i];
- i -= self.statements_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const LabeledBlock) TokenIndex {
- return self.label;
- }
-
- pub fn lastToken(self: *const LabeledBlock) TokenIndex {
- return self.rbrace;
- }
-
- pub fn statements(self: *LabeledBlock) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(LabeledBlock);
- return @ptrCast([*]*Node, decls_start)[0..self.statements_len];
- }
-
- pub fn statementsConst(self: *const LabeledBlock) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(LabeledBlock);
- return @ptrCast([*]const *Node, decls_start)[0..self.statements_len];
- }
-
- fn sizeInBytes(statements_len: NodeIndex) usize {
- return @sizeOf(LabeledBlock) + @sizeOf(*Node) * @as(usize, statements_len);
- }
- };
-
- pub const Defer = struct {
- base: Node = Node{ .tag = .Defer },
- defer_token: TokenIndex,
- payload: ?*Node,
- expr: *Node,
-
- pub fn iterate(self: *const Defer, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Defer) TokenIndex {
- return self.defer_token;
- }
-
- pub fn lastToken(self: *const Defer) TokenIndex {
- return self.expr.lastToken();
- }
- };
-
- pub const Comptime = struct {
- base: Node = Node{ .tag = .Comptime },
- doc_comments: ?*DocComment,
- comptime_token: TokenIndex,
- expr: *Node,
-
- pub fn iterate(self: *const Comptime, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Comptime) TokenIndex {
- return self.comptime_token;
- }
-
- pub fn lastToken(self: *const Comptime) TokenIndex {
- return self.expr.lastToken();
- }
- };
-
- pub const Nosuspend = struct {
- base: Node = Node{ .tag = .Nosuspend },
- nosuspend_token: TokenIndex,
- expr: *Node,
-
- pub fn iterate(self: *const Nosuspend, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Nosuspend) TokenIndex {
- return self.nosuspend_token;
- }
-
- pub fn lastToken(self: *const Nosuspend) TokenIndex {
- return self.expr.lastToken();
- }
- };
-
- pub const Payload = struct {
- base: Node = Node{ .tag = .Payload },
- lpipe: TokenIndex,
- error_symbol: *Node,
- rpipe: TokenIndex,
-
- pub fn iterate(self: *const Payload, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.error_symbol;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Payload) TokenIndex {
- return self.lpipe;
- }
-
- pub fn lastToken(self: *const Payload) TokenIndex {
- return self.rpipe;
- }
- };
-
- pub const PointerPayload = struct {
- base: Node = Node{ .tag = .PointerPayload },
- lpipe: TokenIndex,
- ptr_token: ?TokenIndex,
- value_symbol: *Node,
- rpipe: TokenIndex,
-
- pub fn iterate(self: *const PointerPayload, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.value_symbol;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const PointerPayload) TokenIndex {
- return self.lpipe;
- }
-
- pub fn lastToken(self: *const PointerPayload) TokenIndex {
- return self.rpipe;
- }
- };
-
- pub const PointerIndexPayload = struct {
- base: Node = Node{ .tag = .PointerIndexPayload },
- lpipe: TokenIndex,
- ptr_token: ?TokenIndex,
- value_symbol: *Node,
- index_symbol: ?*Node,
- rpipe: TokenIndex,
-
- pub fn iterate(self: *const PointerIndexPayload, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.value_symbol;
- i -= 1;
-
- if (self.index_symbol) |index_symbol| {
- if (i < 1) return index_symbol;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const PointerIndexPayload) TokenIndex {
- return self.lpipe;
- }
-
- pub fn lastToken(self: *const PointerIndexPayload) TokenIndex {
- return self.rpipe;
- }
- };
-
- pub const Else = struct {
- base: Node = Node{ .tag = .Else },
- else_token: TokenIndex,
- payload: ?*Node,
- body: *Node,
-
- pub fn iterate(self: *const Else, index: usize) ?*Node {
- var i = index;
-
- if (self.payload) |payload| {
- if (i < 1) return payload;
- i -= 1;
- }
-
- if (i < 1) return self.body;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Else) TokenIndex {
- return self.else_token;
- }
-
- pub fn lastToken(self: *const Else) TokenIndex {
- return self.body.lastToken();
- }
- };
-
- /// The cases node pointers are found in memory after Switch.
- /// They must be SwitchCase or SwitchElse nodes.
- pub const Switch = struct {
- base: Node = Node{ .tag = .Switch },
- switch_token: TokenIndex,
- rbrace: TokenIndex,
- cases_len: NodeIndex,
- expr: *Node,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, cases_len: NodeIndex) !*Switch {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(Switch), sizeInBytes(cases_len));
- return @ptrCast(*Switch, bytes.ptr);
- }
-
- pub fn free(self: *Switch, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.cases_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const Switch, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- if (i < self.cases_len) return self.casesConst()[i];
- i -= self.cases_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const Switch) TokenIndex {
- return self.switch_token;
- }
-
- pub fn lastToken(self: *const Switch) TokenIndex {
- return self.rbrace;
- }
-
- pub fn cases(self: *Switch) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(Switch);
- return @ptrCast([*]*Node, decls_start)[0..self.cases_len];
- }
-
- pub fn casesConst(self: *const Switch) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Switch);
- return @ptrCast([*]const *Node, decls_start)[0..self.cases_len];
- }
-
- fn sizeInBytes(cases_len: NodeIndex) usize {
- return @sizeOf(Switch) + @sizeOf(*Node) * @as(usize, cases_len);
- }
- };
-
- /// Items sub-nodes appear in memory directly following SwitchCase.
- pub const SwitchCase = struct {
- base: Node = Node{ .tag = .SwitchCase },
- arrow_token: TokenIndex,
- payload: ?*Node,
- expr: *Node,
- items_len: NodeIndex,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, items_len: NodeIndex) !*SwitchCase {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(SwitchCase), sizeInBytes(items_len));
- return @ptrCast(*SwitchCase, bytes.ptr);
- }
-
- pub fn free(self: *SwitchCase, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.items_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const SwitchCase, index: usize) ?*Node {
- var i = index;
-
- if (i < self.items_len) return self.itemsConst()[i];
- i -= self.items_len;
-
- if (self.payload) |payload| {
- if (i < 1) return payload;
- i -= 1;
- }
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const SwitchCase) TokenIndex {
- return self.itemsConst()[0].firstToken();
- }
-
- pub fn lastToken(self: *const SwitchCase) TokenIndex {
- return self.expr.lastToken();
- }
-
- pub fn items(self: *SwitchCase) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(SwitchCase);
- return @ptrCast([*]*Node, decls_start)[0..self.items_len];
- }
-
- pub fn itemsConst(self: *const SwitchCase) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(SwitchCase);
- return @ptrCast([*]const *Node, decls_start)[0..self.items_len];
- }
-
- fn sizeInBytes(items_len: NodeIndex) usize {
- return @sizeOf(SwitchCase) + @sizeOf(*Node) * @as(usize, items_len);
- }
- };
-
- pub const SwitchElse = struct {
- base: Node = Node{ .tag = .SwitchElse },
- token: TokenIndex,
-
- pub fn iterate(self: *const SwitchElse, index: usize) ?*Node {
- return null;
- }
-
- pub fn firstToken(self: *const SwitchElse) TokenIndex {
- return self.token;
- }
-
- pub fn lastToken(self: *const SwitchElse) TokenIndex {
- return self.token;
- }
- };
-
- pub const While = struct {
- base: Node = Node{ .tag = .While },
- label: ?TokenIndex,
- inline_token: ?TokenIndex,
- while_token: TokenIndex,
- condition: *Node,
- payload: ?*Node,
- continue_expr: ?*Node,
- body: *Node,
- @"else": ?*Else,
-
- pub fn iterate(self: *const While, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.condition;
- i -= 1;
-
- if (self.payload) |payload| {
- if (i < 1) return payload;
- i -= 1;
- }
-
- if (self.continue_expr) |continue_expr| {
- if (i < 1) return continue_expr;
- i -= 1;
- }
-
- if (i < 1) return self.body;
- i -= 1;
-
- if (self.@"else") |@"else"| {
- if (i < 1) return &@"else".base;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const While) TokenIndex {
- if (self.label) |label| {
- return label;
- }
-
- if (self.inline_token) |inline_token| {
- return inline_token;
- }
-
- return self.while_token;
- }
-
- pub fn lastToken(self: *const While) TokenIndex {
- if (self.@"else") |@"else"| {
- return @"else".body.lastToken();
- }
-
- return self.body.lastToken();
- }
- };
-
- pub const For = struct {
- base: Node = Node{ .tag = .For },
- label: ?TokenIndex,
- inline_token: ?TokenIndex,
- for_token: TokenIndex,
- array_expr: *Node,
- payload: *Node,
- body: *Node,
- @"else": ?*Else,
-
- pub fn iterate(self: *const For, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.array_expr;
- i -= 1;
-
- if (i < 1) return self.payload;
- i -= 1;
-
- if (i < 1) return self.body;
- i -= 1;
-
- if (self.@"else") |@"else"| {
- if (i < 1) return &@"else".base;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const For) TokenIndex {
- if (self.label) |label| {
- return label;
- }
-
- if (self.inline_token) |inline_token| {
- return inline_token;
- }
-
- return self.for_token;
- }
-
- pub fn lastToken(self: *const For) TokenIndex {
- if (self.@"else") |@"else"| {
- return @"else".body.lastToken();
- }
-
- return self.body.lastToken();
- }
- };
-
- pub const If = struct {
- base: Node = Node{ .tag = .If },
- if_token: TokenIndex,
- condition: *Node,
- payload: ?*Node,
- body: *Node,
- @"else": ?*Else,
-
- pub fn iterate(self: *const If, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.condition;
- i -= 1;
-
- if (self.payload) |payload| {
- if (i < 1) return payload;
- i -= 1;
- }
-
- if (i < 1) return self.body;
- i -= 1;
-
- if (self.@"else") |@"else"| {
- if (i < 1) return &@"else".base;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const If) TokenIndex {
- return self.if_token;
- }
-
- pub fn lastToken(self: *const If) TokenIndex {
- if (self.@"else") |@"else"| {
- return @"else".body.lastToken();
- }
-
- return self.body.lastToken();
- }
- };
-
- pub const Catch = struct {
- base: Node = Node{ .tag = .Catch },
- op_token: TokenIndex,
- lhs: *Node,
- rhs: *Node,
- payload: ?*Node,
-
- pub fn iterate(self: *const Catch, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (self.payload) |payload| {
- if (i < 1) return payload;
- i -= 1;
- }
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const Catch) TokenIndex {
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const Catch) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const SimpleInfixOp = struct {
- base: Node,
- op_token: TokenIndex,
- lhs: *Node,
- rhs: *Node,
-
- pub fn iterate(self: *const SimpleInfixOp, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const SimpleInfixOp) TokenIndex {
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const SimpleInfixOp) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const SimplePrefixOp = struct {
- base: Node,
- op_token: TokenIndex,
- rhs: *Node,
-
- const Self = @This();
-
- pub fn iterate(self: *const Self, index: usize) ?*Node {
- if (index == 0) return self.rhs;
- return null;
- }
-
- pub fn firstToken(self: *const Self) TokenIndex {
- return self.op_token;
- }
-
- pub fn lastToken(self: *const Self) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const ArrayType = struct {
- base: Node = Node{ .tag = .ArrayType },
- op_token: TokenIndex,
- rhs: *Node,
- len_expr: *Node,
-
- pub fn iterate(self: *const ArrayType, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.len_expr;
- i -= 1;
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const ArrayType) TokenIndex {
- return self.op_token;
- }
-
- pub fn lastToken(self: *const ArrayType) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const ArrayTypeSentinel = struct {
- base: Node = Node{ .tag = .ArrayTypeSentinel },
- op_token: TokenIndex,
- rhs: *Node,
- len_expr: *Node,
- sentinel: *Node,
-
- pub fn iterate(self: *const ArrayTypeSentinel, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.len_expr;
- i -= 1;
-
- if (i < 1) return self.sentinel;
- i -= 1;
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const ArrayTypeSentinel) TokenIndex {
- return self.op_token;
- }
-
- pub fn lastToken(self: *const ArrayTypeSentinel) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const PtrType = struct {
- base: Node = Node{ .tag = .PtrType },
- op_token: TokenIndex,
- rhs: *Node,
- /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents
- /// one of these possibly-null things. Then we have them directly follow the PtrType in memory.
- ptr_info: PtrInfo = .{},
-
- pub fn iterate(self: *const PtrType, index: usize) ?*Node {
- var i = index;
-
- if (self.ptr_info.sentinel) |sentinel| {
- if (i < 1) return sentinel;
- i -= 1;
- }
-
- if (self.ptr_info.align_info) |align_info| {
- if (i < 1) return align_info.node;
- i -= 1;
- }
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const PtrType) TokenIndex {
- return self.op_token;
- }
-
- pub fn lastToken(self: *const PtrType) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const SliceType = struct {
- base: Node = Node{ .tag = .SliceType },
- op_token: TokenIndex,
- rhs: *Node,
- /// TODO Add a u8 flags field to Node where it would otherwise be padding, and each bit represents
- /// one of these possibly-null things. Then we have them directly follow the SliceType in memory.
- ptr_info: PtrInfo = .{},
-
- pub fn iterate(self: *const SliceType, index: usize) ?*Node {
- var i = index;
-
- if (self.ptr_info.sentinel) |sentinel| {
- if (i < 1) return sentinel;
- i -= 1;
- }
-
- if (self.ptr_info.align_info) |align_info| {
- if (i < 1) return align_info.node;
- i -= 1;
- }
-
- if (i < 1) return self.rhs;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const SliceType) TokenIndex {
- return self.op_token;
- }
-
- pub fn lastToken(self: *const SliceType) TokenIndex {
- return self.rhs.lastToken();
- }
- };
-
- pub const FieldInitializer = struct {
- base: Node = Node{ .tag = .FieldInitializer },
- period_token: TokenIndex,
- name_token: TokenIndex,
- expr: *Node,
-
- pub fn iterate(self: *const FieldInitializer, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const FieldInitializer) TokenIndex {
- return self.period_token;
- }
-
- pub fn lastToken(self: *const FieldInitializer) TokenIndex {
- return self.expr.lastToken();
- }
- };
-
- /// Elements occur directly in memory after ArrayInitializer.
- pub const ArrayInitializer = struct {
- base: Node = Node{ .tag = .ArrayInitializer },
- rtoken: TokenIndex,
- list_len: NodeIndex,
- lhs: *Node,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializer {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializer), sizeInBytes(list_len));
- return @ptrCast(*ArrayInitializer, bytes.ptr);
- }
-
- pub fn free(self: *ArrayInitializer, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const ArrayInitializer, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < self.list_len) return self.listConst()[i];
- i -= self.list_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const ArrayInitializer) TokenIndex {
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const ArrayInitializer) TokenIndex {
- return self.rtoken;
- }
-
- pub fn list(self: *ArrayInitializer) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializer);
- return @ptrCast([*]*Node, decls_start)[0..self.list_len];
- }
-
- pub fn listConst(self: *const ArrayInitializer) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializer);
- return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
- }
-
- fn sizeInBytes(list_len: NodeIndex) usize {
- return @sizeOf(ArrayInitializer) + @sizeOf(*Node) * @as(usize, list_len);
- }
- };
-
- /// Elements occur directly in memory after ArrayInitializerDot.
- pub const ArrayInitializerDot = struct {
- base: Node = Node{ .tag = .ArrayInitializerDot },
- dot: TokenIndex,
- rtoken: TokenIndex,
- list_len: NodeIndex,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*ArrayInitializerDot {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(ArrayInitializerDot), sizeInBytes(list_len));
- return @ptrCast(*ArrayInitializerDot, bytes.ptr);
- }
-
- pub fn free(self: *ArrayInitializerDot, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const ArrayInitializerDot, index: usize) ?*Node {
- var i = index;
-
- if (i < self.list_len) return self.listConst()[i];
- i -= self.list_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const ArrayInitializerDot) TokenIndex {
- return self.dot;
- }
-
- pub fn lastToken(self: *const ArrayInitializerDot) TokenIndex {
- return self.rtoken;
- }
-
- pub fn list(self: *ArrayInitializerDot) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(ArrayInitializerDot);
- return @ptrCast([*]*Node, decls_start)[0..self.list_len];
- }
-
- pub fn listConst(self: *const ArrayInitializerDot) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(ArrayInitializerDot);
- return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
- }
-
- fn sizeInBytes(list_len: NodeIndex) usize {
- return @sizeOf(ArrayInitializerDot) + @sizeOf(*Node) * @as(usize, list_len);
- }
- };
-
- /// Elements occur directly in memory after StructInitializer.
- pub const StructInitializer = struct {
- base: Node = Node{ .tag = .StructInitializer },
- rtoken: TokenIndex,
- list_len: NodeIndex,
- lhs: *Node,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializer {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializer), sizeInBytes(list_len));
- return @ptrCast(*StructInitializer, bytes.ptr);
- }
-
- pub fn free(self: *StructInitializer, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const StructInitializer, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < self.list_len) return self.listConst()[i];
- i -= self.list_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const StructInitializer) TokenIndex {
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const StructInitializer) TokenIndex {
- return self.rtoken;
- }
-
- pub fn list(self: *StructInitializer) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializer);
- return @ptrCast([*]*Node, decls_start)[0..self.list_len];
- }
-
- pub fn listConst(self: *const StructInitializer) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializer);
- return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
- }
-
- fn sizeInBytes(list_len: NodeIndex) usize {
- return @sizeOf(StructInitializer) + @sizeOf(*Node) * @as(usize, list_len);
- }
- };
-
- /// Elements occur directly in memory after StructInitializerDot.
- pub const StructInitializerDot = struct {
- base: Node = Node{ .tag = .StructInitializerDot },
- dot: TokenIndex,
- rtoken: TokenIndex,
- list_len: NodeIndex,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, list_len: NodeIndex) !*StructInitializerDot {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(StructInitializerDot), sizeInBytes(list_len));
- return @ptrCast(*StructInitializerDot, bytes.ptr);
- }
-
- pub fn free(self: *StructInitializerDot, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.list_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const StructInitializerDot, index: usize) ?*Node {
- var i = index;
-
- if (i < self.list_len) return self.listConst()[i];
- i -= self.list_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const StructInitializerDot) TokenIndex {
- return self.dot;
- }
-
- pub fn lastToken(self: *const StructInitializerDot) TokenIndex {
- return self.rtoken;
- }
-
- pub fn list(self: *StructInitializerDot) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(StructInitializerDot);
- return @ptrCast([*]*Node, decls_start)[0..self.list_len];
- }
-
- pub fn listConst(self: *const StructInitializerDot) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(StructInitializerDot);
- return @ptrCast([*]const *Node, decls_start)[0..self.list_len];
- }
-
- fn sizeInBytes(list_len: NodeIndex) usize {
- return @sizeOf(StructInitializerDot) + @sizeOf(*Node) * @as(usize, list_len);
- }
- };
-
- /// Parameter nodes directly follow Call in memory.
- pub const Call = struct {
- base: Node = Node{ .tag = .Call },
- rtoken: TokenIndex,
- lhs: *Node,
- params_len: NodeIndex,
- async_token: ?TokenIndex,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*Call {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(Call), sizeInBytes(params_len));
- return @ptrCast(*Call, bytes.ptr);
- }
-
- pub fn free(self: *Call, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
- allocator.free(bytes);
- }
-
- pub fn iterate(self: *const Call, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < self.params_len) return self.paramsConst()[i];
- i -= self.params_len;
-
- return null;
- }
-
- pub fn firstToken(self: *const Call) TokenIndex {
- if (self.async_token) |async_token| return async_token;
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const Call) TokenIndex {
- return self.rtoken;
- }
-
- pub fn params(self: *Call) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(Call);
- return @ptrCast([*]*Node, decls_start)[0..self.params_len];
- }
-
- pub fn paramsConst(self: *const Call) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(Call);
- return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
- }
-
- fn sizeInBytes(params_len: NodeIndex) usize {
- return @sizeOf(Call) + @sizeOf(*Node) * @as(usize, params_len);
- }
- };
-
- pub const ArrayAccess = struct {
- base: Node = Node{ .tag = .ArrayAccess },
- rtoken: TokenIndex,
- lhs: *Node,
- index_expr: *Node,
-
- pub fn iterate(self: *const ArrayAccess, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < 1) return self.index_expr;
+ fn fullFnProto(tree: Tree, info: full.FnProto.Ast) full.FnProto {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.FnProto = .{
+ .ast = info,
+ .visib_token = null,
+ .extern_export_token = null,
+ .lib_name = null,
+ .name_token = null,
+ .lparen = undefined,
+ };
+ var i = info.fn_token;
+ while (i > 0) {
i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const ArrayAccess) TokenIndex {
- return self.lhs.firstToken();
+ switch (token_tags[i]) {
+ .keyword_extern, .keyword_export => result.extern_export_token = i,
+ .keyword_pub => result.visib_token = i,
+ .string_literal => result.lib_name = i,
+ else => break,
+ }
}
-
- pub fn lastToken(self: *const ArrayAccess) TokenIndex {
- return self.rtoken;
+ const after_fn_token = info.fn_token + 1;
+ if (token_tags[after_fn_token] == .identifier) {
+ result.name_token = after_fn_token;
+ result.lparen = after_fn_token + 1;
+ } else {
+ result.lparen = after_fn_token;
}
- };
+ assert(token_tags[result.lparen] == .l_paren);
- pub const SimpleSuffixOp = struct {
- base: Node,
- rtoken: TokenIndex,
- lhs: *Node,
-
- pub fn iterate(self: *const SimpleSuffixOp, index: usize) ?*Node {
- var i = index;
+ return result;
+ }
- if (i < 1) return self.lhs;
- i -= 1;
+ fn fullStructInit(tree: Tree, info: full.StructInit.Ast) full.StructInit {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.StructInit = .{
+ .ast = info,
+ };
+ return result;
+ }
- return null;
+ fn fullPtrType(tree: Tree, info: full.PtrType.Ast) full.PtrType {
+ const token_tags = tree.tokens.items(.tag);
+ // TODO: looks like stage1 isn't quite smart enough to handle enum
+ // literals in some places here
+ const Size = std.builtin.TypeInfo.Pointer.Size;
+ const size: Size = switch (token_tags[info.main_token]) {
+ .asterisk,
+ .asterisk_asterisk,
+ => switch (token_tags[info.main_token + 1]) {
+ .r_bracket, .colon => .Many,
+ .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Size.C else .One,
+ else => .One,
+ },
+ .l_bracket => Size.Slice,
+ else => unreachable,
+ };
+ var result: full.PtrType = .{
+ .size = size,
+ .allowzero_token = null,
+ .const_token = null,
+ .volatile_token = null,
+ .ast = info,
+ };
+ // We need to be careful that we don't iterate over any sub-expressions
+ // here while looking for modifiers as that could result in false
+ // positives. Therefore, start after a sentinel if there is one and
+ // skip over any align node and bit range nodes.
+ var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else info.main_token;
+ const end = tree.firstToken(info.child_type);
+ while (i < end) : (i += 1) {
+ switch (token_tags[i]) {
+ .keyword_allowzero => result.allowzero_token = i,
+ .keyword_const => result.const_token = i,
+ .keyword_volatile => result.volatile_token = i,
+ .keyword_align => {
+ assert(info.align_node != 0);
+ if (info.bit_range_end != 0) {
+ assert(info.bit_range_start != 0);
+ i = tree.lastToken(info.bit_range_end) + 1;
+ } else {
+ i = tree.lastToken(info.align_node) + 1;
+ }
+ },
+ else => {},
+ }
}
+ return result;
+ }
- pub fn firstToken(self: *const SimpleSuffixOp) TokenIndex {
- return self.lhs.firstToken();
+ fn fullContainerDecl(tree: Tree, info: full.ContainerDecl.Ast) full.ContainerDecl {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.ContainerDecl = .{
+ .ast = info,
+ .layout_token = null,
+ };
+ switch (token_tags[info.main_token - 1]) {
+ .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1,
+ else => {},
}
+ return result;
+ }
- pub fn lastToken(self: *const SimpleSuffixOp) TokenIndex {
- return self.rtoken;
+ fn fullSwitchCase(tree: Tree, info: full.SwitchCase.Ast) full.SwitchCase {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.SwitchCase = .{
+ .ast = info,
+ .payload_token = null,
+ };
+ if (token_tags[info.arrow_token + 1] == .pipe) {
+ result.payload_token = info.arrow_token + 2;
}
- };
-
- pub const Slice = struct {
- base: Node = Node{ .tag = .Slice },
- rtoken: TokenIndex,
- lhs: *Node,
- start: *Node,
- end: ?*Node,
- sentinel: ?*Node,
-
- pub fn iterate(self: *const Slice, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.lhs;
- i -= 1;
-
- if (i < 1) return self.start;
- i -= 1;
+ return result;
+ }
- if (self.end) |end| {
- if (i < 1) return end;
- i -= 1;
+ fn fullAsm(tree: Tree, info: full.Asm.Ast) full.Asm {
+ const token_tags = tree.tokens.items(.tag);
+ const node_tags = tree.nodes.items(.tag);
+ var result: full.Asm = .{
+ .ast = info,
+ .volatile_token = null,
+ .inputs = &.{},
+ .outputs = &.{},
+ .first_clobber = null,
+ };
+ if (token_tags[info.asm_token + 1] == .keyword_volatile) {
+ result.volatile_token = info.asm_token + 1;
+ }
+ const outputs_end: usize = for (info.items) |item, i| {
+ switch (node_tags[item]) {
+ .asm_output => continue,
+ else => break i,
+ }
+ } else info.items.len;
+
+ result.outputs = info.items[0..outputs_end];
+ result.inputs = info.items[outputs_end..];
+
+ if (info.items.len == 0) {
+ // asm ("foo" ::: "a", "b");
+ const template_token = tree.lastToken(info.template);
+ if (token_tags[template_token + 1] == .colon and
+ token_tags[template_token + 2] == .colon and
+ token_tags[template_token + 3] == .colon and
+ token_tags[template_token + 4] == .string_literal)
+ {
+ result.first_clobber = template_token + 4;
+ }
+ } else if (result.inputs.len != 0) {
+ // asm ("foo" :: [_] "" (y) : "a", "b");
+ const last_input = result.inputs[result.inputs.len - 1];
+ const rparen = tree.lastToken(last_input);
+ if (token_tags[rparen + 1] == .colon and
+ token_tags[rparen + 2] == .string_literal)
+ {
+ result.first_clobber = rparen + 2;
}
- if (self.sentinel) |sentinel| {
- if (i < 1) return sentinel;
- i -= 1;
+ } else {
+ // asm ("foo" : [_] "" (x) :: "a", "b");
+ const last_output = result.outputs[result.outputs.len - 1];
+ const rparen = tree.lastToken(last_output);
+ if (token_tags[rparen + 1] == .colon and
+ token_tags[rparen + 2] == .colon and
+ token_tags[rparen + 3] == .string_literal)
+ {
+ result.first_clobber = rparen + 3;
}
-
- return null;
- }
-
- pub fn firstToken(self: *const Slice) TokenIndex {
- return self.lhs.firstToken();
- }
-
- pub fn lastToken(self: *const Slice) TokenIndex {
- return self.rtoken;
- }
- };
-
- pub const GroupedExpression = struct {
- base: Node = Node{ .tag = .GroupedExpression },
- lparen: TokenIndex,
- expr: *Node,
- rparen: TokenIndex,
-
- pub fn iterate(self: *const GroupedExpression, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.expr;
- i -= 1;
-
- return null;
- }
-
- pub fn firstToken(self: *const GroupedExpression) TokenIndex {
- return self.lparen;
- }
-
- pub fn lastToken(self: *const GroupedExpression) TokenIndex {
- return self.rparen;
}
- };
- /// Trailed in memory by possibly many things, with each optional thing
- /// determined by a bit in `trailer_flags`.
- /// Can be: return, break, continue
- pub const ControlFlowExpression = struct {
- base: Node,
- trailer_flags: TrailerFlags,
- ltoken: TokenIndex,
-
- pub const TrailerFlags = std.meta.TrailerFlags(struct {
- rhs: *Node,
- label: TokenIndex,
- });
+ return result;
+ }
- pub const RequiredFields = struct {
- tag: Tag,
- ltoken: TokenIndex,
+ fn fullWhile(tree: Tree, info: full.While.Ast) full.While {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.While = .{
+ .ast = info,
+ .inline_token = null,
+ .label_token = null,
+ .payload_token = null,
+ .else_token = undefined,
+ .error_token = null,
};
-
- pub fn getRHS(self: *const ControlFlowExpression) ?*Node {
- return self.getTrailer(.rhs);
- }
-
- pub fn setRHS(self: *ControlFlowExpression, value: *Node) void {
- self.setTrailer(.rhs, value);
- }
-
- pub fn getLabel(self: *const ControlFlowExpression) ?TokenIndex {
- return self.getTrailer(.label);
- }
-
- pub fn setLabel(self: *ControlFlowExpression, value: TokenIndex) void {
- self.setTrailer(.label, value);
- }
-
- fn getTrailer(self: *const ControlFlowExpression, comptime field: TrailerFlags.FieldEnum) ?TrailerFlags.Field(field) {
- const trailers_start = @ptrCast([*]const u8, self) + @sizeOf(ControlFlowExpression);
- return self.trailer_flags.get(trailers_start, field);
- }
-
- fn setTrailer(self: *ControlFlowExpression, comptime field: TrailerFlags.FieldEnum, value: TrailerFlags.Field(field)) void {
- const trailers_start = @ptrCast([*]u8, self) + @sizeOf(ControlFlowExpression);
- self.trailer_flags.set(trailers_start, field, value);
+ var tok_i = info.while_token - 1;
+ if (token_tags[tok_i] == .keyword_inline) {
+ result.inline_token = tok_i;
+ tok_i -= 1;
}
-
- pub fn create(allocator: *mem.Allocator, required: RequiredFields, trailers: TrailerFlags.InitStruct) !*ControlFlowExpression {
- const trailer_flags = TrailerFlags.init(trailers);
- const bytes = try allocator.alignedAlloc(u8, @alignOf(ControlFlowExpression), sizeInBytes(trailer_flags));
- const ctrl_flow_expr = @ptrCast(*ControlFlowExpression, bytes.ptr);
- ctrl_flow_expr.* = .{
- .base = .{ .tag = required.tag },
- .trailer_flags = trailer_flags,
- .ltoken = required.ltoken,
- };
- const trailers_start = bytes.ptr + @sizeOf(ControlFlowExpression);
- trailer_flags.setMany(trailers_start, trailers);
- return ctrl_flow_expr;
+ if (token_tags[tok_i] == .colon and
+ token_tags[tok_i - 1] == .identifier)
+ {
+ result.label_token = tok_i - 1;
}
-
- pub fn destroy(self: *ControlFlowExpression, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.trailer_flags)];
- allocator.free(bytes);
+ const last_cond_token = tree.lastToken(info.cond_expr);
+ if (token_tags[last_cond_token + 2] == .pipe) {
+ result.payload_token = last_cond_token + 3;
}
-
- pub fn iterate(self: *const ControlFlowExpression, index: usize) ?*Node {
- var i = index;
-
- if (self.getRHS()) |rhs| {
- if (i < 1) return rhs;
- i -= 1;
+ if (info.else_expr != 0) {
+ // then_expr else |x|
+ // ^ ^
+ result.else_token = tree.lastToken(info.then_expr) + 1;
+ if (token_tags[result.else_token + 1] == .pipe) {
+ result.error_token = result.else_token + 2;
}
-
- return null;
}
+ return result;
+ }
- pub fn firstToken(self: *const ControlFlowExpression) TokenIndex {
- return self.ltoken;
+ fn fullCall(tree: Tree, info: full.Call.Ast) full.Call {
+ const token_tags = tree.tokens.items(.tag);
+ var result: full.Call = .{
+ .ast = info,
+ .async_token = null,
+ };
+ const maybe_async_token = tree.firstToken(info.fn_expr) - 1;
+ if (token_tags[maybe_async_token] == .keyword_async) {
+ result.async_token = maybe_async_token;
}
+ return result;
+ }
+};
- pub fn lastToken(self: *const ControlFlowExpression) TokenIndex {
- if (self.getRHS()) |rhs| {
- return rhs.lastToken();
- }
-
- if (self.getLabel()) |label| {
- return label;
- }
-
- return self.ltoken;
- }
+/// Fully assembled AST node information.
+pub const full = struct {
+ pub const VarDecl = struct {
+ visib_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
+ lib_name: ?TokenIndex,
+ threadlocal_token: ?TokenIndex,
+ comptime_token: ?TokenIndex,
+ ast: Ast,
- fn sizeInBytes(trailer_flags: TrailerFlags) usize {
- return @sizeOf(ControlFlowExpression) + trailer_flags.sizeInBytes();
- }
+ pub const Ast = struct {
+ mut_token: TokenIndex,
+ type_node: Node.Index,
+ align_node: Node.Index,
+ section_node: Node.Index,
+ init_node: Node.Index,
+ };
};
- pub const Suspend = struct {
- base: Node = Node{ .tag = .Suspend },
- suspend_token: TokenIndex,
- body: ?*Node,
-
- pub fn iterate(self: *const Suspend, index: usize) ?*Node {
- var i = index;
-
- if (self.body) |body| {
- if (i < 1) return body;
- i -= 1;
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const Suspend) TokenIndex {
- return self.suspend_token;
- }
-
- pub fn lastToken(self: *const Suspend) TokenIndex {
- if (self.body) |body| {
- return body.lastToken();
- }
+ pub const If = struct {
+ /// Points to the first token after the `|`. Will either be an identifier or
+ /// a `*` (with an identifier immediately after it).
+ payload_token: ?TokenIndex,
+ /// Points to the identifier after the `|`.
+ error_token: ?TokenIndex,
+ /// Populated only if else_expr != 0.
+ else_token: TokenIndex,
+ ast: Ast,
- return self.suspend_token;
- }
+ pub const Ast = struct {
+ if_token: TokenIndex,
+ cond_expr: Node.Index,
+ then_expr: Node.Index,
+ else_expr: Node.Index,
+ };
};
- pub const EnumLiteral = struct {
- base: Node = Node{ .tag = .EnumLiteral },
- dot: TokenIndex,
- name: TokenIndex,
-
- pub fn iterate(self: *const EnumLiteral, index: usize) ?*Node {
- return null;
- }
-
- pub fn firstToken(self: *const EnumLiteral) TokenIndex {
- return self.dot;
- }
+ pub const While = struct {
+ ast: Ast,
+ inline_token: ?TokenIndex,
+ label_token: ?TokenIndex,
+ payload_token: ?TokenIndex,
+ error_token: ?TokenIndex,
+ /// Populated only if else_expr != 0.
+ else_token: TokenIndex,
- pub fn lastToken(self: *const EnumLiteral) TokenIndex {
- return self.name;
- }
+ pub const Ast = struct {
+ while_token: TokenIndex,
+ cond_expr: Node.Index,
+ cont_expr: Node.Index,
+ then_expr: Node.Index,
+ else_expr: Node.Index,
+ };
};
- /// Parameters are in memory following BuiltinCall.
- pub const BuiltinCall = struct {
- base: Node = Node{ .tag = .BuiltinCall },
- params_len: NodeIndex,
- builtin_token: TokenIndex,
- rparen_token: TokenIndex,
-
- /// After this the caller must initialize the fields_and_decls list.
- pub fn alloc(allocator: *mem.Allocator, params_len: NodeIndex) !*BuiltinCall {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(BuiltinCall), sizeInBytes(params_len));
- return @ptrCast(*BuiltinCall, bytes.ptr);
- }
-
- pub fn free(self: *BuiltinCall, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.params_len)];
- allocator.free(bytes);
- }
+ pub const ContainerField = struct {
+ comptime_token: ?TokenIndex,
+ ast: Ast,
- pub fn iterate(self: *const BuiltinCall, index: usize) ?*Node {
- var i = index;
+ pub const Ast = struct {
+ name_token: TokenIndex,
+ type_expr: Node.Index,
+ value_expr: Node.Index,
+ align_expr: Node.Index,
+ };
+ };
- if (i < self.params_len) return self.paramsConst()[i];
- i -= self.params_len;
+ pub const FnProto = struct {
+ visib_token: ?TokenIndex,
+ extern_export_token: ?TokenIndex,
+ lib_name: ?TokenIndex,
+ name_token: ?TokenIndex,
+ lparen: TokenIndex,
+ ast: Ast,
- return null;
- }
+ pub const Ast = struct {
+ fn_token: TokenIndex,
+ return_type: Node.Index,
+ params: []const Node.Index,
+ align_expr: Node.Index,
+ section_expr: Node.Index,
+ callconv_expr: Node.Index,
+ };
- pub fn firstToken(self: *const BuiltinCall) TokenIndex {
- return self.builtin_token;
- }
+ pub const Param = struct {
+ first_doc_comment: ?TokenIndex,
+ name_token: ?TokenIndex,
+ comptime_noalias: ?TokenIndex,
+ anytype_ellipsis3: ?TokenIndex,
+ type_expr: Node.Index,
+ };
- pub fn lastToken(self: *const BuiltinCall) TokenIndex {
- return self.rparen_token;
- }
+ /// Abstracts over the fact that anytype and ... are not included
+ /// in the params slice, since they are simple identifiers and
+ /// not sub-expressions.
+ pub const Iterator = struct {
+ tree: *const Tree,
+ fn_proto: *const FnProto,
+ param_i: usize,
+ tok_i: TokenIndex,
+ tok_flag: bool,
+
+ pub fn next(it: *Iterator) ?Param {
+ const token_tags = it.tree.tokens.items(.tag);
+ while (true) {
+ var first_doc_comment: ?TokenIndex = null;
+ var comptime_noalias: ?TokenIndex = null;
+ var name_token: ?TokenIndex = null;
+ if (!it.tok_flag) {
+ if (it.param_i >= it.fn_proto.ast.params.len) {
+ return null;
+ }
+ const param_type = it.fn_proto.ast.params[it.param_i];
+ var tok_i = it.tree.firstToken(param_type) - 1;
+ while (true) : (tok_i -= 1) switch (token_tags[tok_i]) {
+ .colon => continue,
+ .identifier => name_token = tok_i,
+ .doc_comment => first_doc_comment = tok_i,
+ .keyword_comptime, .keyword_noalias => comptime_noalias = tok_i,
+ else => break,
+ };
+ it.param_i += 1;
+ it.tok_i = it.tree.lastToken(param_type) + 1;
+ it.tok_flag = true;
+ return Param{
+ .first_doc_comment = first_doc_comment,
+ .comptime_noalias = comptime_noalias,
+ .name_token = name_token,
+ .anytype_ellipsis3 = null,
+ .type_expr = param_type,
+ };
+ }
+ // Look for anytype and ... params afterwards.
+ if (token_tags[it.tok_i] == .comma) {
+ it.tok_i += 1;
+ } else {
+ return null;
+ }
+ if (token_tags[it.tok_i] == .doc_comment) {
+ first_doc_comment = it.tok_i;
+ while (token_tags[it.tok_i] == .doc_comment) {
+ it.tok_i += 1;
+ }
+ }
+ switch (token_tags[it.tok_i]) {
+ .ellipsis3 => {
+ it.tok_flag = false; // Next iteration should return null.
+ return Param{
+ .first_doc_comment = first_doc_comment,
+ .comptime_noalias = null,
+ .name_token = null,
+ .anytype_ellipsis3 = it.tok_i,
+ .type_expr = 0,
+ };
+ },
+ .keyword_noalias, .keyword_comptime => {
+ comptime_noalias = it.tok_i;
+ it.tok_i += 1;
+ },
+ else => {},
+ }
+ if (token_tags[it.tok_i] == .identifier and
+ token_tags[it.tok_i + 1] == .colon)
+ {
+ name_token = it.tok_i;
+ it.tok_i += 2;
+ }
+ if (token_tags[it.tok_i] == .keyword_anytype) {
+ it.tok_i += 1;
+ return Param{
+ .first_doc_comment = first_doc_comment,
+ .comptime_noalias = comptime_noalias,
+ .name_token = name_token,
+ .anytype_ellipsis3 = it.tok_i - 1,
+ .type_expr = 0,
+ };
+ }
+ it.tok_flag = false;
+ }
+ }
+ };
- pub fn params(self: *BuiltinCall) []*Node {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(BuiltinCall);
- return @ptrCast([*]*Node, decls_start)[0..self.params_len];
+ pub fn iterate(fn_proto: FnProto, tree: Tree) Iterator {
+ return .{
+ .tree = &tree,
+ .fn_proto = &fn_proto,
+ .param_i = 0,
+ .tok_i = undefined,
+ .tok_flag = false,
+ };
}
+ };
- pub fn paramsConst(self: *const BuiltinCall) []const *Node {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(BuiltinCall);
- return @ptrCast([*]const *Node, decls_start)[0..self.params_len];
- }
+ pub const StructInit = struct {
+ ast: Ast,
- fn sizeInBytes(params_len: NodeIndex) usize {
- return @sizeOf(BuiltinCall) + @sizeOf(*Node) * @as(usize, params_len);
- }
+ pub const Ast = struct {
+ lbrace: TokenIndex,
+ fields: []const Node.Index,
+ type_expr: Node.Index,
+ };
};
- /// The string literal tokens appear directly in memory after MultilineStringLiteral.
- pub const MultilineStringLiteral = struct {
- base: Node = Node{ .tag = .MultilineStringLiteral },
- lines_len: TokenIndex,
+ pub const ArrayInit = struct {
+ ast: Ast,
- /// After this the caller must initialize the lines list.
- pub fn alloc(allocator: *mem.Allocator, lines_len: NodeIndex) !*MultilineStringLiteral {
- const bytes = try allocator.alignedAlloc(u8, @alignOf(MultilineStringLiteral), sizeInBytes(lines_len));
- return @ptrCast(*MultilineStringLiteral, bytes.ptr);
- }
+ pub const Ast = struct {
+ lbrace: TokenIndex,
+ elements: []const Node.Index,
+ type_expr: Node.Index,
+ };
+ };
- pub fn free(self: *MultilineStringLiteral, allocator: *mem.Allocator) void {
- const bytes = @ptrCast([*]u8, self)[0..sizeInBytes(self.lines_len)];
- allocator.free(bytes);
- }
+ pub const ArrayType = struct {
+ ast: Ast,
- pub fn iterate(self: *const MultilineStringLiteral, index: usize) ?*Node {
- return null;
- }
+ pub const Ast = struct {
+ lbracket: TokenIndex,
+ elem_count: Node.Index,
+ sentinel: ?Node.Index,
+ elem_type: Node.Index,
+ };
+ };
- pub fn firstToken(self: *const MultilineStringLiteral) TokenIndex {
- return self.linesConst()[0];
- }
+ pub const PtrType = struct {
+ size: std.builtin.TypeInfo.Pointer.Size,
+ allowzero_token: ?TokenIndex,
+ const_token: ?TokenIndex,
+ volatile_token: ?TokenIndex,
+ ast: Ast,
+
+ pub const Ast = struct {
+ main_token: TokenIndex,
+ align_node: Node.Index,
+ sentinel: Node.Index,
+ bit_range_start: Node.Index,
+ bit_range_end: Node.Index,
+ child_type: Node.Index,
+ };
+ };
- pub fn lastToken(self: *const MultilineStringLiteral) TokenIndex {
- return self.linesConst()[self.lines_len - 1];
- }
+ pub const Slice = struct {
+ ast: Ast,
- pub fn lines(self: *MultilineStringLiteral) []TokenIndex {
- const decls_start = @ptrCast([*]u8, self) + @sizeOf(MultilineStringLiteral);
- return @ptrCast([*]TokenIndex, decls_start)[0..self.lines_len];
- }
+ pub const Ast = struct {
+ sliced: Node.Index,
+ lbracket: TokenIndex,
+ start: Node.Index,
+ end: Node.Index,
+ sentinel: Node.Index,
+ };
+ };
- pub fn linesConst(self: *const MultilineStringLiteral) []const TokenIndex {
- const decls_start = @ptrCast([*]const u8, self) + @sizeOf(MultilineStringLiteral);
- return @ptrCast([*]const TokenIndex, decls_start)[0..self.lines_len];
- }
+ pub const ContainerDecl = struct {
+ layout_token: ?TokenIndex,
+ ast: Ast,
+
+ pub const Ast = struct {
+ main_token: TokenIndex,
+ /// Populated when main_token is Keyword_union.
+ enum_token: ?TokenIndex,
+ members: []const Node.Index,
+ arg: Node.Index,
+ };
+ };
- fn sizeInBytes(lines_len: NodeIndex) usize {
- return @sizeOf(MultilineStringLiteral) + @sizeOf(TokenIndex) * @as(usize, lines_len);
- }
+ pub const SwitchCase = struct {
+ /// Points to the first token after the `|`. Will either be an identifier or
+ /// a `*` (with an identifier immediately after it).
+ payload_token: ?TokenIndex,
+ ast: Ast,
+
+ pub const Ast = struct {
+ /// If empty, this is an else case
+ values: []const Node.Index,
+ arrow_token: TokenIndex,
+ target_expr: Node.Index,
+ };
};
pub const Asm = struct {
- base: Node = Node{ .tag = .Asm },
- asm_token: TokenIndex,
- rparen: TokenIndex,
+ ast: Ast,
volatile_token: ?TokenIndex,
- template: *Node,
- outputs: []Output,
- inputs: []Input,
- /// A clobber node must be a StringLiteral or MultilineStringLiteral.
- clobbers: []*Node,
-
- pub const Output = struct {
- lbracket: TokenIndex,
- symbolic_name: *Node,
- constraint: *Node,
- kind: Kind,
+ first_clobber: ?TokenIndex,
+ outputs: []const Node.Index,
+ inputs: []const Node.Index,
+
+ pub const Ast = struct {
+ asm_token: TokenIndex,
+ template: Node.Index,
+ items: []const Node.Index,
rparen: TokenIndex,
-
- pub const Kind = union(enum) {
- Variable: *OneToken,
- Return: *Node,
- };
-
- pub fn iterate(self: *const Output, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.symbolic_name;
- i -= 1;
-
- if (i < 1) return self.constraint;
- i -= 1;
-
- switch (self.kind) {
- .Variable => |variable_name| {
- if (i < 1) return &variable_name.base;
- i -= 1;
- },
- .Return => |return_type| {
- if (i < 1) return return_type;
- i -= 1;
- },
- }
-
- return null;
- }
-
- pub fn firstToken(self: *const Output) TokenIndex {
- return self.lbracket;
- }
-
- pub fn lastToken(self: *const Output) TokenIndex {
- return self.rparen;
- }
};
+ };
- pub const Input = struct {
- lbracket: TokenIndex,
- symbolic_name: *Node,
- constraint: *Node,
- expr: *Node,
- rparen: TokenIndex,
-
- pub fn iterate(self: *const Input, index: usize) ?*Node {
- var i = index;
-
- if (i < 1) return self.symbolic_name;
- i -= 1;
+ pub const Call = struct {
+ ast: Ast,
+ async_token: ?TokenIndex,
- if (i < 1) return self.constraint;
- i -= 1;
+ pub const Ast = struct {
+ lparen: TokenIndex,
+ fn_expr: Node.Index,
+ params: []const Node.Index,
+ };
+ };
+};
- if (i < 1) return self.expr;
- i -= 1;
+pub const Error = struct {
+ tag: Tag,
+ token: TokenIndex,
+ extra: union {
+ none: void,
+ expected_tag: Token.Tag,
+ } = .{ .none = {} },
- return null;
- }
+ pub const Tag = enum {
+ asterisk_after_ptr_deref,
+ decl_between_fields,
+ expected_block,
+ expected_block_or_assignment,
+ expected_block_or_expr,
+ expected_block_or_field,
+ expected_container_members,
+ expected_expr,
+ expected_expr_or_assignment,
+ expected_fn,
+ expected_inlinable,
+ expected_labelable,
+ expected_param_list,
+ expected_prefix_expr,
+ expected_primary_type_expr,
+ expected_pub_item,
+ expected_return_type,
+ expected_semi_or_else,
+ expected_semi_or_lbrace,
+ expected_statement,
+ expected_string_literal,
+ expected_suffix_op,
+ expected_type_expr,
+ expected_var_decl,
+ expected_var_decl_or_fn,
+ expected_loop_payload,
+ expected_container,
+ extra_align_qualifier,
+ extra_allowzero_qualifier,
+ extra_const_qualifier,
+ extra_volatile_qualifier,
+ invalid_align,
+ invalid_and,
+ invalid_bit_range,
+ invalid_token,
+ same_line_doc_comment,
+ unattached_doc_comment,
+
+ /// `expected_tag` is populated.
+ expected_token,
+ };
+};
- pub fn firstToken(self: *const Input) TokenIndex {
- return self.lbracket;
- }
+pub const Node = struct {
+ tag: Tag,
+ main_token: TokenIndex,
+ data: Data,
- pub fn lastToken(self: *const Input) TokenIndex {
- return self.rparen;
- }
- };
+ pub const Index = u32;
- pub fn iterate(self: *const Asm, index: usize) ?*Node {
- var i = index;
+ comptime {
+ // Goal is to keep this under one byte for efficiency.
+ assert(@sizeOf(Tag) == 1);
+ }
- if (i < self.outputs.len * 3) switch (i % 3) {
- 0 => return self.outputs[i / 3].symbolic_name,
- 1 => return self.outputs[i / 3].constraint,
- 2 => switch (self.outputs[i / 3].kind) {
- .Variable => |variable_name| return &variable_name.base,
- .Return => |return_type| return return_type,
- },
- else => unreachable,
- };
- i -= self.outputs.len * 3;
+ /// Note: The FooComma/FooSemicolon variants exist to ease the implementation of
+ /// Tree.lastToken()
+ pub const Tag = enum {
+ /// sub_list[lhs...rhs]
+ root,
+ /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`.
+ @"usingnamespace",
+ /// lhs is test name token (must be string literal), if any.
+ /// rhs is the body node.
+ test_decl,
+ /// lhs is the index into extra_data.
+ /// rhs is the initialization expression, if any.
+ /// main_token is `var` or `const`.
+ global_var_decl,
+ /// `var a: x align(y) = rhs`
+ /// lhs is the index into extra_data.
+ /// main_token is `var` or `const`.
+ local_var_decl,
+ /// `var a: lhs = rhs`. lhs and rhs may be unused.
+ /// Can be local or global.
+ /// main_token is `var` or `const`.
+ simple_var_decl,
+ /// `var a align(lhs) = rhs`. lhs and rhs may be unused.
+ /// Can be local or global.
+ /// main_token is `var` or `const`.
+ aligned_var_decl,
+ /// lhs is the identifier token payload if any,
+ /// rhs is the deferred expression.
+ @"errdefer",
+ /// lhs is unused.
+ /// rhs is the deferred expression.
+ @"defer",
+ /// lhs catch rhs
+ /// lhs catch |err| rhs
+ /// main_token is the `catch` keyword.
+ /// payload is determined by looking at the next token after the `catch` keyword.
+ @"catch",
+ /// `lhs.a`. main_token is the dot. rhs is the identifier token index.
+ field_access,
+ /// `lhs.?`. main_token is the dot. rhs is the `?` token index.
+ unwrap_optional,
+ /// `lhs == rhs`. main_token is op.
+ equal_equal,
+ /// `lhs != rhs`. main_token is op.
+ bang_equal,
+ /// `lhs < rhs`. main_token is op.
+ less_than,
+ /// `lhs > rhs`. main_token is op.
+ greater_than,
+ /// `lhs <= rhs`. main_token is op.
+ less_or_equal,
+ /// `lhs >= rhs`. main_token is op.
+ greater_or_equal,
+ /// `lhs *= rhs`. main_token is op.
+ assign_mul,
+ /// `lhs /= rhs`. main_token is op.
+ assign_div,
+ /// `lhs *= rhs`. main_token is op.
+ assign_mod,
+ /// `lhs += rhs`. main_token is op.
+ assign_add,
+ /// `lhs -= rhs`. main_token is op.
+ assign_sub,
+ /// `lhs <<= rhs`. main_token is op.
+ assign_bit_shift_left,
+ /// `lhs >>= rhs`. main_token is op.
+ assign_bit_shift_right,
+ /// `lhs &= rhs`. main_token is op.
+ assign_bit_and,
+ /// `lhs ^= rhs`. main_token is op.
+ assign_bit_xor,
+ /// `lhs |= rhs`. main_token is op.
+ assign_bit_or,
+ /// `lhs *%= rhs`. main_token is op.
+ assign_mul_wrap,
+ /// `lhs +%= rhs`. main_token is op.
+ assign_add_wrap,
+ /// `lhs -%= rhs`. main_token is op.
+ assign_sub_wrap,
+ /// `lhs = rhs`. main_token is op.
+ assign,
+ /// `lhs || rhs`. main_token is the `||`.
+ merge_error_sets,
+ /// `lhs * rhs`. main_token is the `*`.
+ mul,
+ /// `lhs / rhs`. main_token is the `/`.
+ div,
+ /// `lhs % rhs`. main_token is the `%`.
+ mod,
+ /// `lhs ** rhs`. main_token is the `**`.
+ array_mult,
+ /// `lhs *% rhs`. main_token is the `*%`.
+ mul_wrap,
+ /// `lhs + rhs`. main_token is the `+`.
+ add,
+ /// `lhs - rhs`. main_token is the `-`.
+ sub,
+ /// `lhs ++ rhs`. main_token is the `++`.
+ array_cat,
+ /// `lhs +% rhs`. main_token is the `+%`.
+ add_wrap,
+ /// `lhs -% rhs`. main_token is the `-%`.
+ sub_wrap,
+ /// `lhs << rhs`. main_token is the `<<`.
+ bit_shift_left,
+ /// `lhs >> rhs`. main_token is the `>>`.
+ bit_shift_right,
+ /// `lhs & rhs`. main_token is the `&`.
+ bit_and,
+ /// `lhs ^ rhs`. main_token is the `^`.
+ bit_xor,
+ /// `lhs | rhs`. main_token is the `|`.
+ bit_or,
+ /// `lhs orelse rhs`. main_token is the `orelse`.
+ @"orelse",
+ /// `lhs and rhs`. main_token is the `and`.
+ bool_and,
+ /// `lhs or rhs`. main_token is the `or`.
+ bool_or,
+ /// `op lhs`. rhs unused. main_token is op.
+ bool_not,
+ /// `op lhs`. rhs unused. main_token is op.
+ negation,
+ /// `op lhs`. rhs unused. main_token is op.
+ bit_not,
+ /// `op lhs`. rhs unused. main_token is op.
+ negation_wrap,
+ /// `op lhs`. rhs unused. main_token is op.
+ address_of,
+ /// `op lhs`. rhs unused. main_token is op.
+ @"try",
+ /// `op lhs`. rhs unused. main_token is op.
+ @"await",
+ /// `?lhs`. rhs unused. main_token is the `?`.
+ optional_type,
+ /// `[lhs]rhs`. lhs can be omitted to make it a slice.
+ array_type,
+ /// `[lhs:a]b`. `array_type_sentinel[rhs]`.
+ array_type_sentinel,
+ /// `[*]align(lhs) rhs`. lhs can be omitted.
+ /// `*align(lhs) rhs`. lhs can be omitted.
+ /// `[]rhs`.
+ /// main_token is the asterisk if a pointer or the lbracket if a slice
+ /// main_token might be a ** token, which is shared with a parent/child
+ /// pointer type and may require special handling.
+ ptr_type_aligned,
+ /// `[*:lhs]rhs`. lhs can be omitted.
+ /// `*rhs`.
+ /// `[:lhs]rhs`.
+ /// main_token is the asterisk if a pointer or the lbracket if a slice
+ /// main_token might be a ** token, which is shared with a parent/child
+ /// pointer type and may require special handling.
+ ptr_type_sentinel,
+ /// lhs is index into ptr_type. rhs is the element type expression.
+ /// main_token is the asterisk if a pointer or the lbracket if a slice
+ /// main_token might be a ** token, which is shared with a parent/child
+ /// pointer type and may require special handling.
+ ptr_type,
+ /// lhs is index into ptr_type_bit_range. rhs is the element type expression.
+ /// main_token is the asterisk if a pointer or the lbracket if a slice
+ /// main_token might be a ** token, which is shared with a parent/child
+ /// pointer type and may require special handling.
+ ptr_type_bit_range,
+ /// `lhs[rhs..]`
+ /// main_token is the lbracket.
+ slice_open,
+ /// `lhs[b..c]`. rhs is index into Slice
+ /// main_token is the lbracket.
+ slice,
+ /// `lhs[b..c :d]`. rhs is index into SliceSentinel
+ /// main_token is the lbracket.
+ slice_sentinel,
+ /// `lhs.*`. rhs is unused.
+ deref,
+ /// `lhs[rhs]`.
+ array_access,
+ /// `lhs{rhs}`. rhs can be omitted.
+ array_init_one,
+ /// `lhs{rhs,}`. rhs can *not* be omitted
+ array_init_one_comma,
+ /// `.{lhs, rhs}`. lhs and rhs can be omitted.
+ array_init_dot_two,
+ /// Same as `array_init_dot_two` except there is known to be a trailing comma
+ /// before the final rbrace.
+ array_init_dot_two_comma,
+ /// `.{a, b}`. `sub_list[lhs..rhs]`.
+ array_init_dot,
+ /// Same as `array_init_dot` except there is known to be a trailing comma
+ /// before the final rbrace.
+ array_init_dot_comma,
+ /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`.
+ array_init,
+ /// Same as `array_init` except there is known to be a trailing comma
+ /// before the final rbrace.
+ array_init_comma,
+ /// `lhs{.a = rhs}`. rhs can be omitted making it empty.
+ /// main_token is the lbrace.
+ struct_init_one,
+ /// `lhs{.a = rhs,}`. rhs can *not* be omitted.
+ /// main_token is the lbrace.
+ struct_init_one_comma,
+ /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted.
+ /// main_token is the lbrace.
+ /// No trailing comma before the rbrace.
+ struct_init_dot_two,
+ /// Same as `struct_init_dot_two` except there is known to be a trailing comma
+ /// before the final rbrace.
+ struct_init_dot_two_comma,
+ /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`.
+ /// main_token is the lbrace.
+ struct_init_dot,
+ /// Same as `struct_init_dot` except there is known to be a trailing comma
+ /// before the final rbrace.
+ struct_init_dot_comma,
+ /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`.
+ /// lhs can be omitted which means `.{.a = b, .c = d}`.
+ /// main_token is the lbrace.
+ struct_init,
+ /// Same as `struct_init` except there is known to be a trailing comma
+ /// before the final rbrace.
+ struct_init_comma,
+ /// `lhs(rhs)`. rhs can be omitted.
+ /// main_token is the lparen.
+ call_one,
+ /// `lhs(rhs,)`. rhs can be omitted.
+ /// main_token is the lparen.
+ call_one_comma,
+ /// `async lhs(rhs)`. rhs can be omitted.
+ async_call_one,
+ /// `async lhs(rhs,)`.
+ async_call_one_comma,
+ /// `lhs(a, b, c)`. `SubRange[rhs]`.
+ /// main_token is the `(`.
+ call,
+ /// `lhs(a, b, c,)`. `SubRange[rhs]`.
+ /// main_token is the `(`.
+ call_comma,
+ /// `async lhs(a, b, c)`. `SubRange[rhs]`.
+ /// main_token is the `(`.
+ async_call,
+ /// `async lhs(a, b, c,)`. `SubRange[rhs]`.
+ /// main_token is the `(`.
+ async_call_comma,
+ /// `switch(lhs) {}`. `SubRange[rhs]`.
+ @"switch",
+ /// Same as switch except there is known to be a trailing comma
+ /// before the final rbrace
+ switch_comma,
+ /// `lhs => rhs`. If lhs is omitted it means `else`.
+ /// main_token is the `=>`
+ switch_case_one,
+ /// `a, b, c => rhs`. `SubRange[lhs]`.
+ /// main_token is the `=>`
+ switch_case,
+ /// `lhs...rhs`.
+ switch_range,
+ /// `while (lhs) rhs`.
+ /// `while (lhs) |x| rhs`.
+ while_simple,
+ /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
+ /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
+ while_cont,
+ /// `while (lhs) : (a) b else c`. `While[rhs]`.
+ /// `while (lhs) |x| : (a) b else c`. `While[rhs]`.
+ /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`.
+ @"while",
+ /// `for (lhs) rhs`.
+ for_simple,
+ /// `for (lhs) a else b`. `if_list[rhs]`.
+ @"for",
+ /// `if (lhs) rhs`.
+ /// `if (lhs) |a| rhs`.
+ if_simple,
+ /// `if (lhs) a else b`. `If[rhs]`.
+ /// `if (lhs) |x| a else b`. `If[rhs]`.
+ /// `if (lhs) |x| a else |y| b`. `If[rhs]`.
+ @"if",
+ /// `suspend lhs`. lhs can be omitted. rhs is unused.
+ @"suspend",
+ /// `resume lhs`. rhs is unused.
+ @"resume",
+ /// `continue`. lhs is token index of label if any. rhs is unused.
+ @"continue",
+ /// `break :lhs rhs`
+ /// both lhs and rhs may be omitted.
+ @"break",
+ /// `return lhs`. lhs can be omitted. rhs is unused.
+ @"return",
+ /// `fn(a: lhs) rhs`. lhs can be omitted.
+ /// anytype and ... parameters are omitted from the AST tree.
+ /// main_token is the `fn` keyword.
+ /// extern function declarations use this tag.
+ fn_proto_simple,
+ /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`.
+ /// anytype and ... parameters are omitted from the AST tree.
+ /// main_token is the `fn` keyword.
+ /// extern function declarations use this tag.
+ fn_proto_multi,
+ /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`.
+ /// zero or one parameters.
+ /// anytype and ... parameters are omitted from the AST tree.
+ /// main_token is the `fn` keyword.
+ /// extern function declarations use this tag.
+ fn_proto_one,
+ /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`.
+ /// anytype and ... parameters are omitted from the AST tree.
+ /// main_token is the `fn` keyword.
+ /// extern function declarations use this tag.
+ fn_proto,
+ /// lhs is the fn_proto.
+ /// rhs is the function body block.
+ /// Note that extern function declarations use the fn_proto tags rather
+ /// than this one.
+ fn_decl,
+ /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index.
+ anyframe_type,
+ /// Both lhs and rhs unused.
+ anyframe_literal,
+ /// Both lhs and rhs unused.
+ char_literal,
+ /// Both lhs and rhs unused.
+ integer_literal,
+ /// Both lhs and rhs unused.
+ float_literal,
+ /// Both lhs and rhs unused.
+ false_literal,
+ /// Both lhs and rhs unused.
+ true_literal,
+ /// Both lhs and rhs unused.
+ null_literal,
+ /// Both lhs and rhs unused.
+ undefined_literal,
+ /// Both lhs and rhs unused.
+ unreachable_literal,
+ /// Both lhs and rhs unused.
+ /// Most identifiers will not have explicit AST nodes, however for expressions
+ /// which could be one of many different kinds of AST nodes, there will be an
+ /// identifier AST node for it.
+ identifier,
+ /// lhs is the dot token index, rhs unused, main_token is the identifier.
+ enum_literal,
+ /// main_token is the string literal token
+ /// Both lhs and rhs unused.
+ string_literal,
+ /// main_token is the first token index (redundant with lhs)
+ /// lhs is the first token index; rhs is the last token index.
+ /// Could be a series of multiline_string_literal_line tokens, or a single
+ /// string_literal token.
+ multiline_string_literal,
+ /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`.
+ grouped_expression,
+ /// `@a(lhs, rhs)`. lhs and rhs may be omitted.
+ /// main_token is the builtin token.
+ builtin_call_two,
+ /// Same as builtin_call_two but there is known to be a trailing comma before the rparen.
+ builtin_call_two_comma,
+ /// `@a(b, c)`. `sub_list[lhs..rhs]`.
+ /// main_token is the builtin token.
+ builtin_call,
+ /// Same as builtin_call but there is known to be a trailing comma before the rparen.
+ builtin_call_comma,
+ /// `error{a, b}`.
+ /// rhs is the rbrace, lhs is unused.
+ error_set_decl,
+ /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`.
+ /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
+ container_decl,
+ /// Same as ContainerDecl but there is known to be a trailing comma
+ /// or semicolon before the rbrace.
+ container_decl_trailing,
+ /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`.
+ /// lhs or rhs can be omitted.
+ /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
+ container_decl_two,
+ /// Same as ContainerDeclTwo except there is known to be a trailing comma
+ /// or semicolon before the rbrace.
+ container_decl_two_trailing,
+ /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`.
+ container_decl_arg,
+ /// Same as container_decl_arg but there is known to be a trailing
+ /// comma or semicolon before the rbrace.
+ container_decl_arg_trailing,
+ /// `union(enum) {}`. `sub_list[lhs..rhs]`.
+ /// Note that tagged unions with explicitly provided enums are represented
+ /// by `container_decl_arg`.
+ tagged_union,
+ /// Same as tagged_union but there is known to be a trailing comma
+ /// or semicolon before the rbrace.
+ tagged_union_trailing,
+ /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted.
+ /// Note that tagged unions with explicitly provided enums are represented
+ /// by `container_decl_arg`.
+ tagged_union_two,
+ /// Same as tagged_union_two but there is known to be a trailing comma
+ /// or semicolon before the rbrace.
+ tagged_union_two_trailing,
+ /// `union(enum(lhs)) {}`. `SubRange[rhs]`.
+ tagged_union_enum_tag,
+ /// Same as tagged_union_enum_tag but there is known to be a trailing comma
+ /// or semicolon before the rbrace.
+ tagged_union_enum_tag_trailing,
+ /// `a: lhs = rhs,`. lhs and rhs can be omitted.
+ /// main_token is the field name identifier.
+ /// lastToken() does not include the possible trailing comma.
+ container_field_init,
+ /// `a: lhs align(rhs),`. rhs can be omitted.
+ /// main_token is the field name identifier.
+ /// lastToken() does not include the possible trailing comma.
+ container_field_align,
+ /// `a: lhs align(c) = d,`. `container_field_list[rhs]`.
+ /// main_token is the field name identifier.
+ /// lastToken() does not include the possible trailing comma.
+ container_field,
+ /// `anytype`. both lhs and rhs unused.
+ /// Used by `ContainerField`.
+ @"anytype",
+ /// `comptime lhs`. rhs unused.
+ @"comptime",
+ /// `nosuspend lhs`. rhs unused.
+ @"nosuspend",
+ /// `{lhs rhs}`. rhs or lhs can be omitted.
+ /// main_token points at the lbrace.
+ block_two,
+ /// Same as block_two but there is known to be a semicolon before the rbrace.
+ block_two_semicolon,
+ /// `{}`. `sub_list[lhs..rhs]`.
+ /// main_token points at the lbrace.
+ block,
+ /// Same as block but there is known to be a semicolon before the rbrace.
+ block_semicolon,
+ /// `asm(lhs)`. rhs is the token index of the rparen.
+ asm_simple,
+ /// `asm(lhs, a)`. `Asm[rhs]`.
+ @"asm",
+ /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen.
+ /// `[a] "b" (-> lhs)`. rhs is token index of the rparen.
+ /// main_token is `a`.
+ asm_output,
+ /// `[a] "b" (lhs)`. rhs is token index of the rparen.
+ /// main_token is `a`.
+ asm_input,
+ /// `error.a`. lhs is token index of `.`. rhs is token index of `a`.
+ error_value,
+ /// `lhs!rhs`. main_token is the `!`.
+ error_union,
+
+ pub fn isContainerField(tag: Tag) bool {
+ return switch (tag) {
+ .container_field_init,
+ .container_field_align,
+ .container_field,
+ => true,
- if (i < self.inputs.len * 3) switch (i % 3) {
- 0 => return self.inputs[i / 3].symbolic_name,
- 1 => return self.inputs[i / 3].constraint,
- 2 => return self.inputs[i / 3].expr,
- else => unreachable,
+ else => false,
};
- i -= self.inputs.len * 3;
-
- return null;
}
+ };
- pub fn firstToken(self: *const Asm) TokenIndex {
- return self.asm_token;
- }
+ pub const Data = struct {
+ lhs: Index,
+ rhs: Index,
+ };
- pub fn lastToken(self: *const Asm) TokenIndex {
- return self.rparen;
- }
+ pub const LocalVarDecl = struct {
+ type_node: Index,
+ align_node: Index,
};
- /// TODO remove from the Node base struct
- /// TODO actually maybe remove entirely in favor of iterating backward from Node.firstToken()
- /// and forwards to find same-line doc comments.
- pub const DocComment = struct {
- base: Node = Node{ .tag = .DocComment },
- /// Points to the first doc comment token. API users are expected to iterate over the
- /// tokens array, looking for more doc comments, ignoring line comments, and stopping
- /// at the first other token.
- first_line: TokenIndex,
-
- pub fn iterate(self: *const DocComment, index: usize) ?*Node {
- return null;
- }
+ pub const ArrayTypeSentinel = struct {
+ elem_type: Index,
+ sentinel: Index,
+ };
- pub fn firstToken(self: *const DocComment) TokenIndex {
- return self.first_line;
- }
+ pub const PtrType = struct {
+ sentinel: Index,
+ align_node: Index,
+ };
- /// Returns the first doc comment line. Be careful, this may not be the desired behavior,
- /// which would require the tokens array.
- pub fn lastToken(self: *const DocComment) TokenIndex {
- return self.first_line;
- }
+ pub const PtrTypeBitRange = struct {
+ sentinel: Index,
+ align_node: Index,
+ bit_range_start: Index,
+ bit_range_end: Index,
};
- pub const TestDecl = struct {
- base: Node = Node{ .tag = .TestDecl },
- doc_comments: ?*DocComment,
- test_token: TokenIndex,
- name: ?*Node,
- body_node: *Node,
+ pub const SubRange = struct {
+ /// Index into sub_list.
+ start: Index,
+ /// Index into sub_list.
+ end: Index,
+ };
- pub fn iterate(self: *const TestDecl, index: usize) ?*Node {
- var i = index;
+ pub const If = struct {
+ then_expr: Index,
+ else_expr: Index,
+ };
- if (i < 1) return self.body_node;
- i -= 1;
+ pub const ContainerField = struct {
+ value_expr: Index,
+ align_expr: Index,
+ };
- return null;
- }
+ pub const GlobalVarDecl = struct {
+ type_node: Index,
+ align_node: Index,
+ section_node: Index,
+ };
- pub fn firstToken(self: *const TestDecl) TokenIndex {
- return self.test_token;
- }
+ pub const Slice = struct {
+ start: Index,
+ end: Index,
+ };
- pub fn lastToken(self: *const TestDecl) TokenIndex {
- return self.body_node.lastToken();
- }
+ pub const SliceSentinel = struct {
+ start: Index,
+ end: Index,
+ sentinel: Index,
+ };
+
+ pub const While = struct {
+ cont_expr: Index,
+ then_expr: Index,
+ else_expr: Index,
};
-};
-pub const PtrInfo = struct {
- allowzero_token: ?TokenIndex = null,
- align_info: ?Align = null,
- const_token: ?TokenIndex = null,
- volatile_token: ?TokenIndex = null,
- sentinel: ?*Node = null,
+ pub const WhileCont = struct {
+ cont_expr: Index,
+ then_expr: Index,
+ };
- pub const Align = struct {
- node: *Node,
- bit_range: ?BitRange = null,
+ pub const FnProtoOne = struct {
+ /// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters.
+ param: Index,
+ /// Populated if align(A) is present.
+ align_expr: Index,
+ /// Populated if linksection(A) is present.
+ section_expr: Index,
+ /// Populated if callconv(A) is present.
+ callconv_expr: Index,
+ };
- pub const BitRange = struct {
- start: *Node,
- end: *Node,
- };
+ pub const FnProto = struct {
+ params_start: Index,
+ params_end: Index,
+ /// Populated if align(A) is present.
+ align_expr: Index,
+ /// Populated if linksection(A) is present.
+ section_expr: Index,
+ /// Populated if callconv(A) is present.
+ callconv_expr: Index,
};
-};
-test "iterate" {
- var root = Node.Root{
- .base = Node{ .tag = Node.Tag.Root },
- .decls_len = 0,
- .eof_token = 0,
+ pub const Asm = struct {
+ items_start: Index,
+ items_end: Index,
+ /// Needed to make lastToken() work.
+ rparen: TokenIndex,
};
- var base = &root.base;
- testing.expect(base.iterate(0) == null);
-}
+};
diff --git a/lib/std/zig/parse.zig b/lib/std/zig/parse.zig
index 32f6403686b8..7a6404fbb2ca 100644
--- a/lib/std/zig/parse.zig
+++ b/lib/std/zig/parse.zig
@@ -11,85 +11,181 @@ const Node = ast.Node;
const Tree = ast.Tree;
const AstError = ast.Error;
const TokenIndex = ast.TokenIndex;
-const NodeIndex = ast.NodeIndex;
const Token = std.zig.Token;
pub const Error = error{ParseError} || Allocator.Error;
/// Result should be freed with tree.deinit() when there are
/// no more references to any of the tokens or nodes.
-pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!*Tree {
- var token_ids = std.ArrayList(Token.Id).init(gpa);
- defer token_ids.deinit();
- var token_locs = std.ArrayList(Token.Loc).init(gpa);
- defer token_locs.deinit();
+pub fn parse(gpa: *Allocator, source: []const u8) Allocator.Error!Tree {
+ var tokens = ast.TokenList{};
+ defer tokens.deinit(gpa);
// Empirically, the zig std lib has an 8:1 ratio of source bytes to token count.
const estimated_token_count = source.len / 8;
- try token_ids.ensureCapacity(estimated_token_count);
- try token_locs.ensureCapacity(estimated_token_count);
+ try tokens.ensureCapacity(gpa, estimated_token_count);
var tokenizer = std.zig.Tokenizer.init(source);
while (true) {
const token = tokenizer.next();
- try token_ids.append(token.id);
- try token_locs.append(token.loc);
- if (token.id == .Eof) break;
+ try tokens.append(gpa, .{
+ .tag = token.tag,
+ .start = @intCast(u32, token.loc.start),
+ });
+ if (token.tag == .eof) break;
}
var parser: Parser = .{
.source = source,
- .arena = std.heap.ArenaAllocator.init(gpa),
.gpa = gpa,
- .token_ids = token_ids.items,
- .token_locs = token_locs.items,
+ .token_tags = tokens.items(.tag),
+ .token_starts = tokens.items(.start),
.errors = .{},
+ .nodes = .{},
+ .extra_data = .{},
.tok_i = 0,
};
defer parser.errors.deinit(gpa);
- errdefer parser.arena.deinit();
-
- while (token_ids.items[parser.tok_i] == .LineComment) parser.tok_i += 1;
-
- const root_node = try parser.parseRoot();
+ defer parser.nodes.deinit(gpa);
+ defer parser.extra_data.deinit(gpa);
+
+ // Empirically, Zig source code has a 2:1 ratio of tokens to AST nodes.
+ // Make sure at least 1 so we can use appendAssumeCapacity on the root node below.
+ const estimated_node_count = (tokens.len + 2) / 2;
+ try parser.nodes.ensureCapacity(gpa, estimated_node_count);
+
+ // Root node must be index 0.
+ // Root <- skip ContainerMembers eof
+ parser.nodes.appendAssumeCapacity(.{
+ .tag = .root,
+ .main_token = 0,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ });
+ const root_members = try parser.parseContainerMembers();
+ const root_decls = try root_members.toSpan(&parser);
+ if (parser.token_tags[parser.tok_i] != .eof) {
+ try parser.warnExpected(.eof);
+ }
+ parser.nodes.items(.data)[0] = .{
+ .lhs = root_decls.start,
+ .rhs = root_decls.end,
+ };
- const tree = try parser.arena.allocator.create(Tree);
- tree.* = .{
- .gpa = gpa,
+ // TODO experiment with compacting the MultiArrayList slices here
+ return Tree{
.source = source,
- .token_ids = token_ids.toOwnedSlice(),
- .token_locs = token_locs.toOwnedSlice(),
+ .tokens = tokens.toOwnedSlice(),
+ .nodes = parser.nodes.toOwnedSlice(),
+ .extra_data = parser.extra_data.toOwnedSlice(gpa),
.errors = parser.errors.toOwnedSlice(gpa),
- .root_node = root_node,
- .arena = parser.arena.state,
};
- return tree;
}
+const null_node: Node.Index = 0;
+
/// Represents in-progress parsing, will be converted to an ast.Tree after completion.
const Parser = struct {
- arena: std.heap.ArenaAllocator,
gpa: *Allocator,
source: []const u8,
- token_ids: []const Token.Id,
- token_locs: []const Token.Loc,
+ token_tags: []const Token.Tag,
+ token_starts: []const ast.ByteOffset,
tok_i: TokenIndex,
errors: std.ArrayListUnmanaged(AstError),
+ nodes: ast.NodeList,
+ extra_data: std.ArrayListUnmanaged(Node.Index),
+
+ const SmallSpan = union(enum) {
+ zero_or_one: Node.Index,
+ multi: []Node.Index,
- /// Root <- skip ContainerMembers eof
- fn parseRoot(p: *Parser) Allocator.Error!*Node.Root {
- const decls = try parseContainerMembers(p, true);
- defer p.gpa.free(decls);
+ fn deinit(self: SmallSpan, gpa: *Allocator) void {
+ switch (self) {
+ .zero_or_one => {},
+ .multi => |list| gpa.free(list),
+ }
+ }
+ };
- // parseContainerMembers will try to skip as much
- // invalid tokens as it can so this can only be the EOF
- const eof_token = p.eatToken(.Eof).?;
+ const Members = struct {
+ len: usize,
+ lhs: Node.Index,
+ rhs: Node.Index,
+ trailing: bool,
- const decls_len = @intCast(NodeIndex, decls.len);
- const node = try Node.Root.create(&p.arena.allocator, decls_len, eof_token);
- std.mem.copy(*Node, node.decls(), decls);
+ fn toSpan(self: Members, p: *Parser) !Node.SubRange {
+ if (self.len <= 2) {
+ const nodes = [2]Node.Index{ self.lhs, self.rhs };
+ return p.listToSpan(nodes[0..self.len]);
+ } else {
+ return Node.SubRange{ .start = self.lhs, .end = self.rhs };
+ }
+ }
+ };
- return node;
+ fn listToSpan(p: *Parser, list: []const Node.Index) !Node.SubRange {
+ try p.extra_data.appendSlice(p.gpa, list);
+ return Node.SubRange{
+ .start = @intCast(Node.Index, p.extra_data.items.len - list.len),
+ .end = @intCast(Node.Index, p.extra_data.items.len),
+ };
+ }
+
+ fn addNode(p: *Parser, elem: ast.NodeList.Elem) Allocator.Error!Node.Index {
+ const result = @intCast(Node.Index, p.nodes.len);
+ try p.nodes.append(p.gpa, elem);
+ return result;
+ }
+
+ fn addExtra(p: *Parser, extra: anytype) Allocator.Error!Node.Index {
+ const fields = std.meta.fields(@TypeOf(extra));
+ try p.extra_data.ensureCapacity(p.gpa, p.extra_data.items.len + fields.len);
+ const result = @intCast(u32, p.extra_data.items.len);
+ inline for (fields) |field| {
+ comptime assert(field.field_type == Node.Index);
+ p.extra_data.appendAssumeCapacity(@field(extra, field.name));
+ }
+ return result;
+ }
+
+ fn warn(p: *Parser, tag: ast.Error.Tag) error{OutOfMemory}!void {
+ @setCold(true);
+ try p.warnMsg(.{ .tag = tag, .token = p.tok_i });
+ }
+
+ fn warnExpected(p: *Parser, expected_token: Token.Tag) error{OutOfMemory}!void {
+ @setCold(true);
+ try p.warnMsg(.{
+ .tag = .expected_token,
+ .token = p.tok_i,
+ .extra = .{ .expected_tag = expected_token },
+ });
+ }
+ fn warnMsg(p: *Parser, msg: ast.Error) error{OutOfMemory}!void {
+ @setCold(true);
+ try p.errors.append(p.gpa, msg);
+ }
+
+ fn fail(p: *Parser, tag: ast.Error.Tag) error{ ParseError, OutOfMemory } {
+ @setCold(true);
+ return p.failMsg(.{ .tag = tag, .token = p.tok_i });
+ }
+
+ fn failExpected(p: *Parser, expected_token: Token.Tag) error{ ParseError, OutOfMemory } {
+ @setCold(true);
+ return p.failMsg(.{
+ .tag = .expected_token,
+ .token = p.tok_i,
+ .extra = .{ .expected_tag = expected_token },
+ });
+ }
+
+ fn failMsg(p: *Parser, msg: ast.Error) error{ ParseError, OutOfMemory } {
+ @setCold(true);
+ try p.warnMsg(msg);
+ return error.ParseError;
}
/// ContainerMembers
@@ -99,176 +195,226 @@ const Parser = struct {
/// / ContainerField COMMA ContainerMembers
/// / ContainerField
/// /
- fn parseContainerMembers(p: *Parser, top_level: bool) ![]*Node {
- var list = std.ArrayList(*Node).init(p.gpa);
+ /// TopLevelComptime <- KEYWORD_comptime BlockExpr
+ fn parseContainerMembers(p: *Parser) !Members {
+ var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
var field_state: union(enum) {
- /// no fields have been seen
+ /// No fields have been seen.
none,
- /// currently parsing fields
+ /// Currently parsing fields.
seen,
- /// saw fields and then a declaration after them.
- /// payload is first token of previous declaration.
- end: TokenIndex,
- /// ther was a declaration between fields, don't report more errors
+ /// Saw fields and then a declaration after them.
+ /// Payload is first token of previous declaration.
+ end: Node.Index,
+ /// There was a declaration between fields, don't report more errors.
err,
} = .none;
- while (true) {
- if (try p.parseContainerDocComments()) |node| {
- try list.append(node);
- continue;
- }
-
- const doc_comments = try p.parseDocComment();
+ // Skip container doc comments.
+ while (p.eatToken(.container_doc_comment)) |_| {}
- if (p.parseTestDecl() catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- p.findNextContainerMember();
- continue;
+ var trailing = false;
+ while (true) {
+ const doc_comment = try p.eatDocComments();
+
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_test => {
+ const test_decl_node = try p.expectTestDeclRecoverable();
+ if (test_decl_node != 0) {
+ if (field_state == .seen) {
+ field_state = .{ .end = test_decl_node };
+ }
+ try list.append(test_decl_node);
+ }
+ trailing = false;
},
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = node.firstToken() };
- }
- node.cast(Node.TestDecl).?.doc_comments = doc_comments;
- try list.append(node);
- continue;
- }
-
- if (p.parseTopLevelComptime() catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- p.findNextContainerMember();
- continue;
+ .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => {
+ p.tok_i += 1;
+ const container_field = try p.expectContainerFieldRecoverable();
+ if (container_field != 0) {
+ switch (field_state) {
+ .none => field_state = .seen,
+ .err, .seen => {},
+ .end => |node| {
+ try p.warnMsg(.{
+ .tag = .decl_between_fields,
+ .token = p.nodes.items(.main_token)[node],
+ });
+ // Continue parsing; error will be reported later.
+ field_state = .err;
+ },
+ }
+ try list.append(container_field);
+ switch (p.token_tags[p.tok_i]) {
+ .comma => {
+ p.tok_i += 1;
+ trailing = true;
+ continue;
+ },
+ .r_brace, .eof => {
+ trailing = false;
+ break;
+ },
+ else => {},
+ }
+ // There is not allowed to be a decl after a field with no comma.
+ // Report error but recover parser.
+ try p.warnExpected(.comma);
+ p.findNextContainerMember();
+ }
+ },
+ .l_brace => {
+ const comptime_token = p.nextToken();
+ const block = p.parseBlock() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => blk: {
+ p.findNextContainerMember();
+ break :blk null_node;
+ },
+ };
+ if (block != 0) {
+ const comptime_node = try p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = comptime_token,
+ .data = .{
+ .lhs = block,
+ .rhs = undefined,
+ },
+ });
+ if (field_state == .seen) {
+ field_state = .{ .end = comptime_node };
+ }
+ try list.append(comptime_node);
+ }
+ trailing = false;
+ },
+ else => {
+ p.tok_i += 1;
+ try p.warn(.expected_block_or_field);
+ },
},
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = node.firstToken() };
- }
- node.cast(Node.Comptime).?.doc_comments = doc_comments;
- try list.append(node);
- continue;
- }
-
- const visib_token = p.eatToken(.Keyword_pub);
-
- if (p.parseTopLevelDecl(doc_comments, visib_token) catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- p.findNextContainerMember();
- continue;
+ .keyword_pub => {
+ p.tok_i += 1;
+ const top_level_decl = try p.expectTopLevelDeclRecoverable();
+ if (top_level_decl != 0) {
+ if (field_state == .seen) {
+ field_state = .{ .end = top_level_decl };
+ }
+ try list.append(top_level_decl);
+ }
+ trailing = p.token_tags[p.tok_i - 1] == .semicolon;
},
- }) |node| {
- if (field_state == .seen) {
- field_state = .{ .end = visib_token orelse node.firstToken() };
- }
- try list.append(node);
- continue;
- }
-
- if (visib_token != null) {
- try p.errors.append(p.gpa, .{
- .ExpectedPubItem = .{ .token = p.tok_i },
- });
- // ignore this pub
- continue;
- }
-
- if (p.parseContainerField() catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- // attempt to recover
- p.findNextContainerMember();
- continue;
+ .keyword_usingnamespace => {
+ const node = try p.expectUsingNamespaceRecoverable();
+ if (node != 0) {
+ if (field_state == .seen) {
+ field_state = .{ .end = node };
+ }
+ try list.append(node);
+ }
+ trailing = p.token_tags[p.tok_i - 1] == .semicolon;
+ },
+ .keyword_const,
+ .keyword_var,
+ .keyword_threadlocal,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_inline,
+ .keyword_noinline,
+ .keyword_fn,
+ => {
+ const top_level_decl = try p.expectTopLevelDeclRecoverable();
+ if (top_level_decl != 0) {
+ if (field_state == .seen) {
+ field_state = .{ .end = top_level_decl };
+ }
+ try list.append(top_level_decl);
+ }
+ trailing = p.token_tags[p.tok_i - 1] == .semicolon;
+ },
+ .identifier => {
+ const container_field = try p.expectContainerFieldRecoverable();
+ if (container_field != 0) {
+ switch (field_state) {
+ .none => field_state = .seen,
+ .err, .seen => {},
+ .end => |node| {
+ try p.warnMsg(.{
+ .tag = .decl_between_fields,
+ .token = p.nodes.items(.main_token)[node],
+ });
+ // Continue parsing; error will be reported later.
+ field_state = .err;
+ },
+ }
+ try list.append(container_field);
+ switch (p.token_tags[p.tok_i]) {
+ .comma => {
+ p.tok_i += 1;
+ trailing = true;
+ continue;
+ },
+ .r_brace, .eof => {
+ trailing = false;
+ break;
+ },
+ else => {},
+ }
+ // There is not allowed to be a decl after a field with no comma.
+ // Report error but recover parser.
+ try p.warnExpected(.comma);
+ p.findNextContainerMember();
+ }
},
- }) |node| {
- switch (field_state) {
- .none => field_state = .seen,
- .err, .seen => {},
- .end => |tok| {
- try p.errors.append(p.gpa, .{
- .DeclBetweenFields = .{ .token = tok },
+ .eof, .r_brace => {
+ if (doc_comment) |tok| {
+ try p.warnMsg(.{
+ .tag = .unattached_doc_comment,
+ .token = tok,
});
- // continue parsing, error will be reported later
- field_state = .err;
- },
- }
-
- const field = node.cast(Node.ContainerField).?;
- field.doc_comments = doc_comments;
- try list.append(node);
- const comma = p.eatToken(.Comma) orelse {
- // try to continue parsing
- const index = p.tok_i;
- p.findNextContainerMember();
- const next = p.token_ids[p.tok_i];
- switch (next) {
- .Eof => {
- // no invalid tokens were found
- if (index == p.tok_i) break;
-
- // Invalid tokens, add error and exit
- try p.errors.append(p.gpa, .{
- .ExpectedToken = .{ .token = index, .expected_id = .Comma },
- });
- break;
- },
- else => {
- if (next == .RBrace) {
- if (!top_level) break;
- _ = p.nextToken();
- }
-
- // add error and continue
- try p.errors.append(p.gpa, .{
- .ExpectedToken = .{ .token = index, .expected_id = .Comma },
- });
- continue;
- },
}
- };
- if (try p.parseAppendedDocComment(comma)) |appended_comment|
- field.doc_comments = appended_comment;
- continue;
- }
-
- // Dangling doc comment
- if (doc_comments != null) {
- try p.errors.append(p.gpa, .{
- .UnattachedDocComment = .{ .token = doc_comments.?.firstToken() },
- });
- }
-
- const next = p.token_ids[p.tok_i];
- switch (next) {
- .Eof => break,
- .Keyword_comptime => {
- _ = p.nextToken();
- try p.errors.append(p.gpa, .{
- .ExpectedBlockOrField = .{ .token = p.tok_i },
- });
+ break;
},
else => {
- const index = p.tok_i;
- if (next == .RBrace) {
- if (!top_level) break;
- _ = p.nextToken();
- }
-
- // this was likely not supposed to end yet,
- // try to find the next declaration
+ try p.warn(.expected_container_members);
+ // This was likely not supposed to end yet; try to find the next declaration.
p.findNextContainerMember();
- try p.errors.append(p.gpa, .{
- .ExpectedContainerMembers = .{ .token = index },
- });
},
}
}
- return list.toOwnedSlice();
+ switch (list.items.len) {
+ 0 => return Members{
+ .len = 0,
+ .lhs = 0,
+ .rhs = 0,
+ .trailing = trailing,
+ },
+ 1 => return Members{
+ .len = 1,
+ .lhs = list.items[0],
+ .rhs = 0,
+ .trailing = trailing,
+ },
+ 2 => return Members{
+ .len = 2,
+ .lhs = list.items[0],
+ .rhs = list.items[1],
+ .trailing = trailing,
+ },
+ else => {
+ const span = try p.listToSpan(list.items);
+ return Members{
+ .len = list.items.len,
+ .lhs = span.start,
+ .rhs = span.end,
+ .trailing = trailing,
+ };
+ },
+ }
}
/// Attempts to find next container member by searching for certain tokens
@@ -276,47 +422,52 @@ const Parser = struct {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
- switch (p.token_ids[tok]) {
- // any of these can start a new top level declaration
- .Keyword_test,
- .Keyword_comptime,
- .Keyword_pub,
- .Keyword_export,
- .Keyword_extern,
- .Keyword_inline,
- .Keyword_noinline,
- .Keyword_usingnamespace,
- .Keyword_threadlocal,
- .Keyword_const,
- .Keyword_var,
- .Keyword_fn,
- .Identifier,
+ switch (p.token_tags[tok]) {
+ // Any of these can start a new top level declaration.
+ .keyword_test,
+ .keyword_comptime,
+ .keyword_pub,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_inline,
+ .keyword_noinline,
+ .keyword_usingnamespace,
+ .keyword_threadlocal,
+ .keyword_const,
+ .keyword_var,
+ .keyword_fn,
=> {
if (level == 0) {
- p.putBackToken(tok);
+ p.tok_i -= 1;
+ return;
+ }
+ },
+ .identifier => {
+ if (p.token_tags[tok + 1] == .comma and level == 0) {
+ p.tok_i -= 1;
return;
}
},
- .Comma, .Semicolon => {
+ .comma, .semicolon => {
// this decl was likely meant to end here
if (level == 0) {
return;
}
},
- .LParen, .LBracket, .LBrace => level += 1,
- .RParen, .RBracket => {
+ .l_paren, .l_bracket, .l_brace => level += 1,
+ .r_paren, .r_bracket => {
if (level != 0) level -= 1;
},
- .RBrace => {
+ .r_brace => {
if (level == 0) {
// end of container, exit
- p.putBackToken(tok);
+ p.tok_i -= 1;
return;
}
level -= 1;
},
- .Eof => {
- p.putBackToken(tok);
+ .eof => {
+ p.tok_i -= 1;
return;
},
else => {},
@@ -329,22 +480,22 @@ const Parser = struct {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
- switch (p.token_ids[tok]) {
- .LBrace => level += 1,
- .RBrace => {
+ switch (p.token_tags[tok]) {
+ .l_brace => level += 1,
+ .r_brace => {
if (level == 0) {
- p.putBackToken(tok);
+ p.tok_i -= 1;
return;
}
level -= 1;
},
- .Semicolon => {
+ .semicolon => {
if (level == 0) {
return;
}
},
- .Eof => {
- p.putBackToken(tok);
+ .eof => {
+ p.tok_i -= 1;
return;
},
else => {},
@@ -352,335 +503,337 @@ const Parser = struct {
}
}
- /// Eat a multiline container doc comment
- fn parseContainerDocComments(p: *Parser) !?*Node {
- if (p.eatToken(.ContainerDocComment)) |first_line| {
- while (p.eatToken(.ContainerDocComment)) |_| {}
- const node = try p.arena.allocator.create(Node.DocComment);
- node.* = .{ .first_line = first_line };
- return &node.base;
- }
- return null;
- }
-
- /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE Block
- fn parseTestDecl(p: *Parser) !?*Node {
- const test_token = p.eatToken(.Keyword_test) orelse return null;
- const name_node = try p.parseStringLiteralSingle();
- const block_node = (try p.parseBlock(null)) orelse {
- try p.errors.append(p.gpa, .{ .ExpectedLBrace = .{ .token = p.tok_i } });
- return error.ParseError;
- };
-
- const test_node = try p.arena.allocator.create(Node.TestDecl);
- test_node.* = .{
- .doc_comments = null,
- .test_token = test_token,
- .name = name_node,
- .body_node = block_node,
- };
- return &test_node.base;
- }
-
- /// TopLevelComptime <- KEYWORD_comptime BlockExpr
- fn parseTopLevelComptime(p: *Parser) !?*Node {
- const tok = p.eatToken(.Keyword_comptime) orelse return null;
- const lbrace = p.eatToken(.LBrace) orelse {
- p.putBackToken(tok);
- return null;
- };
- p.putBackToken(lbrace);
- const block_node = try p.expectNode(parseBlockExpr, .{
- .ExpectedLabelOrLBrace = .{ .token = p.tok_i },
+ /// TestDecl <- KEYWORD_test STRINGLITERALSINGLE? Block
+ fn expectTestDecl(p: *Parser) !Node.Index {
+ const test_token = p.assertToken(.keyword_test);
+ const name_token = p.eatToken(.string_literal);
+ const block_node = try p.parseBlock();
+ if (block_node == 0) return p.fail(.expected_block);
+ return p.addNode(.{
+ .tag = .test_decl,
+ .main_token = test_token,
+ .data = .{
+ .lhs = name_token orelse 0,
+ .rhs = block_node,
+ },
});
+ }
- const comptime_node = try p.arena.allocator.create(Node.Comptime);
- comptime_node.* = .{
- .doc_comments = null,
- .comptime_token = tok,
- .expr = block_node,
+ fn expectTestDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
+ return p.expectTestDecl() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ return null_node;
+ },
};
- return &comptime_node.base;
}
/// TopLevelDecl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / (KEYWORD_inline / KEYWORD_noinline))? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
- fn parseTopLevelDecl(p: *Parser, doc_comments: ?*Node.DocComment, visib_token: ?TokenIndex) !?*Node {
- var lib_name: ?*Node = null;
- const extern_export_inline_token = blk: {
- if (p.eatToken(.Keyword_export)) |token| break :blk token;
- if (p.eatToken(.Keyword_extern)) |token| {
- lib_name = try p.parseStringLiteralSingle();
- break :blk token;
+ fn expectTopLevelDecl(p: *Parser) !Node.Index {
+ const extern_export_inline_token = p.nextToken();
+ var expect_fn: bool = false;
+ var expect_var_or_fn: bool = false;
+ switch (p.token_tags[extern_export_inline_token]) {
+ .keyword_extern => {
+ _ = p.eatToken(.string_literal);
+ expect_var_or_fn = true;
+ },
+ .keyword_export => expect_var_or_fn = true,
+ .keyword_inline, .keyword_noinline => expect_fn = true,
+ else => p.tok_i -= 1,
+ }
+ const fn_proto = try p.parseFnProto();
+ if (fn_proto != 0) {
+ switch (p.token_tags[p.tok_i]) {
+ .semicolon => {
+ p.tok_i += 1;
+ return fn_proto;
+ },
+ .l_brace => {
+ const body_block = try p.parseBlock();
+ assert(body_block != 0);
+ return p.addNode(.{
+ .tag = .fn_decl,
+ .main_token = p.nodes.items(.main_token)[fn_proto],
+ .data = .{
+ .lhs = fn_proto,
+ .rhs = body_block,
+ },
+ });
+ },
+ else => {
+ // Since parseBlock only return error.ParseError on
+ // a missing '}' we can assume this function was
+ // supposed to end here.
+ try p.warn(.expected_semi_or_lbrace);
+ return null_node;
+ },
}
- if (p.eatToken(.Keyword_inline)) |token| break :blk token;
- if (p.eatToken(.Keyword_noinline)) |token| break :blk token;
- break :blk null;
- };
-
- if (try p.parseFnProto(.top_level, .{
- .doc_comments = doc_comments,
- .visib_token = visib_token,
- .extern_export_inline_token = extern_export_inline_token,
- .lib_name = lib_name,
- })) |node| {
- return node;
}
-
- if (extern_export_inline_token) |token| {
- if (p.token_ids[token] == .Keyword_inline or
- p.token_ids[token] == .Keyword_noinline)
- {
- try p.errors.append(p.gpa, .{
- .ExpectedFn = .{ .token = p.tok_i },
- });
- return error.ParseError;
- }
+ if (expect_fn) {
+ try p.warn(.expected_fn);
+ return error.ParseError;
}
- const thread_local_token = p.eatToken(.Keyword_threadlocal);
-
- if (try p.parseVarDecl(.{
- .doc_comments = doc_comments,
- .visib_token = visib_token,
- .thread_local_token = thread_local_token,
- .extern_export_token = extern_export_inline_token,
- .lib_name = lib_name,
- })) |node| {
- return node;
+ const thread_local_token = p.eatToken(.keyword_threadlocal);
+ const var_decl = try p.parseVarDecl();
+ if (var_decl != 0) {
+ const semicolon_token = try p.expectToken(.semicolon);
+ return var_decl;
}
-
if (thread_local_token != null) {
- try p.errors.append(p.gpa, .{
- .ExpectedVarDecl = .{ .token = p.tok_i },
- });
- // ignore this and try again;
- return error.ParseError;
+ return p.fail(.expected_var_decl);
}
-
- if (extern_export_inline_token) |token| {
- try p.errors.append(p.gpa, .{
- .ExpectedVarDeclOrFn = .{ .token = p.tok_i },
- });
- // ignore this and try again;
- return error.ParseError;
+ if (expect_var_or_fn) {
+ return p.fail(.expected_var_decl_or_fn);
}
+ if (p.token_tags[p.tok_i] != .keyword_usingnamespace) {
+ return p.fail(.expected_pub_item);
+ }
+ return p.expectUsingNamespace();
+ }
- const use_token = p.eatToken(.Keyword_usingnamespace) orelse return null;
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- const semicolon_token = try p.expectToken(.Semicolon);
-
- const node = try p.arena.allocator.create(Node.Use);
- node.* = .{
- .doc_comments = doc_comments orelse try p.parseAppendedDocComment(semicolon_token),
- .visib_token = visib_token,
- .use_token = use_token,
- .expr = expr,
- .semicolon_token = semicolon_token,
+ fn expectTopLevelDeclRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
+ return p.expectTopLevelDecl() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ return null_node;
+ },
};
+ }
- return &node.base;
+ fn expectUsingNamespace(p: *Parser) !Node.Index {
+ const usingnamespace_token = p.assertToken(.keyword_usingnamespace);
+ const expr = try p.expectExpr();
+ const semicolon_token = try p.expectToken(.semicolon);
+ return p.addNode(.{
+ .tag = .@"usingnamespace",
+ .main_token = usingnamespace_token,
+ .data = .{
+ .lhs = expr,
+ .rhs = undefined,
+ },
+ });
}
- /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
- fn parseFnProto(p: *Parser, level: enum { top_level, as_type }, fields: struct {
- doc_comments: ?*Node.DocComment = null,
- visib_token: ?TokenIndex = null,
- extern_export_inline_token: ?TokenIndex = null,
- lib_name: ?*Node = null,
- }) !?*Node {
- // TODO: Remove once extern/async/inline fn rewriting is
- var is_extern_prototype: ?void = null;
- var is_async: ?void = null;
- var is_inline: ?void = null;
- if (fields.extern_export_inline_token != null and
- p.token_ids[fields.extern_export_inline_token.?] == .Keyword_inline)
- {
- is_inline = {};
- }
- const cc_token: ?TokenIndex = blk: {
- if (p.eatToken(.Keyword_extern)) |token| {
- is_extern_prototype = {};
- break :blk token;
- }
- if (p.eatToken(.Keyword_async)) |token| {
- is_async = {};
- break :blk token;
- }
- break :blk null;
- };
- const fn_token = p.eatToken(.Keyword_fn) orelse {
- if (cc_token) |token|
- p.putBackToken(token);
- return null;
+ fn expectUsingNamespaceRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
+ return p.expectUsingNamespace() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ return null_node;
+ },
};
- const name_token = p.eatToken(.Identifier);
- const lparen = try p.expectToken(.LParen);
+ }
+
+ /// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? LinkSection? CallConv? EXCLAMATIONMARK? (Keyword_anytype / TypeExpr)
+ fn parseFnProto(p: *Parser) !Node.Index {
+ const fn_token = p.eatToken(.keyword_fn) orelse return null_node;
+ _ = p.eatToken(.identifier);
const params = try p.parseParamDeclList();
- defer p.gpa.free(params);
- const var_args_token = p.eatToken(.Ellipsis3);
- const rparen = try p.expectToken(.RParen);
+ defer params.deinit(p.gpa);
const align_expr = try p.parseByteAlign();
const section_expr = try p.parseLinkSection();
const callconv_expr = try p.parseCallconv();
- const exclamation_token = p.eatToken(.Bang);
+ const bang_token = p.eatToken(.bang);
- const return_type_expr = (try p.parseAnyType()) orelse
- try p.expectNodeRecoverable(parseTypeExpr, .{
+ const return_type_expr = try p.parseTypeExpr();
+ if (return_type_expr == 0) {
// most likely the user forgot to specify the return type.
// Mark return type as invalid and try to continue.
- .ExpectedReturnType = .{ .token = p.tok_i },
- });
+ try p.warn(.expected_return_type);
+ }
- // TODO https://github.com/ziglang/zig/issues/3750
- const R = Node.FnProto.ReturnType;
- const return_type = if (return_type_expr == null)
- R{ .Invalid = rparen }
- else if (exclamation_token != null)
- R{ .InferErrorSet = return_type_expr.? }
- else
- R{ .Explicit = return_type_expr.? };
-
- const body_node: ?*Node = switch (level) {
- .top_level => blk: {
- if (p.eatToken(.Semicolon)) |_| {
- break :blk null;
- }
- const body_block = (try p.parseBlock(null)) orelse {
- // Since parseBlock only return error.ParseError on
- // a missing '}' we can assume this function was
- // supposed to end here.
- try p.errors.append(p.gpa, .{ .ExpectedSemiOrLBrace = .{ .token = p.tok_i } });
- break :blk null;
- };
- break :blk body_block;
+ if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) {
+ switch (params) {
+ .zero_or_one => |param| return p.addNode(.{
+ .tag = .fn_proto_simple,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = param,
+ .rhs = return_type_expr,
+ },
+ }),
+ .multi => |list| {
+ const span = try p.listToSpan(list);
+ return p.addNode(.{
+ .tag = .fn_proto_multi,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ .rhs = return_type_expr,
+ },
+ });
+ },
+ }
+ }
+ switch (params) {
+ .zero_or_one => |param| return p.addNode(.{
+ .tag = .fn_proto_one,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.FnProtoOne{
+ .param = param,
+ .align_expr = align_expr,
+ .section_expr = section_expr,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ }),
+ .multi => |list| {
+ const span = try p.listToSpan(list);
+ return p.addNode(.{
+ .tag = .fn_proto,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.FnProto{
+ .params_start = span.start,
+ .params_end = span.end,
+ .align_expr = align_expr,
+ .section_expr = section_expr,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ });
},
- .as_type => null,
- };
-
- const fn_proto_node = try Node.FnProto.create(&p.arena.allocator, .{
- .params_len = params.len,
- .fn_token = fn_token,
- .return_type = return_type,
- }, .{
- .doc_comments = fields.doc_comments,
- .visib_token = fields.visib_token,
- .name_token = name_token,
- .var_args_token = var_args_token,
- .extern_export_inline_token = fields.extern_export_inline_token,
- .body_node = body_node,
- .lib_name = fields.lib_name,
- .align_expr = align_expr,
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
- .is_extern_prototype = is_extern_prototype,
- .is_async = is_async,
- .is_inline = is_inline,
- });
- std.mem.copy(Node.FnProto.ParamDecl, fn_proto_node.params(), params);
-
- return &fn_proto_node.base;
+ }
}
/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? LinkSection? (EQUAL Expr)? SEMICOLON
- fn parseVarDecl(p: *Parser, fields: struct {
- doc_comments: ?*Node.DocComment = null,
- visib_token: ?TokenIndex = null,
- thread_local_token: ?TokenIndex = null,
- extern_export_token: ?TokenIndex = null,
- lib_name: ?*Node = null,
- comptime_token: ?TokenIndex = null,
- }) !?*Node {
- const mut_token = p.eatToken(.Keyword_const) orelse
- p.eatToken(.Keyword_var) orelse
- return null;
+ fn parseVarDecl(p: *Parser) !Node.Index {
+ const mut_token = p.eatToken(.keyword_const) orelse
+ p.eatToken(.keyword_var) orelse
+ return null_node;
- const name_token = try p.expectToken(.Identifier);
- const type_node = if (p.eatToken(.Colon) != null)
- try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
- })
- else
- null;
+ _ = try p.expectToken(.identifier);
+ const type_node: Node.Index = if (p.eatToken(.colon) == null) 0 else try p.expectTypeExpr();
const align_node = try p.parseByteAlign();
const section_node = try p.parseLinkSection();
- const eq_token = p.eatToken(.Equal);
- const init_node = if (eq_token != null) blk: {
- break :blk try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ const init_node: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
+ if (section_node == 0) {
+ if (align_node == 0) {
+ return p.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = mut_token,
+ .data = .{
+ .lhs = type_node,
+ .rhs = init_node,
+ },
+ });
+ } else if (type_node == 0) {
+ return p.addNode(.{
+ .tag = .aligned_var_decl,
+ .main_token = mut_token,
+ .data = .{
+ .lhs = align_node,
+ .rhs = init_node,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .local_var_decl,
+ .main_token = mut_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.LocalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ }),
+ .rhs = init_node,
+ },
+ });
+ }
+ } else {
+ return p.addNode(.{
+ .tag = .global_var_decl,
+ .main_token = mut_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.GlobalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ .section_node = section_node,
+ }),
+ .rhs = init_node,
+ },
});
- } else null;
- const semicolon_token = try p.expectToken(.Semicolon);
-
- const doc_comments = fields.doc_comments orelse try p.parseAppendedDocComment(semicolon_token);
-
- const node = try Node.VarDecl.create(&p.arena.allocator, .{
- .mut_token = mut_token,
- .name_token = name_token,
- .semicolon_token = semicolon_token,
- }, .{
- .doc_comments = doc_comments,
- .visib_token = fields.visib_token,
- .thread_local_token = fields.thread_local_token,
- .eq_token = eq_token,
- .comptime_token = fields.comptime_token,
- .extern_export_token = fields.extern_export_token,
- .lib_name = fields.lib_name,
- .type_node = type_node,
- .align_node = align_node,
- .section_node = section_node,
- .init_node = init_node,
- });
- return &node.base;
+ }
}
/// ContainerField <- KEYWORD_comptime? IDENTIFIER (COLON TypeExpr ByteAlign?)? (EQUAL Expr)?
- fn parseContainerField(p: *Parser) !?*Node {
- const comptime_token = p.eatToken(.Keyword_comptime);
- const name_token = p.eatToken(.Identifier) orelse {
- if (comptime_token) |t| p.putBackToken(t);
- return null;
- };
-
- var align_expr: ?*Node = null;
- var type_expr: ?*Node = null;
- if (p.eatToken(.Colon)) |_| {
- if (p.eatToken(.Keyword_anytype) orelse p.eatToken(.Keyword_var)) |anytype_tok| {
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .AnyType },
- .token = anytype_tok,
- };
- type_expr = &node.base;
- } else {
- type_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
+ fn expectContainerField(p: *Parser) !Node.Index {
+ const comptime_token = p.eatToken(.keyword_comptime);
+ const name_token = p.assertToken(.identifier);
+
+ var align_expr: Node.Index = 0;
+ var type_expr: Node.Index = 0;
+ if (p.eatToken(.colon)) |_| {
+ if (p.eatToken(.keyword_anytype)) |anytype_tok| {
+ type_expr = try p.addNode(.{
+ .tag = .@"anytype",
+ .main_token = anytype_tok,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
});
+ } else {
+ type_expr = try p.expectTypeExpr();
align_expr = try p.parseByteAlign();
}
}
- const value_expr = if (p.eatToken(.Equal)) |_|
- try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- })
- else
- null;
-
- const node = try p.arena.allocator.create(Node.ContainerField);
- node.* = .{
- .doc_comments = null,
- .comptime_token = comptime_token,
- .name_token = name_token,
- .type_expr = type_expr,
- .value_expr = value_expr,
- .align_expr = align_expr,
+ const value_expr: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
+
+ if (align_expr == 0) {
+ return p.addNode(.{
+ .tag = .container_field_init,
+ .main_token = name_token,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = value_expr,
+ },
+ });
+ } else if (value_expr == 0) {
+ return p.addNode(.{
+ .tag = .container_field_align,
+ .main_token = name_token,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = align_expr,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .container_field,
+ .main_token = name_token,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = try p.addExtra(Node.ContainerField{
+ .value_expr = value_expr,
+ .align_expr = align_expr,
+ }),
+ },
+ });
+ }
+ }
+
+ fn expectContainerFieldRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
+ return p.expectContainerField() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextContainerMember();
+ return null_node;
+ },
};
- return &node.base;
}
/// Statement
@@ -694,833 +847,1632 @@ const Parser = struct {
/// / LabeledStatement
/// / SwitchExpr
/// / AssignExpr SEMICOLON
- fn parseStatement(p: *Parser) Error!?*Node {
- const comptime_token = p.eatToken(.Keyword_comptime);
+ fn parseStatement(p: *Parser) Error!Node.Index {
+ const comptime_token = p.eatToken(.keyword_comptime);
- if (try p.parseVarDecl(.{
- .comptime_token = comptime_token,
- })) |node| {
- return node;
+ const var_decl = try p.parseVarDecl();
+ if (var_decl != 0) {
+ _ = try p.expectTokenRecoverable(.semicolon);
+ return var_decl;
}
if (comptime_token) |token| {
- const block_expr = try p.expectNode(parseBlockExprStatement, .{
- .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
- });
-
- const node = try p.arena.allocator.create(Node.Comptime);
- node.* = .{
- .doc_comments = null,
- .comptime_token = token,
- .expr = block_expr,
- };
- return &node.base;
- }
-
- if (p.eatToken(.Keyword_nosuspend)) |nosuspend_token| {
- const block_expr = try p.expectNode(parseBlockExprStatement, .{
- .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = token,
+ .data = .{
+ .lhs = try p.expectBlockExprStatement(),
+ .rhs = undefined,
+ },
});
-
- const node = try p.arena.allocator.create(Node.Nosuspend);
- node.* = .{
- .nosuspend_token = nosuspend_token,
- .expr = block_expr,
- };
- return &node.base;
}
- if (p.eatToken(.Keyword_suspend)) |suspend_token| {
- const semicolon = p.eatToken(.Semicolon);
-
- const body_node = if (semicolon == null) blk: {
- break :blk try p.expectNode(parseBlockExprStatement, .{
- .ExpectedBlockOrExpression = .{ .token = p.tok_i },
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_nosuspend => {
+ return p.addNode(.{
+ .tag = .@"nosuspend",
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.expectBlockExprStatement(),
+ .rhs = undefined,
+ },
});
- } else null;
-
- const node = try p.arena.allocator.create(Node.Suspend);
- node.* = .{
- .suspend_token = suspend_token,
- .body = body_node,
- };
- return &node.base;
+ },
+ .keyword_suspend => {
+ const token = p.nextToken();
+ const block_expr: Node.Index = if (p.eatToken(.semicolon) != null)
+ 0
+ else
+ try p.expectBlockExprStatement();
+ return p.addNode(.{
+ .tag = .@"suspend",
+ .main_token = token,
+ .data = .{
+ .lhs = block_expr,
+ .rhs = undefined,
+ },
+ });
+ },
+ .keyword_defer => return p.addNode(.{
+ .tag = .@"defer",
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = try p.expectBlockExprStatement(),
+ },
+ }),
+ .keyword_errdefer => return p.addNode(.{
+ .tag = .@"errdefer",
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.parsePayload(),
+ .rhs = try p.expectBlockExprStatement(),
+ },
+ }),
+ .keyword_switch => return p.expectSwitchExpr(),
+ .keyword_if => return p.expectIfStatement(),
+ else => {},
}
- const defer_token = p.eatToken(.Keyword_defer) orelse p.eatToken(.Keyword_errdefer);
- if (defer_token) |token| {
- const payload = if (p.token_ids[token] == .Keyword_errdefer)
- try p.parsePayload()
- else
- null;
- const expr_node = try p.expectNode(parseBlockExprStatement, .{
- .ExpectedBlockOrExpression = .{ .token = p.tok_i },
- });
- const node = try p.arena.allocator.create(Node.Defer);
- node.* = .{
- .defer_token = token,
- .expr = expr_node,
- .payload = payload,
- };
- return &node.base;
- }
+ const labeled_statement = try p.parseLabeledStatement();
+ if (labeled_statement != 0) return labeled_statement;
- if (try p.parseIfStatement()) |node| return node;
- if (try p.parseLabeledStatement()) |node| return node;
- if (try p.parseSwitchExpr()) |node| return node;
- if (try p.parseAssignExpr()) |node| {
- _ = try p.expectTokenRecoverable(.Semicolon);
- return node;
+ const assign_expr = try p.parseAssignExpr();
+ if (assign_expr != 0) {
+ _ = try p.expectTokenRecoverable(.semicolon);
+ return assign_expr;
}
- return null;
+ return null_node;
}
- /// IfStatement
- /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
- /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
- fn parseIfStatement(p: *Parser) !?*Node {
- const if_node = (try p.parseIfPrefix()) orelse return null;
- const if_prefix = if_node.cast(Node.If).?;
-
- const block_expr = (try p.parseBlockExpr());
- const assign_expr = if (block_expr == null)
- try p.expectNode(parseAssignExpr, .{
- .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
- })
- else
- null;
-
- const semicolon = if (assign_expr != null) p.eatToken(.Semicolon) else null;
-
- const else_node = if (semicolon == null) blk: {
- const else_token = p.eatToken(.Keyword_else) orelse break :blk null;
- const payload = try p.parsePayload();
- const else_body = try p.expectNode(parseStatement, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
+ fn expectStatement(p: *Parser) !Node.Index {
+ const statement = try p.parseStatement();
+ if (statement == 0) {
+ return p.fail(.expected_statement);
+ }
+ return statement;
+ }
- const node = try p.arena.allocator.create(Node.Else);
- node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = else_body,
+ /// If a parse error occurs, reports an error, but then finds the next statement
+ /// and returns that one instead. If a parse error occurs but there is no following
+ /// statement, returns 0.
+ fn expectStatementRecoverable(p: *Parser) error{OutOfMemory}!Node.Index {
+ while (true) {
+ return p.expectStatement() catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.ParseError => {
+ p.findNextStmt(); // Try to skip to the next statement.
+ if (p.token_tags[p.tok_i] == .r_brace) return null_node;
+ continue;
+ },
};
-
- break :blk node;
- } else null;
-
- if (block_expr) |body| {
- if_prefix.body = body;
- if_prefix.@"else" = else_node;
- return if_node;
}
+ }
- if (assign_expr) |body| {
- if_prefix.body = body;
- if (semicolon != null) return if_node;
- if (else_node != null) {
- if_prefix.@"else" = else_node;
- return if_node;
+ /// IfStatement
+ /// <- IfPrefix BlockExpr ( KEYWORD_else Payload? Statement )?
+ /// / IfPrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
+ fn expectIfStatement(p: *Parser) !Node.Index {
+ const if_token = p.assertToken(.keyword_if);
+ _ = try p.expectToken(.l_paren);
+ const condition = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const then_payload = try p.parsePtrPayload();
+
+ // TODO propose to change the syntax so that semicolons are always required
+ // inside if statements, even if there is an `else`.
+ var else_required = false;
+ const then_expr = blk: {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) break :blk block_expr;
+ const assign_expr = try p.parseAssignExpr();
+ if (assign_expr == 0) {
+ return p.fail(.expected_block_or_assignment);
}
- try p.errors.append(p.gpa, .{
- .ExpectedSemiOrElse = .{ .token = p.tok_i },
+ if (p.eatToken(.semicolon)) |_| {
+ return p.addNode(.{
+ .tag = .if_simple,
+ .main_token = if_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = assign_expr,
+ },
+ });
+ }
+ else_required = true;
+ break :blk assign_expr;
+ };
+ const else_token = p.eatToken(.keyword_else) orelse {
+ if (else_required) {
+ try p.warn(.expected_semi_or_else);
+ }
+ return p.addNode(.{
+ .tag = .if_simple,
+ .main_token = if_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = then_expr,
+ },
});
- }
-
- return if_node;
+ };
+ const else_payload = try p.parsePayload();
+ const else_expr = try p.expectStatement();
+ return p.addNode(.{
+ .tag = .@"if",
+ .main_token = if_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.If{
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
+ });
}
/// LabeledStatement <- BlockLabel? (Block / LoopStatement)
- fn parseLabeledStatement(p: *Parser) !?*Node {
- var colon: TokenIndex = undefined;
- const label_token = p.parseBlockLabel(&colon);
-
- if (try p.parseBlock(label_token)) |node| return node;
-
- if (try p.parseLoopStatement()) |node| {
- if (node.cast(Node.For)) |for_node| {
- for_node.label = label_token;
- } else if (node.cast(Node.While)) |while_node| {
- while_node.label = label_token;
- } else unreachable;
- return node;
- }
+ fn parseLabeledStatement(p: *Parser) !Node.Index {
+ const label_token = p.parseBlockLabel();
+ const block = try p.parseBlock();
+ if (block != 0) return block;
- if (label_token != null) {
- try p.errors.append(p.gpa, .{
- .ExpectedLabelable = .{ .token = p.tok_i },
- });
- return error.ParseError;
+ const loop_stmt = try p.parseLoopStatement();
+ if (loop_stmt != 0) return loop_stmt;
+
+ if (label_token != 0) {
+ return p.fail(.expected_labelable);
}
- return null;
+ return null_node;
}
/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
- fn parseLoopStatement(p: *Parser) !?*Node {
- const inline_token = p.eatToken(.Keyword_inline);
+ fn parseLoopStatement(p: *Parser) !Node.Index {
+ const inline_token = p.eatToken(.keyword_inline);
- if (try p.parseForStatement()) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
- }
+ const for_statement = try p.parseForStatement();
+ if (for_statement != 0) return for_statement;
- if (try p.parseWhileStatement()) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
- }
- if (inline_token == null) return null;
+ const while_statement = try p.parseWhileStatement();
+ if (while_statement != 0) return while_statement;
+
+ if (inline_token == null) return null_node;
// If we've seen "inline", there should have been a "for" or "while"
- try p.errors.append(p.gpa, .{
- .ExpectedInlinable = .{ .token = p.tok_i },
- });
- return error.ParseError;
+ return p.fail(.expected_inlinable);
}
+ /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
/// ForStatement
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
- fn parseForStatement(p: *Parser) !?*Node {
- const node = (try p.parseForPrefix()) orelse return null;
- const for_prefix = node.cast(Node.For).?;
-
- if (try p.parseBlockExpr()) |block_expr_node| {
- for_prefix.body = block_expr_node;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const statement_node = try p.expectNode(parseStatement, .{
- .InvalidToken = .{ .token = p.tok_i },
+ fn parseForStatement(p: *Parser) !Node.Index {
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const array_expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const found_payload = try p.parsePtrIndexPayload();
+ if (found_payload == 0) try p.warn(.expected_loop_payload);
+
+ // TODO propose to change the syntax so that semicolons are always required
+ // inside while statements, even if there is an `else`.
+ var else_required = false;
+ const then_expr = blk: {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) break :blk block_expr;
+ const assign_expr = try p.parseAssignExpr();
+ if (assign_expr == 0) {
+ return p.fail(.expected_block_or_assignment);
+ }
+ if (p.eatToken(.semicolon)) |_| {
+ return p.addNode(.{
+ .tag = .for_simple,
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = assign_expr,
+ },
});
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = statement_node,
- };
- for_prefix.@"else" = else_node;
-
- return node;
}
-
- return node;
- }
-
- for_prefix.body = try p.expectNode(parseAssignExpr, .{
- .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
- });
-
- if (p.eatToken(.Semicolon) != null) return node;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const statement_node = try p.expectNode(parseStatement, .{
- .ExpectedStatement = .{ .token = p.tok_i },
+ else_required = true;
+ break :blk assign_expr;
+ };
+ const else_token = p.eatToken(.keyword_else) orelse {
+ if (else_required) {
+ try p.warn(.expected_semi_or_else);
+ }
+ return p.addNode(.{
+ .tag = .for_simple,
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = then_expr,
+ },
});
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = statement_node,
- };
- for_prefix.@"else" = else_node;
- return node;
- }
-
- try p.errors.append(p.gpa, .{
- .ExpectedSemiOrElse = .{ .token = p.tok_i },
+ };
+ return p.addNode(.{
+ .tag = .@"for",
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = try p.addExtra(Node.If{
+ .then_expr = then_expr,
+ .else_expr = try p.expectStatement(),
+ }),
+ },
});
-
- return node;
}
+ /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
/// WhileStatement
/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
- fn parseWhileStatement(p: *Parser) !?*Node {
- const node = (try p.parseWhilePrefix()) orelse return null;
- const while_prefix = node.cast(Node.While).?;
-
- if (try p.parseBlockExpr()) |block_expr_node| {
- while_prefix.body = block_expr_node;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const payload = try p.parsePayload();
-
- const statement_node = try p.expectNode(parseStatement, .{
- .InvalidToken = .{ .token = p.tok_i },
+ fn parseWhileStatement(p: *Parser) !Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const condition = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const then_payload = try p.parsePtrPayload();
+ const cont_expr = try p.parseWhileContinueExpr();
+
+ // TODO propose to change the syntax so that semicolons are always required
+ // inside while statements, even if there is an `else`.
+ var else_required = false;
+ const then_expr = blk: {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) break :blk block_expr;
+ const assign_expr = try p.parseAssignExpr();
+ if (assign_expr == 0) {
+ return p.fail(.expected_block_or_assignment);
+ }
+ if (p.eatToken(.semicolon)) |_| {
+ if (cont_expr == 0) {
+ return p.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = assign_expr,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .while_cont,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr,
+ .then_expr = assign_expr,
+ }),
+ },
+ });
+ }
+ }
+ else_required = true;
+ break :blk assign_expr;
+ };
+ const else_token = p.eatToken(.keyword_else) orelse {
+ if (else_required) {
+ try p.warn(.expected_semi_or_else);
+ }
+ if (cont_expr == 0) {
+ return p.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = then_expr,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .while_cont,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ }),
+ },
});
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = statement_node,
- };
- while_prefix.@"else" = else_node;
-
- return node;
}
-
- return node;
- }
-
- while_prefix.body = try p.expectNode(parseAssignExpr, .{
- .ExpectedBlockOrAssignment = .{ .token = p.tok_i },
- });
-
- if (p.eatToken(.Semicolon) != null) return node;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const payload = try p.parsePayload();
-
- const statement_node = try p.expectNode(parseStatement, .{
- .ExpectedStatement = .{ .token = p.tok_i },
- });
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = statement_node,
- };
- while_prefix.@"else" = else_node;
- return node;
- }
-
- try p.errors.append(p.gpa, .{
- .ExpectedSemiOrElse = .{ .token = p.tok_i },
+ };
+ const else_payload = try p.parsePayload();
+ const else_expr = try p.expectStatement();
+ return p.addNode(.{
+ .tag = .@"while",
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.While{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
});
-
- return node;
}
/// BlockExprStatement
/// <- BlockExpr
/// / AssignExpr SEMICOLON
- fn parseBlockExprStatement(p: *Parser) !?*Node {
- if (try p.parseBlockExpr()) |node| return node;
- if (try p.parseAssignExpr()) |node| {
- _ = try p.expectTokenRecoverable(.Semicolon);
- return node;
+ fn parseBlockExprStatement(p: *Parser) !Node.Index {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) {
+ return block_expr;
}
- return null;
+ const assign_expr = try p.parseAssignExpr();
+ if (assign_expr != 0) {
+ _ = try p.expectTokenRecoverable(.semicolon);
+ return assign_expr;
+ }
+ return null_node;
}
- /// BlockExpr <- BlockLabel? Block
- fn parseBlockExpr(p: *Parser) Error!?*Node {
- var colon: TokenIndex = undefined;
- const label_token = p.parseBlockLabel(&colon);
- const block_node = (try p.parseBlock(label_token)) orelse {
- if (label_token) |label| {
- p.putBackToken(label + 1); // ":"
- p.putBackToken(label); // IDENTIFIER
- }
- return null;
- };
- return block_node;
+ fn expectBlockExprStatement(p: *Parser) !Node.Index {
+ const node = try p.parseBlockExprStatement();
+ if (node == 0) {
+ return p.fail(.expected_block_or_expr);
+ }
+ return node;
}
- /// AssignExpr <- Expr (AssignOp Expr)?
- fn parseAssignExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseAssignOp, parseExpr, .Once);
+ /// BlockExpr <- BlockLabel? Block
+ fn parseBlockExpr(p: *Parser) Error!Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .identifier => {
+ if (p.token_tags[p.tok_i + 1] == .colon and
+ p.token_tags[p.tok_i + 2] == .l_brace)
+ {
+ p.tok_i += 2;
+ return p.parseBlock();
+ } else {
+ return null_node;
+ }
+ },
+ .l_brace => return p.parseBlock(),
+ else => return null_node,
+ }
}
- /// Expr <- BoolOrExpr
- fn parseExpr(p: *Parser) Error!?*Node {
- return p.parsePrefixOpExpr(parseTry, parseBoolOrExpr);
+ /// AssignExpr <- Expr (AssignOp Expr)?
+ /// AssignOp
+ /// <- ASTERISKEQUAL
+ /// / SLASHEQUAL
+ /// / PERCENTEQUAL
+ /// / PLUSEQUAL
+ /// / MINUSEQUAL
+ /// / LARROW2EQUAL
+ /// / RARROW2EQUAL
+ /// / AMPERSANDEQUAL
+ /// / CARETEQUAL
+ /// / PIPEEQUAL
+ /// / ASTERISKPERCENTEQUAL
+ /// / PLUSPERCENTEQUAL
+ /// / MINUSPERCENTEQUAL
+ /// / EQUAL
+ fn parseAssignExpr(p: *Parser) !Node.Index {
+ const expr = try p.parseExpr();
+ if (expr == 0) return null_node;
+
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .asterisk_equal => .assign_mul,
+ .slash_equal => .assign_div,
+ .percent_equal => .assign_mod,
+ .plus_equal => .assign_add,
+ .minus_equal => .assign_sub,
+ .angle_bracket_angle_bracket_left_equal => .assign_bit_shift_left,
+ .angle_bracket_angle_bracket_right_equal => .assign_bit_shift_right,
+ .ampersand_equal => .assign_bit_and,
+ .caret_equal => .assign_bit_xor,
+ .pipe_equal => .assign_bit_or,
+ .asterisk_percent_equal => .assign_mul_wrap,
+ .plus_percent_equal => .assign_add_wrap,
+ .minus_percent_equal => .assign_sub_wrap,
+ .equal => .assign,
+ else => return expr,
+ };
+ return p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = expr,
+ .rhs = try p.expectExpr(),
+ },
+ });
}
- /// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
- fn parseBoolOrExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(
- SimpleBinOpParseFn(.Keyword_or, .BoolOr),
- parseBoolAndExpr,
- .Infinitely,
- );
+ fn expectAssignExpr(p: *Parser) !Node.Index {
+ const expr = try p.parseAssignExpr();
+ if (expr == 0) {
+ return p.fail(.expected_expr_or_assignment);
+ }
+ return expr;
}
- /// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
- fn parseBoolAndExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(
- SimpleBinOpParseFn(.Keyword_and, .BoolAnd),
- parseCompareExpr,
- .Infinitely,
- );
+ /// Expr <- BoolOrExpr
+ fn parseExpr(p: *Parser) Error!Node.Index {
+ return p.parseBoolOrExpr();
}
- /// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
- fn parseCompareExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseCompareOp, parseBitwiseExpr, .Once);
+ fn expectExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseExpr();
+ if (node == 0) {
+ return p.fail(.expected_expr);
+ } else {
+ return node;
+ }
}
- /// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
- fn parseBitwiseExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseBitwiseOp, parseBitShiftExpr, .Infinitely);
- }
+ /// BoolOrExpr <- BoolAndExpr (KEYWORD_or BoolAndExpr)*
+ fn parseBoolOrExpr(p: *Parser) Error!Node.Index {
+ var res = try p.parseBoolAndExpr();
+ if (res == 0) return null_node;
- /// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
- fn parseBitShiftExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseBitShiftOp, parseAdditionExpr, .Infinitely);
+ while (true) {
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_or => {
+ const or_token = p.nextToken();
+ const rhs = try p.parseBoolAndExpr();
+ if (rhs == 0) {
+ return p.fail(.invalid_token);
+ }
+ res = try p.addNode(.{
+ .tag = .bool_or,
+ .main_token = or_token,
+ .data = .{
+ .lhs = res,
+ .rhs = rhs,
+ },
+ });
+ },
+ else => return res,
+ }
+ }
}
- /// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
- fn parseAdditionExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseAdditionOp, parseMultiplyExpr, .Infinitely);
- }
+ /// BoolAndExpr <- CompareExpr (KEYWORD_and CompareExpr)*
+ fn parseBoolAndExpr(p: *Parser) !Node.Index {
+ var res = try p.parseCompareExpr();
+ if (res == 0) return null_node;
- /// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
- fn parseMultiplyExpr(p: *Parser) !?*Node {
- return p.parseBinOpExpr(parseMultiplyOp, parsePrefixExpr, .Infinitely);
+ while (true) {
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_and => {
+ const and_token = p.nextToken();
+ const rhs = try p.parseCompareExpr();
+ if (rhs == 0) {
+ return p.fail(.invalid_token);
+ }
+ res = try p.addNode(.{
+ .tag = .bool_and,
+ .main_token = and_token,
+ .data = .{
+ .lhs = res,
+ .rhs = rhs,
+ },
+ });
+ },
+ .invalid_ampersands => {
+ try p.warn(.invalid_and);
+ p.tok_i += 1;
+ return p.parseCompareExpr();
+ },
+ else => return res,
+ }
+ }
}
- /// PrefixExpr <- PrefixOp* PrimaryExpr
- fn parsePrefixExpr(p: *Parser) !?*Node {
- return p.parsePrefixOpExpr(parsePrefixOp, parsePrimaryExpr);
+ /// CompareExpr <- BitwiseExpr (CompareOp BitwiseExpr)?
+ /// CompareOp
+ /// <- EQUALEQUAL
+ /// / EXCLAMATIONMARKEQUAL
+ /// / LARROW
+ /// / RARROW
+ /// / LARROWEQUAL
+ /// / RARROWEQUAL
+ fn parseCompareExpr(p: *Parser) !Node.Index {
+ const expr = try p.parseBitwiseExpr();
+ if (expr == 0) return null_node;
+
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .equal_equal => .equal_equal,
+ .bang_equal => .bang_equal,
+ .angle_bracket_left => .less_than,
+ .angle_bracket_right => .greater_than,
+ .angle_bracket_left_equal => .less_or_equal,
+ .angle_bracket_right_equal => .greater_or_equal,
+ else => return expr,
+ };
+ return p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = expr,
+ .rhs = try p.expectBitwiseExpr(),
+ },
+ });
}
- /// PrimaryExpr
- /// <- AsmExpr
- /// / IfExpr
- /// / KEYWORD_break BreakLabel? Expr?
- /// / KEYWORD_comptime Expr
- /// / KEYWORD_nosuspend Expr
- /// / KEYWORD_continue BreakLabel?
- /// / KEYWORD_resume Expr
- /// / KEYWORD_return Expr?
- /// / BlockLabel? LoopExpr
- /// / Block
- /// / CurlySuffixExpr
- fn parsePrimaryExpr(p: *Parser) !?*Node {
- if (try p.parseAsmExpr()) |node| return node;
- if (try p.parseIfExpr()) |node| return node;
-
- if (p.eatToken(.Keyword_break)) |token| {
- const label = try p.parseBreakLabel();
- const expr_node = try p.parseExpr();
- const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
- .tag = .Break,
- .ltoken = token,
- }, .{
- .label = label,
- .rhs = expr_node,
- });
- return &node.base;
- }
+ /// BitwiseExpr <- BitShiftExpr (BitwiseOp BitShiftExpr)*
+ /// BitwiseOp
+ /// <- AMPERSAND
+ /// / CARET
+ /// / PIPE
+ /// / KEYWORD_orelse
+ /// / KEYWORD_catch Payload?
+ fn parseBitwiseExpr(p: *Parser) !Node.Index {
+ var res = try p.parseBitShiftExpr();
+ if (res == 0) return null_node;
- if (p.eatToken(.Keyword_comptime)) |token| {
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- const node = try p.arena.allocator.create(Node.Comptime);
- node.* = .{
- .doc_comments = null,
- .comptime_token = token,
- .expr = expr_node,
+ while (true) {
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .ampersand => .bit_and,
+ .caret => .bit_xor,
+ .pipe => .bit_or,
+ .keyword_orelse => .@"orelse",
+ .keyword_catch => {
+ const catch_token = p.nextToken();
+ _ = try p.parsePayload();
+ const rhs = try p.parseBitShiftExpr();
+ if (rhs == 0) {
+ return p.fail(.invalid_token);
+ }
+ res = try p.addNode(.{
+ .tag = .@"catch",
+ .main_token = catch_token,
+ .data = .{
+ .lhs = res,
+ .rhs = rhs,
+ },
+ });
+ continue;
+ },
+ else => return res,
};
- return &node.base;
- }
-
- if (p.eatToken(.Keyword_nosuspend)) |token| {
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ res = try p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = res,
+ .rhs = try p.expectBitShiftExpr(),
+ },
});
- const node = try p.arena.allocator.create(Node.Nosuspend);
- node.* = .{
- .nosuspend_token = token,
- .expr = expr_node,
- };
- return &node.base;
}
+ }
- if (p.eatToken(.Keyword_continue)) |token| {
- const label = try p.parseBreakLabel();
- const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
- .tag = .Continue,
- .ltoken = token,
- }, .{
- .label = label,
- .rhs = null,
- });
- return &node.base;
+ fn expectBitwiseExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseBitwiseExpr();
+ if (node == 0) {
+ return p.fail(.invalid_token);
+ } else {
+ return node;
}
+ }
- if (p.eatToken(.Keyword_resume)) |token| {
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- const node = try p.arena.allocator.create(Node.SimplePrefixOp);
- node.* = .{
- .base = .{ .tag = .Resume },
- .op_token = token,
- .rhs = expr_node,
- };
- return &node.base;
- }
+ /// BitShiftExpr <- AdditionExpr (BitShiftOp AdditionExpr)*
+ /// BitShiftOp
+ /// <- LARROW2
+ /// / RARROW2
+ fn parseBitShiftExpr(p: *Parser) Error!Node.Index {
+ var res = try p.parseAdditionExpr();
+ if (res == 0) return null_node;
- if (p.eatToken(.Keyword_return)) |token| {
- const expr_node = try p.parseExpr();
- const node = try Node.ControlFlowExpression.create(&p.arena.allocator, .{
- .tag = .Return,
- .ltoken = token,
- }, .{
- .rhs = expr_node,
+ while (true) {
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .angle_bracket_angle_bracket_left => .bit_shift_left,
+ .angle_bracket_angle_bracket_right => .bit_shift_right,
+ else => return res,
+ };
+ res = try p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = res,
+ .rhs = try p.expectAdditionExpr(),
+ },
});
- return &node.base;
}
+ }
- var colon: TokenIndex = undefined;
- const label = p.parseBlockLabel(&colon);
- if (try p.parseLoopExpr()) |node| {
- if (node.cast(Node.For)) |for_node| {
- for_node.label = label;
- } else if (node.cast(Node.While)) |while_node| {
- while_node.label = label;
- } else unreachable;
+ fn expectBitShiftExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseBitShiftExpr();
+ if (node == 0) {
+ return p.fail(.invalid_token);
+ } else {
return node;
}
- if (label) |token| {
- p.putBackToken(token + 1); // ":"
- p.putBackToken(token); // IDENTIFIER
- }
-
- if (try p.parseBlock(null)) |node| return node;
- if (try p.parseCurlySuffixExpr()) |node| return node;
+ }
- return null;
- }
+ /// AdditionExpr <- MultiplyExpr (AdditionOp MultiplyExpr)*
+ /// AdditionOp
+ /// <- PLUS
+ /// / MINUS
+ /// / PLUS2
+ /// / PLUSPERCENT
+ /// / MINUSPERCENT
+ fn parseAdditionExpr(p: *Parser) Error!Node.Index {
+ var res = try p.parseMultiplyExpr();
+ if (res == 0) return null_node;
- /// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
- fn parseIfExpr(p: *Parser) !?*Node {
- return p.parseIf(parseExpr);
+ while (true) {
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .plus => .add,
+ .minus => .sub,
+ .plus_plus => .array_cat,
+ .plus_percent => .add_wrap,
+ .minus_percent => .sub_wrap,
+ else => return res,
+ };
+ res = try p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = res,
+ .rhs = try p.expectMultiplyExpr(),
+ },
+ });
+ }
}
- /// Block <- LBRACE Statement* RBRACE
- fn parseBlock(p: *Parser, label_token: ?TokenIndex) !?*Node {
- const lbrace = p.eatToken(.LBrace) orelse return null;
+ fn expectAdditionExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseAdditionExpr();
+ if (node == 0) {
+ return p.fail(.invalid_token);
+ }
+ return node;
+ }
- var statements = std.ArrayList(*Node).init(p.gpa);
- defer statements.deinit();
+ /// MultiplyExpr <- PrefixExpr (MultiplyOp PrefixExpr)*
+ /// MultiplyOp
+ /// <- PIPE2
+ /// / ASTERISK
+ /// / SLASH
+ /// / PERCENT
+ /// / ASTERISK2
+ /// / ASTERISKPERCENT
+ fn parseMultiplyExpr(p: *Parser) Error!Node.Index {
+ var res = try p.parsePrefixExpr();
+ if (res == 0) return null_node;
while (true) {
- const statement = (p.parseStatement() catch |err| switch (err) {
- error.OutOfMemory => return error.OutOfMemory,
- error.ParseError => {
- // try to skip to the next statement
- p.findNextStmt();
- continue;
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .pipe_pipe => .merge_error_sets,
+ .asterisk => .mul,
+ .slash => .div,
+ .percent => .mod,
+ .asterisk_asterisk => .array_mult,
+ .asterisk_percent => .mul_wrap,
+ else => return res,
+ };
+ res = try p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = res,
+ .rhs = try p.expectPrefixExpr(),
},
- }) orelse break;
- try statements.append(statement);
+ });
}
+ }
- const rbrace = try p.expectToken(.RBrace);
-
- const statements_len = @intCast(NodeIndex, statements.items.len);
-
- if (label_token) |label| {
- const block_node = try Node.LabeledBlock.alloc(&p.arena.allocator, statements_len);
- block_node.* = .{
- .label = label,
- .lbrace = lbrace,
- .statements_len = statements_len,
- .rbrace = rbrace,
- };
- std.mem.copy(*Node, block_node.statements(), statements.items);
- return &block_node.base;
- } else {
- const block_node = try Node.Block.alloc(&p.arena.allocator, statements_len);
- block_node.* = .{
- .lbrace = lbrace,
- .statements_len = statements_len,
- .rbrace = rbrace,
- };
- std.mem.copy(*Node, block_node.statements(), statements.items);
- return &block_node.base;
+ fn expectMultiplyExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseMultiplyExpr();
+ if (node == 0) {
+ return p.fail(.invalid_token);
}
+ return node;
}
- /// LoopExpr <- KEYWORD_inline? (ForExpr / WhileExpr)
- fn parseLoopExpr(p: *Parser) !?*Node {
- const inline_token = p.eatToken(.Keyword_inline);
+ /// PrefixExpr <- PrefixOp* PrimaryExpr
+ /// PrefixOp
+ /// <- EXCLAMATIONMARK
+ /// / MINUS
+ /// / TILDE
+ /// / MINUSPERCENT
+ /// / AMPERSAND
+ /// / KEYWORD_try
+ /// / KEYWORD_await
+ fn parsePrefixExpr(p: *Parser) Error!Node.Index {
+ const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+ .bang => .bool_not,
+ .minus => .negation,
+ .tilde => .bit_not,
+ .minus_percent => .negation_wrap,
+ .ampersand => .address_of,
+ .keyword_try => .@"try",
+ .keyword_await => .@"await",
+ else => return p.parsePrimaryExpr(),
+ };
+ return p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.expectPrefixExpr(),
+ .rhs = undefined,
+ },
+ });
+ }
- if (try p.parseForExpr()) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
+ fn expectPrefixExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parsePrefixExpr();
+ if (node == 0) {
+ return p.fail(.expected_prefix_expr);
}
+ return node;
+ }
- if (try p.parseWhileExpr()) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
+ /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
+ /// PrefixTypeOp
+ /// <- QUESTIONMARK
+ /// / KEYWORD_anyframe MINUSRARROW
+ /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
+ /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
+ /// PtrTypeStart
+ /// <- ASTERISK
+ /// / ASTERISK2
+ /// / LBRACKET ASTERISK (LETTERC / COLON Expr)? RBRACKET
+ /// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET
+ fn parseTypeExpr(p: *Parser) Error!Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .question_mark => return p.addNode(.{
+ .tag = .optional_type,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.expectTypeExpr(),
+ .rhs = undefined,
+ },
+ }),
+ .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) {
+ .arrow => return p.addNode(.{
+ .tag = .anyframe_type,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = p.nextToken(),
+ .rhs = try p.expectTypeExpr(),
+ },
+ }),
+ else => return p.parseErrorUnionExpr(),
+ },
+ .asterisk => {
+ const asterisk = p.nextToken();
+ const mods = try p.parsePtrModifiers();
+ const elem_type = try p.expectTypeExpr();
+ if (mods.bit_range_start == 0) {
+ return p.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = mods.align_node,
+ .rhs = elem_type,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .ptr_type_bit_range,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = 0,
+ .align_node = mods.align_node,
+ .bit_range_start = mods.bit_range_start,
+ .bit_range_end = mods.bit_range_end,
+ }),
+ .rhs = elem_type,
+ },
+ });
+ }
+ },
+ .asterisk_asterisk => {
+ const asterisk = p.nextToken();
+ const mods = try p.parsePtrModifiers();
+ const elem_type = try p.expectTypeExpr();
+ const inner: Node.Index = inner: {
+ if (mods.bit_range_start == 0) {
+ break :inner try p.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = mods.align_node,
+ .rhs = elem_type,
+ },
+ });
+ } else {
+ break :inner try p.addNode(.{
+ .tag = .ptr_type_bit_range,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = 0,
+ .align_node = mods.align_node,
+ .bit_range_start = mods.bit_range_start,
+ .bit_range_end = mods.bit_range_end,
+ }),
+ .rhs = elem_type,
+ },
+ });
+ }
+ };
+ return p.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = 0,
+ .rhs = inner,
+ },
+ });
+ },
+ .l_bracket => switch (p.token_tags[p.tok_i + 1]) {
+ .asterisk => {
+ const lbracket = p.nextToken();
+ const asterisk = p.nextToken();
+ var sentinel: Node.Index = 0;
+ prefix: {
+ if (p.eatToken(.identifier)) |ident| {
+ const token_slice = p.source[p.token_starts[ident]..][0..2];
+ if (!std.mem.eql(u8, token_slice, "c]")) {
+ p.tok_i -= 1;
+ } else {
+ break :prefix;
+ }
+ }
+ if (p.eatToken(.colon)) |_| {
+ sentinel = try p.expectExpr();
+ }
+ }
+ _ = try p.expectToken(.r_bracket);
+ const mods = try p.parsePtrModifiers();
+ const elem_type = try p.expectTypeExpr();
+ if (mods.bit_range_start == 0) {
+ if (sentinel == 0) {
+ return p.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = mods.align_node,
+ .rhs = elem_type,
+ },
+ });
+ } else if (mods.align_node == 0) {
+ return p.addNode(.{
+ .tag = .ptr_type_sentinel,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = sentinel,
+ .rhs = elem_type,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .ptr_type,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = try p.addExtra(Node.PtrType{
+ .sentinel = sentinel,
+ .align_node = mods.align_node,
+ }),
+ .rhs = elem_type,
+ },
+ });
+ }
+ } else {
+ return p.addNode(.{
+ .tag = .ptr_type_bit_range,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = sentinel,
+ .align_node = mods.align_node,
+ .bit_range_start = mods.bit_range_start,
+ .bit_range_end = mods.bit_range_end,
+ }),
+ .rhs = elem_type,
+ },
+ });
+ }
+ },
+ else => {
+ const lbracket = p.nextToken();
+ const len_expr = try p.parseExpr();
+ const sentinel: Node.Index = if (p.eatToken(.colon)) |_|
+ try p.expectExpr()
+ else
+ 0;
+ _ = try p.expectToken(.r_bracket);
+ const mods = try p.parsePtrModifiers();
+ const elem_type = try p.expectTypeExpr();
+ if (mods.bit_range_start != 0) {
+ try p.warnMsg(.{
+ .tag = .invalid_bit_range,
+ .token = p.nodes.items(.main_token)[mods.bit_range_start],
+ });
+ }
+ if (len_expr == 0) {
+ if (sentinel == 0) {
+ return p.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = mods.align_node,
+ .rhs = elem_type,
+ },
+ });
+ } else if (mods.align_node == 0) {
+ return p.addNode(.{
+ .tag = .ptr_type_sentinel,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = sentinel,
+ .rhs = elem_type,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .ptr_type,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = try p.addExtra(Node.PtrType{
+ .sentinel = sentinel,
+ .align_node = mods.align_node,
+ }),
+ .rhs = elem_type,
+ },
+ });
+ }
+ } else {
+ if (mods.align_node != 0) {
+ try p.warnMsg(.{
+ .tag = .invalid_align,
+ .token = p.nodes.items(.main_token)[mods.align_node],
+ });
+ }
+ if (sentinel == 0) {
+ return p.addNode(.{
+ .tag = .array_type,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = len_expr,
+ .rhs = elem_type,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .array_type_sentinel,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = len_expr,
+ .rhs = try p.addExtra(.{
+ .elem_type = elem_type,
+ .sentinel = sentinel,
+ }),
+ },
+ });
+ }
+ }
+ },
+ },
+ else => return p.parseErrorUnionExpr(),
}
+ }
- if (inline_token == null) return null;
-
- // If we've seen "inline", there should have been a "for" or "while"
- try p.errors.append(p.gpa, .{
- .ExpectedInlinable = .{ .token = p.tok_i },
- });
- return error.ParseError;
+ fn expectTypeExpr(p: *Parser) Error!Node.Index {
+ const node = try p.parseTypeExpr();
+ if (node == 0) {
+ return p.fail(.expected_type_expr);
+ }
+ return node;
}
- /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
- fn parseForExpr(p: *Parser) !?*Node {
- const node = (try p.parseForPrefix()) orelse return null;
- const for_prefix = node.cast(Node.For).?;
+ /// PrimaryExpr
+ /// <- AsmExpr
+ /// / IfExpr
+ /// / KEYWORD_break BreakLabel? Expr?
+ /// / KEYWORD_comptime Expr
+ /// / KEYWORD_nosuspend Expr
+ /// / KEYWORD_continue BreakLabel?
+ /// / KEYWORD_resume Expr
+ /// / KEYWORD_return Expr?
+ /// / BlockLabel? LoopExpr
+ /// / Block
+ /// / CurlySuffixExpr
+ fn parsePrimaryExpr(p: *Parser) !Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_asm => return p.expectAsmExpr(),
+ .keyword_if => return p.parseIfExpr(),
+ .keyword_break => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"break",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.parseBreakLabel(),
+ .rhs = try p.parseExpr(),
+ },
+ });
+ },
+ .keyword_continue => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"continue",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.parseBreakLabel(),
+ .rhs = undefined,
+ },
+ });
+ },
+ .keyword_comptime => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.expectExpr(),
+ .rhs = undefined,
+ },
+ });
+ },
+ .keyword_nosuspend => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"nosuspend",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.expectExpr(),
+ .rhs = undefined,
+ },
+ });
+ },
+ .keyword_resume => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"resume",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.expectExpr(),
+ .rhs = undefined,
+ },
+ });
+ },
+ .keyword_return => {
+ p.tok_i += 1;
+ return p.addNode(.{
+ .tag = .@"return",
+ .main_token = p.tok_i - 1,
+ .data = .{
+ .lhs = try p.parseExpr(),
+ .rhs = undefined,
+ },
+ });
+ },
+ .identifier => {
+ if (p.token_tags[p.tok_i + 1] == .colon) {
+ switch (p.token_tags[p.tok_i + 2]) {
+ .keyword_inline => {
+ p.tok_i += 3;
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
+ else => return p.fail(.expected_inlinable),
+ }
+ },
+ .keyword_for => {
+ p.tok_i += 2;
+ return p.parseForExpr();
+ },
+ .keyword_while => {
+ p.tok_i += 2;
+ return p.parseWhileExpr();
+ },
+ .l_brace => {
+ p.tok_i += 2;
+ return p.parseBlock();
+ },
+ else => return p.parseCurlySuffixExpr(),
+ }
+ } else {
+ return p.parseCurlySuffixExpr();
+ }
+ },
+ .keyword_inline => {
+ p.tok_i += 2;
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
+ else => return p.fail(.expected_inlinable),
+ }
+ },
+ .keyword_for => return p.parseForExpr(),
+ .keyword_while => return p.parseWhileExpr(),
+ .l_brace => return p.parseBlock(),
+ else => return p.parseCurlySuffixExpr(),
+ }
+ }
- const body_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- for_prefix.body = body_node;
+ /// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
+ fn parseIfExpr(p: *Parser) !Node.Index {
+ return p.parseIf(parseExpr);
+ }
- if (p.eatToken(.Keyword_else)) |else_token| {
- const body = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ /// Block <- LBRACE Statement* RBRACE
+ fn parseBlock(p: *Parser) !Node.Index {
+ const lbrace = p.eatToken(.l_brace) orelse return null_node;
+
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = .block_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
});
+ }
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = body,
- };
-
- for_prefix.@"else" = else_node;
+ const stmt_one = try p.expectStatementRecoverable();
+ if (p.eatToken(.r_brace)) |_| {
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
+ return p.addNode(.{
+ .tag = if (semicolon) .block_two_semicolon else .block_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = stmt_one,
+ .rhs = 0,
+ },
+ });
+ }
+ const stmt_two = try p.expectStatementRecoverable();
+ if (p.eatToken(.r_brace)) |_| {
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
+ return p.addNode(.{
+ .tag = if (semicolon) .block_two_semicolon else .block_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = stmt_one,
+ .rhs = stmt_two,
+ },
+ });
}
- return node;
- }
+ var statements = std.ArrayList(Node.Index).init(p.gpa);
+ defer statements.deinit();
- /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
- fn parseWhileExpr(p: *Parser) !?*Node {
- const node = (try p.parseWhilePrefix()) orelse return null;
- const while_prefix = node.cast(Node.While).?;
+ try statements.appendSlice(&.{ stmt_one, stmt_two });
- const body_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ while (true) {
+ const statement = try p.expectStatementRecoverable();
+ if (statement == 0) break;
+ try statements.append(statement);
+ if (p.token_tags[p.tok_i] == .r_brace) break;
+ }
+ _ = try p.expectToken(.r_brace);
+ const semicolon = p.token_tags[p.tok_i - 2] == .semicolon;
+ const statements_span = try p.listToSpan(statements.items);
+ return p.addNode(.{
+ .tag = if (semicolon) .block_semicolon else .block,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = statements_span.start,
+ .rhs = statements_span.end,
+ },
});
- while_prefix.body = body_node;
+ }
- if (p.eatToken(.Keyword_else)) |else_token| {
- const payload = try p.parsePayload();
- const body = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
+ /// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
+ fn parseForExpr(p: *Parser) !Node.Index {
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const array_expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const found_payload = try p.parsePtrIndexPayload();
+ if (found_payload == 0) try p.warn(.expected_loop_payload);
+
+ const then_expr = try p.expectExpr();
+ const else_token = p.eatToken(.keyword_else) orelse {
+ return p.addNode(.{
+ .tag = .for_simple,
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = then_expr,
+ },
});
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = body,
- };
-
- while_prefix.@"else" = else_node;
- }
-
- return node;
+ };
+ const else_expr = try p.expectExpr();
+ return p.addNode(.{
+ .tag = .@"for",
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = try p.addExtra(Node.If{
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
+ });
}
- /// CurlySuffixExpr <- TypeExpr InitList?
- fn parseCurlySuffixExpr(p: *Parser) !?*Node {
- const lhs = (try p.parseTypeExpr()) orelse return null;
- const suffix_op = (try p.parseInitList(lhs)) orelse return lhs;
- return suffix_op;
+ /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
+ /// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
+ fn parseWhileExpr(p: *Parser) !Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const condition = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const then_payload = try p.parsePtrPayload();
+ const cont_expr = try p.parseWhileContinueExpr();
+
+ const then_expr = try p.expectExpr();
+ const else_token = p.eatToken(.keyword_else) orelse {
+ if (cont_expr == 0) {
+ return p.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = then_expr,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .while_cont,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ }),
+ },
+ });
+ }
+ };
+ const else_payload = try p.parsePayload();
+ const else_expr = try p.expectExpr();
+ return p.addNode(.{
+ .tag = .@"while",
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.While{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
+ });
}
+ /// CurlySuffixExpr <- TypeExpr InitList?
/// InitList
/// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
/// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
/// / LBRACE RBRACE
- fn parseInitList(p: *Parser, lhs: *Node) !?*Node {
- const lbrace = p.eatToken(.LBrace) orelse return null;
- var init_list = std.ArrayList(*Node).init(p.gpa);
- defer init_list.deinit();
+ fn parseCurlySuffixExpr(p: *Parser) !Node.Index {
+ const lhs = try p.parseTypeExpr();
+ if (lhs == 0) return null_node;
+ const lbrace = p.eatToken(.l_brace) orelse return lhs;
+
+ // If there are 0 or 1 items, we can use ArrayInitOne/StructInitOne;
+ // otherwise we use the full ArrayInit/StructInit.
+
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = .struct_init_one,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = 0,
+ },
+ });
+ }
+ const field_init = try p.parseFieldInit();
+ if (field_init != 0) {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_one != null) .struct_init_one_comma else .struct_init_one,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = field_init,
+ },
+ });
+ }
+
+ var init_list = std.ArrayList(Node.Index).init(p.gpa);
+ defer init_list.deinit();
- if (try p.parseFieldInit()) |field_init| {
try init_list.append(field_init);
- while (p.eatToken(.Comma)) |_| {
- const next = (try p.parseFieldInit()) orelse break;
- try init_list.append(next);
- }
- const node = try Node.StructInitializer.alloc(&p.arena.allocator, init_list.items.len);
- node.* = .{
- .lhs = lhs,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = init_list.items.len,
- };
- std.mem.copy(*Node, node.list(), init_list.items);
- return &node.base;
- }
- if (try p.parseExpr()) |expr| {
- try init_list.append(expr);
- while (p.eatToken(.Comma)) |_| {
- const next = (try p.parseExpr()) orelse break;
+ while (true) {
+ const next = try p.expectFieldInit();
try init_list.append(next);
+
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
+ continue;
+ },
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_brace);
+ },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
}
- const node = try Node.ArrayInitializer.alloc(&p.arena.allocator, init_list.items.len);
- node.* = .{
- .lhs = lhs,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = init_list.items.len,
- };
- std.mem.copy(*Node, node.list(), init_list.items);
- return &node.base;
+ const span = try p.listToSpan(init_list.items);
+ return p.addNode(.{
+ .tag = if (p.token_tags[p.tok_i - 2] == .comma) .struct_init_comma else .struct_init,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
}
- const node = try p.arena.allocator.create(Node.StructInitializer);
- node.* = .{
- .lhs = lhs,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = 0,
- };
- return &node.base;
- }
+ const elem_init = try p.expectExpr();
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_one != null) .array_init_one_comma else .array_init_one,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = elem_init,
+ },
+ });
+ }
+ if (comma_one == null) {
+ try p.warnExpected(.comma);
+ }
- /// InitList
- /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
- /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
- /// / LBRACE RBRACE
- fn parseAnonInitList(p: *Parser, dot: TokenIndex) !?*Node {
- const lbrace = p.eatToken(.LBrace) orelse return null;
- var init_list = std.ArrayList(*Node).init(p.gpa);
+ var init_list = std.ArrayList(Node.Index).init(p.gpa);
defer init_list.deinit();
- if (try p.parseFieldInit()) |field_init| {
- try init_list.append(field_init);
- while (p.eatToken(.Comma)) |_| {
- const next = (try p.parseFieldInit()) orelse break;
- try init_list.append(next);
- }
- const node = try Node.StructInitializerDot.alloc(&p.arena.allocator, init_list.items.len);
- node.* = .{
- .dot = dot,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = init_list.items.len,
- };
- std.mem.copy(*Node, node.list(), init_list.items);
- return &node.base;
- }
+ try init_list.append(elem_init);
- if (try p.parseExpr()) |expr| {
- try init_list.append(expr);
- while (p.eatToken(.Comma)) |_| {
- const next = (try p.parseExpr()) orelse break;
- try init_list.append(next);
+ var trailing_comma = true;
+ var next = try p.parseExpr();
+ while (next != 0) : (next = try p.parseExpr()) {
+ try init_list.append(next);
+ if (p.eatToken(.comma) == null) {
+ trailing_comma = false;
+ break;
}
- const node = try Node.ArrayInitializerDot.alloc(&p.arena.allocator, init_list.items.len);
- node.* = .{
- .dot = dot,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = init_list.items.len,
- };
- std.mem.copy(*Node, node.list(), init_list.items);
- return &node.base;
}
-
- const node = try p.arena.allocator.create(Node.StructInitializerDot);
- node.* = .{
- .dot = dot,
- .rtoken = try p.expectToken(.RBrace),
- .list_len = 0,
- };
- return &node.base;
- }
-
- /// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
- fn parseTypeExpr(p: *Parser) Error!?*Node {
- return p.parsePrefixOpExpr(parsePrefixTypeOp, parseErrorUnionExpr);
+ _ = try p.expectToken(.r_brace);
+ const span = try p.listToSpan(init_list.items);
+ return p.addNode(.{
+ .tag = if (trailing_comma) .array_init_comma else .array_init,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
}
/// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
- fn parseErrorUnionExpr(p: *Parser) !?*Node {
- const suffix_expr = (try p.parseSuffixExpr()) orelse return null;
-
- if (try SimpleBinOpParseFn(.Bang, .ErrorUnion)(p)) |node| {
- const error_union = node.castTag(.ErrorUnion).?;
- const type_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
- });
- error_union.lhs = suffix_expr;
- error_union.rhs = type_expr;
- return node;
- }
-
- return suffix_expr;
+ fn parseErrorUnionExpr(p: *Parser) !Node.Index {
+ const suffix_expr = try p.parseSuffixExpr();
+ if (suffix_expr == 0) return null_node;
+ const bang = p.eatToken(.bang) orelse return suffix_expr;
+ return p.addNode(.{
+ .tag = .error_union,
+ .main_token = bang,
+ .data = .{
+ .lhs = suffix_expr,
+ .rhs = try p.expectTypeExpr(),
+ },
+ });
}
/// SuffixExpr
/// <- KEYWORD_async PrimaryTypeExpr SuffixOp* FnCallArguments
/// / PrimaryTypeExpr (SuffixOp / FnCallArguments)*
- fn parseSuffixExpr(p: *Parser) !?*Node {
- const maybe_async = p.eatToken(.Keyword_async);
- if (maybe_async) |async_token| {
- const token_fn = p.eatToken(.Keyword_fn);
- if (token_fn != null) {
- // TODO: remove this hack when async fn rewriting is
- // HACK: If we see the keyword `fn`, then we assume that
- // we are parsing an async fn proto, and not a call.
- // We therefore put back all tokens consumed by the async
- // prefix...
- p.putBackToken(token_fn.?);
- p.putBackToken(async_token);
- return p.parsePrimaryTypeExpr();
- }
- var res = try p.expectNode(parsePrimaryTypeExpr, .{
- .ExpectedPrimaryTypeExpr = .{ .token = p.tok_i },
- });
+ /// FnCallArguments <- LPAREN ExprList RPAREN
+ /// ExprList <- (Expr COMMA)* Expr?
+ fn parseSuffixExpr(p: *Parser) !Node.Index {
+ if (p.eatToken(.keyword_async)) |async_token| {
+ var res = try p.expectPrimaryTypeExpr();
- while (try p.parseSuffixOp(res)) |node| {
+ while (true) {
+ const node = try p.parseSuffixOp(res);
+ if (node == 0) break;
res = node;
}
-
- const params = (try p.parseFnCallArguments()) orelse {
- try p.errors.append(p.gpa, .{
- .ExpectedParamList = .{ .token = p.tok_i },
- });
- // ignore this, continue parsing
+ const lparen = p.nextToken();
+ if (p.token_tags[lparen] != .l_paren) {
+ p.tok_i -= 1;
+ try p.warn(.expected_param_list);
return res;
- };
- defer p.gpa.free(params.list);
- const node = try Node.Call.alloc(&p.arena.allocator, params.list.len);
- node.* = .{
- .lhs = res,
- .params_len = params.list.len,
- .async_token = async_token,
- .rtoken = params.rparen,
- };
- std.mem.copy(*Node, node.params(), params.list);
- return &node.base;
- }
- if (try p.parsePrimaryTypeExpr()) |expr| {
- var res = expr;
+ }
+ if (p.eatToken(.r_paren)) |_| {
+ return p.addNode(.{
+ .tag = .async_call_one,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = 0,
+ },
+ });
+ }
+ const param_one = try p.expectExpr();
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_paren)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_one == null) .async_call_one else .async_call_one_comma,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = param_one,
+ },
+ });
+ }
+ if (comma_one == null) {
+ try p.warnExpected(.comma);
+ }
+
+ var param_list = std.ArrayList(Node.Index).init(p.gpa);
+ defer param_list.deinit();
+
+ try param_list.append(param_one);
while (true) {
- if (try p.parseSuffixOp(res)) |node| {
- res = node;
- continue;
- }
- if (try p.parseFnCallArguments()) |params| {
- defer p.gpa.free(params.list);
- const call = try Node.Call.alloc(&p.arena.allocator, params.list.len);
- call.* = .{
- .lhs = res,
- .params_len = params.list.len,
- .async_token = null,
- .rtoken = params.rparen,
- };
- std.mem.copy(*Node, call.params(), params.list);
- res = &call.base;
- continue;
+ const next = try p.expectExpr();
+ try param_list.append(next);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ const span = try p.listToSpan(param_list.items);
+ return p.addNode(.{
+ .tag = .async_call_comma,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ } else {
+ continue;
+ }
+ },
+ .r_paren => {
+ const span = try p.listToSpan(param_list.items);
+ return p.addNode(.{
+ .tag = .async_call,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ },
+ .colon, .r_brace, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_paren);
+ },
+ else => {
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
}
- break;
}
- return res;
}
+ var res = try p.parsePrimaryTypeExpr();
+ if (res == 0) return res;
- return null;
+ while (true) {
+ const suffix_op = try p.parseSuffixOp(res);
+ if (suffix_op != 0) {
+ res = suffix_op;
+ continue;
+ }
+ res = res: {
+ const lparen = p.eatToken(.l_paren) orelse return res;
+ if (p.eatToken(.r_paren)) |_| {
+ break :res try p.addNode(.{
+ .tag = .call_one,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = 0,
+ },
+ });
+ }
+ const param_one = try p.expectExpr();
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_paren)) |_| {
+ break :res try p.addNode(.{
+ .tag = if (comma_one == null) .call_one else .call_one_comma,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = param_one,
+ },
+ });
+ }
+ if (comma_one == null) {
+ try p.warnExpected(.comma);
+ }
+
+ var param_list = std.ArrayList(Node.Index).init(p.gpa);
+ defer param_list.deinit();
+
+ try param_list.append(param_one);
+
+ while (true) {
+ const next = try p.expectExpr();
+ try param_list.append(next);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ const span = try p.listToSpan(param_list.items);
+ break :res try p.addNode(.{
+ .tag = .call_comma,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ } else {
+ continue;
+ }
+ },
+ .r_paren => {
+ const span = try p.listToSpan(param_list.items);
+ break :res try p.addNode(.{
+ .tag = .call,
+ .main_token = lparen,
+ .data = .{
+ .lhs = res,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ },
+ .colon, .r_brace, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_paren);
+ },
+ else => {
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ };
+ }
}
/// PrimaryTypeExpr
@@ -1528,6 +2480,7 @@ const Parser = struct {
/// / CHAR_LITERAL
/// / ContainerDecl
/// / DOT IDENTIFIER
+ /// / DOT InitList
/// / ErrorSetDecl
/// / FLOAT
/// / FnProto
@@ -1546,260 +2499,546 @@ const Parser = struct {
/// / KEYWORD_unreachable
/// / STRINGLITERAL
/// / SwitchExpr
- fn parsePrimaryTypeExpr(p: *Parser) !?*Node {
- if (try p.parseBuiltinCall()) |node| return node;
- if (p.eatToken(.CharLiteral)) |token| {
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .CharLiteral },
- .token = token,
- };
- return &node.base;
- }
- if (try p.parseContainerDecl()) |node| return node;
- if (try p.parseAnonLiteral()) |node| return node;
- if (try p.parseErrorSetDecl()) |node| return node;
- if (try p.parseFloatLiteral()) |node| return node;
- if (try p.parseFnProto(.as_type, .{})) |node| return node;
- if (try p.parseGroupedExpr()) |node| return node;
- if (try p.parseLabeledTypeExpr()) |node| return node;
- if (try p.parseIdentifier()) |node| return node;
- if (try p.parseIfTypeExpr()) |node| return node;
- if (try p.parseIntegerLiteral()) |node| return node;
- if (p.eatToken(.Keyword_comptime)) |token| {
- const expr = (try p.parseTypeExpr()) orelse return null;
- const node = try p.arena.allocator.create(Node.Comptime);
- node.* = .{
- .doc_comments = null,
- .comptime_token = token,
- .expr = expr,
- };
- return &node.base;
- }
- if (p.eatToken(.Keyword_error)) |token| {
- const period = try p.expectTokenRecoverable(.Period);
- const identifier = try p.expectNodeRecoverable(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- const global_error_set = try p.createLiteral(.ErrorType, token);
- if (period == null or identifier == null) return global_error_set;
-
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = Node{ .tag = .Period },
- .op_token = period.?,
- .lhs = global_error_set,
- .rhs = identifier.?,
- };
- return &node.base;
- }
- if (p.eatToken(.Keyword_false)) |token| return p.createLiteral(.BoolLiteral, token);
- if (p.eatToken(.Keyword_null)) |token| return p.createLiteral(.NullLiteral, token);
- if (p.eatToken(.Keyword_anyframe)) |token| {
- const node = try p.arena.allocator.create(Node.AnyFrameType);
- node.* = .{
- .anyframe_token = token,
- .result = null,
- };
- return &node.base;
- }
- if (p.eatToken(.Keyword_true)) |token| return p.createLiteral(.BoolLiteral, token);
- if (p.eatToken(.Keyword_undefined)) |token| return p.createLiteral(.UndefinedLiteral, token);
- if (p.eatToken(.Keyword_unreachable)) |token| return p.createLiteral(.Unreachable, token);
- if (try p.parseStringLiteral()) |node| return node;
- if (try p.parseSwitchExpr()) |node| return node;
-
- return null;
- }
-
/// ContainerDecl <- (KEYWORD_extern / KEYWORD_packed)? ContainerDeclAuto
- fn parseContainerDecl(p: *Parser) !?*Node {
- const layout_token = p.eatToken(.Keyword_extern) orelse
- p.eatToken(.Keyword_packed);
-
- const node = (try p.parseContainerDeclAuto()) orelse {
- if (layout_token) |token|
- p.putBackToken(token);
- return null;
- };
- node.cast(Node.ContainerDecl).?.*.layout_token = layout_token;
- return node;
- }
-
+ /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
+ /// InitList
+ /// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
+ /// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
+ /// / LBRACE RBRACE
/// ErrorSetDecl <- KEYWORD_error LBRACE IdentifierList RBRACE
- fn parseErrorSetDecl(p: *Parser) !?*Node {
- const error_token = p.eatToken(.Keyword_error) orelse return null;
- if (p.eatToken(.LBrace) == null) {
- // Might parse as `KEYWORD_error DOT IDENTIFIER` later in PrimaryTypeExpr, so don't error
- p.putBackToken(error_token);
- return null;
- }
- const decls = try p.parseErrorTagList();
- defer p.gpa.free(decls);
- const rbrace = try p.expectToken(.RBrace);
-
- const node = try Node.ErrorSetDecl.alloc(&p.arena.allocator, decls.len);
- node.* = .{
- .error_token = error_token,
- .decls_len = decls.len,
- .rbrace_token = rbrace,
- };
- std.mem.copy(*Node, node.decls(), decls);
- return &node.base;
- }
-
/// GroupedExpr <- LPAREN Expr RPAREN
- fn parseGroupedExpr(p: *Parser) !?*Node {
- const lparen = p.eatToken(.LParen) orelse return null;
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- const rparen = try p.expectToken(.RParen);
-
- const node = try p.arena.allocator.create(Node.GroupedExpression);
- node.* = .{
- .lparen = lparen,
- .expr = expr,
- .rparen = rparen,
- };
- return &node.base;
- }
-
/// IfTypeExpr <- IfPrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
- fn parseIfTypeExpr(p: *Parser) !?*Node {
- return p.parseIf(parseTypeExpr);
- }
-
/// LabeledTypeExpr
/// <- BlockLabel Block
/// / BlockLabel? LoopTypeExpr
- fn parseLabeledTypeExpr(p: *Parser) !?*Node {
- var colon: TokenIndex = undefined;
- const label = p.parseBlockLabel(&colon);
+ /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
+ fn parsePrimaryTypeExpr(p: *Parser) !Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .char_literal => return p.addNode(.{
+ .tag = .char_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .integer_literal => return p.addNode(.{
+ .tag = .integer_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .float_literal => return p.addNode(.{
+ .tag = .float_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_false => return p.addNode(.{
+ .tag = .false_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_true => return p.addNode(.{
+ .tag = .true_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_null => return p.addNode(.{
+ .tag = .null_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_undefined => return p.addNode(.{
+ .tag = .undefined_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_unreachable => return p.addNode(.{
+ .tag = .unreachable_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .keyword_anyframe => return p.addNode(.{
+ .tag = .anyframe_literal,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ .string_literal => {
+ const main_token = p.nextToken();
+ return p.addNode(.{
+ .tag = .string_literal,
+ .main_token = main_token,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ });
+ },
- if (label) |label_token| {
- if (try p.parseBlock(label_token)) |node| return node;
- }
+ .builtin => return p.parseBuiltinCall(),
+ .keyword_fn => return p.parseFnProto(),
+ .keyword_if => return p.parseIf(parseTypeExpr),
+ .keyword_switch => return p.expectSwitchExpr(),
- if (try p.parseLoopTypeExpr()) |node| {
- switch (node.tag) {
- .For => node.cast(Node.For).?.label = label,
- .While => node.cast(Node.While).?.label = label,
- else => unreachable,
- }
- return node;
- }
+ .keyword_extern,
+ .keyword_packed,
+ => {
+ p.tok_i += 1;
+ return p.parseContainerDeclAuto();
+ },
- if (label) |token| {
- p.putBackToken(colon);
- p.putBackToken(token);
- }
- return null;
- }
+ .keyword_struct,
+ .keyword_opaque,
+ .keyword_enum,
+ .keyword_union,
+ => return p.parseContainerDeclAuto(),
+
+ .keyword_comptime => return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.expectTypeExpr(),
+ .rhs = undefined,
+ },
+ }),
+ .multiline_string_literal_line => {
+ const first_line = p.nextToken();
+ while (p.token_tags[p.tok_i] == .multiline_string_literal_line) {
+ p.tok_i += 1;
+ }
+ return p.addNode(.{
+ .tag = .multiline_string_literal,
+ .main_token = first_line,
+ .data = .{
+ .lhs = first_line,
+ .rhs = p.tok_i - 1,
+ },
+ });
+ },
+ .identifier => switch (p.token_tags[p.tok_i + 1]) {
+ .colon => switch (p.token_tags[p.tok_i + 2]) {
+ .keyword_inline => {
+ p.tok_i += 3;
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_for => return p.parseForTypeExpr(),
+ .keyword_while => return p.parseWhileTypeExpr(),
+ else => return p.fail(.expected_inlinable),
+ }
+ },
+ .keyword_for => {
+ p.tok_i += 2;
+ return p.parseForTypeExpr();
+ },
+ .keyword_while => {
+ p.tok_i += 2;
+ return p.parseWhileTypeExpr();
+ },
+ .l_brace => {
+ p.tok_i += 2;
+ return p.parseBlock();
+ },
+ else => return p.addNode(.{
+ .tag = .identifier,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ },
+ else => return p.addNode(.{
+ .tag = .identifier,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ }),
+ },
+ .keyword_inline => {
+ p.tok_i += 1;
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_for => return p.parseForTypeExpr(),
+ .keyword_while => return p.parseWhileTypeExpr(),
+ else => return p.fail(.expected_inlinable),
+ }
+ },
+ .keyword_for => return p.parseForTypeExpr(),
+ .keyword_while => return p.parseWhileTypeExpr(),
+ .period => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => return p.addNode(.{
+ .tag = .enum_literal,
+ .data = .{
+ .lhs = p.nextToken(), // dot
+ .rhs = undefined,
+ },
+ .main_token = p.nextToken(), // identifier
+ }),
+ .l_brace => {
+ const lbrace = p.tok_i + 1;
+ p.tok_i = lbrace + 1;
+
+ // If there are 0, 1, or 2 items, we can use ArrayInitDotTwo/StructInitDotTwo;
+ // otherwise we use the full ArrayInitDot/StructInitDot.
+
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = .struct_init_dot_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ }
+ const field_init_one = try p.parseFieldInit();
+ if (field_init_one != 0) {
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_one != null) .struct_init_dot_two_comma else .struct_init_dot_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = field_init_one,
+ .rhs = 0,
+ },
+ });
+ }
+ if (comma_one == null) {
+ try p.warnExpected(.comma);
+ }
+ const field_init_two = try p.expectFieldInit();
+ const comma_two = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_two != null) .struct_init_dot_two_comma else .struct_init_dot_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = field_init_one,
+ .rhs = field_init_two,
+ },
+ });
+ }
+ if (comma_two == null) {
+ try p.warnExpected(.comma);
+ }
+ var init_list = std.ArrayList(Node.Index).init(p.gpa);
+ defer init_list.deinit();
+
+ try init_list.appendSlice(&.{ field_init_one, field_init_two });
+
+ while (true) {
+ const next = try p.expectFieldInit();
+ assert(next != 0);
+ try init_list.append(next);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
+ continue;
+ },
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_brace);
+ },
+ else => {
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ const span = try p.listToSpan(init_list.items);
+ const trailing_comma = p.token_tags[p.tok_i - 2] == .comma;
+ return p.addNode(.{
+ .tag = if (trailing_comma) .struct_init_dot_comma else .struct_init_dot,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ }
- /// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
- fn parseLoopTypeExpr(p: *Parser) !?*Node {
- const inline_token = p.eatToken(.Keyword_inline);
+ const elem_init_one = try p.expectExpr();
+ const comma_one = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_one != null) .array_init_dot_two_comma else .array_init_dot_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = elem_init_one,
+ .rhs = 0,
+ },
+ });
+ }
+ if (comma_one == null) {
+ try p.warnExpected(.comma);
+ }
+ const elem_init_two = try p.expectExpr();
+ const comma_two = p.eatToken(.comma);
+ if (p.eatToken(.r_brace)) |_| {
+ return p.addNode(.{
+ .tag = if (comma_two != null) .array_init_dot_two_comma else .array_init_dot_two,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = elem_init_one,
+ .rhs = elem_init_two,
+ },
+ });
+ }
+ if (comma_two == null) {
+ try p.warnExpected(.comma);
+ }
+ var init_list = std.ArrayList(Node.Index).init(p.gpa);
+ defer init_list.deinit();
+
+ try init_list.appendSlice(&.{ elem_init_one, elem_init_two });
+
+ while (true) {
+ const next = try p.expectExpr();
+ if (next == 0) break;
+ try init_list.append(next);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
+ continue;
+ },
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_brace);
+ },
+ else => {
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ const span = try p.listToSpan(init_list.items);
+ return p.addNode(.{
+ .tag = if (p.token_tags[p.tok_i - 2] == .comma) .array_init_dot_comma else .array_init_dot,
+ .main_token = lbrace,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ },
+ else => return null_node,
+ },
+ .keyword_error => switch (p.token_tags[p.tok_i + 1]) {
+ .l_brace => {
+ const error_token = p.tok_i;
+ p.tok_i += 2;
+
+ if (p.eatToken(.r_brace)) |rbrace| {
+ return p.addNode(.{
+ .tag = .error_set_decl,
+ .main_token = error_token,
+ .data = .{
+ .lhs = undefined,
+ .rhs = rbrace,
+ },
+ });
+ }
- if (try p.parseForTypeExpr()) |node| {
- node.cast(Node.For).?.inline_token = inline_token;
- return node;
+ while (true) {
+ const doc_comment = try p.eatDocComments();
+ const identifier = try p.expectToken(.identifier);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_brace)) |_| break;
+ continue;
+ },
+ .r_brace => break,
+ .colon, .r_paren, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_brace);
+ },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ return p.addNode(.{
+ .tag = .error_set_decl,
+ .main_token = error_token,
+ .data = .{
+ .lhs = undefined,
+ .rhs = p.tok_i - 1, // rbrace
+ },
+ });
+ },
+ else => {
+ const main_token = p.nextToken();
+ const period = p.eatToken(.period);
+ if (period == null) try p.warnExpected(.period);
+ const identifier = p.eatToken(.identifier);
+ if (identifier == null) try p.warnExpected(.identifier);
+ return p.addNode(.{
+ .tag = .error_value,
+ .main_token = main_token,
+ .data = .{
+ .lhs = period orelse 0,
+ .rhs = identifier orelse 0,
+ },
+ });
+ },
+ },
+ .l_paren => return p.addNode(.{
+ .tag = .grouped_expression,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = try p.expectExpr(),
+ .rhs = try p.expectToken(.r_paren),
+ },
+ }),
+ else => return null_node,
}
+ }
- if (try p.parseWhileTypeExpr()) |node| {
- node.cast(Node.While).?.inline_token = inline_token;
- return node;
+ fn expectPrimaryTypeExpr(p: *Parser) !Node.Index {
+ const node = try p.parsePrimaryTypeExpr();
+ if (node == 0) {
+ return p.fail(.expected_primary_type_expr);
}
-
- if (inline_token == null) return null;
-
- // If we've seen "inline", there should have been a "for" or "while"
- try p.errors.append(p.gpa, .{
- .ExpectedInlinable = .{ .token = p.tok_i },
- });
- return error.ParseError;
+ return node;
}
+ /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
- fn parseForTypeExpr(p: *Parser) !?*Node {
- const node = (try p.parseForPrefix()) orelse return null;
- const for_prefix = node.cast(Node.For).?;
-
- const type_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
- });
- for_prefix.body = type_expr;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const else_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
+ fn parseForTypeExpr(p: *Parser) !Node.Index {
+ const for_token = p.eatToken(.keyword_for) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const array_expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const found_payload = try p.parsePtrIndexPayload();
+ if (found_payload == 0) try p.warn(.expected_loop_payload);
+
+ const then_expr = try p.expectExpr();
+ const else_token = p.eatToken(.keyword_else) orelse {
+ return p.addNode(.{
+ .tag = .for_simple,
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = then_expr,
+ },
});
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = else_expr,
- };
-
- for_prefix.@"else" = else_node;
- }
-
- return node;
+ };
+ const else_expr = try p.expectTypeExpr();
+ return p.addNode(.{
+ .tag = .@"for",
+ .main_token = for_token,
+ .data = .{
+ .lhs = array_expr,
+ .rhs = try p.addExtra(Node.If{
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
+ });
}
+ /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
- fn parseWhileTypeExpr(p: *Parser) !?*Node {
- const node = (try p.parseWhilePrefix()) orelse return null;
- const while_prefix = node.cast(Node.While).?;
-
- const type_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
+ fn parseWhileTypeExpr(p: *Parser) !Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const condition = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const then_payload = try p.parsePtrPayload();
+ const cont_expr = try p.parseWhileContinueExpr();
+
+ const then_expr = try p.expectTypeExpr();
+ const else_token = p.eatToken(.keyword_else) orelse {
+ if (cont_expr == 0) {
+ return p.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = then_expr,
+ },
+ });
+ } else {
+ return p.addNode(.{
+ .tag = .while_cont,
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ }),
+ },
+ });
+ }
+ };
+ const else_payload = try p.parsePayload();
+ const else_expr = try p.expectTypeExpr();
+ return p.addNode(.{
+ .tag = .@"while",
+ .main_token = while_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.While{
+ .cont_expr = cont_expr,
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
});
- while_prefix.body = type_expr;
-
- if (p.eatToken(.Keyword_else)) |else_token| {
- const payload = try p.parsePayload();
-
- const else_expr = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
- });
-
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = null,
- .body = else_expr,
- };
-
- while_prefix.@"else" = else_node;
- }
-
- return node;
}
/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
- fn parseSwitchExpr(p: *Parser) !?*Node {
- const switch_token = p.eatToken(.Keyword_switch) orelse return null;
- _ = try p.expectToken(.LParen);
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
- _ = try p.expectToken(.LBrace);
+ fn expectSwitchExpr(p: *Parser) !Node.Index {
+ const switch_token = p.assertToken(.keyword_switch);
+ _ = try p.expectToken(.l_paren);
+ const expr_node = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ _ = try p.expectToken(.l_brace);
const cases = try p.parseSwitchProngList();
- defer p.gpa.free(cases);
- const rbrace = try p.expectToken(.RBrace);
-
- const node = try Node.Switch.alloc(&p.arena.allocator, cases.len);
- node.* = .{
- .switch_token = switch_token,
- .expr = expr_node,
- .cases_len = cases.len,
- .rbrace = rbrace,
- };
- std.mem.copy(*Node, node.cases(), cases);
- return &node.base;
+ const trailing_comma = p.token_tags[p.tok_i - 1] == .comma;
+ _ = try p.expectToken(.r_brace);
+
+ return p.addNode(.{
+ .tag = if (trailing_comma) .switch_comma else .@"switch",
+ .main_token = switch_token,
+ .data = .{
+ .lhs = expr_node,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = cases.start,
+ .end = cases.end,
+ }),
+ },
+ });
}
/// AsmExpr <- KEYWORD_asm KEYWORD_volatile? LPAREN Expr AsmOutput? RPAREN
@@ -1807,1696 +3046,1050 @@ const Parser = struct {
/// AsmInput <- COLON AsmInputList AsmClobbers?
/// AsmClobbers <- COLON StringList
/// StringList <- (STRINGLITERAL COMMA)* STRINGLITERAL?
- fn parseAsmExpr(p: *Parser) !?*Node {
- const asm_token = p.eatToken(.Keyword_asm) orelse return null;
- const volatile_token = p.eatToken(.Keyword_volatile);
- _ = try p.expectToken(.LParen);
- const template = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
-
- var arena_outputs: []Node.Asm.Output = &[0]Node.Asm.Output{};
- var arena_inputs: []Node.Asm.Input = &[0]Node.Asm.Input{};
- var arena_clobbers: []*Node = &[0]*Node{};
+ /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
+ /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
+ fn expectAsmExpr(p: *Parser) !Node.Index {
+ const asm_token = p.assertToken(.keyword_asm);
+ _ = p.eatToken(.keyword_volatile);
+ _ = try p.expectToken(.l_paren);
+ const template = try p.expectExpr();
+
+ if (p.eatToken(.r_paren)) |rparen| {
+ return p.addNode(.{
+ .tag = .asm_simple,
+ .main_token = asm_token,
+ .data = .{
+ .lhs = template,
+ .rhs = rparen,
+ },
+ });
+ }
- if (p.eatToken(.Colon) != null) {
- const outputs = try p.parseAsmOutputList();
- defer p.gpa.free(outputs);
- arena_outputs = try p.arena.allocator.dupe(Node.Asm.Output, outputs);
+ _ = try p.expectToken(.colon);
- if (p.eatToken(.Colon) != null) {
- const inputs = try p.parseAsmInputList();
- defer p.gpa.free(inputs);
- arena_inputs = try p.arena.allocator.dupe(Node.Asm.Input, inputs);
+ var list = std.ArrayList(Node.Index).init(p.gpa);
+ defer list.deinit();
- if (p.eatToken(.Colon) != null) {
- const clobbers = try ListParseFn(*Node, parseStringLiteral)(p);
- defer p.gpa.free(clobbers);
- arena_clobbers = try p.arena.allocator.dupe(*Node, clobbers);
+ while (true) {
+ const output_item = try p.parseAsmOutputItem();
+ if (output_item == 0) break;
+ try list.append(output_item);
+ switch (p.token_tags[p.tok_i]) {
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters.
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ if (p.eatToken(.colon)) |_| {
+ while (true) {
+ const input_item = try p.parseAsmInputItem();
+ if (input_item == 0) break;
+ try list.append(input_item);
+ switch (p.token_tags[p.tok_i]) {
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break, // All possible delimiters.
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ try p.warnExpected(.comma);
+ },
+ }
+ }
+ if (p.eatToken(.colon)) |_| {
+ while (p.eatToken(.string_literal)) |_| {
+ switch (p.token_tags[p.tok_i]) {
+ .comma => p.tok_i += 1,
+ .colon, .r_paren, .r_brace, .r_bracket => break,
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ try p.warnExpected(.comma);
+ },
+ }
}
}
}
-
- const node = try p.arena.allocator.create(Node.Asm);
- node.* = .{
- .asm_token = asm_token,
- .volatile_token = volatile_token,
- .template = template,
- .outputs = arena_outputs,
- .inputs = arena_inputs,
- .clobbers = arena_clobbers,
- .rparen = try p.expectToken(.RParen),
- };
-
- return &node.base;
- }
-
- /// DOT IDENTIFIER
- fn parseAnonLiteral(p: *Parser) !?*Node {
- const dot = p.eatToken(.Period) orelse return null;
-
- // anon enum literal
- if (p.eatToken(.Identifier)) |name| {
- const node = try p.arena.allocator.create(Node.EnumLiteral);
- node.* = .{
- .dot = dot,
- .name = name,
- };
- return &node.base;
- }
-
- if (try p.parseAnonInitList(dot)) |node| {
- return node;
- }
-
- p.putBackToken(dot);
- return null;
+ const rparen = try p.expectToken(.r_paren);
+ const span = try p.listToSpan(list.items);
+ return p.addNode(.{
+ .tag = .@"asm",
+ .main_token = asm_token,
+ .data = .{
+ .lhs = template,
+ .rhs = try p.addExtra(Node.Asm{
+ .items_start = span.start,
+ .items_end = span.end,
+ .rparen = rparen,
+ }),
+ },
+ });
}
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
- fn parseAsmOutputItem(p: *Parser) !?Node.Asm.Output {
- const lbracket = p.eatToken(.LBracket) orelse return null;
- const name = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RBracket);
-
- const constraint = try p.expectNode(parseStringLiteral, .{
- .ExpectedStringLiteral = .{ .token = p.tok_i },
- });
-
- _ = try p.expectToken(.LParen);
- const kind: Node.Asm.Output.Kind = blk: {
- if (p.eatToken(.Arrow) != null) {
- const return_ident = try p.expectNode(parseTypeExpr, .{
- .ExpectedTypeExpr = .{ .token = p.tok_i },
- });
- break :blk .{ .Return = return_ident };
+ fn parseAsmOutputItem(p: *Parser) !Node.Index {
+ _ = p.eatToken(.l_bracket) orelse return null_node;
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.r_bracket);
+ _ = try p.expectToken(.string_literal);
+ _ = try p.expectToken(.l_paren);
+ const type_expr: Node.Index = blk: {
+ if (p.eatToken(.arrow)) |_| {
+ break :blk try p.expectTypeExpr();
+ } else {
+ _ = try p.expectToken(.identifier);
+ break :blk null_node;
}
- const variable = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- break :blk .{ .Variable = variable.castTag(.Identifier).? };
- };
- const rparen = try p.expectToken(.RParen);
-
- return Node.Asm.Output{
- .lbracket = lbracket,
- .symbolic_name = name,
- .constraint = constraint,
- .kind = kind,
- .rparen = rparen,
};
+ const rparen = try p.expectToken(.r_paren);
+ return p.addNode(.{
+ .tag = .asm_output,
+ .main_token = identifier,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = rparen,
+ },
+ });
}
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
- fn parseAsmInputItem(p: *Parser) !?Node.Asm.Input {
- const lbracket = p.eatToken(.LBracket) orelse return null;
- const name = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RBracket);
-
- const constraint = try p.expectNode(parseStringLiteral, .{
- .ExpectedStringLiteral = .{ .token = p.tok_i },
- });
-
- _ = try p.expectToken(.LParen);
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ fn parseAsmInputItem(p: *Parser) !Node.Index {
+ _ = p.eatToken(.l_bracket) orelse return null_node;
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.r_bracket);
+ _ = try p.expectToken(.string_literal);
+ _ = try p.expectToken(.l_paren);
+ const expr = try p.expectExpr();
+ const rparen = try p.expectToken(.r_paren);
+ return p.addNode(.{
+ .tag = .asm_input,
+ .main_token = identifier,
+ .data = .{
+ .lhs = expr,
+ .rhs = rparen,
+ },
});
- const rparen = try p.expectToken(.RParen);
-
- return Node.Asm.Input{
- .lbracket = lbracket,
- .symbolic_name = name,
- .constraint = constraint,
- .expr = expr,
- .rparen = rparen,
- };
}
/// BreakLabel <- COLON IDENTIFIER
- fn parseBreakLabel(p: *Parser) !?TokenIndex {
- _ = p.eatToken(.Colon) orelse return null;
- const ident = try p.expectToken(.Identifier);
- return ident;
+ fn parseBreakLabel(p: *Parser) !TokenIndex {
+ _ = p.eatToken(.colon) orelse return @as(TokenIndex, 0);
+ return p.expectToken(.identifier);
}
/// BlockLabel <- IDENTIFIER COLON
- fn parseBlockLabel(p: *Parser, colon_token: *TokenIndex) ?TokenIndex {
- const identifier = p.eatToken(.Identifier) orelse return null;
- if (p.eatToken(.Colon)) |colon| {
- colon_token.* = colon;
+ fn parseBlockLabel(p: *Parser) TokenIndex {
+ if (p.token_tags[p.tok_i] == .identifier and
+ p.token_tags[p.tok_i + 1] == .colon)
+ {
+ const identifier = p.tok_i;
+ p.tok_i += 2;
return identifier;
}
- p.putBackToken(identifier);
- return null;
+ return 0;
}
/// FieldInit <- DOT IDENTIFIER EQUAL Expr
- fn parseFieldInit(p: *Parser) !?*Node {
- const period_token = p.eatToken(.Period) orelse return null;
- const name_token = p.eatToken(.Identifier) orelse {
- // Because of anon literals `.{` is also valid.
- p.putBackToken(period_token);
- return null;
- };
- const eq_token = p.eatToken(.Equal) orelse {
- // `.Name` may also be an enum literal, which is a later rule.
- p.putBackToken(name_token);
- p.putBackToken(period_token);
- return null;
- };
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
+ fn parseFieldInit(p: *Parser) !Node.Index {
+ if (p.token_tags[p.tok_i + 0] == .period and
+ p.token_tags[p.tok_i + 1] == .identifier and
+ p.token_tags[p.tok_i + 2] == .equal)
+ {
+ p.tok_i += 3;
+ return p.expectExpr();
+ } else {
+ return null_node;
+ }
+ }
- const node = try p.arena.allocator.create(Node.FieldInitializer);
- node.* = .{
- .period_token = period_token,
- .name_token = name_token,
- .expr = expr_node,
- };
- return &node.base;
+ fn expectFieldInit(p: *Parser) !Node.Index {
+ _ = try p.expectToken(.period);
+ _ = try p.expectToken(.identifier);
+ _ = try p.expectToken(.equal);
+ return p.expectExpr();
}
/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
- fn parseWhileContinueExpr(p: *Parser) !?*Node {
- _ = p.eatToken(.Colon) orelse return null;
- _ = try p.expectToken(.LParen);
- const node = try p.expectNode(parseAssignExpr, .{
- .ExpectedExprOrAssignment = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
+ fn parseWhileContinueExpr(p: *Parser) !Node.Index {
+ _ = p.eatToken(.colon) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const node = try p.parseAssignExpr();
+ if (node == 0) return p.fail(.expected_expr_or_assignment);
+ _ = try p.expectToken(.r_paren);
return node;
}
/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
- fn parseLinkSection(p: *Parser) !?*Node {
- _ = p.eatToken(.Keyword_linksection) orelse return null;
- _ = try p.expectToken(.LParen);
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
+ fn parseLinkSection(p: *Parser) !Node.Index {
+ _ = p.eatToken(.keyword_linksection) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const expr_node = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
return expr_node;
}
/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
- fn parseCallconv(p: *Parser) !?*Node {
- _ = p.eatToken(.Keyword_callconv) orelse return null;
- _ = try p.expectToken(.LParen);
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
+ fn parseCallconv(p: *Parser) !Node.Index {
+ _ = p.eatToken(.keyword_callconv) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const expr_node = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
return expr_node;
}
- /// ParamDecl <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
- fn parseParamDecl(p: *Parser) !?Node.FnProto.ParamDecl {
- const doc_comments = try p.parseDocComment();
- const noalias_token = p.eatToken(.Keyword_noalias);
- const comptime_token = if (noalias_token == null) p.eatToken(.Keyword_comptime) else null;
- const name_token = blk: {
- const identifier = p.eatToken(.Identifier) orelse break :blk null;
- if (p.eatToken(.Colon) != null) break :blk identifier;
- p.putBackToken(identifier); // ParamType may also be an identifier
- break :blk null;
- };
- const param_type = (try p.parseParamType()) orelse {
- // Only return cleanly if no keyword, identifier, or doc comment was found
- if (noalias_token == null and
- comptime_token == null and
- name_token == null and
- doc_comments == null)
- {
- return null;
- }
- try p.errors.append(p.gpa, .{
- .ExpectedParamType = .{ .token = p.tok_i },
- });
- return error.ParseError;
- };
-
- return Node.FnProto.ParamDecl{
- .doc_comments = doc_comments,
- .comptime_token = comptime_token,
- .noalias_token = noalias_token,
- .name_token = name_token,
- .param_type = param_type,
- };
- }
-
+ /// ParamDecl
+ /// <- (KEYWORD_noalias / KEYWORD_comptime)? (IDENTIFIER COLON)? ParamType
+ /// / DOT3
/// ParamType
/// <- Keyword_anytype
- /// / DOT3
/// / TypeExpr
- fn parseParamType(p: *Parser) !?Node.FnProto.ParamDecl.ParamType {
- // TODO cast from tuple to error union is broken
- const P = Node.FnProto.ParamDecl.ParamType;
- if (try p.parseAnyType()) |node| return P{ .any_type = node };
- if (try p.parseTypeExpr()) |node| return P{ .type_expr = node };
- return null;
- }
-
- /// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
- fn parseIfPrefix(p: *Parser) !?*Node {
- const if_token = p.eatToken(.Keyword_if) orelse return null;
- _ = try p.expectToken(.LParen);
- const condition = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
- const payload = try p.parsePtrPayload();
-
- const node = try p.arena.allocator.create(Node.If);
- node.* = .{
- .if_token = if_token,
- .condition = condition,
- .payload = payload,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
- }
-
- /// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
- fn parseWhilePrefix(p: *Parser) !?*Node {
- const while_token = p.eatToken(.Keyword_while) orelse return null;
-
- _ = try p.expectToken(.LParen);
- const condition = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
-
- const payload = try p.parsePtrPayload();
- const continue_expr = try p.parseWhileContinueExpr();
-
- const node = try p.arena.allocator.create(Node.While);
- node.* = .{
- .label = null,
- .inline_token = null,
- .while_token = while_token,
- .condition = condition,
- .payload = payload,
- .continue_expr = continue_expr,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
- }
-
- /// ForPrefix <- KEYWORD_for LPAREN Expr RPAREN PtrIndexPayload
- fn parseForPrefix(p: *Parser) !?*Node {
- const for_token = p.eatToken(.Keyword_for) orelse return null;
-
- _ = try p.expectToken(.LParen);
- const array_expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
-
- const payload = try p.expectNode(parsePtrIndexPayload, .{
- .ExpectedPayload = .{ .token = p.tok_i },
- });
-
- const node = try p.arena.allocator.create(Node.For);
- node.* = .{
- .label = null,
- .inline_token = null,
- .for_token = for_token,
- .array_expr = array_expr,
- .payload = payload,
- .body = undefined, // set by caller
- .@"else" = null,
- };
- return &node.base;
+ /// This function can return null nodes and then still return nodes afterwards,
+ /// such as in the case of anytype and `...`. Caller must look for rparen to find
+ /// out when there are no more param decls left.
+ fn expectParamDecl(p: *Parser) !Node.Index {
+ _ = try p.eatDocComments();
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_noalias, .keyword_comptime => p.tok_i += 1,
+ .ellipsis3 => {
+ p.tok_i += 1;
+ return null_node;
+ },
+ else => {},
+ }
+ if (p.token_tags[p.tok_i] == .identifier and
+ p.token_tags[p.tok_i + 1] == .colon)
+ {
+ p.tok_i += 2;
+ }
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_anytype => {
+ p.tok_i += 1;
+ return null_node;
+ },
+ else => return p.expectTypeExpr(),
+ }
}
/// Payload <- PIPE IDENTIFIER PIPE
- fn parsePayload(p: *Parser) !?*Node {
- const lpipe = p.eatToken(.Pipe) orelse return null;
- const identifier = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- const rpipe = try p.expectToken(.Pipe);
-
- const node = try p.arena.allocator.create(Node.Payload);
- node.* = .{
- .lpipe = lpipe,
- .error_symbol = identifier,
- .rpipe = rpipe,
- };
- return &node.base;
+ fn parsePayload(p: *Parser) !TokenIndex {
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.pipe);
+ return identifier;
}
/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
- fn parsePtrPayload(p: *Parser) !?*Node {
- const lpipe = p.eatToken(.Pipe) orelse return null;
- const asterisk = p.eatToken(.Asterisk);
- const identifier = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
- const rpipe = try p.expectToken(.Pipe);
-
- const node = try p.arena.allocator.create(Node.PointerPayload);
- node.* = .{
- .lpipe = lpipe,
- .ptr_token = asterisk,
- .value_symbol = identifier,
- .rpipe = rpipe,
- };
- return &node.base;
+ fn parsePtrPayload(p: *Parser) !TokenIndex {
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ _ = p.eatToken(.asterisk);
+ const identifier = try p.expectToken(.identifier);
+ _ = try p.expectToken(.pipe);
+ return identifier;
}
/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
- fn parsePtrIndexPayload(p: *Parser) !?*Node {
- const lpipe = p.eatToken(.Pipe) orelse return null;
- const asterisk = p.eatToken(.Asterisk);
- const identifier = try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
-
- const index = if (p.eatToken(.Comma) == null)
- null
- else
- try p.expectNode(parseIdentifier, .{
- .ExpectedIdentifier = .{ .token = p.tok_i },
- });
-
- const rpipe = try p.expectToken(.Pipe);
-
- const node = try p.arena.allocator.create(Node.PointerIndexPayload);
- node.* = .{
- .lpipe = lpipe,
- .ptr_token = asterisk,
- .value_symbol = identifier,
- .index_symbol = index,
- .rpipe = rpipe,
- };
- return &node.base;
+ /// Returns the first identifier token, if any.
+ fn parsePtrIndexPayload(p: *Parser) !TokenIndex {
+ _ = p.eatToken(.pipe) orelse return @as(TokenIndex, 0);
+ _ = p.eatToken(.asterisk);
+ const identifier = try p.expectToken(.identifier);
+ if (p.eatToken(.comma) != null) {
+ _ = try p.expectToken(.identifier);
+ }
+ _ = try p.expectToken(.pipe);
+ return identifier;
}
/// SwitchProng <- SwitchCase EQUALRARROW PtrPayload? AssignExpr
- fn parseSwitchProng(p: *Parser) !?*Node {
- const node = (try p.parseSwitchCase()) orelse return null;
- const arrow = try p.expectToken(.EqualAngleBracketRight);
- const payload = try p.parsePtrPayload();
- const expr = try p.expectNode(parseAssignExpr, .{
- .ExpectedExprOrAssignment = .{ .token = p.tok_i },
- });
-
- const switch_case = node.cast(Node.SwitchCase).?;
- switch_case.arrow_token = arrow;
- switch_case.payload = payload;
- switch_case.expr = expr;
-
- return node;
- }
-
/// SwitchCase
/// <- SwitchItem (COMMA SwitchItem)* COMMA?
/// / KEYWORD_else
- fn parseSwitchCase(p: *Parser) !?*Node {
- var list = std.ArrayList(*Node).init(p.gpa);
- defer list.deinit();
-
- if (try p.parseSwitchItem()) |first_item| {
- try list.append(first_item);
- while (p.eatToken(.Comma) != null) {
- const next_item = (try p.parseSwitchItem()) orelse break;
- try list.append(next_item);
- }
- } else if (p.eatToken(.Keyword_else)) |else_token| {
- const else_node = try p.arena.allocator.create(Node.SwitchElse);
- else_node.* = .{
- .token = else_token,
- };
- try list.append(&else_node.base);
- } else return null;
-
- const node = try Node.SwitchCase.alloc(&p.arena.allocator, list.items.len);
- node.* = .{
- .items_len = list.items.len,
- .arrow_token = undefined, // set by caller
- .payload = null,
- .expr = undefined, // set by caller
- };
- std.mem.copy(*Node, node.items(), list.items);
- return &node.base;
- }
-
- /// SwitchItem <- Expr (DOT3 Expr)?
- fn parseSwitchItem(p: *Parser) !?*Node {
- const expr = (try p.parseExpr()) orelse return null;
- if (p.eatToken(.Ellipsis3)) |token| {
- const range_end = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ fn parseSwitchProng(p: *Parser) !Node.Index {
+ if (p.eatToken(.keyword_else)) |_| {
+ const arrow_token = try p.expectToken(.equal_angle_bracket_right);
+ _ = try p.parsePtrPayload();
+ return p.addNode(.{
+ .tag = .switch_case_one,
+ .main_token = arrow_token,
+ .data = .{
+ .lhs = 0,
+ .rhs = try p.expectAssignExpr(),
+ },
});
-
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = Node{ .tag = .Range },
- .op_token = token,
- .lhs = expr,
- .rhs = range_end,
- };
- return &node.base;
}
- return expr;
- }
-
- /// AssignOp
- /// <- ASTERISKEQUAL
- /// / SLASHEQUAL
- /// / PERCENTEQUAL
- /// / PLUSEQUAL
- /// / MINUSEQUAL
- /// / LARROW2EQUAL
- /// / RARROW2EQUAL
- /// / AMPERSANDEQUAL
- /// / CARETEQUAL
- /// / PIPEEQUAL
- /// / ASTERISKPERCENTEQUAL
- /// / PLUSPERCENTEQUAL
- /// / MINUSPERCENTEQUAL
- /// / EQUAL
- fn parseAssignOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .AsteriskEqual => .AssignMul,
- .SlashEqual => .AssignDiv,
- .PercentEqual => .AssignMod,
- .PlusEqual => .AssignAdd,
- .MinusEqual => .AssignSub,
- .AngleBracketAngleBracketLeftEqual => .AssignBitShiftLeft,
- .AngleBracketAngleBracketRightEqual => .AssignBitShiftRight,
- .AmpersandEqual => .AssignBitAnd,
- .CaretEqual => .AssignBitXor,
- .PipeEqual => .AssignBitOr,
- .AsteriskPercentEqual => .AssignMulWrap,
- .PlusPercentEqual => .AssignAddWrap,
- .MinusPercentEqual => .AssignSubWrap,
- .Equal => .Assign,
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = .{ .tag = op },
- .op_token = token,
- .lhs = undefined, // set by caller
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
-
- /// CompareOp
- /// <- EQUALEQUAL
- /// / EXCLAMATIONMARKEQUAL
- /// / LARROW
- /// / RARROW
- /// / LARROWEQUAL
- /// / RARROWEQUAL
- fn parseCompareOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .EqualEqual => .EqualEqual,
- .BangEqual => .BangEqual,
- .AngleBracketLeft => .LessThan,
- .AngleBracketRight => .GreaterThan,
- .AngleBracketLeftEqual => .LessOrEqual,
- .AngleBracketRightEqual => .GreaterOrEqual,
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- return p.createInfixOp(token, op);
- }
-
- /// BitwiseOp
- /// <- AMPERSAND
- /// / CARET
- /// / PIPE
- /// / KEYWORD_orelse
- /// / KEYWORD_catch Payload?
- fn parseBitwiseOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .Ampersand => .BitAnd,
- .Caret => .BitXor,
- .Pipe => .BitOr,
- .Keyword_orelse => .OrElse,
- .Keyword_catch => {
- const payload = try p.parsePayload();
- const node = try p.arena.allocator.create(Node.Catch);
- node.* = .{
- .op_token = token,
- .lhs = undefined, // set by caller
- .rhs = undefined, // set by caller
- .payload = payload,
- };
- return &node.base;
- },
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- return p.createInfixOp(token, op);
- }
-
- /// BitShiftOp
- /// <- LARROW2
- /// / RARROW2
- fn parseBitShiftOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .AngleBracketAngleBracketLeft => .BitShiftLeft,
- .AngleBracketAngleBracketRight => .BitShiftRight,
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- return p.createInfixOp(token, op);
- }
-
- /// AdditionOp
- /// <- PLUS
- /// / MINUS
- /// / PLUS2
- /// / PLUSPERCENT
- /// / MINUSPERCENT
- fn parseAdditionOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .Plus => .Add,
- .Minus => .Sub,
- .PlusPlus => .ArrayCat,
- .PlusPercent => .AddWrap,
- .MinusPercent => .SubWrap,
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- return p.createInfixOp(token, op);
- }
-
- /// MultiplyOp
- /// <- PIPE2
- /// / ASTERISK
- /// / SLASH
- /// / PERCENT
- /// / ASTERISK2
- /// / ASTERISKPERCENT
- fn parseMultiplyOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- const op: Node.Tag = switch (p.token_ids[token]) {
- .PipePipe => .MergeErrorSets,
- .Asterisk => .Mul,
- .Slash => .Div,
- .Percent => .Mod,
- .AsteriskAsterisk => .ArrayMult,
- .AsteriskPercent => .MulWrap,
- else => {
- p.putBackToken(token);
- return null;
- },
- };
-
- return p.createInfixOp(token, op);
- }
-
- /// PrefixOp
- /// <- EXCLAMATIONMARK
- /// / MINUS
- /// / TILDE
- /// / MINUSPERCENT
- /// / AMPERSAND
- /// / KEYWORD_try
- /// / KEYWORD_await
- fn parsePrefixOp(p: *Parser) !?*Node {
- const token = p.nextToken();
- switch (p.token_ids[token]) {
- .Bang => return p.allocSimplePrefixOp(.BoolNot, token),
- .Minus => return p.allocSimplePrefixOp(.Negation, token),
- .Tilde => return p.allocSimplePrefixOp(.BitNot, token),
- .MinusPercent => return p.allocSimplePrefixOp(.NegationWrap, token),
- .Ampersand => return p.allocSimplePrefixOp(.AddressOf, token),
- .Keyword_try => return p.allocSimplePrefixOp(.Try, token),
- .Keyword_await => return p.allocSimplePrefixOp(.Await, token),
- else => {
- p.putBackToken(token);
- return null;
- },
- }
- }
-
- fn allocSimplePrefixOp(p: *Parser, comptime tag: Node.Tag, token: TokenIndex) !?*Node {
- const node = try p.arena.allocator.create(Node.SimplePrefixOp);
- node.* = .{
- .base = .{ .tag = tag },
- .op_token = token,
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
-
- // TODO: ArrayTypeStart is either an array or a slice, but const/allowzero only work on
- // pointers. Consider updating this rule:
- // ...
- // / ArrayTypeStart
- // / SliceTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
- // / PtrTypeStart ...
-
- /// PrefixTypeOp
- /// <- QUESTIONMARK
- /// / KEYWORD_anyframe MINUSRARROW
- /// / ArrayTypeStart (ByteAlign / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
- /// / PtrTypeStart (KEYWORD_align LPAREN Expr (COLON INTEGER COLON INTEGER)? RPAREN / KEYWORD_const / KEYWORD_volatile / KEYWORD_allowzero)*
- fn parsePrefixTypeOp(p: *Parser) !?*Node {
- if (p.eatToken(.QuestionMark)) |token| {
- const node = try p.arena.allocator.create(Node.SimplePrefixOp);
- node.* = .{
- .base = .{ .tag = .OptionalType },
- .op_token = token,
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
-
- if (p.eatToken(.Keyword_anyframe)) |token| {
- const arrow = p.eatToken(.Arrow) orelse {
- p.putBackToken(token);
- return null;
- };
- const node = try p.arena.allocator.create(Node.AnyFrameType);
- node.* = .{
- .anyframe_token = token,
- .result = .{
- .arrow_token = arrow,
- .return_type = undefined, // set by caller
+ const first_item = try p.parseSwitchItem();
+ if (first_item == 0) return null_node;
+
+ if (p.eatToken(.equal_angle_bracket_right)) |arrow_token| {
+ _ = try p.parsePtrPayload();
+ return p.addNode(.{
+ .tag = .switch_case_one,
+ .main_token = arrow_token,
+ .data = .{
+ .lhs = first_item,
+ .rhs = try p.expectAssignExpr(),
},
- };
- return &node.base;
- }
-
- if (try p.parsePtrTypeStart()) |node| {
- // If the token encountered was **, there will be two nodes instead of one.
- // The attributes should be applied to the rightmost operator.
- var ptr_info = if (node.cast(Node.PtrType)) |ptr_type|
- if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk)
- &ptr_type.rhs.cast(Node.PtrType).?.ptr_info
- else
- &ptr_type.ptr_info
- else if (node.cast(Node.SliceType)) |slice_type|
- &slice_type.ptr_info
- else
- unreachable;
-
- while (true) {
- if (p.eatToken(.Keyword_align)) |align_token| {
- const lparen = try p.expectToken(.LParen);
- const expr_node = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
-
- // Optional bit range
- const bit_range = if (p.eatToken(.Colon)) |_| bit_range_value: {
- const range_start = try p.expectNode(parseIntegerLiteral, .{
- .ExpectedIntegerLiteral = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.Colon);
- const range_end = try p.expectNode(parseIntegerLiteral, .{
- .ExpectedIntegerLiteral = .{ .token = p.tok_i },
- });
-
- break :bit_range_value ast.PtrInfo.Align.BitRange{
- .start = range_start,
- .end = range_end,
- };
- } else null;
- _ = try p.expectToken(.RParen);
-
- if (ptr_info.align_info != null) {
- try p.errors.append(p.gpa, .{
- .ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
-
- ptr_info.align_info = ast.PtrInfo.Align{
- .node = expr_node,
- .bit_range = bit_range,
- };
-
- continue;
- }
- if (p.eatToken(.Keyword_const)) |const_token| {
- if (ptr_info.const_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraConstQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- ptr_info.const_token = const_token;
- continue;
- }
- if (p.eatToken(.Keyword_volatile)) |volatile_token| {
- if (ptr_info.volatile_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- ptr_info.volatile_token = volatile_token;
- continue;
- }
- if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
- if (ptr_info.allowzero_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- ptr_info.allowzero_token = allowzero_token;
- continue;
- }
- break;
- }
-
- return node;
- }
-
- if (try p.parseArrayTypeStart()) |node| {
- if (node.cast(Node.SliceType)) |slice_type| {
- // Collect pointer qualifiers in any order, but disallow duplicates
- while (true) {
- if (try p.parseByteAlign()) |align_expr| {
- if (slice_type.ptr_info.align_info != null) {
- try p.errors.append(p.gpa, .{
- .ExtraAlignQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- slice_type.ptr_info.align_info = ast.PtrInfo.Align{
- .node = align_expr,
- .bit_range = null,
- };
- continue;
- }
- if (p.eatToken(.Keyword_const)) |const_token| {
- if (slice_type.ptr_info.const_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraConstQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- slice_type.ptr_info.const_token = const_token;
- continue;
- }
- if (p.eatToken(.Keyword_volatile)) |volatile_token| {
- if (slice_type.ptr_info.volatile_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraVolatileQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- slice_type.ptr_info.volatile_token = volatile_token;
- continue;
- }
- if (p.eatToken(.Keyword_allowzero)) |allowzero_token| {
- if (slice_type.ptr_info.allowzero_token != null) {
- try p.errors.append(p.gpa, .{
- .ExtraAllowZeroQualifier = .{ .token = p.tok_i - 1 },
- });
- continue;
- }
- slice_type.ptr_info.allowzero_token = allowzero_token;
- continue;
- }
- break;
- }
- }
- return node;
- }
-
- return null;
- }
-
- /// SuffixOp
- /// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
- /// / DOT IDENTIFIER
- /// / DOTASTERISK
- /// / DOTQUESTIONMARK
- fn parseSuffixOp(p: *Parser, lhs: *Node) !?*Node {
- if (p.eatToken(.LBracket)) |_| {
- const index_expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
});
-
- if (p.eatToken(.Ellipsis2) != null) {
- const end_expr = try p.parseExpr();
- const sentinel: ?*Node = if (p.eatToken(.Colon) != null)
- try p.parseExpr()
- else
- null;
- const rtoken = try p.expectToken(.RBracket);
- const node = try p.arena.allocator.create(Node.Slice);
- node.* = .{
- .lhs = lhs,
- .rtoken = rtoken,
- .start = index_expr,
- .end = end_expr,
- .sentinel = sentinel,
- };
- return &node.base;
- }
-
- const rtoken = try p.expectToken(.RBracket);
- const node = try p.arena.allocator.create(Node.ArrayAccess);
- node.* = .{
- .lhs = lhs,
- .rtoken = rtoken,
- .index_expr = index_expr,
- };
- return &node.base;
}
- if (p.eatToken(.PeriodAsterisk)) |period_asterisk| {
- const node = try p.arena.allocator.create(Node.SimpleSuffixOp);
- node.* = .{
- .base = .{ .tag = .Deref },
- .lhs = lhs,
- .rtoken = period_asterisk,
- };
- return &node.base;
- }
+ var list = std.ArrayList(Node.Index).init(p.gpa);
+ defer list.deinit();
- if (p.eatToken(.Invalid_periodasterisks)) |period_asterisk| {
- try p.errors.append(p.gpa, .{
- .AsteriskAfterPointerDereference = .{ .token = period_asterisk },
- });
- const node = try p.arena.allocator.create(Node.SimpleSuffixOp);
- node.* = .{
- .base = .{ .tag = .Deref },
- .lhs = lhs,
- .rtoken = period_asterisk,
- };
- return &node.base;
- }
+ try list.append(first_item);
+ while (p.eatToken(.comma)) |_| {
+ const next_item = try p.parseSwitchItem();
+ if (next_item == 0) break;
+ try list.append(next_item);
+ }
+ const span = try p.listToSpan(list.items);
+ const arrow_token = try p.expectToken(.equal_angle_bracket_right);
+ _ = try p.parsePtrPayload();
+ return p.addNode(.{
+ .tag = .switch_case,
+ .main_token = arrow_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ .rhs = try p.expectAssignExpr(),
+ },
+ });
+ }
- if (p.eatToken(.Period)) |period| {
- if (try p.parseIdentifier()) |identifier| {
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = Node{ .tag = .Period },
- .op_token = period,
- .lhs = lhs,
- .rhs = identifier,
- };
- return &node.base;
- }
- if (p.eatToken(.QuestionMark)) |question_mark| {
- const node = try p.arena.allocator.create(Node.SimpleSuffixOp);
- node.* = .{
- .base = .{ .tag = .UnwrapOptional },
- .lhs = lhs,
- .rtoken = question_mark,
- };
- return &node.base;
- }
- try p.errors.append(p.gpa, .{
- .ExpectedSuffixOp = .{ .token = p.tok_i },
+ /// SwitchItem <- Expr (DOT3 Expr)?
+ fn parseSwitchItem(p: *Parser) !Node.Index {
+ const expr = try p.parseExpr();
+ if (expr == 0) return null_node;
+
+ if (p.eatToken(.ellipsis3)) |token| {
+ return p.addNode(.{
+ .tag = .switch_range,
+ .main_token = token,
+ .data = .{
+ .lhs = expr,
+ .rhs = try p.expectExpr(),
+ },
});
- return null;
}
-
- return null;
- }
-
- /// FnCallArguments <- LPAREN ExprList RPAREN
- /// ExprList <- (Expr COMMA)* Expr?
- fn parseFnCallArguments(p: *Parser) !?AnnotatedParamList {
- if (p.eatToken(.LParen) == null) return null;
- const list = try ListParseFn(*Node, parseExpr)(p);
- errdefer p.gpa.free(list);
- const rparen = try p.expectToken(.RParen);
- return AnnotatedParamList{ .list = list, .rparen = rparen };
+ return expr;
}
- const AnnotatedParamList = struct {
- list: []*Node,
- rparen: TokenIndex,
+ const PtrModifiers = struct {
+ align_node: Node.Index,
+ bit_range_start: Node.Index,
+ bit_range_end: Node.Index,
};
- /// ArrayTypeStart <- LBRACKET Expr? (COLON Expr)? RBRACKET
- fn parseArrayTypeStart(p: *Parser) !?*Node {
- const lbracket = p.eatToken(.LBracket) orelse return null;
- const expr = try p.parseExpr();
- const sentinel = if (p.eatToken(.Colon)) |_|
- try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- })
- else
- null;
- const rbracket = try p.expectToken(.RBracket);
-
- if (expr) |len_expr| {
- if (sentinel) |s| {
- const node = try p.arena.allocator.create(Node.ArrayTypeSentinel);
- node.* = .{
- .op_token = lbracket,
- .rhs = undefined, // set by caller
- .len_expr = len_expr,
- .sentinel = s,
- };
- return &node.base;
- } else {
- const node = try p.arena.allocator.create(Node.ArrayType);
- node.* = .{
- .op_token = lbracket,
- .rhs = undefined, // set by caller
- .len_expr = len_expr,
- };
- return &node.base;
- }
- }
-
- const node = try p.arena.allocator.create(Node.SliceType);
- node.* = .{
- .op_token = lbracket,
- .rhs = undefined, // set by caller
- .ptr_info = .{ .sentinel = sentinel },
+ fn parsePtrModifiers(p: *Parser) !PtrModifiers {
+ var result: PtrModifiers = .{
+ .align_node = 0,
+ .bit_range_start = 0,
+ .bit_range_end = 0,
};
- return &node.base;
- }
-
- /// PtrTypeStart
- /// <- ASTERISK
- /// / ASTERISK2
- /// / LBRACKET ASTERISK (LETTERC / COLON Expr)? RBRACKET
- fn parsePtrTypeStart(p: *Parser) !?*Node {
- if (p.eatToken(.Asterisk)) |asterisk| {
- const sentinel = if (p.eatToken(.Colon)) |_|
- try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- })
- else
- null;
- const node = try p.arena.allocator.create(Node.PtrType);
- node.* = .{
- .op_token = asterisk,
- .rhs = undefined, // set by caller
- .ptr_info = .{ .sentinel = sentinel },
- };
- return &node.base;
- }
-
- if (p.eatToken(.AsteriskAsterisk)) |double_asterisk| {
- const node = try p.arena.allocator.create(Node.PtrType);
- node.* = .{
- .op_token = double_asterisk,
- .rhs = undefined, // set by caller
- };
-
- // Special case for **, which is its own token
- const child = try p.arena.allocator.create(Node.PtrType);
- child.* = .{
- .op_token = double_asterisk,
- .rhs = undefined, // set by caller
- };
- node.rhs = &child.base;
+ var saw_const = false;
+ var saw_volatile = false;
+ var saw_allowzero = false;
+ while (true) {
+ switch (p.token_tags[p.tok_i]) {
+ .keyword_align => {
+ if (result.align_node != 0) {
+ try p.warn(.extra_align_qualifier);
+ }
+ p.tok_i += 1;
+ _ = try p.expectToken(.l_paren);
+ result.align_node = try p.expectExpr();
+
+ if (p.eatToken(.colon)) |_| {
+ result.bit_range_start = try p.expectExpr();
+ _ = try p.expectToken(.colon);
+ result.bit_range_end = try p.expectExpr();
+ }
- return &node.base;
- }
- if (p.eatToken(.LBracket)) |lbracket| {
- const asterisk = p.eatToken(.Asterisk) orelse {
- p.putBackToken(lbracket);
- return null;
- };
- if (p.eatToken(.Identifier)) |ident| {
- const token_loc = p.token_locs[ident];
- const token_slice = p.source[token_loc.start..token_loc.end];
- if (!std.mem.eql(u8, token_slice, "c")) {
- p.putBackToken(ident);
- } else {
- _ = try p.expectToken(.RBracket);
- const node = try p.arena.allocator.create(Node.PtrType);
- node.* = .{
- .op_token = lbracket,
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
+ _ = try p.expectToken(.r_paren);
+ },
+ .keyword_const => {
+ if (saw_const) {
+ try p.warn(.extra_const_qualifier);
+ }
+ p.tok_i += 1;
+ saw_const = true;
+ },
+ .keyword_volatile => {
+ if (saw_volatile) {
+ try p.warn(.extra_volatile_qualifier);
+ }
+ p.tok_i += 1;
+ saw_volatile = true;
+ },
+ .keyword_allowzero => {
+ if (saw_allowzero) {
+ try p.warn(.extra_allowzero_qualifier);
+ }
+ p.tok_i += 1;
+ saw_allowzero = true;
+ },
+ else => return result,
}
- const sentinel = if (p.eatToken(.Colon)) |_|
- try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- })
- else
- null;
- _ = try p.expectToken(.RBracket);
- const node = try p.arena.allocator.create(Node.PtrType);
- node.* = .{
- .op_token = lbracket,
- .rhs = undefined, // set by caller
- .ptr_info = .{ .sentinel = sentinel },
- };
- return &node.base;
}
- return null;
}
- /// ContainerDeclAuto <- ContainerDeclType LBRACE ContainerMembers RBRACE
- fn parseContainerDeclAuto(p: *Parser) !?*Node {
- const container_decl_type = (try p.parseContainerDeclType()) orelse return null;
- const lbrace = try p.expectToken(.LBrace);
- const members = try p.parseContainerMembers(false);
- defer p.gpa.free(members);
- const rbrace = try p.expectToken(.RBrace);
-
- const members_len = @intCast(NodeIndex, members.len);
- const node = try Node.ContainerDecl.alloc(&p.arena.allocator, members_len);
- node.* = .{
- .layout_token = null,
- .kind_token = container_decl_type.kind_token,
- .init_arg_expr = container_decl_type.init_arg_expr,
- .fields_and_decls_len = members_len,
- .lbrace_token = lbrace,
- .rbrace_token = rbrace,
- };
- std.mem.copy(*Node, node.fieldsAndDecls(), members);
- return &node.base;
+ /// SuffixOp
+ /// <- LBRACKET Expr (DOT2 (Expr (COLON Expr)?)?)? RBRACKET
+ /// / DOT IDENTIFIER
+ /// / DOTASTERISK
+ /// / DOTQUESTIONMARK
+ fn parseSuffixOp(p: *Parser, lhs: Node.Index) !Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .l_bracket => {
+ const lbracket = p.nextToken();
+ const index_expr = try p.expectExpr();
+
+ if (p.eatToken(.ellipsis2)) |_| {
+ const end_expr = try p.parseExpr();
+ if (end_expr == 0) {
+ _ = try p.expectToken(.r_bracket);
+ return p.addNode(.{
+ .tag = .slice_open,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = lhs,
+ .rhs = index_expr,
+ },
+ });
+ }
+ if (p.eatToken(.colon)) |_| {
+ const sentinel = try p.parseExpr();
+ _ = try p.expectToken(.r_bracket);
+ return p.addNode(.{
+ .tag = .slice_sentinel,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.addExtra(Node.SliceSentinel{
+ .start = index_expr,
+ .end = end_expr,
+ .sentinel = sentinel,
+ }),
+ },
+ });
+ } else {
+ _ = try p.expectToken(.r_bracket);
+ return p.addNode(.{
+ .tag = .slice,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.addExtra(Node.Slice{
+ .start = index_expr,
+ .end = end_expr,
+ }),
+ },
+ });
+ }
+ }
+ _ = try p.expectToken(.r_bracket);
+ return p.addNode(.{
+ .tag = .array_access,
+ .main_token = lbracket,
+ .data = .{
+ .lhs = lhs,
+ .rhs = index_expr,
+ },
+ });
+ },
+ .period_asterisk => return p.addNode(.{
+ .tag = .deref,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = undefined,
+ },
+ }),
+ .invalid_periodasterisks => {
+ try p.warn(.asterisk_after_ptr_deref);
+ return p.addNode(.{
+ .tag = .deref,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = undefined,
+ },
+ });
+ },
+ .period => switch (p.token_tags[p.tok_i + 1]) {
+ .identifier => return p.addNode(.{
+ .tag = .field_access,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = p.nextToken(),
+ },
+ }),
+ .question_mark => return p.addNode(.{
+ .tag = .unwrap_optional,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = p.nextToken(),
+ },
+ }),
+ else => {
+ p.tok_i += 1;
+ try p.warn(.expected_suffix_op);
+ return null_node;
+ },
+ },
+ else => return null_node,
+ }
}
- /// Holds temporary data until we are ready to construct the full ContainerDecl AST node.
- const ContainerDeclType = struct {
- kind_token: TokenIndex,
- init_arg_expr: Node.ContainerDecl.InitArg,
- };
-
+ /// Caller must have already verified the first token.
/// ContainerDeclType
/// <- KEYWORD_struct
/// / KEYWORD_enum (LPAREN Expr RPAREN)?
/// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
/// / KEYWORD_opaque
- fn parseContainerDeclType(p: *Parser) !?ContainerDeclType {
- const kind_token = p.nextToken();
-
- const init_arg_expr = switch (p.token_ids[kind_token]) {
- .Keyword_struct, .Keyword_opaque => Node.ContainerDecl.InitArg{ .None = {} },
- .Keyword_enum => blk: {
- if (p.eatToken(.LParen) != null) {
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
- break :blk Node.ContainerDecl.InitArg{ .Type = expr };
+ fn parseContainerDeclAuto(p: *Parser) !Node.Index {
+ const main_token = p.nextToken();
+ const arg_expr = switch (p.token_tags[main_token]) {
+ .keyword_struct, .keyword_opaque => null_node,
+ .keyword_enum => blk: {
+ if (p.eatToken(.l_paren)) |_| {
+ const expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ break :blk expr;
+ } else {
+ break :blk null_node;
}
- break :blk Node.ContainerDecl.InitArg{ .None = {} };
- },
- .Keyword_union => blk: {
- if (p.eatToken(.LParen) != null) {
- if (p.eatToken(.Keyword_enum) != null) {
- if (p.eatToken(.LParen) != null) {
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
+ },
+ .keyword_union => blk: {
+ if (p.eatToken(.l_paren)) |_| {
+ if (p.eatToken(.keyword_enum)) |_| {
+ if (p.eatToken(.l_paren)) |_| {
+ const enum_tag_expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ _ = try p.expectToken(.r_paren);
+
+ _ = try p.expectToken(.l_brace);
+ const members = try p.parseContainerMembers();
+ const members_span = try members.toSpan(p);
+ _ = try p.expectToken(.r_brace);
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .tagged_union_enum_tag_trailing,
+ false => .tagged_union_enum_tag,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = enum_tag_expr,
+ .rhs = try p.addExtra(members_span),
+ },
});
- _ = try p.expectToken(.RParen);
- _ = try p.expectToken(.RParen);
- break :blk Node.ContainerDecl.InitArg{ .Enum = expr };
+ } else {
+ _ = try p.expectToken(.r_paren);
+
+ _ = try p.expectToken(.l_brace);
+ const members = try p.parseContainerMembers();
+ _ = try p.expectToken(.r_brace);
+ if (members.len <= 2) {
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .tagged_union_two_trailing,
+ false => .tagged_union_two,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = members.lhs,
+ .rhs = members.rhs,
+ },
+ });
+ } else {
+ const span = try members.toSpan(p);
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .tagged_union_trailing,
+ false => .tagged_union,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ }
}
- _ = try p.expectToken(.RParen);
- break :blk Node.ContainerDecl.InitArg{ .Enum = null };
+ } else {
+ const expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ break :blk expr;
}
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
- break :blk Node.ContainerDecl.InitArg{ .Type = expr };
+ } else {
+ break :blk null_node;
}
- break :blk Node.ContainerDecl.InitArg{ .None = {} };
},
else => {
- p.putBackToken(kind_token);
- return null;
+ p.tok_i -= 1;
+ return p.fail(.expected_container);
},
};
-
- return ContainerDeclType{
- .kind_token = kind_token,
- .init_arg_expr = init_arg_expr,
- };
+ _ = try p.expectToken(.l_brace);
+ const members = try p.parseContainerMembers();
+ _ = try p.expectToken(.r_brace);
+ if (arg_expr == 0) {
+ if (members.len <= 2) {
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .container_decl_two_trailing,
+ false => .container_decl_two,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = members.lhs,
+ .rhs = members.rhs,
+ },
+ });
+ } else {
+ const span = try members.toSpan(p);
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .container_decl_trailing,
+ false => .container_decl,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ }
+ } else {
+ const span = try members.toSpan(p);
+ return p.addNode(.{
+ .tag = switch (members.trailing) {
+ true => .container_decl_arg_trailing,
+ false => .container_decl_arg,
+ },
+ .main_token = main_token,
+ .data = .{
+ .lhs = arg_expr,
+ .rhs = try p.addExtra(Node.SubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ }
}
+ /// Holds temporary data until we are ready to construct the full ContainerDecl AST node.
/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
- fn parseByteAlign(p: *Parser) !?*Node {
- _ = p.eatToken(.Keyword_align) orelse return null;
- _ = try p.expectToken(.LParen);
- const expr = try p.expectNode(parseExpr, .{
- .ExpectedExpr = .{ .token = p.tok_i },
- });
- _ = try p.expectToken(.RParen);
+ fn parseByteAlign(p: *Parser) !Node.Index {
+ _ = p.eatToken(.keyword_align) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const expr = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
return expr;
}
- /// IdentifierList <- (IDENTIFIER COMMA)* IDENTIFIER?
- /// Only ErrorSetDecl parses an IdentifierList
- fn parseErrorTagList(p: *Parser) ![]*Node {
- return ListParseFn(*Node, parseErrorTag)(p);
- }
-
/// SwitchProngList <- (SwitchProng COMMA)* SwitchProng?
- fn parseSwitchProngList(p: *Parser) ![]*Node {
- return ListParseFn(*Node, parseSwitchProng)(p);
+ fn parseSwitchProngList(p: *Parser) !Node.SubRange {
+ return ListParseFn(parseSwitchProng)(p);
}
- /// AsmOutputList <- (AsmOutputItem COMMA)* AsmOutputItem?
- fn parseAsmOutputList(p: *Parser) Error![]Node.Asm.Output {
- return ListParseFn(Node.Asm.Output, parseAsmOutputItem)(p);
- }
+ /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
+ fn parseParamDeclList(p: *Parser) !SmallSpan {
+ _ = try p.expectToken(.l_paren);
+ if (p.eatToken(.r_paren)) |_| {
+ return SmallSpan{ .zero_or_one = 0 };
+ }
+ const param_one = while (true) {
+ const param = try p.expectParamDecl();
+ if (param != 0) break param;
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ return SmallSpan{ .zero_or_one = 0 };
+ }
+ continue;
+ },
+ .r_paren => return SmallSpan{ .zero_or_one = 0 },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ } else unreachable;
- /// AsmInputList <- (AsmInputItem COMMA)* AsmInputItem?
- fn parseAsmInputList(p: *Parser) Error![]Node.Asm.Input {
- return ListParseFn(Node.Asm.Input, parseAsmInputItem)(p);
- }
+ const param_two = while (true) {
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ return SmallSpan{ .zero_or_one = param_one };
+ }
+ const param = try p.expectParamDecl();
+ if (param != 0) break param;
+ continue;
+ },
+ .r_paren => return SmallSpan{ .zero_or_one = param_one },
+ .colon, .r_brace, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_paren);
+ },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ } else unreachable;
- /// ParamDeclList <- (ParamDecl COMMA)* ParamDecl?
- fn parseParamDeclList(p: *Parser) ![]Node.FnProto.ParamDecl {
- return ListParseFn(Node.FnProto.ParamDecl, parseParamDecl)(p);
+ var list = std.ArrayList(Node.Index).init(p.gpa);
+ defer list.deinit();
+
+ try list.appendSlice(&.{ param_one, param_two });
+
+ while (true) {
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.token_tags[p.tok_i] == .r_paren) {
+ p.tok_i += 1;
+ return SmallSpan{ .multi = list.toOwnedSlice() };
+ }
+ const param = try p.expectParamDecl();
+ if (param != 0) {
+ try list.append(param);
+ }
+ continue;
+ },
+ .r_paren => return SmallSpan{ .multi = list.toOwnedSlice() },
+ .colon, .r_brace, .r_bracket => {
+ p.tok_i -= 1;
+ return p.failExpected(.r_paren);
+ },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ }
}
- const NodeParseFn = fn (p: *Parser) Error!?*Node;
+ const NodeParseFn = fn (p: *Parser) Error!Node.Index;
- fn ListParseFn(comptime E: type, comptime nodeParseFn: anytype) ParseFn([]E) {
+ fn ListParseFn(comptime nodeParseFn: anytype) (fn (p: *Parser) Error!Node.SubRange) {
return struct {
- pub fn parse(p: *Parser) ![]E {
- var list = std.ArrayList(E).init(p.gpa);
+ pub fn parse(p: *Parser) Error!Node.SubRange {
+ var list = std.ArrayList(Node.Index).init(p.gpa);
defer list.deinit();
- while (try nodeParseFn(p)) |item| {
+ while (true) {
+ const item = try nodeParseFn(p);
+ if (item == 0) break;
+
try list.append(item);
- switch (p.token_ids[p.tok_i]) {
- .Comma => _ = p.nextToken(),
+ switch (p.token_tags[p.tok_i]) {
+ .comma => p.tok_i += 1,
// all possible delimiters
- .Colon, .RParen, .RBrace, .RBracket => break,
+ .colon, .r_paren, .r_brace, .r_bracket => break,
else => {
- // this is likely just a missing comma,
- // continue parsing this list and give an error
- try p.errors.append(p.gpa, .{
- .ExpectedToken = .{ .token = p.tok_i, .expected_id = .Comma },
- });
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ try p.warnExpected(.comma);
},
}
}
- return list.toOwnedSlice();
- }
- }.parse;
- }
-
- fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.Tag) NodeParseFn {
- return struct {
- pub fn parse(p: *Parser) Error!?*Node {
- const op_token = if (token == .Keyword_and) switch (p.token_ids[p.tok_i]) {
- .Keyword_and => p.nextToken(),
- .Invalid_ampersands => blk: {
- try p.errors.append(p.gpa, .{
- .InvalidAnd = .{ .token = p.tok_i },
- });
- break :blk p.nextToken();
- },
- else => return null,
- } else p.eatToken(token) orelse return null;
-
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = .{ .tag = op },
- .op_token = op_token,
- .lhs = undefined, // set by caller
- .rhs = undefined, // set by caller
- };
- return &node.base;
+ return p.listToSpan(list.items);
}
}.parse;
}
- // Helper parsers not included in the grammar
-
- fn parseBuiltinCall(p: *Parser) !?*Node {
- const token = p.eatToken(.Builtin) orelse return null;
- const params = (try p.parseFnCallArguments()) orelse {
- try p.errors.append(p.gpa, .{
- .ExpectedParamList = .{ .token = p.tok_i },
+ /// FnCallArguments <- LPAREN ExprList RPAREN
+ /// ExprList <- (Expr COMMA)* Expr?
+ fn parseBuiltinCall(p: *Parser) !Node.Index {
+ const builtin_token = p.assertToken(.builtin);
+ if (p.token_tags[p.nextToken()] != .l_paren) {
+ p.tok_i -= 1;
+ try p.warn(.expected_param_list);
+ // Pretend this was an identifier so we can continue parsing.
+ return p.addNode(.{
+ .tag = .identifier,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
});
+ }
+ if (p.eatToken(.r_paren)) |_| {
+ return p.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ }
+ const param_one = try p.expectExpr();
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ return p.addNode(.{
+ .tag = .builtin_call_two_comma,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = param_one,
+ .rhs = 0,
+ },
+ });
+ }
+ },
+ .r_paren => return p.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = param_one,
+ .rhs = 0,
+ },
+ }),
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
+ const param_two = try p.expectExpr();
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ return p.addNode(.{
+ .tag = .builtin_call_two_comma,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = param_one,
+ .rhs = param_two,
+ },
+ });
+ }
+ },
+ .r_paren => return p.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = param_one,
+ .rhs = param_two,
+ },
+ }),
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
- // lets pretend this was an identifier so we can continue parsing
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .Identifier },
- .token = token,
- };
- return &node.base;
- };
- defer p.gpa.free(params.list);
-
- const node = try Node.BuiltinCall.alloc(&p.arena.allocator, params.list.len);
- node.* = .{
- .builtin_token = token,
- .params_len = params.list.len,
- .rparen_token = params.rparen,
- };
- std.mem.copy(*Node, node.params(), params.list);
- return &node.base;
- }
-
- fn parseErrorTag(p: *Parser) !?*Node {
- const doc_comments = try p.parseDocComment(); // no need to rewind on failure
- const token = p.eatToken(.Identifier) orelse return null;
-
- const node = try p.arena.allocator.create(Node.ErrorTag);
- node.* = .{
- .doc_comments = doc_comments,
- .name_token = token,
- };
- return &node.base;
- }
-
- fn parseIdentifier(p: *Parser) !?*Node {
- const token = p.eatToken(.Identifier) orelse return null;
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .Identifier },
- .token = token,
- };
- return &node.base;
- }
-
- fn parseAnyType(p: *Parser) !?*Node {
- const token = p.eatToken(.Keyword_anytype) orelse
- p.eatToken(.Keyword_var) orelse return null; // TODO remove in next release cycle
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .AnyType },
- .token = token,
- };
- return &node.base;
- }
+ var list = std.ArrayList(Node.Index).init(p.gpa);
+ defer list.deinit();
- fn createLiteral(p: *Parser, tag: ast.Node.Tag, token: TokenIndex) !*Node {
- const result = try p.arena.allocator.create(Node.OneToken);
- result.* = .{
- .base = .{ .tag = tag },
- .token = token,
- };
- return &result.base;
- }
+ try list.appendSlice(&.{ param_one, param_two });
- fn parseStringLiteralSingle(p: *Parser) !?*Node {
- if (p.eatToken(.StringLiteral)) |token| {
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = token,
- };
- return &node.base;
+ while (true) {
+ const param = try p.expectExpr();
+ try list.append(param);
+ switch (p.token_tags[p.nextToken()]) {
+ .comma => {
+ if (p.eatToken(.r_paren)) |_| {
+ const params = try p.listToSpan(list.items);
+ return p.addNode(.{
+ .tag = .builtin_call_comma,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = params.start,
+ .rhs = params.end,
+ },
+ });
+ }
+ continue;
+ },
+ .r_paren => {
+ const params = try p.listToSpan(list.items);
+ return p.addNode(.{
+ .tag = .builtin_call,
+ .main_token = builtin_token,
+ .data = .{
+ .lhs = params.start,
+ .rhs = params.end,
+ },
+ });
+ },
+ else => {
+ // This is likely just a missing comma;
+ // give an error but continue parsing this list.
+ p.tok_i -= 1;
+ try p.warnExpected(.comma);
+ },
+ }
}
- return null;
}
// string literal or multiline string literal
- fn parseStringLiteral(p: *Parser) !?*Node {
- if (try p.parseStringLiteralSingle()) |node| return node;
-
- if (p.eatToken(.MultilineStringLiteralLine)) |first_line| {
- const start_tok_i = p.tok_i;
- var tok_i = start_tok_i;
- var count: usize = 1; // including first_line
- while (true) : (tok_i += 1) {
- switch (p.token_ids[tok_i]) {
- .LineComment => continue,
- .MultilineStringLiteralLine => count += 1,
- else => break,
- }
- }
-
- const node = try Node.MultilineStringLiteral.alloc(&p.arena.allocator, count);
- node.* = .{ .lines_len = count };
- const lines = node.lines();
- tok_i = start_tok_i;
- lines[0] = first_line;
- count = 1;
- while (true) : (tok_i += 1) {
- switch (p.token_ids[tok_i]) {
- .LineComment => continue,
- .MultilineStringLiteralLine => {
- lines[count] = tok_i;
- count += 1;
+ fn parseStringLiteral(p: *Parser) !Node.Index {
+ switch (p.token_tags[p.tok_i]) {
+ .string_literal => {
+ const main_token = p.nextToken();
+ return p.addNode(.{
+ .tag = .string_literal,
+ .main_token = main_token,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
},
- else => break,
+ });
+ },
+ .multiline_string_literal_line => {
+ const first_line = p.nextToken();
+ while (p.token_tags[p.tok_i] == .multiline_string_literal_line) {
+ p.tok_i += 1;
}
- }
- p.tok_i = tok_i;
- return &node.base;
+ return p.addNode(.{
+ .tag = .multiline_string_literal,
+ .main_token = first_line,
+ .data = .{
+ .lhs = first_line,
+ .rhs = p.tok_i - 1,
+ },
+ });
+ },
+ else => return null_node,
}
-
- return null;
}
- fn parseIntegerLiteral(p: *Parser) !?*Node {
- const token = p.eatToken(.IntegerLiteral) orelse return null;
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .IntegerLiteral },
- .token = token,
- };
- return &node.base;
+ fn expectStringLiteral(p: *Parser) !Node.Index {
+ const node = try p.parseStringLiteral();
+ if (node == 0) {
+ return p.fail(.expected_string_literal);
+ }
+ return node;
}
- fn parseFloatLiteral(p: *Parser) !?*Node {
- const token = p.eatToken(.FloatLiteral) orelse return null;
- const node = try p.arena.allocator.create(Node.OneToken);
- node.* = .{
- .base = .{ .tag = .FloatLiteral },
- .token = token,
- };
- return &node.base;
+ fn expectIntegerLiteral(p: *Parser) !Node.Index {
+ return p.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try p.expectToken(.integer_literal),
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ });
}
- fn parseTry(p: *Parser) !?*Node {
- const token = p.eatToken(.Keyword_try) orelse return null;
- const node = try p.arena.allocator.create(Node.SimplePrefixOp);
- node.* = .{
- .base = .{ .tag = .Try },
- .op_token = token,
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
+ /// KEYWORD_if LPAREN Expr RPAREN PtrPayload? Body (KEYWORD_else Payload? Body)?
+ fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !Node.Index {
+ const if_token = p.eatToken(.keyword_if) orelse return null_node;
+ _ = try p.expectToken(.l_paren);
+ const condition = try p.expectExpr();
+ _ = try p.expectToken(.r_paren);
+ const then_payload = try p.parsePtrPayload();
- /// IfPrefix Body (KEYWORD_else Payload? Body)?
- fn parseIf(p: *Parser, bodyParseFn: NodeParseFn) !?*Node {
- const node = (try p.parseIfPrefix()) orelse return null;
- const if_prefix = node.cast(Node.If).?;
+ const then_expr = try bodyParseFn(p);
+ if (then_expr == 0) return p.fail(.invalid_token);
- if_prefix.body = try p.expectNode(bodyParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
+ const else_token = p.eatToken(.keyword_else) orelse return p.addNode(.{
+ .tag = .if_simple,
+ .main_token = if_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = then_expr,
+ },
});
-
- const else_token = p.eatToken(.Keyword_else) orelse return node;
- const payload = try p.parsePayload();
- const else_expr = try p.expectNode(bodyParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
+ const else_payload = try p.parsePayload();
+ const else_expr = try bodyParseFn(p);
+ if (else_expr == 0) return p.fail(.invalid_token);
+
+ return p.addNode(.{
+ .tag = .@"if",
+ .main_token = if_token,
+ .data = .{
+ .lhs = condition,
+ .rhs = try p.addExtra(Node.If{
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
});
- const else_node = try p.arena.allocator.create(Node.Else);
- else_node.* = .{
- .else_token = else_token,
- .payload = payload,
- .body = else_expr,
- };
- if_prefix.@"else" = else_node;
-
- return node;
}
- /// Eat a multiline doc comment
- fn parseDocComment(p: *Parser) !?*Node.DocComment {
- if (p.eatToken(.DocComment)) |first_line| {
- while (p.eatToken(.DocComment)) |_| {}
- const node = try p.arena.allocator.create(Node.DocComment);
- node.* = .{ .first_line = first_line };
- return node;
+ /// Skips over doc comment tokens. Returns the first one, if any.
+ fn eatDocComments(p: *Parser) !?TokenIndex {
+ if (p.eatToken(.doc_comment)) |tok| {
+ var first_line = tok;
+ if (tok > 0 and tokensOnSameLine(p, tok - 1, tok)) {
+ try p.warnMsg(.{
+ .tag = .same_line_doc_comment,
+ .token = tok,
+ });
+ first_line = p.eatToken(.doc_comment) orelse return null;
+ }
+ while (p.eatToken(.doc_comment)) |_| {}
+ return first_line;
}
return null;
}
fn tokensOnSameLine(p: *Parser, token1: TokenIndex, token2: TokenIndex) bool {
- return std.mem.indexOfScalar(u8, p.source[p.token_locs[token1].end..p.token_locs[token2].start], '\n') == null;
+ return std.mem.indexOfScalar(u8, p.source[p.token_starts[token1]..p.token_starts[token2]], '\n') == null;
}
- /// Eat a single-line doc comment on the same line as another node
- fn parseAppendedDocComment(p: *Parser, after_token: TokenIndex) !?*Node.DocComment {
- const comment_token = p.eatToken(.DocComment) orelse return null;
- if (p.tokensOnSameLine(after_token, comment_token)) {
- const node = try p.arena.allocator.create(Node.DocComment);
- node.* = .{ .first_line = comment_token };
- return node;
- }
- p.putBackToken(comment_token);
- return null;
+ fn eatToken(p: *Parser, tag: Token.Tag) ?TokenIndex {
+ return if (p.token_tags[p.tok_i] == tag) p.nextToken() else null;
}
- /// Op* Child
- fn parsePrefixOpExpr(p: *Parser, comptime opParseFn: NodeParseFn, comptime childParseFn: NodeParseFn) Error!?*Node {
- if (try opParseFn(p)) |first_op| {
- var rightmost_op = first_op;
- while (true) {
- switch (rightmost_op.tag) {
- .AddressOf,
- .Await,
- .BitNot,
- .BoolNot,
- .OptionalType,
- .Negation,
- .NegationWrap,
- .Resume,
- .Try,
- => {
- if (try opParseFn(p)) |rhs| {
- rightmost_op.cast(Node.SimplePrefixOp).?.rhs = rhs;
- rightmost_op = rhs;
- } else break;
- },
- .ArrayType => {
- if (try opParseFn(p)) |rhs| {
- rightmost_op.cast(Node.ArrayType).?.rhs = rhs;
- rightmost_op = rhs;
- } else break;
- },
- .ArrayTypeSentinel => {
- if (try opParseFn(p)) |rhs| {
- rightmost_op.cast(Node.ArrayTypeSentinel).?.rhs = rhs;
- rightmost_op = rhs;
- } else break;
- },
- .SliceType => {
- if (try opParseFn(p)) |rhs| {
- rightmost_op.cast(Node.SliceType).?.rhs = rhs;
- rightmost_op = rhs;
- } else break;
- },
- .PtrType => {
- var ptr_type = rightmost_op.cast(Node.PtrType).?;
- // If the token encountered was **, there will be two nodes
- if (p.token_ids[ptr_type.op_token] == .AsteriskAsterisk) {
- rightmost_op = ptr_type.rhs;
- ptr_type = rightmost_op.cast(Node.PtrType).?;
- }
- if (try opParseFn(p)) |rhs| {
- ptr_type.rhs = rhs;
- rightmost_op = rhs;
- } else break;
- },
- .AnyFrameType => {
- const prom = rightmost_op.cast(Node.AnyFrameType).?;
- if (try opParseFn(p)) |rhs| {
- prom.result.?.return_type = rhs;
- rightmost_op = rhs;
- } else break;
- },
- else => unreachable,
- }
- }
-
- // If any prefix op existed, a child node on the RHS is required
- switch (rightmost_op.tag) {
- .AddressOf,
- .Await,
- .BitNot,
- .BoolNot,
- .OptionalType,
- .Negation,
- .NegationWrap,
- .Resume,
- .Try,
- => {
- const prefix_op = rightmost_op.cast(Node.SimplePrefixOp).?;
- prefix_op.rhs = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- .ArrayType => {
- const prefix_op = rightmost_op.cast(Node.ArrayType).?;
- prefix_op.rhs = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- .ArrayTypeSentinel => {
- const prefix_op = rightmost_op.cast(Node.ArrayTypeSentinel).?;
- prefix_op.rhs = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- .PtrType => {
- const prefix_op = rightmost_op.cast(Node.PtrType).?;
- prefix_op.rhs = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- .SliceType => {
- const prefix_op = rightmost_op.cast(Node.SliceType).?;
- prefix_op.rhs = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- .AnyFrameType => {
- const prom = rightmost_op.cast(Node.AnyFrameType).?;
- prom.result.?.return_type = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
- });
- },
- else => unreachable,
- }
-
- return first_op;
- }
-
- // Otherwise, the child node is optional
- return childParseFn(p);
+ fn assertToken(p: *Parser, tag: Token.Tag) TokenIndex {
+ const token = p.nextToken();
+ assert(p.token_tags[token] == tag);
+ return token;
}
- /// Child (Op Child)*
- /// Child (Op Child)?
- fn parseBinOpExpr(
- p: *Parser,
- opParseFn: NodeParseFn,
- childParseFn: NodeParseFn,
- chain: enum {
- Once,
- Infinitely,
- },
- ) Error!?*Node {
- var res = (try childParseFn(p)) orelse return null;
-
- while (try opParseFn(p)) |node| {
- const right = try p.expectNode(childParseFn, .{
- .InvalidToken = .{ .token = p.tok_i },
+ fn expectToken(p: *Parser, tag: Token.Tag) Error!TokenIndex {
+ const token = p.nextToken();
+ if (p.token_tags[token] != tag) {
+ p.tok_i -= 1; // Go back so that we can recover properly.
+ return p.failMsg(.{
+ .tag = .expected_token,
+ .token = token,
+ .extra = .{ .expected_tag = tag },
});
- const left = res;
- res = node;
-
- if (node.castTag(.Catch)) |op| {
- op.lhs = left;
- op.rhs = right;
- } else if (node.cast(Node.SimpleInfixOp)) |op| {
- op.lhs = left;
- op.rhs = right;
- }
-
- switch (chain) {
- .Once => break,
- .Infinitely => continue,
- }
}
-
- return res;
- }
-
- fn createInfixOp(p: *Parser, op_token: TokenIndex, tag: Node.Tag) !*Node {
- const node = try p.arena.allocator.create(Node.SimpleInfixOp);
- node.* = .{
- .base = Node{ .tag = tag },
- .op_token = op_token,
- .lhs = undefined, // set by caller
- .rhs = undefined, // set by caller
- };
- return &node.base;
- }
-
- fn eatToken(p: *Parser, id: Token.Id) ?TokenIndex {
- return if (p.token_ids[p.tok_i] == id) p.nextToken() else null;
- }
-
- fn expectToken(p: *Parser, id: Token.Id) Error!TokenIndex {
- return (try p.expectTokenRecoverable(id)) orelse error.ParseError;
+ return token;
}
- fn expectTokenRecoverable(p: *Parser, id: Token.Id) !?TokenIndex {
- const token = p.nextToken();
- if (p.token_ids[token] != id) {
- try p.errors.append(p.gpa, .{
- .ExpectedToken = .{ .token = token, .expected_id = id },
- });
- // go back so that we can recover properly
- p.putBackToken(token);
+ fn expectTokenRecoverable(p: *Parser, tag: Token.Tag) !?TokenIndex {
+ if (p.token_tags[p.tok_i] != tag) {
+ try p.warnExpected(tag);
return null;
+ } else {
+ return p.nextToken();
}
- return token;
}
fn nextToken(p: *Parser) TokenIndex {
const result = p.tok_i;
p.tok_i += 1;
- assert(p.token_ids[result] != .LineComment);
- if (p.tok_i >= p.token_ids.len) return result;
-
- while (true) {
- if (p.token_ids[p.tok_i] != .LineComment) return result;
- p.tok_i += 1;
- }
- }
-
- fn putBackToken(p: *Parser, putting_back: TokenIndex) void {
- while (p.tok_i > 0) {
- p.tok_i -= 1;
- if (p.token_ids[p.tok_i] == .LineComment) continue;
- assert(putting_back == p.tok_i);
- return;
- }
- }
-
- /// TODO Delete this function. I don't like the inversion of control.
- fn expectNode(
- p: *Parser,
- parseFn: NodeParseFn,
- /// if parsing fails
- err: AstError,
- ) Error!*Node {
- return (try p.expectNodeRecoverable(parseFn, err)) orelse return error.ParseError;
- }
-
- /// TODO Delete this function. I don't like the inversion of control.
- fn expectNodeRecoverable(
- p: *Parser,
- parseFn: NodeParseFn,
- /// if parsing fails
- err: AstError,
- ) !?*Node {
- return (try parseFn(p)) orelse {
- try p.errors.append(p.gpa, err);
- return null;
- };
+ return result;
}
};
-fn ParseFn(comptime T: type) type {
- return fn (p: *Parser) Error!T;
-}
-
-test "std.zig.parser" {
+test {
_ = @import("parser_test.zig");
}
diff --git a/lib/std/zig/parser_test.zig b/lib/std/zig/parser_test.zig
index 505e900c64c2..4ec527fd6c9d 100644
--- a/lib/std/zig/parser_test.zig
+++ b/lib/std/zig/parser_test.zig
@@ -3,275 +3,34 @@
// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
// The MIT license requires this copyright notice to be included in all copies
// and substantial portions of the software.
-test "zig fmt: convert var to anytype" {
- // TODO remove in next release cycle
+
+// TODO Remove this after zig 0.9.0 is released.
+test "zig fmt: rewrite inline functions as callconv(.Inline)" {
try testTransform(
- \\pub fn main(
- \\ a: var,
- \\ bar: var,
- \\) void {}
+ \\inline fn foo() void {}
+ \\
,
- \\pub fn main(
- \\ a: anytype,
- \\ bar: anytype,
- \\) void {}
+ \\fn foo() callconv(.Inline) void {}
\\
);
}
-test "zig fmt: noasync to nosuspend" {
- // TODO: remove this
- try testTransform(
- \\pub fn main() void {
- \\ noasync call();
- \\}
- ,
- \\pub fn main() void {
- \\ nosuspend call();
- \\}
+test "zig fmt: simple top level comptime block" {
+ try testCanonical(
+ \\// line comment
+ \\comptime {}
\\
);
}
-test "recovery: top level" {
- try testError(
- \\test "" {inline}
- \\test "" {inline}
- , &[_]Error{
- .ExpectedInlinable,
- .ExpectedInlinable,
- });
-}
-
-test "recovery: block statements" {
- try testError(
- \\test "" {
- \\ foo + +;
- \\ inline;
- \\}
- , &[_]Error{
- .InvalidToken,
- .ExpectedInlinable,
- });
-}
-
-test "recovery: missing comma" {
- try testError(
- \\test "" {
- \\ switch (foo) {
- \\ 2 => {}
- \\ 3 => {}
- \\ else => {
- \\ foo && bar +;
- \\ }
- \\ }
- \\}
- , &[_]Error{
- .ExpectedToken,
- .ExpectedToken,
- .InvalidAnd,
- .InvalidToken,
- });
-}
-
-test "recovery: extra qualifier" {
- try testError(
- \\const a: *const const u8;
- \\test ""
- , &[_]Error{
- .ExtraConstQualifier,
- .ExpectedLBrace,
- });
-}
-
-test "recovery: missing return type" {
- try testError(
- \\fn foo() {
- \\ a && b;
- \\}
- \\test ""
- , &[_]Error{
- .ExpectedReturnType,
- .InvalidAnd,
- .ExpectedLBrace,
- });
-}
-
-test "recovery: continue after invalid decl" {
- try testError(
- \\fn foo {
- \\ inline;
- \\}
- \\pub test "" {
- \\ async a && b;
- \\}
- , &[_]Error{
- .ExpectedToken,
- .ExpectedPubItem,
- .ExpectedParamList,
- .InvalidAnd,
- });
- try testError(
- \\threadlocal test "" {
- \\ @a && b;
- \\}
- , &[_]Error{
- .ExpectedVarDecl,
- .ExpectedParamList,
- .InvalidAnd,
- });
-}
-
-test "recovery: invalid extern/inline" {
- try testError(
- \\inline test "" { a && b; }
- , &[_]Error{
- .ExpectedFn,
- .InvalidAnd,
- });
- try testError(
- \\extern "" test "" { a && b; }
- , &[_]Error{
- .ExpectedVarDeclOrFn,
- .InvalidAnd,
- });
-}
-
-test "recovery: missing semicolon" {
- try testError(
- \\test "" {
- \\ comptime a && b
- \\ c && d
- \\ @foo
- \\}
- , &[_]Error{
- .InvalidAnd,
- .ExpectedToken,
- .InvalidAnd,
- .ExpectedToken,
- .ExpectedParamList,
- .ExpectedToken,
- });
-}
-
-test "recovery: invalid container members" {
- try testError(
- \\usingnamespace;
- \\foo+
- \\bar@,
- \\while (a == 2) { test "" {}}
- \\test "" {
- \\ a && b
- \\}
- , &[_]Error{
- .ExpectedExpr,
- .ExpectedToken,
- .ExpectedToken,
- .ExpectedContainerMembers,
- .InvalidAnd,
- .ExpectedToken,
- });
-}
-
-test "recovery: invalid parameter" {
- try testError(
- \\fn main() void {
- \\ a(comptime T: type)
- \\}
- , &[_]Error{
- .ExpectedToken,
- });
-}
-
-test "recovery: extra '}' at top level" {
- try testError(
- \\}}}
- \\test "" {
- \\ a && b;
- \\}
- , &[_]Error{
- .ExpectedContainerMembers,
- .ExpectedContainerMembers,
- .ExpectedContainerMembers,
- .InvalidAnd,
- });
-}
-
-test "recovery: mismatched bracket at top level" {
- try testError(
- \\const S = struct {
- \\ arr: 128]?G
- \\};
- , &[_]Error{
- .ExpectedToken,
- });
-}
-
-test "recovery: invalid global error set access" {
- try testError(
- \\test "" {
- \\ error && foo;
- \\}
- , &[_]Error{
- .ExpectedToken,
- .ExpectedIdentifier,
- .InvalidAnd,
- });
-}
-
-test "recovery: invalid asterisk after pointer dereference" {
- try testError(
- \\test "" {
- \\ var sequence = "repeat".*** 10;
- \\}
- , &[_]Error{
- .AsteriskAfterPointerDereference,
- });
- try testError(
- \\test "" {
- \\ var sequence = "repeat".** 10&&a;
- \\}
- , &[_]Error{
- .AsteriskAfterPointerDereference,
- .InvalidAnd,
- });
-}
-
-test "recovery: missing semicolon after if, for, while stmt" {
- try testError(
- \\test "" {
- \\ if (foo) bar
- \\ for (foo) |a| bar
- \\ while (foo) bar
- \\ a && b;
- \\}
- , &[_]Error{
- .ExpectedSemiOrElse,
- .ExpectedSemiOrElse,
- .ExpectedSemiOrElse,
- .InvalidAnd,
- });
-}
-
-test "recovery: invalid comptime" {
- try testError(
- \\comptime
- , &[_]Error{
- .ExpectedBlockOrField,
- });
-}
-
-test "recovery: missing block after for/while loops" {
- try testError(
- \\test "" { while (foo) }
- , &[_]Error{
- .ExpectedBlockOrAssignment,
- });
- try testError(
- \\test "" { for (foo) |bar| }
- , &[_]Error{
- .ExpectedBlockOrAssignment,
- });
+test "zig fmt: two spaced line comments before decl" {
+ try testCanonical(
+ \\// line comment
+ \\
+ \\// another
+ \\comptime {}
+ \\
+ );
}
test "zig fmt: respect line breaks after var declarations" {
@@ -325,6 +84,35 @@ test "zig fmt: empty file" {
);
}
+test "zig fmt: file ends in comment" {
+ try testTransform(
+ \\ //foobar
+ ,
+ \\//foobar
+ \\
+ );
+}
+
+test "zig fmt: file ends in comment after var decl" {
+ try testTransform(
+ \\const x = 42;
+ \\ //foobar
+ ,
+ \\const x = 42;
+ \\//foobar
+ \\
+ );
+}
+
+test "zig fmt: doc comments on test" {
+ try testCanonical(
+ \\/// hello
+ \\/// world
+ \\test "" {}
+ \\
+ );
+}
+
test "zig fmt: if statment" {
try testCanonical(
\\test "" {
@@ -357,7 +145,7 @@ test "zig fmt: decl between fields" {
\\ b: usize,
\\};
, &[_]Error{
- .DeclBetweenFields,
+ .decl_between_fields,
});
}
@@ -365,7 +153,7 @@ test "zig fmt: eof after missing comma" {
try testError(
\\foo()
, &[_]Error{
- .ExpectedToken,
+ .expected_token,
});
}
@@ -402,7 +190,7 @@ test "zig fmt: nosuspend await" {
);
}
-test "zig fmt: trailing comma in container declaration" {
+test "zig fmt: container declaration, single line" {
try testCanonical(
\\const X = struct { foo: i32 };
\\const X = struct { foo: i32, bar: i32 };
@@ -411,7 +199,23 @@ test "zig fmt: trailing comma in container declaration" {
\\const X = struct { foo: i32 align(4) = 1, bar: i32 align(4) = 2 };
\\
);
+}
+
+test "zig fmt: container declaration, one item, multi line trailing comma" {
try testCanonical(
+ \\test "" {
+ \\ comptime {
+ \\ const X = struct {
+ \\ x: i32,
+ \\ };
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: container declaration, no trailing comma on separate line" {
+ try testTransform(
\\test "" {
\\ comptime {
\\ const X = struct {
@@ -420,30 +224,113 @@ test "zig fmt: trailing comma in container declaration" {
\\ }
\\}
\\
+ ,
+ \\test "" {
+ \\ comptime {
+ \\ const X = struct { x: i32 };
+ \\ }
+ \\}
+ \\
);
+}
+
+test "zig fmt: container declaration, line break, no trailing comma" {
try testTransform(
\\const X = struct {
\\ foo: i32, bar: i8 };
+ ,
+ \\const X = struct { foo: i32, bar: i8 };
+ \\
+ );
+}
+
+test "zig fmt: container declaration, transform trailing comma" {
+ try testTransform(
+ \\const X = struct {
+ \\ foo: i32, bar: i8, };
,
\\const X = struct {
- \\ foo: i32, bar: i8
+ \\ foo: i32,
+ \\ bar: i8,
\\};
\\
);
}
-test "zig fmt: trailing comma in fn parameter list" {
- try testCanonical(
- \\pub fn f(
- \\ a: i32,
- \\ b: i32,
- \\) i32 {}
- \\pub fn f(
- \\ a: i32,
- \\ b: i32,
- \\) align(8) i32 {}
- \\pub fn f(
- \\ a: i32,
+test "zig fmt: remove empty lines at start/end of container decl" {
+ try testTransform(
+ \\const X = struct {
+ \\
+ \\ foo: i32,
+ \\
+ \\ bar: i8,
+ \\
+ \\};
+ \\
+ ,
+ \\const X = struct {
+ \\ foo: i32,
+ \\
+ \\ bar: i8,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: remove empty lines at start/end of block" {
+ try testTransform(
+ \\test {
+ \\
+ \\ if (foo) {
+ \\ foo();
+ \\ }
+ \\
+ \\}
+ \\
+ ,
+ \\test {
+ \\ if (foo) {
+ \\ foo();
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: allow empty line before commment at start of block" {
+ try testCanonical(
+ \\test {
+ \\
+ \\ // foo
+ \\ const x = 42;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: allow empty line before commment at start of block" {
+ try testCanonical(
+ \\test {
+ \\
+ \\ // foo
+ \\ const x = 42;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: trailing comma in fn parameter list" {
+ try testCanonical(
+ \\pub fn f(
+ \\ a: i32,
+ \\ b: i32,
+ \\) i32 {}
+ \\pub fn f(
+ \\ a: i32,
+ \\ b: i32,
+ \\) align(8) i32 {}
+ \\pub fn f(
+ \\ a: i32,
\\ b: i32,
\\) linksection(".text") i32 {}
\\pub fn f(
@@ -480,6 +367,31 @@ test "zig fmt: comptime struct field" {
);
}
+test "zig fmt: break from block" {
+ try testCanonical(
+ \\const a = blk: {
+ \\ break :blk 42;
+ \\};
+ \\const b = blk: {
+ \\ break :blk;
+ \\};
+ \\const c = {
+ \\ break 42;
+ \\};
+ \\const d = {
+ \\ break;
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: grouped expressions (parentheses)" {
+ try testCanonical(
+ \\const r = (x + y) * (a + b);
+ \\
+ );
+}
+
test "zig fmt: c pointer type" {
try testCanonical(
\\pub extern fn repro() [*c]const u8;
@@ -535,6 +447,19 @@ test "zig fmt: anytype struct field" {
);
}
+test "zig fmt: array types last token" {
+ try testCanonical(
+ \\test {
+ \\ const x = [40]u32;
+ \\}
+ \\
+ \\test {
+ \\ const x = [40:0]u32;
+ \\}
+ \\
+ );
+}
+
test "zig fmt: sentinel-terminated array type" {
try testCanonical(
\\pub fn cStrToPrefixedFileW(s: [*:0]const u8) ![PATH_MAX_WIDE:0]u16 {
@@ -553,6 +478,58 @@ test "zig fmt: sentinel-terminated slice type" {
);
}
+test "zig fmt: pointer-to-one with modifiers" {
+ try testCanonical(
+ \\const x: *u32 = undefined;
+ \\const y: *allowzero align(8) const volatile u32 = undefined;
+ \\const z: *allowzero align(8:4:2) const volatile u32 = undefined;
+ \\
+ );
+}
+
+test "zig fmt: pointer-to-many with modifiers" {
+ try testCanonical(
+ \\const x: [*]u32 = undefined;
+ \\const y: [*]allowzero align(8) const volatile u32 = undefined;
+ \\const z: [*]allowzero align(8:4:2) const volatile u32 = undefined;
+ \\
+ );
+}
+
+test "zig fmt: sentinel pointer with modifiers" {
+ try testCanonical(
+ \\const x: [*:42]u32 = undefined;
+ \\const y: [*:42]allowzero align(8) const volatile u32 = undefined;
+ \\const y: [*:42]allowzero align(8:4:2) const volatile u32 = undefined;
+ \\
+ );
+}
+
+test "zig fmt: c pointer with modifiers" {
+ try testCanonical(
+ \\const x: [*c]u32 = undefined;
+ \\const y: [*c]allowzero align(8) const volatile u32 = undefined;
+ \\const z: [*c]allowzero align(8:4:2) const volatile u32 = undefined;
+ \\
+ );
+}
+
+test "zig fmt: slice with modifiers" {
+ try testCanonical(
+ \\const x: []u32 = undefined;
+ \\const y: []allowzero align(8) const volatile u32 = undefined;
+ \\
+ );
+}
+
+test "zig fmt: sentinel slice with modifiers" {
+ try testCanonical(
+ \\const x: [:42]u32 = undefined;
+ \\const y: [:42]allowzero align(8) const volatile u32 = undefined;
+ \\
+ );
+}
+
test "zig fmt: anon literal in array" {
try testCanonical(
\\var arr: [2]Foo = .{
@@ -581,100 +558,91 @@ test "zig fmt: alignment in anonymous literal" {
);
}
-test "zig fmt: anon struct literal syntax" {
+test "zig fmt: anon struct literal 0 element" {
try testCanonical(
- \\const x = .{
- \\ .a = b,
- \\ .c = d,
- \\};
+ \\test {
+ \\ const x = .{};
+ \\}
\\
);
}
-test "zig fmt: anon list literal syntax" {
+test "zig fmt: anon struct literal 1 element" {
try testCanonical(
- \\const x = .{ a, b, c };
+ \\test {
+ \\ const x = .{ .a = b };
+ \\}
\\
);
}
-test "zig fmt: async function" {
+test "zig fmt: anon struct literal 1 element comma" {
try testCanonical(
- \\pub const Server = struct {
- \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void,
- \\};
- \\test "hi" {
- \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other);
+ \\test {
+ \\ const x = .{
+ \\ .a = b,
+ \\ };
\\}
\\
);
}
-test "zig fmt: whitespace fixes" {
- try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n",
- \\test "" {
- \\ const hi = x;
+test "zig fmt: anon struct literal 2 element" {
+ try testCanonical(
+ \\test {
+ \\ const x = .{ .a = b, .c = d };
\\}
- \\// zig fmt: off
- \\test ""{
- \\ const a = b;}
\\
);
}
-test "zig fmt: while else err prong with no block" {
+test "zig fmt: anon struct literal 2 element comma" {
try testCanonical(
- \\test "" {
- \\ const result = while (returnError()) |value| {
- \\ break value;
- \\ } else |err| @as(i32, 2);
- \\ expect(result == 2);
+ \\test {
+ \\ const x = .{
+ \\ .a = b,
+ \\ .c = d,
+ \\ };
\\}
\\
);
}
-test "zig fmt: tagged union with enum values" {
+test "zig fmt: anon struct literal 3 element" {
try testCanonical(
- \\const MultipleChoice2 = union(enum(u32)) {
- \\ Unspecified1: i32,
- \\ A: f32 = 20,
- \\ Unspecified2: void,
- \\ B: bool = 40,
- \\ Unspecified3: i32,
- \\ C: i8 = 60,
- \\ Unspecified4: void,
- \\ D: void = 1000,
- \\ Unspecified5: i32,
- \\};
+ \\test {
+ \\ const x = .{ .a = b, .c = d, .e = f };
+ \\}
\\
);
}
-test "zig fmt: allowzero pointer" {
+test "zig fmt: anon struct literal 3 element comma" {
try testCanonical(
- \\const T = [*]allowzero const u8;
+ \\test {
+ \\ const x = .{
+ \\ .a = b,
+ \\ .c = d,
+ \\ .e = f,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: enum literal" {
+test "zig fmt: struct literal 0 element" {
try testCanonical(
- \\const x = .hi;
+ \\test {
+ \\ const x = X{};
+ \\}
\\
);
}
-test "zig fmt: enum literal inside array literal" {
+test "zig fmt: struct literal 1 element" {
try testCanonical(
- \\test "enums in arrays" {
- \\ var colors = []Color{.Green};
- \\ colors = []Colors{ .Green, .Cyan };
- \\ colors = []Colors{
- \\ .Grey,
- \\ .Green,
- \\ .Cyan,
- \\ };
+ \\test {
+ \\ const x = X{ .a = b };
\\}
\\
);
@@ -682,656 +650,1016 @@ test "zig fmt: enum literal inside array literal" {
test "zig fmt: Unicode code point literal larger than u8" {
try testCanonical(
- \\const x = '\u{01f4a9}';
+ \\test {
+ \\ const x = X{
+ \\ .a = b,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: infix operator and then multiline string literal" {
+test "zig fmt: struct literal 2 element" {
try testCanonical(
- \\const x = "" ++
- \\ \\ hi
- \\;
+ \\test {
+ \\ const x = X{ .a = b, .c = d };
+ \\}
\\
);
}
-test "zig fmt: infix operator and then multiline string literal" {
+test "zig fmt: struct literal 2 element comma" {
try testCanonical(
- \\const x = "" ++
- \\ \\ hi0
- \\ \\ hi1
- \\ \\ hi2
- \\;
+ \\test {
+ \\ const x = X{
+ \\ .a = b,
+ \\ .c = d,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: C pointers" {
+test "zig fmt: struct literal 3 element" {
try testCanonical(
- \\const Ptr = [*c]i32;
+ \\test {
+ \\ const x = X{ .a = b, .c = d, .e = f };
+ \\}
\\
);
}
-test "zig fmt: threadlocal" {
+test "zig fmt: struct literal 3 element comma" {
try testCanonical(
- \\threadlocal var x: i32 = 1234;
+ \\test {
+ \\ const x = X{
+ \\ .a = b,
+ \\ .c = d,
+ \\ .e = f,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: linksection" {
+test "zig fmt: anon list literal 1 element" {
try testCanonical(
- \\export var aoeu: u64 linksection(".text.derp") = 1234;
- \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {}
+ \\test {
+ \\ const x = .{a};
+ \\}
\\
);
}
-test "zig fmt: correctly move doc comments on struct fields" {
- try testTransform(
- \\pub const section_64 = extern struct {
- \\ sectname: [16]u8, /// name of this section
- \\ segname: [16]u8, /// segment this section goes in
- \\};
- ,
- \\pub const section_64 = extern struct {
- \\ /// name of this section
- \\ sectname: [16]u8,
- \\ /// segment this section goes in
- \\ segname: [16]u8,
- \\};
+test "zig fmt: anon list literal 1 element comma" {
+ try testCanonical(
+ \\test {
+ \\ const x = .{
+ \\ a,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: correctly space struct fields with doc comments" {
- try testTransform(
- \\pub const S = struct {
- \\ /// A
- \\ a: u8,
- \\ /// B
- \\ /// B (cont)
- \\ b: u8,
- \\
- \\
- \\ /// C
- \\ c: u8,
- \\};
- \\
- ,
- \\pub const S = struct {
- \\ /// A
- \\ a: u8,
- \\ /// B
- \\ /// B (cont)
- \\ b: u8,
- \\
- \\ /// C
- \\ c: u8,
- \\};
+test "zig fmt: anon list literal 2 element" {
+ try testCanonical(
+ \\test {
+ \\ const x = .{ a, b };
+ \\}
\\
);
}
-test "zig fmt: doc comments on param decl" {
+test "zig fmt: anon list literal 2 element comma" {
try testCanonical(
- \\pub const Allocator = struct {
- \\ shrinkFn: fn (
- \\ self: *Allocator,
- \\ /// Guaranteed to be the same as what was returned from most recent call to
- \\ /// `allocFn`, `reallocFn`, or `shrinkFn`.
- \\ old_mem: []u8,
- \\ /// Guaranteed to be the same as what was returned from most recent call to
- \\ /// `allocFn`, `reallocFn`, or `shrinkFn`.
- \\ old_alignment: u29,
- \\ /// Guaranteed to be less than or equal to `old_mem.len`.
- \\ new_byte_count: usize,
- \\ /// Guaranteed to be less than or equal to `old_alignment`.
- \\ new_alignment: u29,
- \\ ) []u8,
- \\};
+ \\test {
+ \\ const x = .{
+ \\ a,
+ \\ b,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: aligned struct field" {
+test "zig fmt: anon list literal 3 element" {
try testCanonical(
- \\pub const S = struct {
- \\ f: i32 align(32),
- \\};
- \\
- );
- try testCanonical(
- \\pub const S = struct {
- \\ f: i32 align(32) = 1,
- \\};
+ \\test {
+ \\ const x = .{ a, b, c };
+ \\}
\\
);
}
-test "zig fmt: comment to disable/enable zig fmt first" {
+test "zig fmt: anon list literal 3 element comma" {
try testCanonical(
- \\// Test trailing comma syntax
- \\// zig fmt: off
+ \\test {
+ \\ const x = .{
+ \\ a,
+ \\ // foo
+ \\ b,
\\
- \\const struct_trailing_comma = struct { x: i32, y: i32, };
- );
-}
-
-test "zig fmt: comment to disable/enable zig fmt" {
- try testTransform(
- \\const a = b;
- \\// zig fmt: off
- \\const c = d;
- \\// zig fmt: on
- \\const e = f;
- ,
- \\const a = b;
- \\// zig fmt: off
- \\const c = d;
- \\// zig fmt: on
- \\const e = f;
+ \\ c,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: line comment following 'zig fmt: off'" {
+test "zig fmt: array literal 0 element" {
try testCanonical(
- \\// zig fmt: off
- \\// Test
- \\const e = f;
+ \\test {
+ \\ const x = [_]u32{};
+ \\}
+ \\
);
}
-test "zig fmt: doc comment following 'zig fmt: off'" {
+test "zig fmt: array literal 1 element" {
try testCanonical(
- \\// zig fmt: off
- \\/// test
- \\const e = f;
+ \\test {
+ \\ const x = [_]u32{a};
+ \\}
+ \\
);
}
-test "zig fmt: line and doc comment following 'zig fmt: off'" {
+test "zig fmt: array literal 1 element comma" {
try testCanonical(
- \\// zig fmt: off
- \\// test 1
- \\/// test 2
- \\const e = f;
+ \\test {
+ \\ const x = [1]u32{
+ \\ a,
+ \\ };
+ \\}
+ \\
);
}
-test "zig fmt: doc and line comment following 'zig fmt: off'" {
+test "zig fmt: array literal 2 element" {
try testCanonical(
- \\// zig fmt: off
- \\/// test 1
- \\// test 2
- \\const e = f;
+ \\test {
+ \\ const x = [_]u32{ a, b };
+ \\}
+ \\
);
}
-test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" {
+test "zig fmt: array literal 2 element comma" {
try testCanonical(
- \\// zig fmt: off
- \\// zig fmt: on
- \\// zig fmt: off
- \\const e = f;
- \\// zig fmt: off
- \\// zig fmt: on
- \\// zig fmt: off
- \\const a = b;
- \\// zig fmt: on
- \\const c = d;
- \\// zig fmt: on
+ \\test {
+ \\ const x = [2]u32{
+ \\ a,
+ \\ b,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: line comment following 'zig fmt: on'" {
+test "zig fmt: array literal 3 element" {
try testCanonical(
- \\// zig fmt: off
- \\const e = f;
- \\// zig fmt: on
- \\// test
- \\const e = f;
+ \\test {
+ \\ const x = [_]u32{ a, b, c };
+ \\}
\\
);
}
-test "zig fmt: doc comment following 'zig fmt: on'" {
+test "zig fmt: array literal 3 element comma" {
try testCanonical(
- \\// zig fmt: off
- \\const e = f;
- \\// zig fmt: on
- \\/// test
- \\const e = f;
+ \\test {
+ \\ const x = [3]u32{
+ \\ a,
+ \\ b,
+ \\ c,
+ \\ };
+ \\}
\\
);
}
-test "zig fmt: line and doc comment following 'zig fmt: on'" {
+test "zig fmt: sentinel array literal 1 element" {
try testCanonical(
- \\// zig fmt: off
- \\const e = f;
- \\// zig fmt: on
- \\// test1
- \\/// test2
- \\const e = f;
+ \\test {
+ \\ const x = [_:9000]u32{a};
+ \\}
\\
);
}
-test "zig fmt: doc and line comment following 'zig fmt: on'" {
+test "zig fmt: slices" {
try testCanonical(
- \\// zig fmt: off
- \\const e = f;
- \\// zig fmt: on
- \\/// test1
- \\// test2
- \\const e = f;
+ \\const a = b[0..];
+ \\const c = d[0..1];
+ \\const e = f[0..1 :0];
\\
);
}
-test "zig fmt: pointer of unknown length" {
+test "zig fmt: slices with spaces in bounds" {
try testCanonical(
- \\fn foo(ptr: [*]u8) void {}
+ \\const a = b[0 + 0 ..];
+ \\const c = d[0 + 0 .. 1];
+ \\const e = f[0 .. 1 + 1 :0];
\\
);
}
-test "zig fmt: spaces around slice operator" {
+test "zig fmt: block in slice expression" {
try testCanonical(
- \\var a = b[c..d];
- \\var a = b[c..d :0];
- \\var a = b[c + 1 .. d];
- \\var a = b[c + 1 ..];
- \\var a = b[c .. d + 1];
- \\var a = b[c .. d + 1 :0];
- \\var a = b[c.a..d.e];
- \\var a = b[c.a..d.e :0];
+ \\const a = b[{
+ \\ _ = x;
+ \\}..];
+ \\const c = d[0..{
+ \\ _ = x;
+ \\ _ = y;
+ \\}];
+ \\const e = f[0..1 :{
+ \\ _ = x;
+ \\ _ = y;
+ \\ _ = z;
+ \\}];
\\
);
}
-test "zig fmt: async call in if condition" {
+test "zig fmt: async function" {
try testCanonical(
- \\comptime {
- \\ if (async b()) {
- \\ a();
- \\ }
+ \\pub const Server = struct {
+ \\ handleRequestFn: fn (*Server, *const std.net.Address, File) callconv(.Async) void,
+ \\};
+ \\test "hi" {
+ \\ var ptr = @ptrCast(fn (i32) callconv(.Async) void, other);
\\}
\\
);
}
-test "zig fmt: 2nd arg multiline string" {
- try testCanonical(
- \\comptime {
- \\ cases.addAsm("hello world linux x86_64",
- \\ \\.text
- \\ , "Hello, world!\n");
+test "zig fmt: whitespace fixes" {
+ try testTransform("test \"\" {\r\n\tconst hi = x;\r\n}\n// zig fmt: off\ntest \"\"{\r\n\tconst a = b;}\r\n",
+ \\test "" {
+ \\ const hi = x;
\\}
+ \\// zig fmt: off
+ \\test ""{
+ \\ const a = b;}
\\
);
}
-test "zig fmt: 2nd arg multiline string many args" {
+test "zig fmt: while else err prong with no block" {
try testCanonical(
- \\comptime {
- \\ cases.addAsm("hello world linux x86_64",
- \\ \\.text
- \\ , "Hello, world!\n", "Hello, world!\n");
+ \\test "" {
+ \\ const result = while (returnError()) |value| {
+ \\ break value;
+ \\ } else |err| @as(i32, 2);
+ \\ expect(result == 2);
\\}
\\
);
}
-test "zig fmt: final arg multiline string" {
+test "zig fmt: tagged union with enum values" {
try testCanonical(
- \\comptime {
- \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n",
- \\ \\.text
- \\ );
- \\}
+ \\const MultipleChoice2 = union(enum(u32)) {
+ \\ Unspecified1: i32,
+ \\ A: f32 = 20,
+ \\ Unspecified2: void,
+ \\ B: bool = 40,
+ \\ Unspecified3: i32,
+ \\ C: i8 = 60,
+ \\ Unspecified4: void,
+ \\ D: void = 1000,
+ \\ Unspecified5: i32,
+ \\};
\\
);
}
-test "zig fmt: if condition wraps" {
- try testTransform(
- \\comptime {
- \\ if (cond and
- \\ cond) {
- \\ return x;
- \\ }
- \\ while (cond and
- \\ cond) {
- \\ return x;
- \\ }
- \\ if (a == b and
- \\ c) {
- \\ a = b;
- \\ }
- \\ while (a == b and
- \\ c) {
- \\ a = b;
- \\ }
- \\ if ((cond and
- \\ cond)) {
- \\ return x;
- \\ }
- \\ while ((cond and
- \\ cond)) {
- \\ return x;
- \\ }
- \\ var a = if (a) |*f| x: {
- \\ break :x &a.b;
- \\ } else |err| err;
- \\ var a = if (cond and
- \\ cond) |*f|
- \\ x: {
- \\ break :x &a.b;
- \\ } else |err| err;
+test "zig fmt: tagged union enum tag last token" {
+ try testCanonical(
+ \\test {
+ \\ const U = union(enum(u32)) {};
\\}
- ,
- \\comptime {
- \\ if (cond and
- \\ cond)
- \\ {
- \\ return x;
- \\ }
- \\ while (cond and
- \\ cond)
- \\ {
- \\ return x;
- \\ }
- \\ if (a == b and
- \\ c)
- \\ {
- \\ a = b;
- \\ }
- \\ while (a == b and
- \\ c)
- \\ {
- \\ a = b;
- \\ }
- \\ if ((cond and
- \\ cond))
- \\ {
- \\ return x;
- \\ }
- \\ while ((cond and
- \\ cond))
- \\ {
- \\ return x;
- \\ }
- \\ var a = if (a) |*f| x: {
- \\ break :x &a.b;
- \\ } else |err| err;
- \\ var a = if (cond and
- \\ cond) |*f|
- \\ x: {
- \\ break :x &a.b;
- \\ } else |err| err;
+ \\
+ \\test {
+ \\ const U = union(enum(u32)) { foo };
+ \\}
+ \\
+ \\test {
+ \\ const U = union(enum(u32)) {
+ \\ foo,
+ \\ };
\\}
\\
);
}
-test "zig fmt: if condition has line break but must not wrap" {
+test "zig fmt: allowzero pointer" {
try testCanonical(
- \\comptime {
- \\ if (self.user_input_options.put(
- \\ name,
- \\ UserInputOption{
- \\ .name = name,
- \\ .used = false,
- \\ },
- \\ ) catch unreachable) |*prev_value| {
- \\ foo();
- \\ bar();
- \\ }
- \\ if (put(
- \\ a,
- \\ b,
- \\ )) {
- \\ foo();
- \\ }
- \\}
+ \\const T = [*]allowzero const u8;
\\
);
}
-test "zig fmt: if condition has line break but must not wrap" {
+test "zig fmt: enum literal" {
try testCanonical(
- \\comptime {
- \\ if (self.user_input_options.put(name, UserInputOption{
- \\ .name = name,
- \\ .used = false,
- \\ }) catch unreachable) |*prev_value| {
- \\ foo();
- \\ bar();
- \\ }
- \\ if (put(
- \\ a,
- \\ b,
- \\ )) {
- \\ foo();
- \\ }
- \\}
+ \\const x = .hi;
\\
);
}
-test "zig fmt: function call with multiline argument" {
+test "zig fmt: enum literal inside array literal" {
try testCanonical(
- \\comptime {
- \\ self.user_input_options.put(name, UserInputOption{
- \\ .name = name,
- \\ .used = false,
- \\ });
+ \\test "enums in arrays" {
+ \\ var colors = []Color{.Green};
+ \\ colors = []Colors{ .Green, .Cyan };
+ \\ colors = []Colors{
+ \\ .Grey,
+ \\ .Green,
+ \\ .Cyan,
+ \\ };
\\}
\\
);
}
-test "zig fmt: same-line doc comment on variable declaration" {
- try testTransform(
- \\pub const MAP_ANONYMOUS = 0x1000; /// allocated from memory, swap space
- \\pub const MAP_FILE = 0x0000; /// map from file (default)
- \\
- \\pub const EMEDIUMTYPE = 124; /// Wrong medium type
- \\
- \\// nameserver query return codes
- \\pub const ENSROK = 0; /// DNS server returned answer with no data
- ,
- \\/// allocated from memory, swap space
- \\pub const MAP_ANONYMOUS = 0x1000;
- \\/// map from file (default)
- \\pub const MAP_FILE = 0x0000;
- \\
- \\/// Wrong medium type
- \\pub const EMEDIUMTYPE = 124;
- \\
- \\// nameserver query return codes
- \\/// DNS server returned answer with no data
- \\pub const ENSROK = 0;
+test "zig fmt: character literal larger than u8" {
+ try testCanonical(
+ \\const x = '\u{01f4a9}';
\\
);
}
-test "zig fmt: if-else with comment before else" {
+test "zig fmt: infix operator and then multiline string literal" {
try testCanonical(
- \\comptime {
- \\ // cexp(finite|nan +- i inf|nan) = nan + i nan
- \\ if ((hx & 0x7fffffff) != 0x7f800000) {
- \\ return Complex(f32).new(y - y, y - y);
- \\ } // cexp(-inf +- i inf|nan) = 0 + i0
- \\ else if (hx & 0x80000000 != 0) {
- \\ return Complex(f32).new(0, 0);
- \\ } // cexp(+inf +- i inf|nan) = inf + i nan
- \\ else {
- \\ return Complex(f32).new(x, y - y);
- \\ }
- \\}
+ \\const x = "" ++
+ \\ \\ hi
+ \\;
\\
);
}
-test "zig fmt: if nested" {
+test "zig fmt: infix operator and then multiline string literal" {
try testCanonical(
- \\pub fn foo() void {
- \\ return if ((aInt & bInt) >= 0)
- \\ if (aInt < bInt)
- \\ GE_LESS
- \\ else if (aInt == bInt)
- \\ GE_EQUAL
- \\ else
- \\ GE_GREATER
- \\ else if (aInt > bInt)
- \\ GE_LESS
- \\ else if (aInt == bInt)
- \\ GE_EQUAL
- \\ else
- \\ GE_GREATER;
- \\}
+ \\const x = "" ++
+ \\ \\ hi0
+ \\ \\ hi1
+ \\ \\ hi2
+ \\;
\\
);
}
-test "zig fmt: respect line breaks in if-else" {
+test "zig fmt: C pointers" {
try testCanonical(
- \\comptime {
- \\ return if (cond) a else b;
- \\ return if (cond)
- \\ a
- \\ else
- \\ b;
- \\ return if (cond)
- \\ a
- \\ else if (cond)
- \\ b
- \\ else
- \\ c;
- \\}
+ \\const Ptr = [*c]i32;
\\
);
}
-test "zig fmt: respect line breaks after infix operators" {
+test "zig fmt: threadlocal" {
try testCanonical(
- \\comptime {
- \\ self.crc =
- \\ lookup_tables[0][p[7]] ^
- \\ lookup_tables[1][p[6]] ^
- \\ lookup_tables[2][p[5]] ^
- \\ lookup_tables[3][p[4]] ^
- \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^
- \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^
- \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^
- \\ lookup_tables[7][@truncate(u8, self.crc >> 0)];
- \\}
+ \\threadlocal var x: i32 = 1234;
\\
);
}
-test "zig fmt: fn decl with trailing comma" {
- try testTransform(
- \\fn foo(a: i32, b: i32,) void {}
- ,
- \\fn foo(
- \\ a: i32,
- \\ b: i32,
- \\) void {}
+test "zig fmt: linksection" {
+ try testCanonical(
+ \\export var aoeu: u64 linksection(".text.derp") = 1234;
+ \\export fn _start() linksection(".text.boot") callconv(.Naked) noreturn {}
\\
);
}
-test "zig fmt: enum decl with no trailing comma" {
+test "zig fmt: correctly space struct fields with doc comments" {
try testTransform(
- \\const StrLitKind = enum {Normal, C};
+ \\pub const S = struct {
+ \\ /// A
+ \\ a: u8,
+ \\ /// B
+ \\ /// B (cont)
+ \\ b: u8,
+ \\
+ \\
+ \\ /// C
+ \\ c: u8,
+ \\};
+ \\
,
- \\const StrLitKind = enum { Normal, C };
+ \\pub const S = struct {
+ \\ /// A
+ \\ a: u8,
+ \\ /// B
+ \\ /// B (cont)
+ \\ b: u8,
+ \\
+ \\ /// C
+ \\ c: u8,
+ \\};
\\
);
}
-test "zig fmt: switch comment before prong" {
+test "zig fmt: doc comments on param decl" {
try testCanonical(
- \\comptime {
- \\ switch (a) {
- \\ // hi
- \\ 0 => {},
- \\ }
- \\}
+ \\pub const Allocator = struct {
+ \\ shrinkFn: fn (
+ \\ self: *Allocator,
+ \\ /// Guaranteed to be the same as what was returned from most recent call to
+ \\ /// `allocFn`, `reallocFn`, or `shrinkFn`.
+ \\ old_mem: []u8,
+ \\ /// Guaranteed to be the same as what was returned from most recent call to
+ \\ /// `allocFn`, `reallocFn`, or `shrinkFn`.
+ \\ old_alignment: u29,
+ \\ /// Guaranteed to be less than or equal to `old_mem.len`.
+ \\ new_byte_count: usize,
+ \\ /// Guaranteed to be less than or equal to `old_alignment`.
+ \\ new_alignment: u29,
+ \\ ) []u8,
+ \\};
\\
);
}
-test "zig fmt: struct literal no trailing comma" {
- try testTransform(
- \\const a = foo{ .x = 1, .y = 2 };
- \\const a = foo{ .x = 1,
- \\ .y = 2 };
- ,
- \\const a = foo{ .x = 1, .y = 2 };
- \\const a = foo{
- \\ .x = 1,
- \\ .y = 2,
+test "zig fmt: aligned struct field" {
+ try testCanonical(
+ \\pub const S = struct {
+ \\ f: i32 align(32),
+ \\};
+ \\
+ );
+ try testCanonical(
+ \\pub const S = struct {
+ \\ f: i32 align(32) = 1,
\\};
\\
);
}
-test "zig fmt: struct literal containing a multiline expression" {
- try testTransform(
- \\const a = A{ .x = if (f1()) 10 else 20 };
- \\const a = A{ .x = if (f1()) 10 else 20, };
- \\const a = A{ .x = if (f1())
- \\ 10 else 20 };
- \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 };
- \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, };
- \\const a = A{ .x = if (f1())
- \\ 10 else 20};
- \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} };
- \\
- ,
- \\const a = A{ .x = if (f1()) 10 else 20 };
- \\const a = A{
- \\ .x = if (f1()) 10 else 20,
- \\};
- \\const a = A{
- \\ .x = if (f1())
- \\ 10
- \\ else
- \\ 20,
- \\};
- \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 };
- \\const a = A{
- \\ .x = if (f1()) 10 else 20,
- \\ .y = f2() + 100,
- \\};
- \\const a = A{
- \\ .x = if (f1())
- \\ 10
- \\ else
- \\ 20,
- \\};
- \\const a = A{
- \\ .x = switch (g) {
- \\ 0 => "ok",
- \\ else => "no",
- \\ },
- \\};
+test "zig fmt: comment to disable/enable zig fmt first" {
+ try testCanonical(
+ \\// Test trailing comma syntax
+ \\// zig fmt: off
\\
+ \\const struct_trailing_comma = struct { x: i32, y: i32, };
);
}
-test "zig fmt: array literal with hint" {
+test "zig fmt: comment to disable/enable zig fmt" {
try testTransform(
- \\const a = []u8{
+ \\const a = b;
+ \\// zig fmt: off
+ \\const c = d;
+ \\// zig fmt: on
+ \\const e = f;
+ ,
+ \\const a = b;
+ \\// zig fmt: off
+ \\const c = d;
+ \\// zig fmt: on
+ \\const e = f;
+ \\
+ );
+}
+
+test "zig fmt: line comment following 'zig fmt: off'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\// Test
+ \\const e = f;
+ );
+}
+
+test "zig fmt: doc comment following 'zig fmt: off'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\/// test
+ \\const e = f;
+ );
+}
+
+test "zig fmt: line and doc comment following 'zig fmt: off'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\// test 1
+ \\/// test 2
+ \\const e = f;
+ );
+}
+
+test "zig fmt: doc and line comment following 'zig fmt: off'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\/// test 1
+ \\// test 2
+ \\const e = f;
+ );
+}
+
+test "zig fmt: alternating 'zig fmt: off' and 'zig fmt: on'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\// zig fmt: on
+ \\// zig fmt: off
+ \\const e = f;
+ \\// zig fmt: off
+ \\// zig fmt: on
+ \\// zig fmt: off
+ \\const a = b;
+ \\// zig fmt: on
+ \\const c = d;
+ \\// zig fmt: on
+ \\
+ );
+}
+
+test "zig fmt: line comment following 'zig fmt: on'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\const e = f;
+ \\// zig fmt: on
+ \\// test
+ \\const e = f;
+ \\
+ );
+}
+
+test "zig fmt: doc comment following 'zig fmt: on'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\const e = f;
+ \\// zig fmt: on
+ \\/// test
+ \\const e = f;
+ \\
+ );
+}
+
+test "zig fmt: line and doc comment following 'zig fmt: on'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\const e = f;
+ \\// zig fmt: on
+ \\// test1
+ \\/// test2
+ \\const e = f;
+ \\
+ );
+}
+
+test "zig fmt: doc and line comment following 'zig fmt: on'" {
+ try testCanonical(
+ \\// zig fmt: off
+ \\const e = f;
+ \\// zig fmt: on
+ \\/// test1
+ \\// test2
+ \\const e = f;
+ \\
+ );
+}
+
+test "zig fmt: 'zig fmt: (off|on)' works in the middle of code" {
+ try testTransform(
+ \\test "" {
+ \\ const x = 42;
+ \\
+ \\ if (foobar) |y| {
+ \\ // zig fmt: off
+ \\ }// zig fmt: on
+ \\
+ \\ const z = 420;
+ \\}
+ \\
+ ,
+ \\test "" {
+ \\ const x = 42;
+ \\
+ \\ if (foobar) |y| {
+ \\ // zig fmt: off
+ \\ }// zig fmt: on
+ \\
+ \\ const z = 420;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: pointer of unknown length" {
+ try testCanonical(
+ \\fn foo(ptr: [*]u8) void {}
+ \\
+ );
+}
+
+test "zig fmt: spaces around slice operator" {
+ try testCanonical(
+ \\var a = b[c..d];
+ \\var a = b[c..d :0];
+ \\var a = b[c + 1 .. d];
+ \\var a = b[c + 1 ..];
+ \\var a = b[c .. d + 1];
+ \\var a = b[c .. d + 1 :0];
+ \\var a = b[c.a..d.e];
+ \\var a = b[c.a..d.e :0];
+ \\
+ );
+}
+
+test "zig fmt: async call in if condition" {
+ try testCanonical(
+ \\comptime {
+ \\ if (async b()) {
+ \\ a();
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: 2nd arg multiline string" {
+ try testCanonical(
+ \\comptime {
+ \\ cases.addAsm("hello world linux x86_64",
+ \\ \\.text
+ \\ , "Hello, world!\n");
+ \\}
+ \\
+ );
+ try testTransform(
+ \\comptime {
+ \\ cases.addAsm("hello world linux x86_64",
+ \\ \\.text
+ \\ , "Hello, world!\n",);
+ \\}
+ ,
+ \\comptime {
+ \\ cases.addAsm(
+ \\ "hello world linux x86_64",
+ \\ \\.text
+ \\ ,
+ \\ "Hello, world!\n",
+ \\ );
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: 2nd arg multiline string many args" {
+ try testCanonical(
+ \\comptime {
+ \\ cases.addAsm("hello world linux x86_64",
+ \\ \\.text
+ \\ , "Hello, world!\n", "Hello, world!\n");
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: final arg multiline string" {
+ try testCanonical(
+ \\comptime {
+ \\ cases.addAsm("hello world linux x86_64", "Hello, world!\n",
+ \\ \\.text
+ \\ );
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if condition wraps" {
+ try testTransform(
+ \\comptime {
+ \\ if (cond and
+ \\ cond) {
+ \\ return x;
+ \\ }
+ \\ while (cond and
+ \\ cond) {
+ \\ return x;
+ \\ }
+ \\ if (a == b and
+ \\ c) {
+ \\ a = b;
+ \\ }
+ \\ while (a == b and
+ \\ c) {
+ \\ a = b;
+ \\ }
+ \\ if ((cond and
+ \\ cond)) {
+ \\ return x;
+ \\ }
+ \\ while ((cond and
+ \\ cond)) {
+ \\ return x;
+ \\ }
+ \\ var a = if (a) |*f| x: {
+ \\ break :x &a.b;
+ \\ } else |err| err;
+ \\ var a = if (cond and
+ \\ cond) |*f|
+ \\ x: {
+ \\ break :x &a.b;
+ \\ } else |err| err;
+ \\}
+ ,
+ \\comptime {
+ \\ if (cond and
+ \\ cond)
+ \\ {
+ \\ return x;
+ \\ }
+ \\ while (cond and
+ \\ cond)
+ \\ {
+ \\ return x;
+ \\ }
+ \\ if (a == b and
+ \\ c)
+ \\ {
+ \\ a = b;
+ \\ }
+ \\ while (a == b and
+ \\ c)
+ \\ {
+ \\ a = b;
+ \\ }
+ \\ if ((cond and
+ \\ cond))
+ \\ {
+ \\ return x;
+ \\ }
+ \\ while ((cond and
+ \\ cond))
+ \\ {
+ \\ return x;
+ \\ }
+ \\ var a = if (a) |*f| x: {
+ \\ break :x &a.b;
+ \\ } else |err| err;
+ \\ var a = if (cond and
+ \\ cond) |*f|
+ \\ x: {
+ \\ break :x &a.b;
+ \\ } else |err| err;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if condition has line break but must not wrap" {
+ try testCanonical(
+ \\comptime {
+ \\ if (self.user_input_options.put(
+ \\ name,
+ \\ UserInputOption{
+ \\ .name = name,
+ \\ .used = false,
+ \\ },
+ \\ ) catch unreachable) |*prev_value| {
+ \\ foo();
+ \\ bar();
+ \\ }
+ \\ if (put(
+ \\ a,
+ \\ b,
+ \\ )) {
+ \\ foo();
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if condition has line break but must not wrap (no fn call comma)" {
+ try testCanonical(
+ \\comptime {
+ \\ if (self.user_input_options.put(name, UserInputOption{
+ \\ .name = name,
+ \\ .used = false,
+ \\ }) catch unreachable) |*prev_value| {
+ \\ foo();
+ \\ bar();
+ \\ }
+ \\ if (put(
+ \\ a,
+ \\ b,
+ \\ )) {
+ \\ foo();
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: function call with multiline argument" {
+ try testCanonical(
+ \\comptime {
+ \\ self.user_input_options.put(name, UserInputOption{
+ \\ .name = name,
+ \\ .used = false,
+ \\ });
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if-else with comment before else" {
+ try testCanonical(
+ \\comptime {
+ \\ // cexp(finite|nan +- i inf|nan) = nan + i nan
+ \\ if ((hx & 0x7fffffff) != 0x7f800000) {
+ \\ return Complex(f32).new(y - y, y - y);
+ \\ } // cexp(-inf +- i inf|nan) = 0 + i0
+ \\ else if (hx & 0x80000000 != 0) {
+ \\ return Complex(f32).new(0, 0);
+ \\ } // cexp(+inf +- i inf|nan) = inf + i nan
+ \\ else {
+ \\ return Complex(f32).new(x, y - y);
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: if nested" {
+ try testCanonical(
+ \\pub fn foo() void {
+ \\ return if ((aInt & bInt) >= 0)
+ \\ if (aInt < bInt)
+ \\ GE_LESS
+ \\ else if (aInt == bInt)
+ \\ GE_EQUAL
+ \\ else
+ \\ GE_GREATER
+ \\ // comment
+ \\ else if (aInt > bInt)
+ \\ GE_LESS
+ \\ else if (aInt == bInt)
+ \\ GE_EQUAL
+ \\ else
+ \\ GE_GREATER;
+ \\ // comment
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: respect line breaks in if-else" {
+ try testCanonical(
+ \\comptime {
+ \\ return if (cond) a else b;
+ \\ return if (cond)
+ \\ a
+ \\ else
+ \\ b;
+ \\ return if (cond)
+ \\ a
+ \\ else if (cond)
+ \\ b
+ \\ else
+ \\ c;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: respect line breaks after infix operators" {
+ try testCanonical(
+ \\comptime {
+ \\ self.crc =
+ \\ lookup_tables[0][p[7]] ^
+ \\ lookup_tables[1][p[6]] ^
+ \\ lookup_tables[2][p[5]] ^
+ \\ lookup_tables[3][p[4]] ^
+ \\ lookup_tables[4][@truncate(u8, self.crc >> 24)] ^
+ \\ lookup_tables[5][@truncate(u8, self.crc >> 16)] ^
+ \\ lookup_tables[6][@truncate(u8, self.crc >> 8)] ^
+ \\ lookup_tables[7][@truncate(u8, self.crc >> 0)];
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: fn decl with trailing comma" {
+ try testTransform(
+ \\fn foo(a: i32, b: i32,) void {}
+ ,
+ \\fn foo(
+ \\ a: i32,
+ \\ b: i32,
+ \\) void {}
+ \\
+ );
+}
+
+test "zig fmt: enum decl with no trailing comma" {
+ try testTransform(
+ \\const StrLitKind = enum {Normal, C};
+ ,
+ \\const StrLitKind = enum { Normal, C };
+ \\
+ );
+}
+
+test "zig fmt: switch comment before prong" {
+ try testCanonical(
+ \\comptime {
+ \\ switch (a) {
+ \\ // hi
+ \\ 0 => {},
+ \\ }
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: struct literal no trailing comma" {
+ try testTransform(
+ \\const a = foo{ .x = 1, .y = 2 };
+ \\const a = foo{ .x = 1,
+ \\ .y = 2 };
+ \\const a = foo{ .x = 1,
+ \\ .y = 2, };
+ ,
+ \\const a = foo{ .x = 1, .y = 2 };
+ \\const a = foo{ .x = 1, .y = 2 };
+ \\const a = foo{
+ \\ .x = 1,
+ \\ .y = 2,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: struct literal containing a multiline expression" {
+ try testTransform(
+ \\const a = A{ .x = if (f1()) 10 else 20 };
+ \\const a = A{ .x = if (f1()) 10 else 20, };
+ \\const a = A{ .x = if (f1())
+ \\ 10 else 20 };
+ \\const a = A{ .x = if (f1())
+ \\ 10 else 20,};
+ \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 };
+ \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100, };
+ \\const a = A{ .x = if (f1())
+ \\ 10 else 20};
+ \\const a = A{ .x = if (f1())
+ \\ 10 else 20,};
+ \\const a = A{ .x = switch(g) {0 => "ok", else => "no"} };
+ \\const a = A{ .x = switch(g) {0 => "ok", else => "no"}, };
+ \\
+ ,
+ \\const a = A{ .x = if (f1()) 10 else 20 };
+ \\const a = A{
+ \\ .x = if (f1()) 10 else 20,
+ \\};
+ \\const a = A{ .x = if (f1())
+ \\ 10
+ \\else
+ \\ 20 };
+ \\const a = A{
+ \\ .x = if (f1())
+ \\ 10
+ \\ else
+ \\ 20,
+ \\};
+ \\const a = A{ .x = if (f1()) 10 else 20, .y = f2() + 100 };
+ \\const a = A{
+ \\ .x = if (f1()) 10 else 20,
+ \\ .y = f2() + 100,
+ \\};
+ \\const a = A{ .x = if (f1())
+ \\ 10
+ \\else
+ \\ 20 };
+ \\const a = A{
+ \\ .x = if (f1())
+ \\ 10
+ \\ else
+ \\ 20,
+ \\};
+ \\const a = A{ .x = switch (g) {
+ \\ 0 => "ok",
+ \\ else => "no",
+ \\} };
+ \\const a = A{
+ \\ .x = switch (g) {
+ \\ 0 => "ok",
+ \\ else => "no",
+ \\ },
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: array literal with hint" {
+ try testTransform(
+ \\const a = []u8{
\\ 1, 2, //
\\ 3,
\\ 4,
@@ -1368,19 +1696,19 @@ test "zig fmt: array literal with hint" {
\\};
,
\\const a = []u8{
- \\ 1, 2,
+ \\ 1, 2, //
\\ 3, 4,
\\ 5, 6,
\\ 7,
\\};
\\const a = []u8{
- \\ 1, 2,
+ \\ 1, 2, //
\\ 3, 4,
\\ 5, 6,
\\ 7, 8,
\\};
\\const a = []u8{
- \\ 1, 2,
+ \\ 1, 2, //
\\ 3, 4,
\\ 5,
\\ 6, // blah
@@ -1388,21 +1716,19 @@ test "zig fmt: array literal with hint" {
\\ 8,
\\};
\\const a = []u8{
- \\ 1, 2,
+ \\ 1, 2, //
\\ 3, //
\\ 4,
- \\ 5, 6,
+ \\ 5,
+ \\ 6,
\\ 7,
\\};
\\const a = []u8{
\\ 1,
\\ 2,
- \\ 3,
- \\ 4,
- \\ 5,
- \\ 6,
- \\ 7,
- \\ 8,
+ \\ 3, 4, //
+ \\ 5, 6, //
+ \\ 7, 8, //
\\};
\\
);
@@ -1508,11 +1834,21 @@ test "zig fmt: empty block with only comment" {
);
}
-test "zig fmt: no trailing comma on struct decl" {
- try testCanonical(
+test "zig fmt: trailing commas on struct decl" {
+ try testTransform(
\\const RoundParam = struct {
\\ k: usize, s: u32, t: u32
\\};
+ \\const RoundParam = struct {
+ \\ k: usize, s: u32, t: u32,
+ \\};
+ ,
+ \\const RoundParam = struct { k: usize, s: u32, t: u32 };
+ \\const RoundParam = struct {
+ \\ k: usize,
+ \\ s: u32,
+ \\ t: u32,
+ \\};
\\
);
}
@@ -1560,11 +1896,7 @@ test "zig fmt: simple asm" {
\\ : [a] "x" (-> i32)
\\ : [a] "x" (1)
\\ );
- \\ asm ("still not real assembly"
- \\ :
- \\ :
- \\ : "a", "b"
- \\ );
+ \\ asm ("still not real assembly" ::: "a", "b");
\\}
\\
);
@@ -1581,7 +1913,7 @@ test "zig fmt: nested struct literal with one item" {
test "zig fmt: switch cases trailing comma" {
try testTransform(
- \\fn switch_cases(x: i32) void {
+ \\test "switch cases trailing comma"{
\\ switch (x) {
\\ 1,2,3 => {},
\\ 4,5, => {},
@@ -1590,7 +1922,7 @@ test "zig fmt: switch cases trailing comma" {
\\ }
\\}
,
- \\fn switch_cases(x: i32) void {
+ \\test "switch cases trailing comma" {
\\ switch (x) {
\\ 1, 2, 3 => {},
\\ 4,
@@ -1657,18 +1989,18 @@ test "zig fmt: line comment after doc comment" {
);
}
-test "zig fmt: float literal with exponent" {
+test "zig fmt: bit field alignment" {
try testCanonical(
- \\test "bit field alignment" {
+ \\test {
\\ assert(@TypeOf(&blah.b) == *align(1:3:6) const u3);
\\}
\\
);
}
-test "zig fmt: float literal with exponent" {
+test "zig fmt: nested switch" {
try testCanonical(
- \\test "aoeu" {
+ \\test {
\\ switch (state) {
\\ TermState.Start => switch (c) {
\\ '\x1b' => state = TermState.Escape,
@@ -1679,6 +2011,7 @@ test "zig fmt: float literal with exponent" {
\\
);
}
+
test "zig fmt: float literal with exponent" {
try testCanonical(
\\pub const f64_true_min = 4.94065645841246544177e-324;
@@ -2135,7 +2468,7 @@ test "zig fmt: preserve spacing" {
test "zig fmt: return types" {
try testCanonical(
\\pub fn main() !void {}
- \\pub fn main() anytype {}
+ \\pub fn main() FooBar {}
\\pub fn main() i32 {}
\\
);
@@ -2207,6 +2540,33 @@ test "zig fmt: return" {
);
}
+test "zig fmt: function attributes" {
+ try testCanonical(
+ \\export fn foo() void {}
+ \\pub export fn foo() void {}
+ \\extern fn foo() void;
+ \\pub extern fn foo() void;
+ \\extern "c" fn foo() void;
+ \\pub extern "c" fn foo() void;
+ \\noinline fn foo() void {}
+ \\pub noinline fn foo() void {}
+ \\
+ );
+}
+
+test "zig fmt: nested pointers with ** tokens" {
+ try testCanonical(
+ \\const x: *u32 = undefined;
+ \\const x: **u32 = undefined;
+ \\const x: ***u32 = undefined;
+ \\const x: ****u32 = undefined;
+ \\const x: *****u32 = undefined;
+ \\const x: ******u32 = undefined;
+ \\const x: *******u32 = undefined;
+ \\
+ );
+}
+
test "zig fmt: pointer attributes" {
try testCanonical(
\\extern fn f1(s: *align(*u8) u8) c_int;
@@ -2220,11 +2580,11 @@ test "zig fmt: pointer attributes" {
test "zig fmt: slice attributes" {
try testCanonical(
- \\extern fn f1(s: *align(*u8) u8) c_int;
- \\extern fn f2(s: **align(1) *const *volatile u8) c_int;
- \\extern fn f3(s: *align(1) const *align(1) volatile *const volatile u8) c_int;
- \\extern fn f4(s: *align(1) const volatile u8) c_int;
- \\extern fn f5(s: [*:0]align(1) const volatile u8) c_int;
+ \\extern fn f1(s: []align(*u8) u8) c_int;
+ \\extern fn f2(s: []align(1) []const []volatile u8) c_int;
+ \\extern fn f3(s: []align(1) const [:0]align(1) volatile []const volatile u8) c_int;
+ \\extern fn f4(s: []align(1) const volatile u8) c_int;
+ \\extern fn f5(s: [:0]align(1) const volatile u8) c_int;
\\
);
}
@@ -2241,7 +2601,7 @@ test "zig fmt: test declaration" {
test "zig fmt: infix operators" {
try testCanonical(
- \\test "infix operators" {
+ \\test {
\\ var i = undefined;
\\ i = 2;
\\ i *= 2;
@@ -2345,7 +2705,7 @@ test "zig fmt: call expression" {
test "zig fmt: anytype type" {
try testCanonical(
- \\fn print(args: anytype) anytype {}
+ \\fn print(args: anytype) @This() {}
\\
);
}
@@ -2570,7 +2930,11 @@ test "zig fmt: catch" {
\\test "catch" {
\\ const a: anyerror!u8 = 0;
\\ _ = a catch return;
+ \\ _ = a catch
+ \\ return;
\\ _ = a catch |err| return;
+ \\ _ = a catch |err|
+ \\ return;
\\}
\\
);
@@ -2746,12 +3110,6 @@ test "zig fmt: for" {
\\ d => {},
\\ };
\\
- \\ for (a) |b|
- \\ switch (b) {
- \\ c => {},
- \\ d => {},
- \\ };
- \\
\\ const res = for (a) |v, i| {
\\ break v;
\\ } else {
@@ -2777,7 +3135,8 @@ test "zig fmt: for" {
\\test "fix for" {
\\ for (a) |x|
\\ f(x)
- \\ else continue;
+ \\ else
+ \\ continue;
\\}
\\
);
@@ -2948,7 +3307,7 @@ test "zig fmt: nosuspend" {
test "zig fmt: Block after if" {
try testCanonical(
- \\test "Block after if" {
+ \\test {
\\ if (true) {
\\ const a = 0;
\\ }
@@ -2961,7 +3320,7 @@ test "zig fmt: Block after if" {
);
}
-test "zig fmt: use" {
+test "zig fmt: usingnamespace" {
try testCanonical(
\\usingnamespace @import("std");
\\pub usingnamespace @import("std");
@@ -3025,10 +3384,7 @@ test "zig fmt: inline asm parameter alignment" {
\\ asm volatile (
\\ \\ foo
\\ \\ bar
- \\ :
- \\ :
- \\ : "", ""
- \\ );
+ \\ ::: "", "");
\\ asm volatile (
\\ \\ foo
\\ \\ bar
@@ -3087,16 +3443,12 @@ test "zig fmt: file ends with struct field" {
}
test "zig fmt: comment after empty comment" {
- try testTransform(
+ try testCanonical(
\\const x = true; //
\\//
\\//
\\//a
\\
- ,
- \\const x = true;
- \\//a
- \\
);
}
@@ -3113,7 +3465,8 @@ test "zig fmt: line comment in array" {
,
\\test "a" {
\\ var arr = [_]u32{
- \\ 0, // 1,
+ \\ 0,
+ \\ // 1,
\\ // 2,
\\ };
\\}
@@ -3141,7 +3494,8 @@ test "zig fmt: comment after params" {
\\
,
\\fn a(
- \\ b: u32, // c: u32,
+ \\ b: u32,
+ \\ // c: u32,
\\ // d: u32,
\\) void {}
\\
@@ -3174,13 +3528,17 @@ test "zig fmt: comment in array initializer/access" {
\\ var c = b[ //aa
\\ 0
\\ ];
- \\ var d = [_
+ \\ var d = [
+ \\ _
\\ //aa
+ \\ :
+ \\ 0
\\ ]x{ //aa
\\ //bb
\\ 9,
\\ };
- \\ var e = d[0
+ \\ var e = d[
+ \\ 0
\\ //aa
\\ ];
\\}
@@ -3199,7 +3557,8 @@ test "zig fmt: comments at several places in struct init" {
,
\\var bar = Bar{
\\ .x = 10, // test
- \\ .y = "test", // test
+ \\ .y = "test",
+ \\ // test
\\};
\\
);
@@ -3214,7 +3573,7 @@ test "zig fmt: comments at several places in struct init" {
);
}
-test "zig fmt: top level doc comments" {
+test "zig fmt: container doc comments" {
try testCanonical(
\\//! tld 1
\\//! tld 2
@@ -3235,25 +3594,25 @@ test "zig fmt: top level doc comments" {
\\ //! B tld 2
\\ //! B tld 3
\\
- \\ /// b doc
+ \\ /// B doc
\\ b: u32,
\\};
\\
\\/// C doc
- \\const C = struct {
+ \\const C = union(enum) { // comment
\\ //! C tld 1
\\ //! C tld 2
\\ //! C tld 3
+ \\};
\\
- \\ /// c1 doc
- \\ c1: u32,
- \\
- \\ //! C tld 4
- \\ //! C tld 5
- \\ //! C tld 6
+ \\/// D doc
+ \\const D = union(Foo) {
+ \\ //! D tld 1
+ \\ //! D tld 2
+ \\ //! D tld 3
\\
- \\ /// c2 doc
- \\ c2: u32,
+ \\ /// D doc
+ \\ b: u32,
\\};
\\
);
@@ -3275,8 +3634,31 @@ test "zig fmt: extern without container keyword returns error" {
\\const container = extern {};
\\
, &[_]Error{
- .ExpectedExpr,
- .ExpectedVarDeclOrFn,
+ .expected_container,
+ });
+}
+
+test "zig fmt: same line doc comment returns error" {
+ try testError(
+ \\const Foo = struct{
+ \\ bar: u32, /// comment
+ \\ foo: u32, /// comment
+ \\ /// commment
+ \\};
+ \\
+ \\const a = 42; /// comment
+ \\
+ \\extern fn foo() void; /// comment
+ \\
+ \\/// comment
+ \\
+ , &[_]Error{
+ .same_line_doc_comment,
+ .same_line_doc_comment,
+ .unattached_doc_comment,
+ .same_line_doc_comment,
+ .same_line_doc_comment,
+ .unattached_doc_comment,
});
}
@@ -3350,26 +3732,6 @@ test "zig fmt: hexadeciaml float literals with underscore separators" {
);
}
-test "zig fmt: convert async fn into callconv(.Async)" {
- try testTransform(
- \\async fn foo() void {}
- ,
- \\fn foo() callconv(.Async) void {}
- \\
- );
-}
-
-test "zig fmt: convert extern fn proto into callconv(.C)" {
- try testTransform(
- \\extern fn foo0() void {}
- \\const foo1 = extern fn () void;
- ,
- \\extern fn foo0() void {}
- \\const foo1 = fn () callconv(.C) void;
- \\
- );
-}
-
test "zig fmt: C var args" {
try testCanonical(
\\pub extern "c" fn printf(format: [*:0]const u8, ...) c_int;
@@ -3458,6 +3820,54 @@ test "zig fmt: test comments in field access chain" {
);
}
+test "zig fmt: allow line break before field access" {
+ try testCanonical(
+ \\test {
+ \\ const w = foo.bar().zippy(zag).iguessthisisok();
+ \\
+ \\ const x = foo
+ \\ .bar()
+ \\ . // comment
+ \\ // comment
+ \\ swooop().zippy(zag)
+ \\ .iguessthisisok();
+ \\
+ \\ const y = view.output.root.server.input_manager.default_seat.wlr_seat.name;
+ \\
+ \\ const z = view.output.root.server
+ \\ .input_manager //
+ \\ .default_seat
+ \\ . // comment
+ \\ // another comment
+ \\ wlr_seat.name;
+ \\}
+ \\
+ );
+ try testTransform(
+ \\test {
+ \\ const x = foo.
+ \\ bar()
+ \\ .zippy(zag).iguessthisisok();
+ \\
+ \\ const z = view.output.root.server.
+ \\ input_manager.
+ \\ default_seat.wlr_seat.name;
+ \\}
+ \\
+ ,
+ \\test {
+ \\ const x = foo
+ \\ .bar()
+ \\ .zippy(zag).iguessthisisok();
+ \\
+ \\ const z = view.output.root.server
+ \\ .input_manager
+ \\ .default_seat.wlr_seat.name;
+ \\}
+ \\
+ );
+}
+
test "zig fmt: Indent comma correctly after multiline string literals in arg list (trailing comma)" {
try testCanonical(
\\fn foo() void {
@@ -3495,8 +3905,7 @@ test "zig fmt: Control flow statement as body of blockless if" {
\\
\\ const zoom_node = if (focused_node == layout_first) while (it.next()) |node| {
\\ if (!node.view.pending.float and !node.view.pending.fullscreen) break node;
- \\ } else null else
- \\ focused_node;
+ \\ } else null else focused_node;
\\
\\ const zoom_node = if (focused_node == layout_first)
\\ if (it.next()) {
@@ -3513,14 +3922,13 @@ test "zig fmt: Control flow statement as body of blockless if" {
\\
\\ const zoom_node = if (focused_node == layout_first) switch (nodes) {
\\ 0 => 0,
- \\ } else
- \\ focused_node;
+ \\ } else focused_node;
\\}
\\
);
}
-test "zig fmt: " {
+test "zig fmt: regression test for #5722" {
try testCanonical(
\\pub fn sendViewTags(self: Self) void {
\\ var it = ViewStack(View).iterator(self.output.views.first, std.math.maxInt(u32));
@@ -3580,8 +3988,8 @@ test "zig fmt: multiline string literals should play nice with array initializer
\\ 0,
\\ }}}}}}}};
\\ myFunc(.{
- \\ "aaaaaaa", "bbbbbb", "ccccc",
- \\ "dddd", ("eee"), ("fff"),
+ \\ "aaaaaaa", "bbbbbb", "ccccc",
+ \\ "dddd", ("eee"), ("fff"),
\\ ("gggg"),
\\ // Line comment
\\ \\Multiline String Literals can be quite long
@@ -3610,9 +4018,11 @@ test "zig fmt: multiline string literals should play nice with array initializer
\\ (
\\ \\ xxx
\\ ),
- \\ "xxx", "xxx",
+ \\ "xxx",
+ \\ "xxx",
\\ },
- \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" }, .{ "xxxxxxx", "xxx", "xxx", "xxx" },
+ \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" },
+ \\ .{ "xxxxxxx", "xxx", "xxx", "xxx" },
\\ "aaaaaaa", "bbbbbb", "ccccc", // -
\\ "dddd", ("eee"), ("fff"),
\\ .{
@@ -3620,7 +4030,8 @@ test "zig fmt: multiline string literals should play nice with array initializer
\\ (
\\ \\ xxx
\\ ),
- \\ "xxxxxxxxxxxxxx", "xxx",
+ \\ "xxxxxxxxxxxxxx",
+ \\ "xxx",
\\ },
\\ .{
\\ (
@@ -3636,10 +4047,10 @@ test "zig fmt: multiline string literals should play nice with array initializer
);
}
-test "zig fmt: use of comments and Multiline string literals may force the parameters over multiple lines" {
+test "zig fmt: use of comments and multiline string literals may force the parameters over multiple lines" {
try testCanonical(
\\pub fn makeMemUndefined(qzz: []u8) i1 {
- \\ cases.add( // fixed bug #2032
+ \\ cases.add( // fixed bug foo
\\ "compile diagnostic string for top level decl type",
\\ \\export fn entry() void {
\\ \\ var foo: u32 = @This(){};
@@ -3657,72 +4068,474 @@ test "zig fmt: use of comments and Multiline string literals may force the param
\\ .MakeMemUndefined, @ptrToInt(qzz.ptr), qzz.len, 0, 0, 0));
\\}
\\
- \\// This looks like garbage don't do this
- \\const rparen = tree.prevToken(
- \\// the first token for the annotation expressions is the left
- \\// parenthesis, hence the need for two prevToken
- \\ if (fn_proto.getAlignExpr()) |align_expr|
- \\ tree.prevToken(tree.prevToken(align_expr.firstToken()))
- \\else if (fn_proto.getSectionExpr()) |section_expr|
- \\ tree.prevToken(tree.prevToken(section_expr.firstToken()))
- \\else if (fn_proto.getCallconvExpr()) |callconv_expr|
- \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken()))
- \\else switch (fn_proto.return_type) {
- \\ .Explicit => |node| node.firstToken(),
- \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()),
- \\ .Invalid => unreachable,
- \\});
+ \\// This looks like garbage don't do this
+ \\const rparen = tree.prevToken(
+ \\// the first token for the annotation expressions is the left
+ \\// parenthesis, hence the need for two prevToken
+ \\if (fn_proto.getAlignExpr()) |align_expr|
+ \\ tree.prevToken(tree.prevToken(align_expr.firstToken()))
+ \\else if (fn_proto.getSectionExpr()) |section_expr|
+ \\ tree.prevToken(tree.prevToken(section_expr.firstToken()))
+ \\else if (fn_proto.getCallconvExpr()) |callconv_expr|
+ \\ tree.prevToken(tree.prevToken(callconv_expr.firstToken()))
+ \\else switch (fn_proto.return_type) {
+ \\ .Explicit => |node| node.firstToken(),
+ \\ .InferErrorSet => |node| tree.prevToken(node.firstToken()),
+ \\ .Invalid => unreachable,
+ \\});
+ \\
+ );
+}
+
+test "zig fmt: single argument trailing commas in @builtins()" {
+ try testCanonical(
+ \\pub fn foo(qzz: []u8) i1 {
+ \\ @panic(
+ \\ foo,
+ \\ );
+ \\ panic(
+ \\ foo,
+ \\ );
+ \\ @panic(
+ \\ foo,
+ \\ bar,
+ \\ );
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: trailing comma should force multiline 1 column" {
+ try testTransform(
+ \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,};
+ \\
+ ,
+ \\pub const UUID_NULL: uuid_t = [16]u8{
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\};
+ \\
+ );
+}
+
+test "zig fmt: function params should align nicely" {
+ try testCanonical(
+ \\pub fn foo() void {
+ \\ cases.addRuntimeSafety("slicing operator with sentinel",
+ \\ \\const std = @import("std");
+ \\ ++ check_panic_msg ++
+ \\ \\pub fn main() void {
+ \\ \\ var buf = [4]u8{'a','b','c',0};
+ \\ \\ const slice = buf[0..:0];
+ \\ \\}
+ \\ );
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: fn proto end with anytype and comma" {
+ try testCanonical(
+ \\pub fn format(
+ \\ out_stream: anytype,
+ \\) !void {}
+ \\
+ );
+}
+
+test "zig fmt: space after top level doc comment" {
+ try testCanonical(
+ \\//! top level doc comment
+ \\
+ \\field: i32,
+ \\
+ );
+}
+
+test "zig fmt: for loop with ptr payload and index" {
+ try testCanonical(
+ \\test {
+ \\ for (self.entries.items) |*item, i| {}
+ \\ for (self.entries.items) |*item, i|
+ \\ a = b;
+ \\ for (self.entries.items) |*item, i| a = b;
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: proper indent line comment after multi-line single expr while loop" {
+ try testCanonical(
+ \\test {
+ \\ while (a) : (b)
+ \\ foo();
+ \\
+ \\ // bar
+ \\ baz();
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: function with labeled block as return type" {
+ try testCanonical(
+ \\fn foo() t: {
+ \\ break :t bar;
+ \\} {
+ \\ baz();
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: line comment after multiline single expr if statement with multiline string" {
+ try testCanonical(
+ \\test {
+ \\ if (foo)
+ \\ x =
+ \\ \\hello
+ \\ \\hello
+ \\ \\
+ \\ ;
+ \\
+ \\ // bar
+ \\ baz();
+ \\
+ \\ if (foo)
+ \\ x =
+ \\ \\hello
+ \\ \\hello
+ \\ \\
+ \\ else
+ \\ y =
+ \\ \\hello
+ \\ \\hello
+ \\ \\
+ \\ ;
+ \\
+ \\ // bar
+ \\ baz();
+ \\}
+ \\
+ );
+}
+
+test "zig fmt: respect extra newline between fn and pub usingnamespace" {
+ try testCanonical(
+ \\fn foo() void {
+ \\ bar();
+ \\}
+ \\
+ \\pub usingnamespace baz;
\\
);
}
-test "zig fmt: single argument trailing commas in @builtins()" {
+test "zig fmt: respect extra newline between switch items" {
try testCanonical(
- \\pub fn foo(qzz: []u8) i1 {
- \\ @panic(
- \\ foo,
- \\ );
- \\ panic(
- \\ foo,
- \\ );
- \\ @panic(
- \\ foo,
- \\ bar,
- \\ );
- \\}
+ \\const a = switch (b) {
+ \\ .c => {},
+ \\
+ \\ .d,
+ \\ .e,
+ \\ => f,
+ \\};
\\
);
}
-test "zig fmt: trailing comma should force multiline 1 column" {
+test "zig fmt: insert trailing comma if there are comments between switch values" {
try testTransform(
- \\pub const UUID_NULL: uuid_t = [16]u8{0,0,0,0,};
+ \\const a = switch (b) {
+ \\ .c => {},
+ \\
+ \\ .d, // foobar
+ \\ .e
+ \\ => f,
+ \\
+ \\ .g, .h
+ \\ // comment
+ \\ => i,
+ \\};
\\
,
- \\pub const UUID_NULL: uuid_t = [16]u8{
- \\ 0,
- \\ 0,
- \\ 0,
- \\ 0,
+ \\const a = switch (b) {
+ \\ .c => {},
+ \\
+ \\ .d, // foobar
+ \\ .e,
+ \\ => f,
+ \\
+ \\ .g,
+ \\ .h,
+ \\ // comment
+ \\ => i,
\\};
\\
);
}
-test "zig fmt: function params should align nicely" {
- try testCanonical(
- \\pub fn foo() void {
- \\ cases.addRuntimeSafety("slicing operator with sentinel",
- \\ \\const std = @import("std");
- \\ ++ check_panic_msg ++
- \\ \\pub fn main() void {
- \\ \\ var buf = [4]u8{'a','b','c',0};
- \\ \\ const slice = buf[0..:0];
- \\ \\}
- \\ );
+test "zig fmt: error for invalid bit range" {
+ try testError(
+ \\var x: []align(0:0:0)u8 = bar;
+ , &[_]Error{
+ .invalid_bit_range,
+ });
+}
+
+test "zig fmt: error for invalid align" {
+ try testError(
+ \\var x: [10]align(10)u8 = bar;
+ , &[_]Error{
+ .invalid_align,
+ });
+}
+
+test "recovery: top level" {
+ try testError(
+ \\test "" {inline}
+ \\test "" {inline}
+ , &[_]Error{
+ .expected_inlinable,
+ .expected_inlinable,
+ });
+}
+
+test "recovery: block statements" {
+ try testError(
+ \\test "" {
+ \\ foo + +;
+ \\ inline;
\\}
- \\
- );
+ , &[_]Error{
+ .invalid_token,
+ .expected_inlinable,
+ });
+}
+
+test "recovery: missing comma" {
+ try testError(
+ \\test "" {
+ \\ switch (foo) {
+ \\ 2 => {}
+ \\ 3 => {}
+ \\ else => {
+ \\ foo && bar +;
+ \\ }
+ \\ }
+ \\}
+ , &[_]Error{
+ .expected_token,
+ .expected_token,
+ .invalid_and,
+ .invalid_token,
+ });
+}
+
+test "recovery: extra qualifier" {
+ try testError(
+ \\const a: *const const u8;
+ \\test ""
+ , &[_]Error{
+ .extra_const_qualifier,
+ .expected_block,
+ });
+}
+
+test "recovery: missing return type" {
+ try testError(
+ \\fn foo() {
+ \\ a && b;
+ \\}
+ \\test ""
+ , &[_]Error{
+ .expected_return_type,
+ .invalid_and,
+ .expected_block,
+ });
+}
+
+test "recovery: continue after invalid decl" {
+ try testError(
+ \\fn foo {
+ \\ inline;
+ \\}
+ \\pub test "" {
+ \\ async a && b;
+ \\}
+ , &[_]Error{
+ .expected_token,
+ .expected_pub_item,
+ .expected_param_list,
+ .invalid_and,
+ });
+ try testError(
+ \\threadlocal test "" {
+ \\ @a && b;
+ \\}
+ , &[_]Error{
+ .expected_var_decl,
+ .expected_param_list,
+ .invalid_and,
+ });
+}
+
+test "recovery: invalid extern/inline" {
+ try testError(
+ \\inline test "" { a && b; }
+ , &[_]Error{
+ .expected_fn,
+ .invalid_and,
+ });
+ try testError(
+ \\extern "" test "" { a && b; }
+ , &[_]Error{
+ .expected_var_decl_or_fn,
+ .invalid_and,
+ });
+}
+
+test "recovery: missing semicolon" {
+ try testError(
+ \\test "" {
+ \\ comptime a && b
+ \\ c && d
+ \\ @foo
+ \\}
+ , &[_]Error{
+ .invalid_and,
+ .expected_token,
+ .invalid_and,
+ .expected_token,
+ .expected_param_list,
+ .expected_token,
+ });
+}
+
+test "recovery: invalid container members" {
+ try testError(
+ \\usingnamespace;
+ \\foo+
+ \\bar@,
+ \\while (a == 2) { test "" {}}
+ \\test "" {
+ \\ a && b
+ \\}
+ , &[_]Error{
+ .expected_expr,
+ .expected_token,
+ .expected_container_members,
+ .invalid_and,
+ .expected_token,
+ });
+}
+
+// TODO after https://github.com/ziglang/zig/issues/35 is implemented,
+// we should be able to recover from this *at any indentation level*,
+// reporting a parse error and yet also parsing all the decls even
+// inside structs.
+test "recovery: extra '}' at top level" {
+ try testError(
+ \\}}}
+ \\test "" {
+ \\ a && b;
+ \\}
+ , &[_]Error{
+ .expected_token,
+ });
+}
+
+test "recovery: mismatched bracket at top level" {
+ try testError(
+ \\const S = struct {
+ \\ arr: 128]?G
+ \\};
+ , &[_]Error{
+ .expected_token,
+ });
+}
+
+test "recovery: invalid global error set access" {
+ try testError(
+ \\test "" {
+ \\ error && foo;
+ \\}
+ , &[_]Error{
+ .expected_token,
+ .expected_token,
+ .invalid_and,
+ });
+}
+
+test "recovery: invalid asterisk after pointer dereference" {
+ try testError(
+ \\test "" {
+ \\ var sequence = "repeat".*** 10;
+ \\}
+ , &[_]Error{
+ .asterisk_after_ptr_deref,
+ });
+ try testError(
+ \\test "" {
+ \\ var sequence = "repeat".** 10&&a;
+ \\}
+ , &[_]Error{
+ .asterisk_after_ptr_deref,
+ .invalid_and,
+ });
+}
+
+test "recovery: missing semicolon after if, for, while stmt" {
+ try testError(
+ \\test "" {
+ \\ if (foo) bar
+ \\ for (foo) |a| bar
+ \\ while (foo) bar
+ \\ a && b;
+ \\}
+ , &[_]Error{
+ .expected_semi_or_else,
+ .expected_semi_or_else,
+ .expected_semi_or_else,
+ .invalid_and,
+ });
+}
+
+test "recovery: invalid comptime" {
+ try testError(
+ \\comptime
+ , &[_]Error{
+ .expected_block_or_field,
+ });
+}
+
+test "recovery: missing block after for/while loops" {
+ try testError(
+ \\test "" { while (foo) }
+ , &[_]Error{
+ .expected_block_or_assignment,
+ });
+ try testError(
+ \\test "" { for (foo) |bar| }
+ , &[_]Error{
+ .expected_block_or_assignment,
+ });
+}
+
+test "recovery: missing for payload" {
+ try testError(
+ \\comptime {
+ \\ const a = for(a) {};
+ \\ const a: for(a) {};
+ \\ for(a) {}
+ \\}
+ , &[_]Error{
+ .expected_loop_payload,
+ .expected_loop_payload,
+ .expected_loop_payload,
+ });
}
const std = @import("std");
@@ -3736,12 +4549,12 @@ var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 {
const stderr = io.getStdErr().writer();
- const tree = try std.zig.parse(allocator, source);
- defer tree.deinit();
+ var tree = try std.zig.parse(allocator, source);
+ defer tree.deinit(allocator);
- for (tree.errors) |*parse_error| {
- const token = tree.token_locs[parse_error.loc()];
- const loc = tree.tokenLocation(0, parse_error.loc());
+ for (tree.errors) |parse_error| {
+ const token_start = tree.tokens.items(.start)[parse_error.token];
+ const loc = tree.tokenLocation(0, parse_error.token);
try stderr.print("(memory buffer):{d}:{d}: error: ", .{ loc.line + 1, loc.column + 1 });
try tree.renderError(parse_error, stderr);
try stderr.print("\n{s}\n", .{source[loc.line_start..loc.line_end]});
@@ -3750,13 +4563,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
while (i < loc.column) : (i += 1) {
try stderr.writeAll(" ");
}
- }
- {
- const caret_count = token.end - token.start;
- var i: usize = 0;
- while (i < caret_count) : (i += 1) {
- try stderr.writeAll("~");
- }
+ try stderr.writeAll("^");
}
try stderr.writeAll("\n");
}
@@ -3764,12 +4571,9 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
return error.ParseError;
}
- var buffer = std.ArrayList(u8).init(allocator);
- errdefer buffer.deinit();
-
- const writer = buffer.writer();
- anything_changed.* = try std.zig.render(allocator, writer, tree);
- return buffer.toOwnedSlice();
+ const formatted = try tree.render(allocator);
+ anything_changed.* = !mem.eql(u8, formatted, source);
+ return formatted;
}
fn testTransform(source: []const u8, expected_source: []const u8) !void {
const needed_alloc_count = x: {
@@ -3822,14 +4626,14 @@ fn testCanonical(source: []const u8) !void {
return testTransform(source, source);
}
-const Error = std.meta.Tag(std.zig.ast.Error);
+const Error = std.zig.ast.Error.Tag;
fn testError(source: []const u8, expected_errors: []const Error) !void {
- const tree = try std.zig.parse(std.testing.allocator, source);
- defer tree.deinit();
+ var tree = try std.zig.parse(std.testing.allocator, source);
+ defer tree.deinit(std.testing.allocator);
std.testing.expect(tree.errors.len == expected_errors.len);
for (expected_errors) |expected, i| {
- std.testing.expect(expected == tree.errors[i]);
+ std.testing.expectEqual(expected, tree.errors[i].tag);
}
}
diff --git a/lib/std/zig/render.zig b/lib/std/zig/render.zig
index 31cc6e16c251..673a05f8e610 100644
--- a/lib/std/zig/render.zig
+++ b/lib/std/zig/render.zig
@@ -6,6 +6,7 @@
const std = @import("../std.zig");
const assert = std.debug.assert;
const mem = std.mem;
+const Allocator = std.mem.Allocator;
const meta = std.meta;
const ast = std.zig.ast;
const Token = std.zig.Token;
@@ -13,2657 +14,2555 @@ const Token = std.zig.Token;
const indent_delta = 4;
const asm_indent_delta = 2;
-pub const Error = error{
- /// Ran out of memory allocating call stack frames to complete rendering.
- OutOfMemory,
-};
+pub const Error = ast.Tree.RenderError;
-/// Returns whether anything changed
-pub fn render(allocator: *mem.Allocator, stream: anytype, tree: *ast.Tree) (@TypeOf(stream).Error || Error)!bool {
- // cannot render an invalid tree
- std.debug.assert(tree.errors.len == 0);
+const Ais = AutoIndentingStream(std.ArrayList(u8).Writer);
- var change_detection_stream = std.io.changeDetectionStream(tree.source, stream);
- var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, change_detection_stream.writer());
+pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void {
+ assert(tree.errors.len == 0); // Cannot render an invalid tree.
+ var auto_indenting_stream = Ais{
+ .indent_delta = indent_delta,
+ .underlying_writer = buffer.writer(),
+ };
+ const ais = &auto_indenting_stream;
- try renderRoot(allocator, &auto_indenting_stream, tree);
+ // Render all the line comments at the beginning of the file.
+ const comment_end_loc = tree.tokens.items(.start)[0];
+ _ = try renderComments(ais, tree, 0, comment_end_loc);
- return change_detection_stream.changeDetected();
-}
+ if (tree.tokens.items(.tag)[0] == .container_doc_comment) {
+ try renderContainerDocComments(ais, tree, 0);
+ }
-fn renderRoot(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
-) (@TypeOf(ais.*).Error || Error)!void {
-
- // render all the line comments at the beginning of the file
- for (tree.token_ids) |token_id, i| {
- if (token_id != .LineComment) break;
- const token_loc = tree.token_locs[i];
- try ais.writer().print("{s}\n", .{mem.trimRight(u8, tree.tokenSliceLoc(token_loc), " ")});
- const next_token = tree.token_locs[i + 1];
- const loc = tree.tokenLocationLoc(token_loc.end, next_token);
- if (loc.line >= 2) {
- try ais.insertNewline();
- }
+ try renderMembers(buffer.allocator, ais, tree, tree.rootDecls());
+
+ if (ais.disabled_offset) |disabled_offset| {
+ try writeFixingWhitespace(ais.underlying_writer, tree.source[disabled_offset..]);
}
+}
- var decl_i: ast.NodeIndex = 0;
- const root_decls = tree.root_node.decls();
+/// Render all members in the given slice, keeping empty lines where appropriate
+fn renderMembers(gpa: *Allocator, ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void {
+ if (members.len == 0) return;
+ try renderMember(gpa, ais, tree, members[0], .newline);
+ for (members[1..]) |member| {
+ try renderExtraNewline(ais, tree, member);
+ try renderMember(gpa, ais, tree, member, .newline);
+ }
+}
- if (root_decls.len == 0) return;
- while (true) {
- var decl = root_decls[decl_i];
-
- // This loop does the following:
- //
- // - Iterates through line/doc comment tokens that precedes the current
- // decl.
- // - Figures out the first token index (`copy_start_token_index`) which
- // hasn't been copied to the output stream yet.
- // - Detects `zig fmt: (off|on)` in the line comment tokens, and
- // determines whether the current decl should be reformatted or not.
- //
- var token_index = decl.firstToken();
- var fmt_active = true;
- var found_fmt_directive = false;
-
- var copy_start_token_index = token_index;
-
- while (token_index != 0) {
- token_index -= 1;
- const token_id = tree.token_ids[token_index];
- switch (token_id) {
- .LineComment => {},
- .DocComment => {
- copy_start_token_index = token_index;
- continue;
- },
- else => break,
+fn renderMember(gpa: *Allocator, ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
+ const datas = tree.nodes.items(.data);
+ try renderDocComments(ais, tree, tree.firstToken(decl));
+ switch (tree.nodes.items(.tag)[decl]) {
+ .fn_decl => {
+ // Some examples:
+ // pub extern "foo" fn ...
+ // export fn ...
+ const fn_proto = datas[decl].lhs;
+ const fn_token = main_tokens[fn_proto];
+ // Go back to the first token we should render here.
+ var i = fn_token;
+ while (i > 0) {
+ i -= 1;
+ switch (token_tags[i]) {
+ .keyword_extern,
+ .keyword_export,
+ .keyword_pub,
+ .string_literal,
+ .keyword_inline,
+ .keyword_noinline,
+ => continue,
+
+ else => {
+ i += 1;
+ break;
+ },
+ }
}
-
- const token_loc = tree.token_locs[token_index];
- if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) {
- if (!found_fmt_directive) {
- fmt_active = false;
- found_fmt_directive = true;
+ while (i < fn_token) : (i += 1) {
+ if (token_tags[i] == .keyword_inline) {
+ // TODO remove this special case when 0.9.0 is released.
+ // See the commit that introduced this comment for more details.
+ continue;
}
- } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) {
- if (!found_fmt_directive) {
- fmt_active = true;
- found_fmt_directive = true;
+ try renderToken(ais, tree, i, .space);
+ }
+ assert(datas[decl].rhs != 0);
+ try renderExpression(gpa, ais, tree, fn_proto, .space);
+ return renderExpression(gpa, ais, tree, datas[decl].rhs, space);
+ },
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ => {
+ // Extern function prototypes are parsed as these tags.
+ // Go back to the first token we should render here.
+ const fn_token = main_tokens[decl];
+ var i = fn_token;
+ while (i > 0) {
+ i -= 1;
+ switch (token_tags[i]) {
+ .keyword_extern,
+ .keyword_export,
+ .keyword_pub,
+ .string_literal,
+ .keyword_inline,
+ .keyword_noinline,
+ => continue,
+
+ else => {
+ i += 1;
+ break;
+ },
}
}
- }
+ while (i < fn_token) : (i += 1) {
+ try renderToken(ais, tree, i, .space);
+ }
+ try renderExpression(gpa, ais, tree, decl, .none);
+ return renderToken(ais, tree, tree.lastToken(decl) + 1, space); // semicolon
+ },
- if (!fmt_active) {
- // Reformatting is disabled for the current decl and possibly some
- // more decls that follow.
- // Find the next `decl` for which reformatting is re-enabled.
- token_index = decl.firstToken();
-
- while (!fmt_active) {
- decl_i += 1;
- if (decl_i >= root_decls.len) {
- // If there's no next reformatted `decl`, just copy the
- // remaining input tokens and bail out.
- const start = tree.token_locs[copy_start_token_index].start;
- try copyFixingWhitespace(ais, tree.source[start..]);
- return;
- }
- decl = root_decls[decl_i];
- var decl_first_token_index = decl.firstToken();
-
- while (token_index < decl_first_token_index) : (token_index += 1) {
- const token_id = tree.token_ids[token_index];
- switch (token_id) {
- .LineComment => {},
- .Eof => unreachable,
- else => continue,
- }
- const token_loc = tree.token_locs[token_index];
- if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: on")) {
- fmt_active = true;
- } else if (mem.eql(u8, mem.trim(u8, tree.tokenSliceLoc(token_loc)[2..], " "), "zig fmt: off")) {
- fmt_active = false;
- }
- }
+ .@"usingnamespace" => {
+ const main_token = main_tokens[decl];
+ const expr = datas[decl].lhs;
+ if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
+ try renderToken(ais, tree, main_token - 1, .space); // pub
}
+ try renderToken(ais, tree, main_token, .space); // usingnamespace
+ try renderExpression(gpa, ais, tree, expr, .none);
+ return renderToken(ais, tree, tree.lastToken(expr) + 1, space); // ;
+ },
- // Found the next `decl` for which reformatting is enabled. Copy
- // the input tokens before the `decl` that haven't been copied yet.
- var copy_end_token_index = decl.firstToken();
- token_index = copy_end_token_index;
- while (token_index != 0) {
- token_index -= 1;
- const token_id = tree.token_ids[token_index];
- switch (token_id) {
- .LineComment => {},
- .DocComment => {
- copy_end_token_index = token_index;
- continue;
- },
- else => break,
- }
+ .global_var_decl => return renderVarDecl(gpa, ais, tree, tree.globalVarDecl(decl)),
+ .local_var_decl => return renderVarDecl(gpa, ais, tree, tree.localVarDecl(decl)),
+ .simple_var_decl => return renderVarDecl(gpa, ais, tree, tree.simpleVarDecl(decl)),
+ .aligned_var_decl => return renderVarDecl(gpa, ais, tree, tree.alignedVarDecl(decl)),
+
+ .test_decl => {
+ const test_token = main_tokens[decl];
+ try renderToken(ais, tree, test_token, .space);
+ if (token_tags[test_token + 1] == .string_literal) {
+ try renderToken(ais, tree, test_token + 1, .space);
}
+ try renderExpression(gpa, ais, tree, datas[decl].rhs, space);
+ },
- const start = tree.token_locs[copy_start_token_index].start;
- const end = tree.token_locs[copy_end_token_index].start;
- try copyFixingWhitespace(ais, tree.source[start..end]);
- }
+ .container_field_init => return renderContainerField(gpa, ais, tree, tree.containerFieldInit(decl), space),
+ .container_field_align => return renderContainerField(gpa, ais, tree, tree.containerFieldAlign(decl), space),
+ .container_field => return renderContainerField(gpa, ais, tree, tree.containerField(decl), space),
+ .@"comptime" => return renderExpression(gpa, ais, tree, decl, space),
- try renderTopLevelDecl(allocator, ais, tree, decl);
- decl_i += 1;
- if (decl_i >= root_decls.len) return;
- try renderExtraNewline(tree, ais, root_decls[decl_i]);
+ .root => unreachable,
+ else => unreachable,
}
}
-fn renderExtraNewline(tree: *ast.Tree, ais: anytype, node: *ast.Node) @TypeOf(ais.*).Error!void {
- return renderExtraNewlineToken(tree, ais, node.firstToken());
-}
-
-fn renderExtraNewlineToken(
- tree: *ast.Tree,
- ais: anytype,
- first_token: ast.TokenIndex,
-) @TypeOf(ais.*).Error!void {
- var prev_token = first_token;
- if (prev_token == 0) return;
- var newline_threshold: usize = 2;
- while (tree.token_ids[prev_token - 1] == .DocComment) {
- if (tree.tokenLocation(tree.token_locs[prev_token - 1].end, prev_token).line == 1) {
- newline_threshold += 1;
- }
- prev_token -= 1;
- }
- const prev_token_end = tree.token_locs[prev_token - 1].end;
- const loc = tree.tokenLocation(prev_token_end, first_token);
- if (loc.line >= newline_threshold) {
- try ais.insertNewline();
+/// Render all expressions in the slice, keeping empty lines where appropriate
+fn renderExpressions(gpa: *Allocator, ais: *Ais, tree: ast.Tree, expressions: []const ast.Node.Index, space: Space) Error!void {
+ if (expressions.len == 0) return;
+ try renderExpression(gpa, ais, tree, expressions[0], space);
+ for (expressions[1..]) |expression| {
+ try renderExtraNewline(ais, tree, expression);
+ try renderExpression(gpa, ais, tree, expression, space);
}
}
-fn renderTopLevelDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node) (@TypeOf(ais.*).Error || Error)!void {
- try renderContainerDecl(allocator, ais, tree, decl, .Newline);
-}
+fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_tags = tree.nodes.items(.tag);
+ const datas = tree.nodes.items(.data);
+ switch (node_tags[node]) {
+ .identifier,
+ .integer_literal,
+ .float_literal,
+ .char_literal,
+ .true_literal,
+ .false_literal,
+ .null_literal,
+ .unreachable_literal,
+ .undefined_literal,
+ .anyframe_literal,
+ .string_literal,
+ => return renderToken(ais, tree, main_tokens[node], space),
+
+ .multiline_string_literal => {
+ var locked_indents = ais.lockOneShotIndent();
+ try ais.maybeInsertNewline();
-fn renderContainerDecl(allocator: *mem.Allocator, ais: anytype, tree: *ast.Tree, decl: *ast.Node, space: Space) (@TypeOf(ais.*).Error || Error)!void {
- switch (decl.tag) {
- .FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
+ var i = datas[node].lhs;
+ while (i <= datas[node].rhs) : (i += 1) try renderToken(ais, tree, i, .newline);
- try renderDocComments(tree, ais, fn_proto, fn_proto.getDocComments());
+ while (locked_indents > 0) : (locked_indents -= 1) ais.popIndent();
- if (fn_proto.getBodyNode()) |body_node| {
- try renderExpression(allocator, ais, tree, decl, .Space);
- try renderExpression(allocator, ais, tree, body_node, space);
- } else {
- try renderExpression(allocator, ais, tree, decl, .None);
- try renderToken(tree, ais, tree.nextToken(decl.lastToken()), space);
+ switch (space) {
+ .none, .space, .newline, .skip => {},
+ .semicolon => if (token_tags[i] == .semicolon) try renderToken(ais, tree, i, .newline),
+ .comma => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .newline),
+ .comma_space => if (token_tags[i] == .comma) try renderToken(ais, tree, i, .space),
}
},
- .Use => {
- const use_decl = @fieldParentPtr(ast.Node.Use, "base", decl);
+ .error_value => {
+ try renderToken(ais, tree, main_tokens[node], .none);
+ try renderToken(ais, tree, main_tokens[node] + 1, .none);
+ return renderToken(ais, tree, main_tokens[node] + 2, space);
+ },
+
+ .@"anytype" => return renderToken(ais, tree, main_tokens[node], space),
- if (use_decl.visib_token) |visib_token| {
- try renderToken(tree, ais, visib_token, .Space); // pub
+ .block_two,
+ .block_two_semicolon,
+ => {
+ const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
+ if (datas[node].lhs == 0) {
+ return renderBlock(gpa, ais, tree, node, statements[0..0], space);
+ } else if (datas[node].rhs == 0) {
+ return renderBlock(gpa, ais, tree, node, statements[0..1], space);
+ } else {
+ return renderBlock(gpa, ais, tree, node, statements[0..2], space);
}
- try renderToken(tree, ais, use_decl.use_token, .Space); // usingnamespace
- try renderExpression(allocator, ais, tree, use_decl.expr, .None);
- try renderToken(tree, ais, use_decl.semicolon_token, space); // ;
+ },
+ .block,
+ .block_semicolon,
+ => {
+ const statements = tree.extra_data[datas[node].lhs..datas[node].rhs];
+ return renderBlock(gpa, ais, tree, node, statements, space);
},
- .VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", decl);
+ .@"errdefer" => {
+ const defer_token = main_tokens[node];
+ const payload_token = datas[node].lhs;
+ const expr = datas[node].rhs;
- try renderDocComments(tree, ais, var_decl, var_decl.getDocComments());
- try renderVarDecl(allocator, ais, tree, var_decl);
+ try renderToken(ais, tree, defer_token, .space);
+ if (payload_token != 0) {
+ try renderToken(ais, tree, payload_token - 1, .none); // |
+ try renderToken(ais, tree, payload_token, .none); // identifier
+ try renderToken(ais, tree, payload_token + 1, .space); // |
+ }
+ return renderExpression(gpa, ais, tree, expr, space);
},
- .TestDecl => {
- const test_decl = @fieldParentPtr(ast.Node.TestDecl, "base", decl);
+ .@"defer" => {
+ const defer_token = main_tokens[node];
+ const expr = datas[node].rhs;
+ try renderToken(ais, tree, defer_token, .space);
+ return renderExpression(gpa, ais, tree, expr, space);
+ },
+ .@"comptime", .@"nosuspend" => {
+ const comptime_token = main_tokens[node];
+ const block = datas[node].lhs;
+ try renderToken(ais, tree, comptime_token, .space);
+ return renderExpression(gpa, ais, tree, block, space);
+ },
- try renderDocComments(tree, ais, test_decl, test_decl.doc_comments);
- try renderToken(tree, ais, test_decl.test_token, .Space);
- if (test_decl.name) |name|
- try renderExpression(allocator, ais, tree, name, .Space);
- try renderExpression(allocator, ais, tree, test_decl.body_node, space);
+ .@"suspend" => {
+ const suspend_token = main_tokens[node];
+ const body = datas[node].lhs;
+ if (body != 0) {
+ try renderToken(ais, tree, suspend_token, .space);
+ return renderExpression(gpa, ais, tree, body, space);
+ } else {
+ return renderToken(ais, tree, suspend_token, space);
+ }
},
- .ContainerField => {
- const field = @fieldParentPtr(ast.Node.ContainerField, "base", decl);
+ .@"catch" => {
+ const main_token = main_tokens[node];
+ const fallback_first = tree.firstToken(datas[node].rhs);
- try renderDocComments(tree, ais, field, field.doc_comments);
- if (field.comptime_token) |t| {
- try renderToken(tree, ais, t, .Space); // comptime
- }
+ const same_line = tree.tokensOnSameLine(main_token, fallback_first);
+ const after_op_space = if (same_line) Space.space else Space.newline;
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.nextToken(field.lastToken());
- break :blk tree.token_ids[maybe_comma] == .Comma;
- };
+ try renderExpression(gpa, ais, tree, datas[node].lhs, .space); // target
- // The trailing comma is emitted at the end, but if it's not present
- // we still have to respect the specified `space` parameter
- const last_token_space: Space = if (src_has_trailing_comma) .None else space;
-
- if (field.type_expr == null and field.value_expr == null) {
- try renderToken(tree, ais, field.name_token, last_token_space); // name
- } else if (field.type_expr != null and field.value_expr == null) {
- try renderToken(tree, ais, field.name_token, .None); // name
- try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // :
-
- if (field.align_expr) |align_value_expr| {
- try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type
- const lparen_token = tree.prevToken(align_value_expr.firstToken());
- const align_kw = tree.prevToken(lparen_token);
- const rparen_token = tree.nextToken(align_value_expr.lastToken());
- try renderToken(tree, ais, align_kw, .None); // align
- try renderToken(tree, ais, lparen_token, .None); // (
- try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment
- try renderToken(tree, ais, rparen_token, last_token_space); // )
- } else {
- try renderExpression(allocator, ais, tree, field.type_expr.?, last_token_space); // type
- }
- } else if (field.type_expr == null and field.value_expr != null) {
- try renderToken(tree, ais, field.name_token, .Space); // name
- try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // =
- try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value
+ if (token_tags[fallback_first - 1] == .pipe) {
+ try renderToken(ais, tree, main_token, .space); // catch keyword
+ try renderToken(ais, tree, main_token + 1, .none); // pipe
+ try renderToken(ais, tree, main_token + 2, .none); // payload identifier
+ try renderToken(ais, tree, main_token + 3, after_op_space); // pipe
} else {
- try renderToken(tree, ais, field.name_token, .None); // name
- try renderToken(tree, ais, tree.nextToken(field.name_token), .Space); // :
-
- if (field.align_expr) |align_value_expr| {
- try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type
- const lparen_token = tree.prevToken(align_value_expr.firstToken());
- const align_kw = tree.prevToken(lparen_token);
- const rparen_token = tree.nextToken(align_value_expr.lastToken());
- try renderToken(tree, ais, align_kw, .None); // align
- try renderToken(tree, ais, lparen_token, .None); // (
- try renderExpression(allocator, ais, tree, align_value_expr, .None); // alignment
- try renderToken(tree, ais, rparen_token, .Space); // )
- } else {
- try renderExpression(allocator, ais, tree, field.type_expr.?, .Space); // type
- }
- try renderToken(tree, ais, tree.prevToken(field.value_expr.?.firstToken()), .Space); // =
- try renderExpression(allocator, ais, tree, field.value_expr.?, last_token_space); // value
+ assert(token_tags[fallback_first - 1] == .keyword_catch);
+ try renderToken(ais, tree, main_token, after_op_space); // catch keyword
}
- if (src_has_trailing_comma) {
- const comma = tree.nextToken(field.lastToken());
- try renderToken(tree, ais, comma, space);
- }
+ ais.pushIndentOneShot();
+ try renderExpression(gpa, ais, tree, datas[node].rhs, space); // fallback
},
- .Comptime => {
- assert(!decl.requireSemiColon());
- try renderExpression(allocator, ais, tree, decl, space);
- },
+ .field_access => {
+ const main_token = main_tokens[node];
+ const field_access = datas[node];
- .DocComment => {
- const comment = @fieldParentPtr(ast.Node.DocComment, "base", decl);
- const kind = tree.token_ids[comment.first_line];
- try renderToken(tree, ais, comment.first_line, .Newline);
- var tok_i = comment.first_line + 1;
- while (true) : (tok_i += 1) {
- const tok_id = tree.token_ids[tok_i];
- if (tok_id == kind) {
- try renderToken(tree, ais, tok_i, .Newline);
- } else if (tok_id == .LineComment) {
- continue;
- } else {
- break;
- }
+ try renderExpression(gpa, ais, tree, field_access.lhs, .none);
+
+ // Allow a line break between the lhs and the dot if the lhs and rhs
+ // are on different lines.
+ const lhs_last_token = tree.lastToken(field_access.lhs);
+ const same_line = tree.tokensOnSameLine(lhs_last_token, main_token + 1);
+ if (!same_line) {
+ if (!hasComment(tree, lhs_last_token, main_token)) try ais.insertNewline();
+ ais.pushIndentOneShot();
}
+
+ try renderToken(ais, tree, main_token, .none);
+
+ // This check ensures that zag() is indented in the following example:
+ // const x = foo
+ // .bar()
+ // . // comment
+ // zag();
+ if (!same_line and hasComment(tree, main_token, main_token + 1)) {
+ ais.pushIndentOneShot();
+ }
+
+ return renderToken(ais, tree, field_access.rhs, space);
},
- else => unreachable,
- }
-}
-fn renderExpression(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- base: *ast.Node,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- switch (base.tag) {
- .Identifier,
- .IntegerLiteral,
- .FloatLiteral,
- .StringLiteral,
- .CharLiteral,
- .BoolLiteral,
- .NullLiteral,
- .Unreachable,
- .ErrorType,
- .UndefinedLiteral,
+ .error_union,
+ .switch_range,
=> {
- const casted_node = base.cast(ast.Node.OneToken).?;
- return renderToken(tree, ais, casted_node.token, space);
+ const infix = datas[node];
+ try renderExpression(gpa, ais, tree, infix.lhs, .none);
+ try renderToken(ais, tree, main_tokens[node], .none);
+ return renderExpression(gpa, ais, tree, infix.rhs, space);
},
- .AnyType => {
- const any_type = base.castTag(.AnyType).?;
- if (mem.eql(u8, tree.tokenSlice(any_type.token), "var")) {
- // TODO remove in next release cycle
- try ais.writer().writeAll("anytype");
- if (space == .Comma) try ais.writer().writeAll(",\n");
- return;
+ .add,
+ .add_wrap,
+ .array_cat,
+ .array_mult,
+ .assign,
+ .assign_bit_and,
+ .assign_bit_or,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_xor,
+ .assign_div,
+ .assign_sub,
+ .assign_sub_wrap,
+ .assign_mod,
+ .assign_add,
+ .assign_add_wrap,
+ .assign_mul,
+ .assign_mul_wrap,
+ .bang_equal,
+ .bit_and,
+ .bit_or,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_xor,
+ .bool_and,
+ .bool_or,
+ .div,
+ .equal_equal,
+ .greater_or_equal,
+ .greater_than,
+ .less_or_equal,
+ .less_than,
+ .merge_error_sets,
+ .mod,
+ .mul,
+ .mul_wrap,
+ .sub,
+ .sub_wrap,
+ .@"orelse",
+ => {
+ const infix = datas[node];
+ try renderExpression(gpa, ais, tree, infix.lhs, .space);
+ const op_token = main_tokens[node];
+ if (tree.tokensOnSameLine(op_token, op_token + 1)) {
+ try renderToken(ais, tree, op_token, .space);
+ } else {
+ ais.pushIndent();
+ try renderToken(ais, tree, op_token, .newline);
+ ais.popIndent();
}
- return renderToken(tree, ais, any_type.token, space);
+ ais.pushIndentOneShot();
+ return renderExpression(gpa, ais, tree, infix.rhs, space);
},
- .Block, .LabeledBlock => {
- const block: struct {
- label: ?ast.TokenIndex,
- statements: []*ast.Node,
- lbrace: ast.TokenIndex,
- rbrace: ast.TokenIndex,
- } = b: {
- if (base.castTag(.Block)) |block| {
- break :b .{
- .label = null,
- .statements = block.statements(),
- .lbrace = block.lbrace,
- .rbrace = block.rbrace,
- };
- } else if (base.castTag(.LabeledBlock)) |block| {
- break :b .{
- .label = block.label,
- .statements = block.statements(),
- .lbrace = block.lbrace,
- .rbrace = block.rbrace,
- };
- } else {
- unreachable;
- }
- };
+ .bit_not,
+ .bool_not,
+ .negation,
+ .negation_wrap,
+ .optional_type,
+ .address_of,
+ => {
+ try renderToken(ais, tree, main_tokens[node], .none);
+ return renderExpression(gpa, ais, tree, datas[node].lhs, space);
+ },
- if (block.label) |label| {
- try renderToken(tree, ais, label, Space.None);
- try renderToken(tree, ais, tree.nextToken(label), Space.Space);
- }
+ .@"try",
+ .@"resume",
+ .@"await",
+ => {
+ try renderToken(ais, tree, main_tokens[node], .space);
+ return renderExpression(gpa, ais, tree, datas[node].lhs, space);
+ },
- if (block.statements.len == 0) {
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, block.lbrace, Space.None);
- } else {
- ais.pushIndentNextLine();
- defer ais.popIndent();
+ .array_type => return renderArrayType(gpa, ais, tree, tree.arrayType(node), space),
+ .array_type_sentinel => return renderArrayType(gpa, ais, tree, tree.arrayTypeSentinel(node), space),
- try renderToken(tree, ais, block.lbrace, Space.Newline);
+ .ptr_type_aligned => return renderPtrType(gpa, ais, tree, tree.ptrTypeAligned(node), space),
+ .ptr_type_sentinel => return renderPtrType(gpa, ais, tree, tree.ptrTypeSentinel(node), space),
+ .ptr_type => return renderPtrType(gpa, ais, tree, tree.ptrType(node), space),
+ .ptr_type_bit_range => return renderPtrType(gpa, ais, tree, tree.ptrTypeBitRange(node), space),
- for (block.statements) |statement, i| {
- try renderStatement(allocator, ais, tree, statement);
+ .array_init_one, .array_init_one_comma => {
+ var elements: [1]ast.Node.Index = undefined;
+ return renderArrayInit(gpa, ais, tree, tree.arrayInitOne(&elements, node), space);
+ },
+ .array_init_dot_two, .array_init_dot_two_comma => {
+ var elements: [2]ast.Node.Index = undefined;
+ return renderArrayInit(gpa, ais, tree, tree.arrayInitDotTwo(&elements, node), space);
+ },
+ .array_init_dot,
+ .array_init_dot_comma,
+ => return renderArrayInit(gpa, ais, tree, tree.arrayInitDot(node), space),
+ .array_init,
+ .array_init_comma,
+ => return renderArrayInit(gpa, ais, tree, tree.arrayInit(node), space),
+
+ .struct_init_one, .struct_init_one_comma => {
+ var fields: [1]ast.Node.Index = undefined;
+ return renderStructInit(gpa, ais, tree, node, tree.structInitOne(&fields, node), space);
+ },
+ .struct_init_dot_two, .struct_init_dot_two_comma => {
+ var fields: [2]ast.Node.Index = undefined;
+ return renderStructInit(gpa, ais, tree, node, tree.structInitDotTwo(&fields, node), space);
+ },
+ .struct_init_dot,
+ .struct_init_dot_comma,
+ => return renderStructInit(gpa, ais, tree, node, tree.structInitDot(node), space),
+ .struct_init,
+ .struct_init_comma,
+ => return renderStructInit(gpa, ais, tree, node, tree.structInit(node), space),
+
+ .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => {
+ var params: [1]ast.Node.Index = undefined;
+ return renderCall(gpa, ais, tree, tree.callOne(¶ms, node), space);
+ },
- if (i + 1 < block.statements.len) {
- try renderExtraNewline(tree, ais, block.statements[i + 1]);
- }
- }
- }
- return renderToken(tree, ais, block.rbrace, space);
+ .call,
+ .call_comma,
+ .async_call,
+ .async_call_comma,
+ => return renderCall(gpa, ais, tree, tree.callFull(node), space),
+
+ .array_access => {
+ const suffix = datas[node];
+ const lbracket = tree.firstToken(suffix.rhs) - 1;
+ const rbracket = tree.lastToken(suffix.rhs) + 1;
+ const one_line = tree.tokensOnSameLine(lbracket, rbracket);
+ const inner_space = if (one_line) Space.none else Space.newline;
+ try renderExpression(gpa, ais, tree, suffix.lhs, .none);
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, lbracket, inner_space); // [
+ try renderExpression(gpa, ais, tree, suffix.rhs, inner_space);
+ ais.popIndent();
+ return renderToken(ais, tree, rbracket, space); // ]
},
- .Defer => {
- const defer_node = @fieldParentPtr(ast.Node.Defer, "base", base);
+ .slice_open => return renderSlice(gpa, ais, tree, tree.sliceOpen(node), space),
+ .slice => return renderSlice(gpa, ais, tree, tree.slice(node), space),
+ .slice_sentinel => return renderSlice(gpa, ais, tree, tree.sliceSentinel(node), space),
- try renderToken(tree, ais, defer_node.defer_token, Space.Space);
- if (defer_node.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
- }
- return renderExpression(allocator, ais, tree, defer_node.expr, space);
+ .deref => {
+ try renderExpression(gpa, ais, tree, datas[node].lhs, .none);
+ return renderToken(ais, tree, main_tokens[node], space);
},
- .Comptime => {
- const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", base);
- try renderToken(tree, ais, comptime_node.comptime_token, Space.Space);
- return renderExpression(allocator, ais, tree, comptime_node.expr, space);
+ .unwrap_optional => {
+ try renderExpression(gpa, ais, tree, datas[node].lhs, .none);
+ try renderToken(ais, tree, main_tokens[node], .none);
+ return renderToken(ais, tree, datas[node].rhs, space);
},
- .Nosuspend => {
- const nosuspend_node = @fieldParentPtr(ast.Node.Nosuspend, "base", base);
- if (mem.eql(u8, tree.tokenSlice(nosuspend_node.nosuspend_token), "noasync")) {
- // TODO: remove this
- try ais.writer().writeAll("nosuspend ");
- } else {
- try renderToken(tree, ais, nosuspend_node.nosuspend_token, Space.Space);
+
+ .@"break" => {
+ const main_token = main_tokens[node];
+ const label_token = datas[node].lhs;
+ const target = datas[node].rhs;
+ if (label_token == 0 and target == 0) {
+ try renderToken(ais, tree, main_token, space); // break keyword
+ } else if (label_token == 0 and target != 0) {
+ try renderToken(ais, tree, main_token, .space); // break keyword
+ try renderExpression(gpa, ais, tree, target, space);
+ } else if (label_token != 0 and target == 0) {
+ try renderToken(ais, tree, main_token, .space); // break keyword
+ try renderToken(ais, tree, label_token - 1, .none); // colon
+ try renderToken(ais, tree, label_token, space); // identifier
+ } else if (label_token != 0 and target != 0) {
+ try renderToken(ais, tree, main_token, .space); // break keyword
+ try renderToken(ais, tree, label_token - 1, .none); // colon
+ try renderToken(ais, tree, label_token, .space); // identifier
+ try renderExpression(gpa, ais, tree, target, space);
}
- return renderExpression(allocator, ais, tree, nosuspend_node.expr, space);
},
- .Suspend => {
- const suspend_node = @fieldParentPtr(ast.Node.Suspend, "base", base);
-
- if (suspend_node.body) |body| {
- try renderToken(tree, ais, suspend_node.suspend_token, Space.Space);
- return renderExpression(allocator, ais, tree, body, space);
+ .@"continue" => {
+ const main_token = main_tokens[node];
+ const label = datas[node].lhs;
+ if (label != 0) {
+ try renderToken(ais, tree, main_token, .space); // continue
+ try renderToken(ais, tree, label - 1, .none); // :
+ return renderToken(ais, tree, label, space); // label
} else {
- return renderToken(tree, ais, suspend_node.suspend_token, space);
+ return renderToken(ais, tree, main_token, space); // continue
}
},
- .Catch => {
- const infix_op_node = @fieldParentPtr(ast.Node.Catch, "base", base);
-
- const op_space = Space.Space;
- try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space);
-
- const after_op_space = blk: {
- const same_line = tree.tokensOnSameLine(infix_op_node.op_token, tree.nextToken(infix_op_node.op_token));
- break :blk if (same_line) op_space else Space.Newline;
- };
-
- try renderToken(tree, ais, infix_op_node.op_token, after_op_space);
-
- if (infix_op_node.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
+ .@"return" => {
+ if (datas[node].lhs != 0) {
+ try renderToken(ais, tree, main_tokens[node], .space);
+ try renderExpression(gpa, ais, tree, datas[node].lhs, space);
+ } else {
+ try renderToken(ais, tree, main_tokens[node], space);
}
+ },
+ .grouped_expression => {
+ try renderToken(ais, tree, main_tokens[node], .none); // lparen
ais.pushIndentOneShot();
- return renderExpression(allocator, ais, tree, infix_op_node.rhs, space);
+ try renderExpression(gpa, ais, tree, datas[node].lhs, .none);
+ return renderToken(ais, tree, datas[node].rhs, space); // rparen
},
- .Add,
- .AddWrap,
- .ArrayCat,
- .ArrayMult,
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .BangEqual,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BoolAnd,
- .BoolOr,
- .Div,
- .EqualEqual,
- .ErrorUnion,
- .GreaterOrEqual,
- .GreaterThan,
- .LessOrEqual,
- .LessThan,
- .MergeErrorSets,
- .Mod,
- .Mul,
- .MulWrap,
- .Period,
- .Range,
- .Sub,
- .SubWrap,
- .OrElse,
- => {
- const infix_op_node = @fieldParentPtr(ast.Node.SimpleInfixOp, "base", base);
+ .container_decl,
+ .container_decl_trailing,
+ => return renderContainerDecl(gpa, ais, tree, node, tree.containerDecl(node), space),
- const op_space = switch (base.tag) {
- .Period, .ErrorUnion, .Range => Space.None,
- else => Space.Space,
- };
- try renderExpression(allocator, ais, tree, infix_op_node.lhs, op_space);
+ .container_decl_two, .container_decl_two_trailing => {
+ var buffer: [2]ast.Node.Index = undefined;
+ return renderContainerDecl(gpa, ais, tree, node, tree.containerDeclTwo(&buffer, node), space);
+ },
+ .container_decl_arg,
+ .container_decl_arg_trailing,
+ => return renderContainerDecl(gpa, ais, tree, node, tree.containerDeclArg(node), space),
- const after_op_space = blk: {
- const loc = tree.tokenLocation(tree.token_locs[infix_op_node.op_token].end, tree.nextToken(infix_op_node.op_token));
- break :blk if (loc.line == 0) op_space else Space.Newline;
- };
+ .tagged_union,
+ .tagged_union_trailing,
+ => return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnion(node), space),
- {
- ais.pushIndent();
- defer ais.popIndent();
- try renderToken(tree, ais, infix_op_node.op_token, after_op_space);
+ .tagged_union_two, .tagged_union_two_trailing => {
+ var buffer: [2]ast.Node.Index = undefined;
+ return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnionTwo(&buffer, node), space);
+ },
+ .tagged_union_enum_tag,
+ .tagged_union_enum_tag_trailing,
+ => return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnionEnumTag(node), space),
+
+ .error_set_decl => {
+ const error_token = main_tokens[node];
+ const lbrace = error_token + 1;
+ const rbrace = datas[node].rhs;
+
+ try renderToken(ais, tree, error_token, .none);
+
+ if (lbrace + 1 == rbrace) {
+ // There is nothing between the braces so render condensed: `error{}`
+ try renderToken(ais, tree, lbrace, .none);
+ return renderToken(ais, tree, rbrace, space);
+ } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .identifier) {
+ // There is exactly one member and no trailing comma or
+ // comments, so render without surrounding spaces: `error{Foo}`
+ try renderToken(ais, tree, lbrace, .none);
+ try renderToken(ais, tree, lbrace + 1, .none); // identifier
+ return renderToken(ais, tree, rbrace, space);
+ } else if (token_tags[rbrace - 1] == .comma) {
+ // There is a trailing comma so render each member on a new line.
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, lbrace, .newline);
+ var i = lbrace + 1;
+ while (i < rbrace) : (i += 1) {
+ if (i > lbrace + 1) try renderExtraNewlineToken(ais, tree, i);
+ switch (token_tags[i]) {
+ .doc_comment => try renderToken(ais, tree, i, .newline),
+ .identifier => try renderToken(ais, tree, i, .comma),
+ .comma => {},
+ else => unreachable,
+ }
+ }
+ ais.popIndent();
+ return renderToken(ais, tree, rbrace, space);
+ } else {
+ // There is no trailing comma so render everything on one line.
+ try renderToken(ais, tree, lbrace, .space);
+ var i = lbrace + 1;
+ while (i < rbrace) : (i += 1) {
+ switch (token_tags[i]) {
+ .doc_comment => unreachable, // TODO
+ .identifier => try renderToken(ais, tree, i, .comma_space),
+ .comma => {},
+ else => unreachable,
+ }
+ }
+ return renderToken(ais, tree, rbrace, space);
}
- ais.pushIndentOneShot();
- return renderExpression(allocator, ais, tree, infix_op_node.rhs, space);
},
- .BitNot,
- .BoolNot,
- .Negation,
- .NegationWrap,
- .OptionalType,
- .AddressOf,
- => {
- const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
- try renderToken(tree, ais, casted_node.op_token, Space.None);
- return renderExpression(allocator, ais, tree, casted_node.rhs, space);
+ .builtin_call_two, .builtin_call_two_comma => {
+ if (datas[node].lhs == 0) {
+ return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{}, space);
+ } else if (datas[node].rhs == 0) {
+ return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{datas[node].lhs}, space);
+ } else {
+ return renderBuiltinCall(gpa, ais, tree, main_tokens[node], &.{ datas[node].lhs, datas[node].rhs }, space);
+ }
},
-
- .Try,
- .Resume,
- .Await,
- => {
- const casted_node = @fieldParentPtr(ast.Node.SimplePrefixOp, "base", base);
- try renderToken(tree, ais, casted_node.op_token, Space.Space);
- return renderExpression(allocator, ais, tree, casted_node.rhs, space);
+ .builtin_call, .builtin_call_comma => {
+ const params = tree.extra_data[datas[node].lhs..datas[node].rhs];
+ return renderBuiltinCall(gpa, ais, tree, main_tokens[node], params, space);
},
- .ArrayType => {
- const array_type = @fieldParentPtr(ast.Node.ArrayType, "base", base);
- return renderArrayType(
- allocator,
- ais,
- tree,
- array_type.op_token,
- array_type.rhs,
- array_type.len_expr,
- null,
- space,
- );
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ return renderFnProto(gpa, ais, tree, tree.fnProtoSimple(¶ms, node), space);
},
- .ArrayTypeSentinel => {
- const array_type = @fieldParentPtr(ast.Node.ArrayTypeSentinel, "base", base);
- return renderArrayType(
- allocator,
- ais,
- tree,
- array_type.op_token,
- array_type.rhs,
- array_type.len_expr,
- array_type.sentinel,
- space,
- );
+ .fn_proto_multi => return renderFnProto(gpa, ais, tree, tree.fnProtoMulti(node), space),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ return renderFnProto(gpa, ais, tree, tree.fnProtoOne(¶ms, node), space);
},
-
- .PtrType => {
- const ptr_type = @fieldParentPtr(ast.Node.PtrType, "base", base);
- const op_tok_id = tree.token_ids[ptr_type.op_token];
- switch (op_tok_id) {
- .Asterisk, .AsteriskAsterisk => try ais.writer().writeByte('*'),
- .LBracket => if (tree.token_ids[ptr_type.op_token + 2] == .Identifier)
- try ais.writer().writeAll("[*c")
- else
- try ais.writer().writeAll("[*"),
- else => unreachable,
- }
- if (ptr_type.ptr_info.sentinel) |sentinel| {
- const colon_token = tree.prevToken(sentinel.firstToken());
- try renderToken(tree, ais, colon_token, Space.None); // :
- const sentinel_space = switch (op_tok_id) {
- .LBracket => Space.None,
- else => Space.Space,
- };
- try renderExpression(allocator, ais, tree, sentinel, sentinel_space);
- }
- switch (op_tok_id) {
- .Asterisk, .AsteriskAsterisk => {},
- .LBracket => try ais.writer().writeByte(']'),
- else => unreachable,
- }
- if (ptr_type.ptr_info.allowzero_token) |allowzero_token| {
- try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero
- }
- if (ptr_type.ptr_info.align_info) |align_info| {
- const lparen_token = tree.prevToken(align_info.node.firstToken());
- const align_token = tree.prevToken(lparen_token);
-
- try renderToken(tree, ais, align_token, Space.None); // align
- try renderToken(tree, ais, lparen_token, Space.None); // (
-
- try renderExpression(allocator, ais, tree, align_info.node, Space.None);
-
- if (align_info.bit_range) |bit_range| {
- const colon1 = tree.prevToken(bit_range.start.firstToken());
- const colon2 = tree.prevToken(bit_range.end.firstToken());
-
- try renderToken(tree, ais, colon1, Space.None); // :
- try renderExpression(allocator, ais, tree, bit_range.start, Space.None);
- try renderToken(tree, ais, colon2, Space.None); // :
- try renderExpression(allocator, ais, tree, bit_range.end, Space.None);
-
- const rparen_token = tree.nextToken(bit_range.end.lastToken());
- try renderToken(tree, ais, rparen_token, Space.Space); // )
- } else {
- const rparen_token = tree.nextToken(align_info.node.lastToken());
- try renderToken(tree, ais, rparen_token, Space.Space); // )
- }
- }
- if (ptr_type.ptr_info.const_token) |const_token| {
- try renderToken(tree, ais, const_token, Space.Space); // const
- }
- if (ptr_type.ptr_info.volatile_token) |volatile_token| {
- try renderToken(tree, ais, volatile_token, Space.Space); // volatile
+ .fn_proto => return renderFnProto(gpa, ais, tree, tree.fnProto(node), space),
+
+ .anyframe_type => {
+ const main_token = main_tokens[node];
+ if (datas[node].rhs != 0) {
+ try renderToken(ais, tree, main_token, .none); // anyframe
+ try renderToken(ais, tree, main_token + 1, .none); // ->
+ return renderExpression(gpa, ais, tree, datas[node].rhs, space);
+ } else {
+ return renderToken(ais, tree, main_token, space); // anyframe
}
- return renderExpression(allocator, ais, tree, ptr_type.rhs, space);
},
- .SliceType => {
- const slice_type = @fieldParentPtr(ast.Node.SliceType, "base", base);
- try renderToken(tree, ais, slice_type.op_token, Space.None); // [
- if (slice_type.ptr_info.sentinel) |sentinel| {
- const colon_token = tree.prevToken(sentinel.firstToken());
- try renderToken(tree, ais, colon_token, Space.None); // :
- try renderExpression(allocator, ais, tree, sentinel, Space.None);
- try renderToken(tree, ais, tree.nextToken(sentinel.lastToken()), Space.None); // ]
+ .@"switch",
+ .switch_comma,
+ => {
+ const switch_token = main_tokens[node];
+ const condition = datas[node].lhs;
+ const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange);
+ const cases = tree.extra_data[extra.start..extra.end];
+ const rparen = tree.lastToken(condition) + 1;
+
+ try renderToken(ais, tree, switch_token, .space); // switch keyword
+ try renderToken(ais, tree, switch_token + 1, .none); // lparen
+ try renderExpression(gpa, ais, tree, condition, .none); // condtion expression
+ try renderToken(ais, tree, rparen, .space); // rparen
+
+ ais.pushIndentNextLine();
+ if (cases.len == 0) {
+ try renderToken(ais, tree, rparen + 1, .none); // lbrace
} else {
- try renderToken(tree, ais, tree.nextToken(slice_type.op_token), Space.None); // ]
+ try renderToken(ais, tree, rparen + 1, .newline); // lbrace
+ try renderExpressions(gpa, ais, tree, cases, .comma);
}
+ ais.popIndent();
+ return renderToken(ais, tree, tree.lastToken(node), space); // rbrace
+ },
- if (slice_type.ptr_info.allowzero_token) |allowzero_token| {
- try renderToken(tree, ais, allowzero_token, Space.Space); // allowzero
- }
- if (slice_type.ptr_info.align_info) |align_info| {
- const lparen_token = tree.prevToken(align_info.node.firstToken());
- const align_token = tree.prevToken(lparen_token);
+ .switch_case_one => return renderSwitchCase(gpa, ais, tree, tree.switchCaseOne(node), space),
+ .switch_case => return renderSwitchCase(gpa, ais, tree, tree.switchCase(node), space),
- try renderToken(tree, ais, align_token, Space.None); // align
- try renderToken(tree, ais, lparen_token, Space.None); // (
+ .while_simple => return renderWhile(gpa, ais, tree, tree.whileSimple(node), space),
+ .while_cont => return renderWhile(gpa, ais, tree, tree.whileCont(node), space),
+ .@"while" => return renderWhile(gpa, ais, tree, tree.whileFull(node), space),
+ .for_simple => return renderWhile(gpa, ais, tree, tree.forSimple(node), space),
+ .@"for" => return renderWhile(gpa, ais, tree, tree.forFull(node), space),
- try renderExpression(allocator, ais, tree, align_info.node, Space.None);
+ .if_simple => return renderIf(gpa, ais, tree, tree.ifSimple(node), space),
+ .@"if" => return renderIf(gpa, ais, tree, tree.ifFull(node), space),
- if (align_info.bit_range) |bit_range| {
- const colon1 = tree.prevToken(bit_range.start.firstToken());
- const colon2 = tree.prevToken(bit_range.end.firstToken());
+ .asm_simple => return renderAsm(gpa, ais, tree, tree.asmSimple(node), space),
+ .@"asm" => return renderAsm(gpa, ais, tree, tree.asmFull(node), space),
- try renderToken(tree, ais, colon1, Space.None); // :
- try renderExpression(allocator, ais, tree, bit_range.start, Space.None);
- try renderToken(tree, ais, colon2, Space.None); // :
- try renderExpression(allocator, ais, tree, bit_range.end, Space.None);
+ .enum_literal => {
+ try renderToken(ais, tree, main_tokens[node] - 1, .none); // .
+ return renderToken(ais, tree, main_tokens[node], space); // name
+ },
- const rparen_token = tree.nextToken(bit_range.end.lastToken());
- try renderToken(tree, ais, rparen_token, Space.Space); // )
- } else {
- const rparen_token = tree.nextToken(align_info.node.lastToken());
- try renderToken(tree, ais, rparen_token, Space.Space); // )
- }
+ .fn_decl => unreachable,
+ .container_field => unreachable,
+ .container_field_init => unreachable,
+ .container_field_align => unreachable,
+ .root => unreachable,
+ .global_var_decl => unreachable,
+ .local_var_decl => unreachable,
+ .simple_var_decl => unreachable,
+ .aligned_var_decl => unreachable,
+ .@"usingnamespace" => unreachable,
+ .test_decl => unreachable,
+ .asm_output => unreachable,
+ .asm_input => unreachable,
+ }
+}
+
+fn renderArrayType(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ array_type: ast.full.ArrayType,
+ space: Space,
+) Error!void {
+ const rbracket = tree.firstToken(array_type.ast.elem_type) - 1;
+ const one_line = tree.tokensOnSameLine(array_type.ast.lbracket, rbracket);
+ const inner_space = if (one_line) Space.none else Space.newline;
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, array_type.ast.lbracket, inner_space); // lbracket
+ try renderExpression(gpa, ais, tree, array_type.ast.elem_count, inner_space);
+ if (array_type.ast.sentinel) |sentinel| {
+ try renderToken(ais, tree, tree.firstToken(sentinel) - 1, inner_space); // colon
+ try renderExpression(gpa, ais, tree, sentinel, inner_space);
+ }
+ ais.popIndent();
+ try renderToken(ais, tree, rbracket, .none); // rbracket
+ return renderExpression(gpa, ais, tree, array_type.ast.elem_type, space);
+}
+
+fn renderPtrType(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ ptr_type: ast.full.PtrType,
+ space: Space,
+) Error!void {
+ switch (ptr_type.size) {
+ .One => {
+ // Since ** tokens exist and the same token is shared by two
+ // nested pointer types, we check to see if we are the parent
+ // in such a relationship. If so, skip rendering anything for
+ // this pointer type and rely on the child to render our asterisk
+ // as well when it renders the ** token.
+ if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .asterisk_asterisk and
+ ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type])
+ {
+ return renderExpression(gpa, ais, tree, ptr_type.ast.child_type, space);
}
- if (slice_type.ptr_info.const_token) |const_token| {
- try renderToken(tree, ais, const_token, Space.Space);
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk
+ },
+ .Many => {
+ if (ptr_type.ast.sentinel == 0) {
+ try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk
+ try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket
+ } else {
+ try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk
+ try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon
+ try renderExpression(gpa, ais, tree, ptr_type.ast.sentinel, .none);
+ try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket
}
- if (slice_type.ptr_info.volatile_token) |volatile_token| {
- try renderToken(tree, ais, volatile_token, Space.Space);
+ },
+ .C => {
+ try renderToken(ais, tree, ptr_type.ast.main_token - 1, .none); // lbracket
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // asterisk
+ try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // c
+ try renderToken(ais, tree, ptr_type.ast.main_token + 2, .none); // rbracket
+ },
+ .Slice => {
+ if (ptr_type.ast.sentinel == 0) {
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket
+ try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // rbracket
+ } else {
+ try renderToken(ais, tree, ptr_type.ast.main_token, .none); // lbracket
+ try renderToken(ais, tree, ptr_type.ast.main_token + 1, .none); // colon
+ try renderExpression(gpa, ais, tree, ptr_type.ast.sentinel, .none);
+ try renderToken(ais, tree, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket
}
- return renderExpression(allocator, ais, tree, slice_type.rhs, space);
},
+ }
- .ArrayInitializer, .ArrayInitializerDot => {
- var rtoken: ast.TokenIndex = undefined;
- var exprs: []*ast.Node = undefined;
- const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
- .ArrayInitializerDot => blk: {
- const casted = @fieldParentPtr(ast.Node.ArrayInitializerDot, "base", base);
- rtoken = casted.rtoken;
- exprs = casted.list();
- break :blk .{ .dot = casted.dot };
- },
- .ArrayInitializer => blk: {
- const casted = @fieldParentPtr(ast.Node.ArrayInitializer, "base", base);
- rtoken = casted.rtoken;
- exprs = casted.list();
- break :blk .{ .node = casted.lhs };
- },
- else => unreachable,
- };
-
- const lbrace = switch (lhs) {
- .dot => |dot| tree.nextToken(dot),
- .node => |node| tree.nextToken(node.lastToken()),
- };
+ if (ptr_type.allowzero_token) |allowzero_token| {
+ try renderToken(ais, tree, allowzero_token, .space);
+ }
- switch (lhs) {
- .dot => |dot| try renderToken(tree, ais, dot, Space.None),
- .node => |node| try renderExpression(allocator, ais, tree, node, Space.None),
- }
+ if (ptr_type.ast.align_node != 0) {
+ const align_first = tree.firstToken(ptr_type.ast.align_node);
+ try renderToken(ais, tree, align_first - 2, .none); // align
+ try renderToken(ais, tree, align_first - 1, .none); // lparen
+ try renderExpression(gpa, ais, tree, ptr_type.ast.align_node, .none);
+ if (ptr_type.ast.bit_range_start != 0) {
+ assert(ptr_type.ast.bit_range_end != 0);
+ try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .none); // colon
+ try renderExpression(gpa, ais, tree, ptr_type.ast.bit_range_start, .none);
+ try renderToken(ais, tree, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .none); // colon
+ try renderExpression(gpa, ais, tree, ptr_type.ast.bit_range_end, .none);
+ try renderToken(ais, tree, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .space); // rparen
+ } else {
+ try renderToken(ais, tree, tree.lastToken(ptr_type.ast.align_node) + 1, .space); // rparen
+ }
+ }
- if (exprs.len == 0) {
- try renderToken(tree, ais, lbrace, Space.None);
- return renderToken(tree, ais, rtoken, space);
- }
+ if (ptr_type.const_token) |const_token| {
+ try renderToken(ais, tree, const_token, .space);
+ }
- if (exprs.len == 1 and exprs[0].tag != .MultilineStringLiteral and tree.token_ids[exprs[0].*.lastToken() + 1] == .RBrace) {
- const expr = exprs[0];
+ if (ptr_type.volatile_token) |volatile_token| {
+ try renderToken(ais, tree, volatile_token, .space);
+ }
- try renderToken(tree, ais, lbrace, Space.None);
- try renderExpression(allocator, ais, tree, expr, Space.None);
- return renderToken(tree, ais, rtoken, space);
- }
+ try renderExpression(gpa, ais, tree, ptr_type.ast.child_type, space);
+}
- // scan to find row size
- if (rowSize(tree, exprs, rtoken) != null) {
- {
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, lbrace, Space.Newline);
-
- var expr_index: usize = 0;
- while (rowSize(tree, exprs[expr_index..], rtoken)) |row_size| {
- const row_exprs = exprs[expr_index..];
- // A place to store the width of each expression and its column's maximum
- var widths = try allocator.alloc(usize, row_exprs.len + row_size);
- defer allocator.free(widths);
- mem.set(usize, widths, 0);
-
- var expr_newlines = try allocator.alloc(bool, row_exprs.len);
- defer allocator.free(expr_newlines);
- mem.set(bool, expr_newlines, false);
-
- var expr_widths = widths[0 .. widths.len - row_size];
- var column_widths = widths[widths.len - row_size ..];
-
- // Find next row with trailing comment (if any) to end the current section
- var section_end = sec_end: {
- var this_line_first_expr: usize = 0;
- var this_line_size = rowSize(tree, row_exprs, rtoken);
- for (row_exprs) |expr, i| {
- // Ignore comment on first line of this section
- if (i == 0 or tree.tokensOnSameLine(row_exprs[0].firstToken(), expr.lastToken())) continue;
- // Track start of line containing comment
- if (!tree.tokensOnSameLine(row_exprs[this_line_first_expr].firstToken(), expr.lastToken())) {
- this_line_first_expr = i;
- this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rtoken);
- }
-
- const maybe_comma = expr.lastToken() + 1;
- const maybe_comment = expr.lastToken() + 2;
- if (maybe_comment < tree.token_ids.len) {
- if (tree.token_ids[maybe_comma] == .Comma and
- tree.token_ids[maybe_comment] == .LineComment and
- tree.tokensOnSameLine(expr.lastToken(), maybe_comment))
- {
- var comment_token_loc = tree.token_locs[maybe_comment];
- const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(comment_token_loc), " ").len == 2;
- if (!comment_is_empty) {
- // Found row ending in comment
- break :sec_end i - this_line_size.? + 1;
- }
- }
- }
- }
- break :sec_end row_exprs.len;
- };
- expr_index += section_end;
-
- const section_exprs = row_exprs[0..section_end];
-
- // Null stream for counting the printed length of each expression
- var line_find_stream = std.io.findByteWriter('\n', std.io.null_writer);
- var counting_stream = std.io.countingWriter(line_find_stream.writer());
- var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, counting_stream.writer());
-
- // Calculate size of columns in current section
- var column_counter: usize = 0;
- var single_line = true;
- for (section_exprs) |expr, i| {
- if (i + 1 < section_exprs.len) {
- counting_stream.bytes_written = 0;
- line_find_stream.byte_found = false;
- try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None);
- const width = @intCast(usize, counting_stream.bytes_written);
- expr_widths[i] = width;
- expr_newlines[i] = line_find_stream.byte_found;
-
- if (!line_find_stream.byte_found) {
- const column = column_counter % row_size;
- column_widths[column] = std.math.max(column_widths[column], width);
-
- const expr_last_token = expr.*.lastToken() + 1;
- const next_expr = section_exprs[i + 1];
- const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, next_expr.*.firstToken());
-
- column_counter += 1;
-
- if (loc.line != 0) single_line = false;
- } else {
- single_line = false;
- column_counter = 0;
- }
- } else {
- counting_stream.bytes_written = 0;
- try renderExpression(allocator, &auto_indenting_stream, tree, expr, Space.None);
- const width = @intCast(usize, counting_stream.bytes_written);
- expr_widths[i] = width;
- expr_newlines[i] = line_find_stream.byte_found;
-
- if (!line_find_stream.byte_found) {
- const column = column_counter % row_size;
- column_widths[column] = std.math.max(column_widths[column], width);
- }
- break;
- }
- }
+fn renderSlice(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ slice: ast.full.Slice,
+ space: Space,
+) Error!void {
+ const node_tags = tree.nodes.items(.tag);
+ const after_start_space_bool = nodeCausesSliceOpSpace(node_tags[slice.ast.start]) or
+ if (slice.ast.end != 0) nodeCausesSliceOpSpace(node_tags[slice.ast.end]) else false;
+ const after_start_space = if (after_start_space_bool) Space.space else Space.none;
+ const after_dots_space = if (slice.ast.end != 0) after_start_space else Space.none;
+
+ try renderExpression(gpa, ais, tree, slice.ast.sliced, .none);
+ try renderToken(ais, tree, slice.ast.lbracket, .none); // lbracket
+
+ const start_last = tree.lastToken(slice.ast.start);
+ try renderExpression(gpa, ais, tree, slice.ast.start, after_start_space);
+ try renderToken(ais, tree, start_last + 1, after_dots_space); // ellipsis2 ("..")
+ if (slice.ast.end == 0) {
+ return renderToken(ais, tree, start_last + 2, space); // rbracket
+ }
- // Render exprs in current section
- column_counter = 0;
- var last_col_index: usize = row_size - 1;
- for (section_exprs) |expr, i| {
- if (i + 1 < section_exprs.len) {
- const next_expr = section_exprs[i + 1];
- try renderExpression(allocator, ais, tree, expr, Space.None);
-
- const comma = tree.nextToken(expr.*.lastToken());
-
- if (column_counter != last_col_index) {
- if (!expr_newlines[i] and !expr_newlines[i + 1]) {
- // Neither the current or next expression is multiline
- try renderToken(tree, ais, comma, Space.Space); // ,
- assert(column_widths[column_counter % row_size] >= expr_widths[i]);
- const padding = column_widths[column_counter % row_size] - expr_widths[i];
- try ais.writer().writeByteNTimes(' ', padding);
-
- column_counter += 1;
- continue;
- }
- }
- if (single_line and row_size != 1) {
- try renderToken(tree, ais, comma, Space.Space); // ,
- continue;
- }
-
- column_counter = 0;
- try renderToken(tree, ais, comma, Space.Newline); // ,
- try renderExtraNewline(tree, ais, next_expr);
- } else {
- const maybe_comma = tree.nextToken(expr.*.lastToken());
- if (tree.token_ids[maybe_comma] == .Comma) {
- try renderExpression(allocator, ais, tree, expr, Space.None); // ,
- try renderToken(tree, ais, maybe_comma, Space.Newline); // ,
- } else {
- try renderExpression(allocator, ais, tree, expr, Space.Comma); // ,
- }
- }
- }
+ const end_last = tree.lastToken(slice.ast.end);
+ const after_end_space = if (slice.ast.sentinel != 0) Space.space else Space.none;
+ try renderExpression(gpa, ais, tree, slice.ast.end, after_end_space);
+ if (slice.ast.sentinel == 0) {
+ return renderToken(ais, tree, end_last + 1, space); // rbracket
+ }
- if (expr_index == exprs.len) {
- break;
- }
- }
- }
+ try renderToken(ais, tree, end_last + 1, .none); // colon
+ try renderExpression(gpa, ais, tree, slice.ast.sentinel, .none);
+ try renderToken(ais, tree, tree.lastToken(slice.ast.sentinel) + 1, space); // rbracket
+}
- return renderToken(tree, ais, rtoken, space);
- }
+fn renderAsmOutput(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ asm_output: ast.Node.Index,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const node_tags = tree.nodes.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
+ const datas = tree.nodes.items(.data);
+ assert(node_tags[asm_output] == .asm_output);
+ const symbolic_name = main_tokens[asm_output];
+
+ try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
+ try renderToken(ais, tree, symbolic_name, .none); // ident
+ try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
+ try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
+ try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
+
+ if (token_tags[symbolic_name + 4] == .arrow) {
+ try renderToken(ais, tree, symbolic_name + 4, .space); // ->
+ try renderExpression(gpa, ais, tree, datas[asm_output].lhs, Space.none);
+ return renderToken(ais, tree, datas[asm_output].rhs, space); // rparen
+ } else {
+ try renderToken(ais, tree, symbolic_name + 4, .none); // ident
+ return renderToken(ais, tree, symbolic_name + 5, space); // rparen
+ }
+}
- // Single line
- try renderToken(tree, ais, lbrace, Space.Space);
- for (exprs) |expr, i| {
- if (i + 1 < exprs.len) {
- const next_expr = exprs[i + 1];
- try renderExpression(allocator, ais, tree, expr, Space.None);
- const comma = tree.nextToken(expr.*.lastToken());
- try renderToken(tree, ais, comma, Space.Space); // ,
- } else {
- try renderExpression(allocator, ais, tree, expr, Space.Space);
- }
- }
+fn renderAsmInput(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ asm_input: ast.Node.Index,
+ space: Space,
+) Error!void {
+ const node_tags = tree.nodes.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
+ const datas = tree.nodes.items(.data);
+ assert(node_tags[asm_input] == .asm_input);
+ const symbolic_name = main_tokens[asm_input];
+
+ try renderToken(ais, tree, symbolic_name - 1, .none); // lbracket
+ try renderToken(ais, tree, symbolic_name, .none); // ident
+ try renderToken(ais, tree, symbolic_name + 1, .space); // rbracket
+ try renderToken(ais, tree, symbolic_name + 2, .space); // "constraint"
+ try renderToken(ais, tree, symbolic_name + 3, .none); // lparen
+ try renderExpression(gpa, ais, tree, datas[asm_input].lhs, Space.none);
+ return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen
+}
- return renderToken(tree, ais, rtoken, space);
- },
+fn renderVarDecl(gpa: *Allocator, ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void {
+ if (var_decl.visib_token) |visib_token| {
+ try renderToken(ais, tree, visib_token, Space.space); // pub
+ }
- .StructInitializer, .StructInitializerDot => {
- var rtoken: ast.TokenIndex = undefined;
- var field_inits: []*ast.Node = undefined;
- const lhs: union(enum) { dot: ast.TokenIndex, node: *ast.Node } = switch (base.tag) {
- .StructInitializerDot => blk: {
- const casted = @fieldParentPtr(ast.Node.StructInitializerDot, "base", base);
- rtoken = casted.rtoken;
- field_inits = casted.list();
- break :blk .{ .dot = casted.dot };
- },
- .StructInitializer => blk: {
- const casted = @fieldParentPtr(ast.Node.StructInitializer, "base", base);
- rtoken = casted.rtoken;
- field_inits = casted.list();
- break :blk .{ .node = casted.lhs };
- },
- else => unreachable,
- };
+ if (var_decl.extern_export_token) |extern_export_token| {
+ try renderToken(ais, tree, extern_export_token, Space.space); // extern
- const lbrace = switch (lhs) {
- .dot => |dot| tree.nextToken(dot),
- .node => |node| tree.nextToken(node.lastToken()),
- };
+ if (var_decl.lib_name) |lib_name| {
+ try renderToken(ais, tree, lib_name, Space.space); // "lib"
+ }
+ }
- if (field_inits.len == 0) {
- switch (lhs) {
- .dot => |dot| try renderToken(tree, ais, dot, Space.None),
- .node => |node| try renderExpression(allocator, ais, tree, node, Space.None),
- }
+ if (var_decl.threadlocal_token) |thread_local_token| {
+ try renderToken(ais, tree, thread_local_token, Space.space); // threadlocal
+ }
- {
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, lbrace, Space.None);
- }
+ if (var_decl.comptime_token) |comptime_token| {
+ try renderToken(ais, tree, comptime_token, Space.space); // comptime
+ }
- return renderToken(tree, ais, rtoken, space);
- }
+ try renderToken(ais, tree, var_decl.ast.mut_token, .space); // var
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.prevToken(rtoken);
- break :blk tree.token_ids[maybe_comma] == .Comma;
- };
+ const name_space = if (var_decl.ast.type_node == 0 and
+ (var_decl.ast.align_node != 0 or
+ var_decl.ast.section_node != 0 or
+ var_decl.ast.init_node != 0))
+ Space.space
+ else
+ Space.none;
+ try renderToken(ais, tree, var_decl.ast.mut_token + 1, name_space); // name
- const src_same_line = blk: {
- const loc = tree.tokenLocation(tree.token_locs[lbrace].end, rtoken);
- break :blk loc.line == 0;
- };
+ if (var_decl.ast.type_node != 0) {
+ try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // :
+ if (var_decl.ast.align_node != 0 or var_decl.ast.section_node != 0 or
+ var_decl.ast.init_node != 0)
+ {
+ try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .space);
+ } else {
+ try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .none);
+ const semicolon = tree.lastToken(var_decl.ast.type_node) + 1;
+ return renderToken(ais, tree, semicolon, Space.newline); // ;
+ }
+ }
- const expr_outputs_one_line = blk: {
- // render field expressions until a LF is found
- for (field_inits) |field_init| {
- var find_stream = std.io.findByteWriter('\n', std.io.null_writer);
- var auto_indenting_stream = std.io.autoIndentingStream(indent_delta, find_stream.writer());
+ if (var_decl.ast.align_node != 0) {
+ const lparen = tree.firstToken(var_decl.ast.align_node) - 1;
+ const align_kw = lparen - 1;
+ const rparen = tree.lastToken(var_decl.ast.align_node) + 1;
+ try renderToken(ais, tree, align_kw, Space.none); // align
+ try renderToken(ais, tree, lparen, Space.none); // (
+ try renderExpression(gpa, ais, tree, var_decl.ast.align_node, Space.none);
+ if (var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) {
+ try renderToken(ais, tree, rparen, .space); // )
+ } else {
+ try renderToken(ais, tree, rparen, .none); // )
+ return renderToken(ais, tree, rparen + 1, Space.newline); // ;
+ }
+ }
- try renderExpression(allocator, &auto_indenting_stream, tree, field_init, Space.None);
- if (find_stream.byte_found) break :blk false;
- }
- break :blk true;
- };
+ if (var_decl.ast.section_node != 0) {
+ const lparen = tree.firstToken(var_decl.ast.section_node) - 1;
+ const section_kw = lparen - 1;
+ const rparen = tree.lastToken(var_decl.ast.section_node) + 1;
+ try renderToken(ais, tree, section_kw, Space.none); // linksection
+ try renderToken(ais, tree, lparen, Space.none); // (
+ try renderExpression(gpa, ais, tree, var_decl.ast.section_node, Space.none);
+ if (var_decl.ast.init_node != 0) {
+ try renderToken(ais, tree, rparen, .space); // )
+ } else {
+ try renderToken(ais, tree, rparen, .none); // )
+ return renderToken(ais, tree, rparen + 1, Space.newline); // ;
+ }
+ }
- if (field_inits.len == 1) blk: {
- if (field_inits[0].cast(ast.Node.FieldInitializer)) |field_init| {
- switch (field_init.expr.tag) {
- .StructInitializer,
- .StructInitializerDot,
- => break :blk,
- else => {},
- }
- }
+ assert(var_decl.ast.init_node != 0);
+ const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
+ const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
+ {
+ ais.pushIndent();
+ try renderToken(ais, tree, eq_token, eq_space); // =
+ ais.popIndent();
+ }
+ ais.pushIndentOneShot();
+ try renderExpression(gpa, ais, tree, var_decl.ast.init_node, .semicolon);
+}
- // if the expression outputs to multiline, make this struct multiline
- if (!expr_outputs_one_line or src_has_trailing_comma) {
- break :blk;
- }
+fn renderIf(gpa: *Allocator, ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void {
+ return renderWhile(gpa, ais, tree, .{
+ .ast = .{
+ .while_token = if_node.ast.if_token,
+ .cond_expr = if_node.ast.cond_expr,
+ .cont_expr = 0,
+ .then_expr = if_node.ast.then_expr,
+ .else_expr = if_node.ast.else_expr,
+ },
+ .inline_token = null,
+ .label_token = null,
+ .payload_token = if_node.payload_token,
+ .else_token = if_node.else_token,
+ .error_token = if_node.error_token,
+ }, space);
+}
- switch (lhs) {
- .dot => |dot| try renderToken(tree, ais, dot, Space.None),
- .node => |node| try renderExpression(allocator, ais, tree, node, Space.None),
- }
- try renderToken(tree, ais, lbrace, Space.Space);
- try renderExpression(allocator, ais, tree, field_inits[0], Space.Space);
- return renderToken(tree, ais, rtoken, space);
- }
+/// Note that this function is additionally used to render if and for expressions, with
+/// respective values set to null.
+fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void {
+ const node_tags = tree.nodes.items(.tag);
+ const token_tags = tree.tokens.items(.tag);
- if (!src_has_trailing_comma and src_same_line and expr_outputs_one_line) {
- // render all on one line, no trailing comma
- switch (lhs) {
- .dot => |dot| try renderToken(tree, ais, dot, Space.None),
- .node => |node| try renderExpression(allocator, ais, tree, node, Space.None),
- }
- try renderToken(tree, ais, lbrace, Space.Space);
+ if (while_node.label_token) |label| {
+ try renderToken(ais, tree, label, .none); // label
+ try renderToken(ais, tree, label + 1, .space); // :
+ }
- for (field_inits) |field_init, i| {
- if (i + 1 < field_inits.len) {
- try renderExpression(allocator, ais, tree, field_init, Space.None);
+ if (while_node.inline_token) |inline_token| {
+ try renderToken(ais, tree, inline_token, .space); // inline
+ }
- const comma = tree.nextToken(field_init.lastToken());
- try renderToken(tree, ais, comma, Space.Space);
- } else {
- try renderExpression(allocator, ais, tree, field_init, Space.Space);
- }
+ try renderToken(ais, tree, while_node.ast.while_token, .space); // if
+ try renderToken(ais, tree, while_node.ast.while_token + 1, .none); // lparen
+ try renderExpression(gpa, ais, tree, while_node.ast.cond_expr, .none); // condition
+
+ const then_tag = node_tags[while_node.ast.then_expr];
+ if (nodeIsBlock(then_tag) and !nodeIsIf(then_tag)) {
+ if (while_node.payload_token) |payload_token| {
+ try renderToken(ais, tree, payload_token - 2, .space); // rparen
+ try renderToken(ais, tree, payload_token - 1, .none); // |
+ const ident = blk: {
+ if (token_tags[payload_token] == .asterisk) {
+ try renderToken(ais, tree, payload_token, .none); // *
+ break :blk payload_token + 1;
+ } else {
+ break :blk payload_token;
}
-
- return renderToken(tree, ais, rtoken, space);
- }
-
- {
- switch (lhs) {
- .dot => |dot| try renderToken(tree, ais, dot, Space.None),
- .node => |node| try renderExpression(allocator, ais, tree, node, Space.None),
- }
-
- ais.pushIndentNextLine();
- defer ais.popIndent();
-
- try renderToken(tree, ais, lbrace, Space.Newline);
-
- for (field_inits) |field_init, i| {
- if (i + 1 < field_inits.len) {
- const next_field_init = field_inits[i + 1];
- try renderExpression(allocator, ais, tree, field_init, Space.None);
-
- const comma = tree.nextToken(field_init.lastToken());
- try renderToken(tree, ais, comma, Space.Newline);
-
- try renderExtraNewline(tree, ais, next_field_init);
- } else {
- try renderExpression(allocator, ais, tree, field_init, Space.Comma);
- }
- }
- }
-
- return renderToken(tree, ais, rtoken, space);
- },
-
- .Call => {
- const call = @fieldParentPtr(ast.Node.Call, "base", base);
- if (call.async_token) |async_token| {
- try renderToken(tree, ais, async_token, Space.Space);
- }
-
- try renderExpression(allocator, ais, tree, call.lhs, Space.None);
-
- const lparen = tree.nextToken(call.lhs.lastToken());
-
- if (call.params_len == 0) {
- try renderToken(tree, ais, lparen, Space.None);
- return renderToken(tree, ais, call.rtoken, space);
- }
-
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.prevToken(call.rtoken);
- break :blk tree.token_ids[maybe_comma] == .Comma;
};
-
- if (src_has_trailing_comma) {
- {
- ais.pushIndent();
- defer ais.popIndent();
-
- try renderToken(tree, ais, lparen, Space.Newline); // (
- const params = call.params();
- for (params) |param_node, i| {
- if (i + 1 < params.len) {
- const next_node = params[i + 1];
- try renderExpression(allocator, ais, tree, param_node, Space.None);
-
- // Unindent the comma for multiline string literals
- const maybe_multiline_string = param_node.firstToken();
- const is_multiline_string = tree.token_ids[maybe_multiline_string] == .MultilineStringLiteralLine;
- if (is_multiline_string) ais.popIndent();
- defer if (is_multiline_string) ais.pushIndent();
-
- const comma = tree.nextToken(param_node.lastToken());
- try renderToken(tree, ais, comma, Space.Newline); // ,
- try renderExtraNewline(tree, ais, next_node);
- } else {
- try renderExpression(allocator, ais, tree, param_node, Space.Comma);
- }
- }
- }
- return renderToken(tree, ais, call.rtoken, space);
- }
-
- try renderToken(tree, ais, lparen, Space.None); // (
-
- const params = call.params();
- for (params) |param_node, i| {
- const maybe_comment = param_node.firstToken() - 1;
- const maybe_multiline_string = param_node.firstToken();
- if (tree.token_ids[maybe_multiline_string] == .MultilineStringLiteralLine or tree.token_ids[maybe_comment] == .LineComment) {
- ais.pushIndentOneShot();
- }
-
- try renderExpression(allocator, ais, tree, param_node, Space.None);
-
- if (i + 1 < params.len) {
- const comma = tree.nextToken(param_node.lastToken());
- try renderToken(tree, ais, comma, Space.Space);
- }
- }
- return renderToken(tree, ais, call.rtoken, space); // )
- },
-
- .ArrayAccess => {
- const suffix_op = base.castTag(.ArrayAccess).?;
-
- const lbracket = tree.nextToken(suffix_op.lhs.lastToken());
- const rbracket = tree.nextToken(suffix_op.index_expr.lastToken());
-
- try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None);
- try renderToken(tree, ais, lbracket, Space.None); // [
-
- const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment;
- const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment;
- {
- const new_space = if (ends_with_comment) Space.Newline else Space.None;
-
- ais.pushIndent();
- defer ais.popIndent();
- try renderExpression(allocator, ais, tree, suffix_op.index_expr, new_space);
- }
- if (starts_with_comment) try ais.maybeInsertNewline();
- return renderToken(tree, ais, rbracket, space); // ]
- },
-
- .Slice => {
- const suffix_op = base.castTag(.Slice).?;
- try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None);
-
- const lbracket = tree.prevToken(suffix_op.start.firstToken());
- const dotdot = tree.nextToken(suffix_op.start.lastToken());
-
- const after_start_space_bool = nodeCausesSliceOpSpace(suffix_op.start) or
- (if (suffix_op.end) |end| nodeCausesSliceOpSpace(end) else false);
- const after_start_space = if (after_start_space_bool) Space.Space else Space.None;
- const after_op_space = if (suffix_op.end != null) after_start_space else Space.None;
-
- try renderToken(tree, ais, lbracket, Space.None); // [
- try renderExpression(allocator, ais, tree, suffix_op.start, after_start_space);
- try renderToken(tree, ais, dotdot, after_op_space); // ..
- if (suffix_op.end) |end| {
- const after_end_space = if (suffix_op.sentinel != null) Space.Space else Space.None;
- try renderExpression(allocator, ais, tree, end, after_end_space);
- }
- if (suffix_op.sentinel) |sentinel| {
- const colon = tree.prevToken(sentinel.firstToken());
- try renderToken(tree, ais, colon, Space.None); // :
- try renderExpression(allocator, ais, tree, sentinel, Space.None);
- }
- return renderToken(tree, ais, suffix_op.rtoken, space); // ]
- },
-
- .Deref => {
- const suffix_op = base.castTag(.Deref).?;
-
- try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None);
- return renderToken(tree, ais, suffix_op.rtoken, space); // .*
- },
- .UnwrapOptional => {
- const suffix_op = base.castTag(.UnwrapOptional).?;
-
- try renderExpression(allocator, ais, tree, suffix_op.lhs, Space.None);
- try renderToken(tree, ais, tree.prevToken(suffix_op.rtoken), Space.None); // .
- return renderToken(tree, ais, suffix_op.rtoken, space); // ?
- },
-
- .Break => {
- const flow_expr = base.castTag(.Break).?;
- const maybe_rhs = flow_expr.getRHS();
- const maybe_label = flow_expr.getLabel();
-
- if (maybe_label == null and maybe_rhs == null) {
- return renderToken(tree, ais, flow_expr.ltoken, space); // break
- }
-
- try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // break
- if (maybe_label) |label| {
- const colon = tree.nextToken(flow_expr.ltoken);
- try renderToken(tree, ais, colon, Space.None); // :
-
- if (maybe_rhs == null) {
- return renderToken(tree, ais, label, space); // label
- }
- try renderToken(tree, ais, label, Space.Space); // label
- }
- return renderExpression(allocator, ais, tree, maybe_rhs.?, space);
- },
-
- .Continue => {
- const flow_expr = base.castTag(.Continue).?;
- if (flow_expr.getLabel()) |label| {
- try renderToken(tree, ais, flow_expr.ltoken, Space.Space); // continue
- const colon = tree.nextToken(flow_expr.ltoken);
- try renderToken(tree, ais, colon, Space.None); // :
- return renderToken(tree, ais, label, space); // label
- } else {
- return renderToken(tree, ais, flow_expr.ltoken, space); // continue
- }
- },
-
- .Return => {
- const flow_expr = base.castTag(.Return).?;
- if (flow_expr.getRHS()) |rhs| {
- try renderToken(tree, ais, flow_expr.ltoken, Space.Space);
- return renderExpression(allocator, ais, tree, rhs, space);
- } else {
- return renderToken(tree, ais, flow_expr.ltoken, space);
- }
- },
-
- .Payload => {
- const payload = @fieldParentPtr(ast.Node.Payload, "base", base);
-
- try renderToken(tree, ais, payload.lpipe, Space.None);
- try renderExpression(allocator, ais, tree, payload.error_symbol, Space.None);
- return renderToken(tree, ais, payload.rpipe, space);
- },
-
- .PointerPayload => {
- const payload = @fieldParentPtr(ast.Node.PointerPayload, "base", base);
-
- try renderToken(tree, ais, payload.lpipe, Space.None);
- if (payload.ptr_token) |ptr_token| {
- try renderToken(tree, ais, ptr_token, Space.None);
- }
- try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None);
- return renderToken(tree, ais, payload.rpipe, space);
- },
-
- .PointerIndexPayload => {
- const payload = @fieldParentPtr(ast.Node.PointerIndexPayload, "base", base);
-
- try renderToken(tree, ais, payload.lpipe, Space.None);
- if (payload.ptr_token) |ptr_token| {
- try renderToken(tree, ais, ptr_token, Space.None);
- }
- try renderExpression(allocator, ais, tree, payload.value_symbol, Space.None);
-
- if (payload.index_symbol) |index_symbol| {
- const comma = tree.nextToken(payload.value_symbol.lastToken());
-
- try renderToken(tree, ais, comma, Space.Space);
- try renderExpression(allocator, ais, tree, index_symbol, Space.None);
- }
-
- return renderToken(tree, ais, payload.rpipe, space);
- },
-
- .GroupedExpression => {
- const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", base);
-
- try renderToken(tree, ais, grouped_expr.lparen, Space.None);
- {
- ais.pushIndentOneShot();
- try renderExpression(allocator, ais, tree, grouped_expr.expr, Space.None);
- }
- return renderToken(tree, ais, grouped_expr.rparen, space);
- },
-
- .FieldInitializer => {
- const field_init = @fieldParentPtr(ast.Node.FieldInitializer, "base", base);
-
- try renderToken(tree, ais, field_init.period_token, Space.None); // .
- try renderToken(tree, ais, field_init.name_token, Space.Space); // name
- try renderToken(tree, ais, tree.nextToken(field_init.name_token), Space.Space); // =
- return renderExpression(allocator, ais, tree, field_init.expr, space);
- },
-
- .ContainerDecl => {
- const container_decl = @fieldParentPtr(ast.Node.ContainerDecl, "base", base);
-
- if (container_decl.layout_token) |layout_token| {
- try renderToken(tree, ais, layout_token, Space.Space);
- }
-
- switch (container_decl.init_arg_expr) {
- .None => {
- try renderToken(tree, ais, container_decl.kind_token, Space.Space); // union
- },
- .Enum => |enum_tag_type| {
- try renderToken(tree, ais, container_decl.kind_token, Space.None); // union
-
- const lparen = tree.nextToken(container_decl.kind_token);
- const enum_token = tree.nextToken(lparen);
-
- try renderToken(tree, ais, lparen, Space.None); // (
- try renderToken(tree, ais, enum_token, Space.None); // enum
-
- if (enum_tag_type) |expr| {
- try renderToken(tree, ais, tree.nextToken(enum_token), Space.None); // (
- try renderExpression(allocator, ais, tree, expr, Space.None);
-
- const rparen = tree.nextToken(expr.lastToken());
- try renderToken(tree, ais, rparen, Space.None); // )
- try renderToken(tree, ais, tree.nextToken(rparen), Space.Space); // )
- } else {
- try renderToken(tree, ais, tree.nextToken(enum_token), Space.Space); // )
- }
- },
- .Type => |type_expr| {
- try renderToken(tree, ais, container_decl.kind_token, Space.None); // union
-
- const lparen = tree.nextToken(container_decl.kind_token);
- const rparen = tree.nextToken(type_expr.lastToken());
-
- try renderToken(tree, ais, lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, type_expr, Space.None);
- try renderToken(tree, ais, rparen, Space.Space); // )
- },
- }
-
- if (container_decl.fields_and_decls_len == 0) {
- {
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, container_decl.lbrace_token, Space.None); // {
+ try renderToken(ais, tree, ident, .none); // identifier
+ const pipe = blk: {
+ if (token_tags[ident + 1] == .comma) {
+ try renderToken(ais, tree, ident + 1, .space); // ,
+ try renderToken(ais, tree, ident + 2, .none); // index
+ break :blk ident + 3;
+ } else {
+ break :blk ident + 1;
}
- return renderToken(tree, ais, container_decl.rbrace_token, space); // }
- }
-
- const src_has_trailing_comma = blk: {
- var maybe_comma = tree.prevToken(container_decl.lastToken());
- // Doc comments for a field may also appear after the comma, eg.
- // field_name: T, // comment attached to field_name
- if (tree.token_ids[maybe_comma] == .DocComment)
- maybe_comma = tree.prevToken(maybe_comma);
- break :blk tree.token_ids[maybe_comma] == .Comma;
};
+ const brace_space = if (while_node.ast.cont_expr == 0 and ais.isLineOverIndented())
+ Space.newline
+ else
+ Space.space;
+ try renderToken(ais, tree, pipe, brace_space); // |
+ } else {
+ const rparen = tree.lastToken(while_node.ast.cond_expr) + 1;
+ const brace_space = if (while_node.ast.cont_expr == 0 and ais.isLineOverIndented())
+ Space.newline
+ else
+ Space.space;
+ try renderToken(ais, tree, rparen, brace_space); // rparen
+ }
+ if (while_node.ast.cont_expr != 0) {
+ const rparen = tree.lastToken(while_node.ast.cont_expr) + 1;
+ const lparen = tree.firstToken(while_node.ast.cont_expr) - 1;
+ try renderToken(ais, tree, lparen - 1, .space); // :
+ try renderToken(ais, tree, lparen, .none); // lparen
+ try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none);
+ const brace_space: Space = if (ais.isLineOverIndented()) .newline else .space;
+ try renderToken(ais, tree, rparen, brace_space); // rparen
+ }
+ if (while_node.ast.else_expr != 0) {
+ try renderExpression(gpa, ais, tree, while_node.ast.then_expr, Space.space);
+ try renderToken(ais, tree, while_node.else_token, .space); // else
+ if (while_node.error_token) |error_token| {
+ try renderToken(ais, tree, error_token - 1, .none); // |
+ try renderToken(ais, tree, error_token, .none); // identifier
+ try renderToken(ais, tree, error_token + 1, .space); // |
+ }
+ return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space);
+ } else {
+ return renderExpression(gpa, ais, tree, while_node.ast.then_expr, space);
+ }
+ }
- const fields_and_decls = container_decl.fieldsAndDecls();
-
- // Check if the first declaration and the { are on the same line
- const src_has_newline = !tree.tokensOnSameLine(
- container_decl.lbrace_token,
- fields_and_decls[0].firstToken(),
- );
-
- // We can only print all the elements in-line if all the
- // declarations inside are fields
- const src_has_only_fields = blk: {
- for (fields_and_decls) |decl| {
- if (decl.tag != .ContainerField) break :blk false;
+ const rparen = tree.lastToken(while_node.ast.cond_expr) + 1;
+ const last_then_token = tree.lastToken(while_node.ast.then_expr);
+ const src_has_newline = !tree.tokensOnSameLine(rparen, last_then_token);
+
+ if (src_has_newline) {
+ if (while_node.payload_token) |payload_token| {
+ try renderToken(ais, tree, payload_token - 2, .space); // rparen
+ try renderToken(ais, tree, payload_token - 1, .none); // |
+ const ident = blk: {
+ if (token_tags[payload_token] == .asterisk) {
+ try renderToken(ais, tree, payload_token, .none); // *
+ break :blk payload_token + 1;
+ } else {
+ break :blk payload_token;
}
- break :blk true;
};
-
- if (src_has_trailing_comma or !src_has_only_fields) {
- // One declaration per line
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // {
-
- for (fields_and_decls) |decl, i| {
- try renderContainerDecl(allocator, ais, tree, decl, .Newline);
-
- if (i + 1 < fields_and_decls.len) {
- try renderExtraNewline(tree, ais, fields_and_decls[i + 1]);
- }
- }
- } else if (src_has_newline) {
- // All the declarations on the same line, but place the items on
- // their own line
- try renderToken(tree, ais, container_decl.lbrace_token, .Newline); // {
-
- ais.pushIndent();
- defer ais.popIndent();
-
- for (fields_and_decls) |decl, i| {
- const space_after_decl: Space = if (i + 1 >= fields_and_decls.len) .Newline else .Space;
- try renderContainerDecl(allocator, ais, tree, decl, space_after_decl);
- }
- } else {
- // All the declarations on the same line
- try renderToken(tree, ais, container_decl.lbrace_token, .Space); // {
-
- for (fields_and_decls) |decl| {
- try renderContainerDecl(allocator, ais, tree, decl, .Space);
- }
- }
-
- return renderToken(tree, ais, container_decl.rbrace_token, space); // }
- },
-
- .ErrorSetDecl => {
- const err_set_decl = @fieldParentPtr(ast.Node.ErrorSetDecl, "base", base);
-
- const lbrace = tree.nextToken(err_set_decl.error_token);
-
- if (err_set_decl.decls_len == 0) {
- try renderToken(tree, ais, err_set_decl.error_token, Space.None);
- try renderToken(tree, ais, lbrace, Space.None);
- return renderToken(tree, ais, err_set_decl.rbrace_token, space);
- }
-
- if (err_set_decl.decls_len == 1) blk: {
- const node = err_set_decl.decls()[0];
-
- // if there are any doc comments or same line comments
- // don't try to put it all on one line
- if (node.cast(ast.Node.ErrorTag)) |tag| {
- if (tag.doc_comments != null) break :blk;
+ try renderToken(ais, tree, ident, .none); // identifier
+ const pipe = blk: {
+ if (token_tags[ident + 1] == .comma) {
+ try renderToken(ais, tree, ident + 1, .space); // ,
+ try renderToken(ais, tree, ident + 2, .none); // index
+ break :blk ident + 3;
} else {
- break :blk;
+ break :blk ident + 1;
}
-
- try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error
- try renderToken(tree, ais, lbrace, Space.None); // {
- try renderExpression(allocator, ais, tree, node, Space.None);
- return renderToken(tree, ais, err_set_decl.rbrace_token, space); // }
- }
-
- try renderToken(tree, ais, err_set_decl.error_token, Space.None); // error
-
- const src_has_trailing_comma = blk: {
- const maybe_comma = tree.prevToken(err_set_decl.rbrace_token);
- break :blk tree.token_ids[maybe_comma] == .Comma;
};
-
- if (src_has_trailing_comma) {
- {
- ais.pushIndent();
- defer ais.popIndent();
-
- try renderToken(tree, ais, lbrace, Space.Newline); // {
- const decls = err_set_decl.decls();
- for (decls) |node, i| {
- if (i + 1 < decls.len) {
- try renderExpression(allocator, ais, tree, node, Space.None);
- try renderToken(tree, ais, tree.nextToken(node.lastToken()), Space.Newline); // ,
-
- try renderExtraNewline(tree, ais, decls[i + 1]);
- } else {
- try renderExpression(allocator, ais, tree, node, Space.Comma);
- }
- }
+ const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline;
+ try renderToken(ais, tree, pipe, after_space); // |
+ } else {
+ ais.pushIndent();
+ const after_space: Space = if (while_node.ast.cont_expr != 0) .space else .newline;
+ try renderToken(ais, tree, rparen, after_space); // rparen
+ ais.popIndent();
+ }
+ if (while_node.ast.cont_expr != 0) {
+ const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1;
+ const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1;
+ try renderToken(ais, tree, cont_lparen - 1, .space); // :
+ try renderToken(ais, tree, cont_lparen, .none); // lparen
+ try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none);
+ try renderToken(ais, tree, cont_rparen, .newline); // rparen
+ }
+ if (while_node.ast.else_expr != 0) {
+ ais.pushIndent();
+ try renderExpression(gpa, ais, tree, while_node.ast.then_expr, Space.newline);
+ ais.popIndent();
+ const else_is_block = nodeIsBlock(node_tags[while_node.ast.else_expr]);
+ if (else_is_block) {
+ try renderToken(ais, tree, while_node.else_token, .space); // else
+ if (while_node.error_token) |error_token| {
+ try renderToken(ais, tree, error_token - 1, .none); // |
+ try renderToken(ais, tree, error_token, .none); // identifier
+ try renderToken(ais, tree, error_token + 1, .space); // |
}
-
- return renderToken(tree, ais, err_set_decl.rbrace_token, space); // }
+ return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space);
} else {
- try renderToken(tree, ais, lbrace, Space.Space); // {
-
- const decls = err_set_decl.decls();
- for (decls) |node, i| {
- if (i + 1 < decls.len) {
- try renderExpression(allocator, ais, tree, node, Space.None);
-
- const comma_token = tree.nextToken(node.lastToken());
- assert(tree.token_ids[comma_token] == .Comma);
- try renderToken(tree, ais, comma_token, Space.Space); // ,
- try renderExtraNewline(tree, ais, decls[i + 1]);
- } else {
- try renderExpression(allocator, ais, tree, node, Space.Space);
- }
- }
-
- return renderToken(tree, ais, err_set_decl.rbrace_token, space); // }
- }
- },
-
- .ErrorTag => {
- const tag = @fieldParentPtr(ast.Node.ErrorTag, "base", base);
-
- try renderDocComments(tree, ais, tag, tag.doc_comments);
- return renderToken(tree, ais, tag.name_token, space); // name
- },
-
- .MultilineStringLiteral => {
- const multiline_str_literal = @fieldParentPtr(ast.Node.MultilineStringLiteral, "base", base);
-
- {
- const locked_indents = ais.lockOneShotIndent();
- defer {
- var i: u8 = 0;
- while (i < locked_indents) : (i += 1) ais.popIndent();
- }
- try ais.maybeInsertNewline();
-
- for (multiline_str_literal.lines()) |t| try renderToken(tree, ais, t, Space.None);
- }
- },
-
- .BuiltinCall => {
- const builtin_call = @fieldParentPtr(ast.Node.BuiltinCall, "base", base);
-
- // TODO remove after 0.7.0 release
- if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@OpaqueType"))
- return ais.writer().writeAll("opaque {}");
-
- // TODO remove after 0.7.0 release
- {
- const params = builtin_call.paramsConst();
- if (mem.eql(u8, tree.tokenSlice(builtin_call.builtin_token), "@Type") and
- params.len == 1)
- {
- if (params[0].castTag(.EnumLiteral)) |enum_literal|
- if (mem.eql(u8, tree.tokenSlice(enum_literal.name), "Opaque"))
- return ais.writer().writeAll("opaque {}");
+ if (while_node.error_token) |error_token| {
+ try renderToken(ais, tree, while_node.else_token, .space); // else
+ try renderToken(ais, tree, error_token - 1, .none); // |
+ try renderToken(ais, tree, error_token, .none); // identifier
+ try renderToken(ais, tree, error_token + 1, .space); // |
+ } else {
+ try renderToken(ais, tree, while_node.else_token, .newline); // else
}
+ try renderExpressionIndented(gpa, ais, tree, while_node.ast.else_expr, space);
+ return;
}
+ } else {
+ try renderExpressionIndented(gpa, ais, tree, while_node.ast.then_expr, space);
+ return;
+ }
+ }
- try renderToken(tree, ais, builtin_call.builtin_token, Space.None); // @name
-
- const src_params_trailing_comma = blk: {
- if (builtin_call.params_len == 0) break :blk false;
- const last_node = builtin_call.params()[builtin_call.params_len - 1];
- const maybe_comma = tree.nextToken(last_node.lastToken());
- break :blk tree.token_ids[maybe_comma] == .Comma;
- };
-
- const lparen = tree.nextToken(builtin_call.builtin_token);
-
- if (!src_params_trailing_comma) {
- try renderToken(tree, ais, lparen, Space.None); // (
-
- // render all on one line, no trailing comma
- const params = builtin_call.params();
- for (params) |param_node, i| {
- const maybe_comment = param_node.firstToken() - 1;
- if (param_node.*.tag == .MultilineStringLiteral or tree.token_ids[maybe_comment] == .LineComment) {
- ais.pushIndentOneShot();
- }
- try renderExpression(allocator, ais, tree, param_node, Space.None);
+ // Render everything on a single line.
- if (i + 1 < params.len) {
- const comma_token = tree.nextToken(param_node.lastToken());
- try renderToken(tree, ais, comma_token, Space.Space); // ,
- }
- }
+ if (while_node.payload_token) |payload_token| {
+ assert(payload_token - 2 == rparen);
+ try renderToken(ais, tree, payload_token - 2, .space); // )
+ try renderToken(ais, tree, payload_token - 1, .none); // |
+ const ident = blk: {
+ if (token_tags[payload_token] == .asterisk) {
+ try renderToken(ais, tree, payload_token, .none); // *
+ break :blk payload_token + 1;
} else {
- // one param per line
- ais.pushIndent();
- defer ais.popIndent();
- try renderToken(tree, ais, lparen, Space.Newline); // (
-
- for (builtin_call.params()) |param_node| {
- try renderExpression(allocator, ais, tree, param_node, Space.Comma);
- }
+ break :blk payload_token;
}
-
- return renderToken(tree, ais, builtin_call.rparen_token, space); // )
- },
-
- .FnProto => {
- const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", base);
-
- if (fn_proto.getVisibToken()) |visib_token_index| {
- const visib_token = tree.token_ids[visib_token_index];
- assert(visib_token == .Keyword_pub or visib_token == .Keyword_export);
-
- try renderToken(tree, ais, visib_token_index, Space.Space); // pub
- }
-
- if (fn_proto.getExternExportInlineToken()) |extern_export_inline_token| {
- if (fn_proto.getIsExternPrototype() == null and fn_proto.getIsInline() == null)
- try renderToken(tree, ais, extern_export_inline_token, Space.Space); // extern/export/inline
- }
-
- if (fn_proto.getLibName()) |lib_name| {
- try renderExpression(allocator, ais, tree, lib_name, Space.Space);
- }
-
- const lparen = if (fn_proto.getNameToken()) |name_token| blk: {
- try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn
- try renderToken(tree, ais, name_token, Space.None); // name
- break :blk tree.nextToken(name_token);
- } else blk: {
- try renderToken(tree, ais, fn_proto.fn_token, Space.Space); // fn
- break :blk tree.nextToken(fn_proto.fn_token);
- };
- assert(tree.token_ids[lparen] == .LParen);
-
- const rparen = tree.prevToken(
- // the first token for the annotation expressions is the left
- // parenthesis, hence the need for two prevToken
- if (fn_proto.getAlignExpr()) |align_expr|
- tree.prevToken(tree.prevToken(align_expr.firstToken()))
- else if (fn_proto.getSectionExpr()) |section_expr|
- tree.prevToken(tree.prevToken(section_expr.firstToken()))
- else if (fn_proto.getCallconvExpr()) |callconv_expr|
- tree.prevToken(tree.prevToken(callconv_expr.firstToken()))
- else switch (fn_proto.return_type) {
- .Explicit => |node| node.firstToken(),
- .InferErrorSet => |node| tree.prevToken(node.firstToken()),
- .Invalid => unreachable,
- },
- );
- assert(tree.token_ids[rparen] == .RParen);
-
- const src_params_trailing_comma = blk: {
- const maybe_comma = tree.token_ids[rparen - 1];
- break :blk maybe_comma == .Comma or maybe_comma == .LineComment;
- };
-
- if (!src_params_trailing_comma) {
- try renderToken(tree, ais, lparen, Space.None); // (
-
- // render all on one line, no trailing comma
- for (fn_proto.params()) |param_decl, i| {
- try renderParamDecl(allocator, ais, tree, param_decl, Space.None);
-
- if (i + 1 < fn_proto.params_len or fn_proto.getVarArgsToken() != null) {
- const comma = tree.nextToken(param_decl.lastToken());
- try renderToken(tree, ais, comma, Space.Space); // ,
- }
- }
- if (fn_proto.getVarArgsToken()) |var_args_token| {
- try renderToken(tree, ais, var_args_token, Space.None);
- }
+ };
+ try renderToken(ais, tree, ident, .none); // identifier
+ const pipe = blk: {
+ if (token_tags[ident + 1] == .comma) {
+ try renderToken(ais, tree, ident + 1, .space); // ,
+ try renderToken(ais, tree, ident + 2, .none); // index
+ break :blk ident + 3;
} else {
- // one param per line
- ais.pushIndent();
- defer ais.popIndent();
- try renderToken(tree, ais, lparen, Space.Newline); // (
-
- for (fn_proto.params()) |param_decl| {
- try renderParamDecl(allocator, ais, tree, param_decl, Space.Comma);
- }
- if (fn_proto.getVarArgsToken()) |var_args_token| {
- try renderToken(tree, ais, var_args_token, Space.Comma);
- }
+ break :blk ident + 1;
}
+ };
+ try renderToken(ais, tree, pipe, .space); // |
+ } else {
+ try renderToken(ais, tree, rparen, .space); // )
+ }
- try renderToken(tree, ais, rparen, Space.Space); // )
-
- if (fn_proto.getAlignExpr()) |align_expr| {
- const align_rparen = tree.nextToken(align_expr.lastToken());
- const align_lparen = tree.prevToken(align_expr.firstToken());
- const align_kw = tree.prevToken(align_lparen);
-
- try renderToken(tree, ais, align_kw, Space.None); // align
- try renderToken(tree, ais, align_lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, align_expr, Space.None);
- try renderToken(tree, ais, align_rparen, Space.Space); // )
- }
-
- if (fn_proto.getSectionExpr()) |section_expr| {
- const section_rparen = tree.nextToken(section_expr.lastToken());
- const section_lparen = tree.prevToken(section_expr.firstToken());
- const section_kw = tree.prevToken(section_lparen);
-
- try renderToken(tree, ais, section_kw, Space.None); // section
- try renderToken(tree, ais, section_lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, section_expr, Space.None);
- try renderToken(tree, ais, section_rparen, Space.Space); // )
- }
+ if (while_node.ast.cont_expr != 0) {
+ const cont_rparen = tree.lastToken(while_node.ast.cont_expr) + 1;
+ const cont_lparen = tree.firstToken(while_node.ast.cont_expr) - 1;
+ try renderToken(ais, tree, cont_lparen - 1, .space); // :
+ try renderToken(ais, tree, cont_lparen, .none); // lparen
+ try renderExpression(gpa, ais, tree, while_node.ast.cont_expr, .none);
+ try renderToken(ais, tree, cont_rparen, .space); // rparen
+ }
- if (fn_proto.getCallconvExpr()) |callconv_expr| {
- const callconv_rparen = tree.nextToken(callconv_expr.lastToken());
- const callconv_lparen = tree.prevToken(callconv_expr.firstToken());
- const callconv_kw = tree.prevToken(callconv_lparen);
-
- try renderToken(tree, ais, callconv_kw, Space.None); // callconv
- try renderToken(tree, ais, callconv_lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, callconv_expr, Space.None);
- try renderToken(tree, ais, callconv_rparen, Space.Space); // )
- } else if (fn_proto.getIsExternPrototype() != null) {
- try ais.writer().writeAll("callconv(.C) ");
- } else if (fn_proto.getIsAsync() != null) {
- try ais.writer().writeAll("callconv(.Async) ");
- } else if (fn_proto.getIsInline() != null) {
- try ais.writer().writeAll("callconv(.Inline) ");
- }
+ if (while_node.ast.else_expr != 0) {
+ try renderExpression(gpa, ais, tree, while_node.ast.then_expr, .space);
+ try renderToken(ais, tree, while_node.else_token, .space); // else
- switch (fn_proto.return_type) {
- .Explicit => |node| {
- return renderExpression(allocator, ais, tree, node, space);
- },
- .InferErrorSet => |node| {
- try renderToken(tree, ais, tree.prevToken(node.firstToken()), Space.None); // !
- return renderExpression(allocator, ais, tree, node, space);
- },
- .Invalid => unreachable,
- }
- },
+ if (while_node.error_token) |error_token| {
+ try renderToken(ais, tree, error_token - 1, .none); // |
+ try renderToken(ais, tree, error_token, .none); // identifier
+ try renderToken(ais, tree, error_token + 1, .space); // |
+ }
- .AnyFrameType => {
- const anyframe_type = @fieldParentPtr(ast.Node.AnyFrameType, "base", base);
+ return renderExpression(gpa, ais, tree, while_node.ast.else_expr, space);
+ } else {
+ return renderExpression(gpa, ais, tree, while_node.ast.then_expr, space);
+ }
+}
- if (anyframe_type.result) |result| {
- try renderToken(tree, ais, anyframe_type.anyframe_token, Space.None); // anyframe
- try renderToken(tree, ais, result.arrow_token, Space.None); // ->
- return renderExpression(allocator, ais, tree, result.return_type, space);
- } else {
- return renderToken(tree, ais, anyframe_type.anyframe_token, space); // anyframe
- }
- },
+fn renderContainerField(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ field: ast.full.ContainerField,
+ space: Space,
+) Error!void {
+ const main_tokens = tree.nodes.items(.main_token);
+ if (field.comptime_token) |t| {
+ try renderToken(ais, tree, t, .space); // comptime
+ }
+ if (field.ast.type_expr == 0 and field.ast.value_expr == 0) {
+ return renderTokenComma(ais, tree, field.ast.name_token, space); // name
+ }
+ if (field.ast.type_expr != 0 and field.ast.value_expr == 0) {
+ try renderToken(ais, tree, field.ast.name_token, .none); // name
+ try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
+
+ if (field.ast.align_expr != 0) {
+ try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
+ const align_token = tree.firstToken(field.ast.align_expr) - 2;
+ try renderToken(ais, tree, align_token, .none); // align
+ try renderToken(ais, tree, align_token + 1, .none); // (
+ try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment
+ const rparen = tree.lastToken(field.ast.align_expr) + 1;
+ return renderTokenComma(ais, tree, rparen, space); // )
+ } else {
+ return renderExpressionComma(gpa, ais, tree, field.ast.type_expr, space); // type
+ }
+ }
+ if (field.ast.type_expr == 0 and field.ast.value_expr != 0) {
+ try renderToken(ais, tree, field.ast.name_token, .space); // name
+ try renderToken(ais, tree, field.ast.name_token + 1, .space); // =
+ return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value
+ }
- .DocComment => unreachable, // doc comments are attached to nodes
+ try renderToken(ais, tree, field.ast.name_token, .none); // name
+ try renderToken(ais, tree, field.ast.name_token + 1, .space); // :
+ try renderExpression(gpa, ais, tree, field.ast.type_expr, .space); // type
+
+ if (field.ast.align_expr != 0) {
+ const lparen_token = tree.firstToken(field.ast.align_expr) - 1;
+ const align_kw = lparen_token - 1;
+ const rparen_token = tree.lastToken(field.ast.align_expr) + 1;
+ try renderToken(ais, tree, align_kw, .none); // align
+ try renderToken(ais, tree, lparen_token, .none); // (
+ try renderExpression(gpa, ais, tree, field.ast.align_expr, .none); // alignment
+ try renderToken(ais, tree, rparen_token, .space); // )
+ }
+ const eq_token = tree.firstToken(field.ast.value_expr) - 1;
+ try renderToken(ais, tree, eq_token, .space); // =
+ return renderExpressionComma(gpa, ais, tree, field.ast.value_expr, space); // value
+}
- .Switch => {
- const switch_node = @fieldParentPtr(ast.Node.Switch, "base", base);
+fn renderBuiltinCall(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ builtin_token: ast.TokenIndex,
+ params: []const ast.Node.Index,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
- try renderToken(tree, ais, switch_node.switch_token, Space.Space); // switch
- try renderToken(tree, ais, tree.nextToken(switch_node.switch_token), Space.None); // (
+ try renderToken(ais, tree, builtin_token, .none); // @name
- const rparen = tree.nextToken(switch_node.expr.lastToken());
- const lbrace = tree.nextToken(rparen);
+ if (params.len == 0) {
+ try renderToken(ais, tree, builtin_token + 1, .none); // (
+ return renderToken(ais, tree, builtin_token + 2, space); // )
+ }
- if (switch_node.cases_len == 0) {
- try renderExpression(allocator, ais, tree, switch_node.expr, Space.None);
- try renderToken(tree, ais, rparen, Space.Space); // )
- try renderToken(tree, ais, lbrace, Space.None); // {
- return renderToken(tree, ais, switch_node.rbrace, space); // }
- }
+ const last_param = params[params.len - 1];
+ const after_last_param_token = tree.lastToken(last_param) + 1;
- try renderExpression(allocator, ais, tree, switch_node.expr, Space.None);
- try renderToken(tree, ais, rparen, Space.Space); // )
+ if (token_tags[after_last_param_token] != .comma) {
+ // Render all on one line, no trailing comma.
+ try renderToken(ais, tree, builtin_token + 1, .none); // (
+ for (params) |param_node, i| {
+ const first_param_token = tree.firstToken(param_node);
+ if (token_tags[first_param_token] == .multiline_string_literal_line or
+ hasSameLineComment(tree, first_param_token - 1))
{
- ais.pushIndentNextLine();
- defer ais.popIndent();
- try renderToken(tree, ais, lbrace, Space.Newline); // {
-
- const cases = switch_node.cases();
- for (cases) |node, i| {
- try renderExpression(allocator, ais, tree, node, Space.Comma);
-
- if (i + 1 < cases.len) {
- try renderExtraNewline(tree, ais, cases[i + 1]);
- }
- }
+ ais.pushIndentOneShot();
}
+ try renderExpression(gpa, ais, tree, param_node, .none);
- return renderToken(tree, ais, switch_node.rbrace, space); // }
- },
-
- .SwitchCase => {
- const switch_case = @fieldParentPtr(ast.Node.SwitchCase, "base", base);
-
- assert(switch_case.items_len != 0);
- const src_has_trailing_comma = blk: {
- const last_node = switch_case.items()[switch_case.items_len - 1];
- const maybe_comma = tree.nextToken(last_node.lastToken());
- break :blk tree.token_ids[maybe_comma] == .Comma;
- };
-
- if (switch_case.items_len == 1 or !src_has_trailing_comma) {
- const items = switch_case.items();
- for (items) |node, i| {
- if (i + 1 < items.len) {
- try renderExpression(allocator, ais, tree, node, Space.None);
-
- const comma_token = tree.nextToken(node.lastToken());
- try renderToken(tree, ais, comma_token, Space.Space); // ,
- try renderExtraNewline(tree, ais, items[i + 1]);
- } else {
- try renderExpression(allocator, ais, tree, node, Space.Space);
- }
- }
- } else {
- const items = switch_case.items();
- for (items) |node, i| {
- if (i + 1 < items.len) {
- try renderExpression(allocator, ais, tree, node, Space.None);
-
- const comma_token = tree.nextToken(node.lastToken());
- try renderToken(tree, ais, comma_token, Space.Newline); // ,
- try renderExtraNewline(tree, ais, items[i + 1]);
- } else {
- try renderExpression(allocator, ais, tree, node, Space.Comma);
- }
- }
+ if (i + 1 < params.len) {
+ const comma_token = tree.lastToken(param_node) + 1;
+ try renderToken(ais, tree, comma_token, .space); // ,
}
+ }
+ return renderToken(ais, tree, after_last_param_token, space); // )
+ } else {
+ // Render one param per line.
+ ais.pushIndent();
+ try renderToken(ais, tree, builtin_token + 1, Space.newline); // (
+
+ for (params) |param_node| {
+ try renderExpression(gpa, ais, tree, param_node, .comma);
+ }
+ ais.popIndent();
- try renderToken(tree, ais, switch_case.arrow_token, Space.Space); // =>
-
- if (switch_case.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
- }
-
- return renderExpression(allocator, ais, tree, switch_case.expr, space);
- },
- .SwitchElse => {
- const switch_else = @fieldParentPtr(ast.Node.SwitchElse, "base", base);
- return renderToken(tree, ais, switch_else.token, space);
- },
- .Else => {
- const else_node = @fieldParentPtr(ast.Node.Else, "base", base);
-
- const body_is_block = nodeIsBlock(else_node.body);
- const same_line = body_is_block or tree.tokensOnSameLine(else_node.else_token, else_node.body.lastToken());
-
- const after_else_space = if (same_line or else_node.payload != null) Space.Space else Space.Newline;
- try renderToken(tree, ais, else_node.else_token, after_else_space);
-
- if (else_node.payload) |payload| {
- const payload_space = if (same_line) Space.Space else Space.Newline;
- try renderExpression(allocator, ais, tree, payload, payload_space);
- }
+ return renderToken(ais, tree, after_last_param_token + 1, space); // )
+ }
+}
- if (same_line) {
- return renderExpression(allocator, ais, tree, else_node.body, space);
- } else {
- ais.pushIndent();
- defer ais.popIndent();
- return renderExpression(allocator, ais, tree, else_node.body, space);
+fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+
+ const is_inline = fn_proto.ast.fn_token > 0 and
+ token_tags[fn_proto.ast.fn_token - 1] == .keyword_inline;
+
+ const after_fn_token = fn_proto.ast.fn_token + 1;
+ const lparen = if (token_tags[after_fn_token] == .identifier) blk: {
+ try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
+ try renderToken(ais, tree, after_fn_token, .none); // name
+ break :blk after_fn_token + 1;
+ } else blk: {
+ try renderToken(ais, tree, fn_proto.ast.fn_token, .space); // fn
+ break :blk fn_proto.ast.fn_token + 1;
+ };
+ assert(token_tags[lparen] == .l_paren);
+
+ const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
+ const rparen = blk: {
+ // These may appear in any order, so we have to check the token_starts array
+ // to find out which is first.
+ var rparen = if (token_tags[maybe_bang] == .bang) maybe_bang - 1 else maybe_bang;
+ var smallest_start = token_starts[maybe_bang];
+ if (fn_proto.ast.align_expr != 0) {
+ const tok = tree.firstToken(fn_proto.ast.align_expr) - 3;
+ const start = token_starts[tok];
+ if (start < smallest_start) {
+ rparen = tok;
+ smallest_start = start;
}
- },
-
- .While => {
- const while_node = @fieldParentPtr(ast.Node.While, "base", base);
-
- if (while_node.label) |label| {
- try renderToken(tree, ais, label, Space.None); // label
- try renderToken(tree, ais, tree.nextToken(label), Space.Space); // :
+ }
+ if (fn_proto.ast.section_expr != 0) {
+ const tok = tree.firstToken(fn_proto.ast.section_expr) - 3;
+ const start = token_starts[tok];
+ if (start < smallest_start) {
+ rparen = tok;
+ smallest_start = start;
}
-
- if (while_node.inline_token) |inline_token| {
- try renderToken(tree, ais, inline_token, Space.Space); // inline
+ }
+ if (fn_proto.ast.callconv_expr != 0) {
+ const tok = tree.firstToken(fn_proto.ast.callconv_expr) - 3;
+ const start = token_starts[tok];
+ if (start < smallest_start) {
+ rparen = tok;
+ smallest_start = start;
}
-
- try renderToken(tree, ais, while_node.while_token, Space.Space); // while
- try renderToken(tree, ais, tree.nextToken(while_node.while_token), Space.None); // (
- try renderExpression(allocator, ais, tree, while_node.condition, Space.None);
-
- const cond_rparen = tree.nextToken(while_node.condition.lastToken());
-
- const body_is_block = nodeIsBlock(while_node.body);
-
- var block_start_space: Space = undefined;
- var after_body_space: Space = undefined;
-
- if (body_is_block) {
- block_start_space = Space.BlockStart;
- after_body_space = if (while_node.@"else" == null) space else Space.SpaceOrOutdent;
- } else if (tree.tokensOnSameLine(cond_rparen, while_node.body.lastToken())) {
- block_start_space = Space.Space;
- after_body_space = if (while_node.@"else" == null) space else Space.Space;
- } else {
- block_start_space = Space.Newline;
- after_body_space = if (while_node.@"else" == null) space else Space.Newline;
+ }
+ break :blk rparen;
+ };
+ assert(token_tags[rparen] == .r_paren);
+
+ // The params list is a sparse set that does *not* include anytype or ... parameters.
+
+ const trailing_comma = token_tags[rparen - 1] == .comma;
+ if (!trailing_comma and !hasComment(tree, lparen, rparen)) {
+ // Render all on one line, no trailing comma.
+ try renderToken(ais, tree, lparen, .none); // (
+
+ var param_i: usize = 0;
+ var last_param_token = lparen;
+ while (true) {
+ last_param_token += 1;
+ switch (token_tags[last_param_token]) {
+ .doc_comment => {
+ try renderToken(ais, tree, last_param_token, .newline);
+ continue;
+ },
+ .ellipsis3 => {
+ try renderToken(ais, tree, last_param_token, .none); // ...
+ break;
+ },
+ .keyword_noalias, .keyword_comptime => {
+ try renderToken(ais, tree, last_param_token, .space);
+ last_param_token += 1;
+ },
+ .identifier => {},
+ .keyword_anytype => {
+ try renderToken(ais, tree, last_param_token, .none); // anytype
+ continue;
+ },
+ .r_paren => break,
+ .comma => {
+ try renderToken(ais, tree, last_param_token, .space); // ,
+ continue;
+ },
+ else => {}, // Parameter type without a name.
}
-
+ if (token_tags[last_param_token] == .identifier and
+ token_tags[last_param_token + 1] == .colon)
{
- const rparen_space = if (while_node.payload != null or while_node.continue_expr != null) Space.Space else block_start_space;
- try renderToken(tree, ais, cond_rparen, rparen_space); // )
- }
-
- if (while_node.payload) |payload| {
- const payload_space = if (while_node.continue_expr != null) Space.Space else block_start_space;
- try renderExpression(allocator, ais, tree, payload, payload_space);
+ try renderToken(ais, tree, last_param_token, .none); // name
+ last_param_token += 1;
+ try renderToken(ais, tree, last_param_token, .space); // :
+ last_param_token += 1;
+ }
+ if (token_tags[last_param_token] == .keyword_anytype) {
+ try renderToken(ais, tree, last_param_token, .none); // anytype
+ continue;
+ }
+ const param = fn_proto.ast.params[param_i];
+ param_i += 1;
+ try renderExpression(gpa, ais, tree, param, .none);
+ last_param_token = tree.lastToken(param);
+ }
+ } else {
+ // One param per line.
+ ais.pushIndent();
+ try renderToken(ais, tree, lparen, .newline); // (
+
+ var param_i: usize = 0;
+ var last_param_token = lparen;
+ while (true) {
+ last_param_token += 1;
+ switch (token_tags[last_param_token]) {
+ .doc_comment => {
+ try renderToken(ais, tree, last_param_token, .newline);
+ continue;
+ },
+ .ellipsis3 => {
+ try renderToken(ais, tree, last_param_token, .comma); // ...
+ break;
+ },
+ .keyword_noalias, .keyword_comptime => {
+ try renderToken(ais, tree, last_param_token, .space);
+ last_param_token += 1;
+ },
+ .identifier => {},
+ .keyword_anytype => {
+ try renderToken(ais, tree, last_param_token, .comma); // anytype
+ if (token_tags[last_param_token + 1] == .comma)
+ last_param_token += 1;
+ continue;
+ },
+ .r_paren => break,
+ else => unreachable,
}
+ if (token_tags[last_param_token] == .identifier) {
+ try renderToken(ais, tree, last_param_token, .none); // name
+ last_param_token += 1;
+ try renderToken(ais, tree, last_param_token, .space); // :
+ last_param_token += 1;
+ }
+ if (token_tags[last_param_token] == .keyword_anytype) {
+ try renderToken(ais, tree, last_param_token, .comma); // anytype
+ if (token_tags[last_param_token + 1] == .comma)
+ last_param_token += 1;
+ continue;
+ }
+ const param = fn_proto.ast.params[param_i];
+ param_i += 1;
+ try renderExpression(gpa, ais, tree, param, .comma);
+ last_param_token = tree.lastToken(param);
+ if (token_tags[last_param_token + 1] == .comma) last_param_token += 1;
+ }
+ ais.popIndent();
+ }
- if (while_node.continue_expr) |continue_expr| {
- const rparen = tree.nextToken(continue_expr.lastToken());
- const lparen = tree.prevToken(continue_expr.firstToken());
- const colon = tree.prevToken(lparen);
-
- try renderToken(tree, ais, colon, Space.Space); // :
- try renderToken(tree, ais, lparen, Space.None); // (
+ try renderToken(ais, tree, rparen, .space); // )
- try renderExpression(allocator, ais, tree, continue_expr, Space.None);
+ if (fn_proto.ast.align_expr != 0) {
+ const align_lparen = tree.firstToken(fn_proto.ast.align_expr) - 1;
+ const align_rparen = tree.lastToken(fn_proto.ast.align_expr) + 1;
- try renderToken(tree, ais, rparen, block_start_space); // )
- }
-
- {
- if (!body_is_block) ais.pushIndent();
- defer if (!body_is_block) ais.popIndent();
- try renderExpression(allocator, ais, tree, while_node.body, after_body_space);
- }
+ try renderToken(ais, tree, align_lparen - 1, .none); // align
+ try renderToken(ais, tree, align_lparen, .none); // (
+ try renderExpression(gpa, ais, tree, fn_proto.ast.align_expr, .none);
+ try renderToken(ais, tree, align_rparen, .space); // )
+ }
- if (while_node.@"else") |@"else"| {
- return renderExpression(allocator, ais, tree, &@"else".base, space);
- }
- },
+ if (fn_proto.ast.section_expr != 0) {
+ const section_lparen = tree.firstToken(fn_proto.ast.section_expr) - 1;
+ const section_rparen = tree.lastToken(fn_proto.ast.section_expr) + 1;
- .For => {
- const for_node = @fieldParentPtr(ast.Node.For, "base", base);
+ try renderToken(ais, tree, section_lparen - 1, .none); // section
+ try renderToken(ais, tree, section_lparen, .none); // (
+ try renderExpression(gpa, ais, tree, fn_proto.ast.section_expr, .none);
+ try renderToken(ais, tree, section_rparen, .space); // )
+ }
- if (for_node.label) |label| {
- try renderToken(tree, ais, label, Space.None); // label
- try renderToken(tree, ais, tree.nextToken(label), Space.Space); // :
- }
+ if (fn_proto.ast.callconv_expr != 0) {
+ const callconv_lparen = tree.firstToken(fn_proto.ast.callconv_expr) - 1;
+ const callconv_rparen = tree.lastToken(fn_proto.ast.callconv_expr) + 1;
- if (for_node.inline_token) |inline_token| {
- try renderToken(tree, ais, inline_token, Space.Space); // inline
- }
-
- try renderToken(tree, ais, for_node.for_token, Space.Space); // for
- try renderToken(tree, ais, tree.nextToken(for_node.for_token), Space.None); // (
- try renderExpression(allocator, ais, tree, for_node.array_expr, Space.None);
+ try renderToken(ais, tree, callconv_lparen - 1, .none); // callconv
+ try renderToken(ais, tree, callconv_lparen, .none); // (
+ try renderExpression(gpa, ais, tree, fn_proto.ast.callconv_expr, .none);
+ try renderToken(ais, tree, callconv_rparen, .space); // )
+ } else if (is_inline) {
+ try ais.writer().writeAll("callconv(.Inline) ");
+ }
- const rparen = tree.nextToken(for_node.array_expr.lastToken());
+ if (token_tags[maybe_bang] == .bang) {
+ try renderToken(ais, tree, maybe_bang, .none); // !
+ }
+ return renderExpression(gpa, ais, tree, fn_proto.ast.return_type, space);
+}
- const body_is_block = for_node.body.tag.isBlock();
- const src_one_line_to_body = !body_is_block and tree.tokensOnSameLine(rparen, for_node.body.firstToken());
- const body_on_same_line = body_is_block or src_one_line_to_body;
+fn renderSwitchCase(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ switch_case: ast.full.SwitchCase,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .comma;
+
+ // Render everything before the arrow
+ if (switch_case.ast.values.len == 0) {
+ try renderToken(ais, tree, switch_case.ast.arrow_token - 1, .space); // else keyword
+ } else if (switch_case.ast.values.len == 1) {
+ // render on one line and drop the trailing comma if any
+ try renderExpression(gpa, ais, tree, switch_case.ast.values[0], .space);
+ } else if (trailing_comma or
+ hasComment(tree, tree.firstToken(switch_case.ast.values[0]), switch_case.ast.arrow_token))
+ {
+ // Render each value on a new line
+ try renderExpressions(gpa, ais, tree, switch_case.ast.values, .comma);
+ } else {
+ // Render on one line
+ for (switch_case.ast.values) |value_expr| {
+ try renderExpression(gpa, ais, tree, value_expr, .comma_space);
+ }
+ }
- try renderToken(tree, ais, rparen, Space.Space); // )
+ // Render the arrow and everything after it
+ try renderToken(ais, tree, switch_case.ast.arrow_token, .space);
- const space_after_payload = if (body_on_same_line) Space.Space else Space.Newline;
- try renderExpression(allocator, ais, tree, for_node.payload, space_after_payload); // |x|
+ if (switch_case.payload_token) |payload_token| {
+ try renderToken(ais, tree, payload_token - 1, .none); // pipe
+ if (token_tags[payload_token] == .asterisk) {
+ try renderToken(ais, tree, payload_token, .none); // asterisk
+ try renderToken(ais, tree, payload_token + 1, .none); // identifier
+ try renderToken(ais, tree, payload_token + 2, .space); // pipe
+ } else {
+ try renderToken(ais, tree, payload_token, .none); // identifier
+ try renderToken(ais, tree, payload_token + 1, .space); // pipe
+ }
+ }
- const space_after_body = blk: {
- if (for_node.@"else") |@"else"| {
- const src_one_line_to_else = tree.tokensOnSameLine(rparen, @"else".firstToken());
- if (body_is_block or src_one_line_to_else) {
- break :blk Space.Space;
- } else {
- break :blk Space.Newline;
- }
- } else {
- break :blk space;
- }
- };
+ try renderExpression(gpa, ais, tree, switch_case.ast.target_expr, space);
+}
- {
- if (!body_on_same_line) ais.pushIndent();
- defer if (!body_on_same_line) ais.popIndent();
- try renderExpression(allocator, ais, tree, for_node.body, space_after_body); // { body }
- }
+fn renderBlock(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ block_node: ast.Node.Index,
+ statements: []const ast.Node.Index,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const node_tags = tree.nodes.items(.tag);
+ const nodes_data = tree.nodes.items(.data);
+ const lbrace = tree.nodes.items(.main_token)[block_node];
+
+ if (token_tags[lbrace - 1] == .colon and
+ token_tags[lbrace - 2] == .identifier)
+ {
+ try renderToken(ais, tree, lbrace - 2, .none);
+ try renderToken(ais, tree, lbrace - 1, .space);
+ }
- if (for_node.@"else") |@"else"| {
- return renderExpression(allocator, ais, tree, &@"else".base, space); // else
+ ais.pushIndentNextLine();
+ if (statements.len == 0) {
+ try renderToken(ais, tree, lbrace, .none);
+ } else {
+ try renderToken(ais, tree, lbrace, .newline);
+ for (statements) |stmt, i| {
+ if (i != 0) try renderExtraNewline(ais, tree, stmt);
+ switch (node_tags[stmt]) {
+ .global_var_decl => try renderVarDecl(gpa, ais, tree, tree.globalVarDecl(stmt)),
+ .local_var_decl => try renderVarDecl(gpa, ais, tree, tree.localVarDecl(stmt)),
+ .simple_var_decl => try renderVarDecl(gpa, ais, tree, tree.simpleVarDecl(stmt)),
+ .aligned_var_decl => try renderVarDecl(gpa, ais, tree, tree.alignedVarDecl(stmt)),
+ else => try renderExpression(gpa, ais, tree, stmt, .semicolon),
}
- },
-
- .If => {
- const if_node = @fieldParentPtr(ast.Node.If, "base", base);
+ }
+ }
+ ais.popIndent();
- const lparen = tree.nextToken(if_node.if_token);
- const rparen = tree.nextToken(if_node.condition.lastToken());
+ try renderToken(ais, tree, tree.lastToken(block_node), space); // rbrace
+}
- try renderToken(tree, ais, if_node.if_token, Space.Space); // if
- try renderToken(tree, ais, lparen, Space.None); // (
+fn renderStructInit(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ struct_node: ast.Node.Index,
+ struct_init: ast.full.StructInit,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ if (struct_init.ast.type_expr == 0) {
+ try renderToken(ais, tree, struct_init.ast.lbrace - 1, .none); // .
+ } else {
+ try renderExpression(gpa, ais, tree, struct_init.ast.type_expr, .none); // T
+ }
+ if (struct_init.ast.fields.len == 0) {
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, struct_init.ast.lbrace, .none); // lbrace
+ ais.popIndent();
+ return renderToken(ais, tree, struct_init.ast.lbrace + 1, space); // rbrace
+ }
- try renderExpression(allocator, ais, tree, if_node.condition, Space.None); // condition
+ const rbrace = tree.lastToken(struct_node);
+ const trailing_comma = token_tags[rbrace - 1] == .comma;
+ if (trailing_comma or hasComment(tree, struct_init.ast.lbrace, rbrace)) {
+ // Render one field init per line.
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, struct_init.ast.lbrace, .newline);
+
+ try renderToken(ais, tree, struct_init.ast.lbrace + 1, .none); // .
+ try renderToken(ais, tree, struct_init.ast.lbrace + 2, .space); // name
+ try renderToken(ais, tree, struct_init.ast.lbrace + 3, .space); // =
+ try renderExpression(gpa, ais, tree, struct_init.ast.fields[0], .comma);
+
+ for (struct_init.ast.fields[1..]) |field_init| {
+ const init_token = tree.firstToken(field_init);
+ try renderExtraNewlineToken(ais, tree, init_token - 3);
+ try renderToken(ais, tree, init_token - 3, .none); // .
+ try renderToken(ais, tree, init_token - 2, .space); // name
+ try renderToken(ais, tree, init_token - 1, .space); // =
+ try renderExpression(gpa, ais, tree, field_init, .comma);
+ }
- const body_is_if_block = if_node.body.tag == .If;
- const body_is_block = nodeIsBlock(if_node.body);
+ ais.popIndent();
+ } else {
+ // Render all on one line, no trailing comma.
+ try renderToken(ais, tree, struct_init.ast.lbrace, .space);
+
+ for (struct_init.ast.fields) |field_init| {
+ const init_token = tree.firstToken(field_init);
+ try renderToken(ais, tree, init_token - 3, .none); // .
+ try renderToken(ais, tree, init_token - 2, .space); // name
+ try renderToken(ais, tree, init_token - 1, .space); // =
+ try renderExpression(gpa, ais, tree, field_init, .comma_space);
+ }
+ }
- if (body_is_if_block) {
- try renderExtraNewline(tree, ais, if_node.body);
- } else if (body_is_block) {
- const after_rparen_space = if (if_node.payload == null) Space.BlockStart else Space.Space;
- try renderToken(tree, ais, rparen, after_rparen_space); // )
+ return renderToken(ais, tree, rbrace, space);
+}
- if (if_node.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.BlockStart); // |x|
- }
+// TODO: handle comments between elements
+fn renderArrayInit(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ array_init: ast.full.ArrayInit,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+
+ if (array_init.ast.type_expr == 0) {
+ try renderToken(ais, tree, array_init.ast.lbrace - 1, .none); // .
+ } else {
+ try renderExpression(gpa, ais, tree, array_init.ast.type_expr, .none); // T
+ }
- if (if_node.@"else") |@"else"| {
- try renderExpression(allocator, ais, tree, if_node.body, Space.SpaceOrOutdent);
- return renderExpression(allocator, ais, tree, &@"else".base, space);
- } else {
- return renderExpression(allocator, ais, tree, if_node.body, space);
- }
- }
+ if (array_init.ast.elements.len == 0) {
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, array_init.ast.lbrace, .none); // lbrace
+ ais.popIndent();
+ return renderToken(ais, tree, array_init.ast.lbrace + 1, space); // rbrace
+ }
- const src_has_newline = !tree.tokensOnSameLine(rparen, if_node.body.lastToken());
+ const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1];
+ const last_elem_token = tree.lastToken(last_elem);
+ const trailing_comma = token_tags[last_elem_token + 1] == .comma;
+ const rbrace = if (trailing_comma) last_elem_token + 2 else last_elem_token + 1;
+ assert(token_tags[rbrace] == .r_brace);
+
+ if (array_init.ast.elements.len == 1) {
+ const only_elem = array_init.ast.elements[0];
+ const first_token = tree.firstToken(only_elem);
+ if (token_tags[first_token] != .multiline_string_literal_line and
+ !anythingBetween(tree, last_elem_token, rbrace))
+ {
+ try renderToken(ais, tree, array_init.ast.lbrace, .none);
+ try renderExpression(gpa, ais, tree, only_elem, .none);
+ return renderToken(ais, tree, rbrace, space);
+ }
+ }
- if (src_has_newline) {
- const after_rparen_space = if (if_node.payload == null) Space.Newline else Space.Space;
+ const contains_newlines = !tree.tokensOnSameLine(array_init.ast.lbrace, rbrace);
- {
- ais.pushIndent();
- defer ais.popIndent();
- try renderToken(tree, ais, rparen, after_rparen_space); // )
- }
+ if (!trailing_comma and !contains_newlines) {
+ // Render all on one line, no trailing comma.
+ if (array_init.ast.elements.len == 1) {
+ // If there is only one element, we don't use spaces
+ try renderToken(ais, tree, array_init.ast.lbrace, .none);
+ try renderExpression(gpa, ais, tree, array_init.ast.elements[0], .none);
+ } else {
+ try renderToken(ais, tree, array_init.ast.lbrace, .space);
+ for (array_init.ast.elements) |elem| {
+ try renderExpression(gpa, ais, tree, elem, .comma_space);
+ }
+ }
+ return renderToken(ais, tree, last_elem_token + 1, space); // rbrace
+ }
- if (if_node.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Newline);
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, array_init.ast.lbrace, .newline);
+
+ var expr_index: usize = 0;
+ while (rowSize(tree, array_init.ast.elements[expr_index..], rbrace)) |row_size| {
+ const row_exprs = array_init.ast.elements[expr_index..];
+ // A place to store the width of each expression and its column's maximum
+ const widths = try gpa.alloc(usize, row_exprs.len + row_size);
+ defer gpa.free(widths);
+ mem.set(usize, widths, 0);
+
+ const expr_newlines = try gpa.alloc(bool, row_exprs.len);
+ defer gpa.free(expr_newlines);
+ mem.set(bool, expr_newlines, false);
+
+ const expr_widths = widths[0..row_exprs.len];
+ const column_widths = widths[row_exprs.len..];
+
+ // Find next row with trailing comment (if any) to end the current section.
+ const section_end = sec_end: {
+ var this_line_first_expr: usize = 0;
+ var this_line_size = rowSize(tree, row_exprs, rbrace);
+ for (row_exprs) |expr, i| {
+ // Ignore comment on first line of this section.
+ if (i == 0) continue;
+ const expr_last_token = tree.lastToken(expr);
+ if (tree.tokensOnSameLine(tree.firstToken(row_exprs[0]), expr_last_token))
+ continue;
+ // Track start of line containing comment.
+ if (!tree.tokensOnSameLine(tree.firstToken(row_exprs[this_line_first_expr]), expr_last_token)) {
+ this_line_first_expr = i;
+ this_line_size = rowSize(tree, row_exprs[this_line_first_expr..], rbrace);
}
-
- if (if_node.@"else") |@"else"| {
- const else_is_block = nodeIsBlock(@"else".body);
-
- {
- ais.pushIndent();
- defer ais.popIndent();
- try renderExpression(allocator, ais, tree, if_node.body, Space.Newline);
- }
-
- if (else_is_block) {
- try renderToken(tree, ais, @"else".else_token, Space.Space); // else
-
- if (@"else".payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
- }
-
- return renderExpression(allocator, ais, tree, @"else".body, space);
- } else {
- const after_else_space = if (@"else".payload == null) Space.Newline else Space.Space;
- try renderToken(tree, ais, @"else".else_token, after_else_space); // else
-
- if (@"else".payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Newline);
- }
-
- ais.pushIndent();
- defer ais.popIndent();
- return renderExpression(allocator, ais, tree, @"else".body, space);
- }
- } else {
- ais.pushIndent();
- defer ais.popIndent();
- return renderExpression(allocator, ais, tree, if_node.body, space);
+
+ const maybe_comma = expr_last_token + 1;
+ if (token_tags[maybe_comma] == .comma) {
+ if (hasSameLineComment(tree, maybe_comma))
+ break :sec_end i - this_line_size.? + 1;
}
}
+ break :sec_end row_exprs.len;
+ };
+ expr_index += section_end;
- // Single line if statement
+ const section_exprs = row_exprs[0..section_end];
- try renderToken(tree, ais, rparen, Space.Space); // )
+ var sub_expr_buffer = std.ArrayList(u8).init(gpa);
+ defer sub_expr_buffer.deinit();
- if (if_node.payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
- }
+ var auto_indenting_stream = Ais{
+ .indent_delta = indent_delta,
+ .underlying_writer = sub_expr_buffer.writer(),
+ };
- if (if_node.@"else") |@"else"| {
- try renderExpression(allocator, ais, tree, if_node.body, Space.Space);
- try renderToken(tree, ais, @"else".else_token, Space.Space);
+ // Calculate size of columns in current section
+ var column_counter: usize = 0;
+ var single_line = true;
+ var contains_newline = false;
+ for (section_exprs) |expr, i| {
+ sub_expr_buffer.shrinkRetainingCapacity(0);
+ if (i + 1 < section_exprs.len) {
+ try renderExpression(gpa, &auto_indenting_stream, tree, expr, .none);
+ const width = sub_expr_buffer.items.len;
+ const this_contains_newline = mem.indexOfScalar(u8, sub_expr_buffer.items, '\n') != null;
+ contains_newline = contains_newline or this_contains_newline;
+ expr_widths[i] = width;
+ expr_newlines[i] = this_contains_newline;
+
+ if (!this_contains_newline) {
+ const column = column_counter % row_size;
+ column_widths[column] = std.math.max(column_widths[column], width);
+
+ const expr_last_token = tree.lastToken(expr) + 1;
+ const next_expr = section_exprs[i + 1];
+ column_counter += 1;
+ if (!tree.tokensOnSameLine(expr_last_token, tree.firstToken(next_expr))) single_line = false;
+ } else {
+ single_line = false;
+ column_counter = 0;
+ }
+ } else {
+ try renderExpression(gpa, &auto_indenting_stream, tree, expr, .none);
+ const width = sub_expr_buffer.items.len;
+ contains_newline = contains_newline or mem.indexOfScalar(u8, sub_expr_buffer.items, '\n') != null;
+ expr_widths[i] = width;
+ expr_newlines[i] = contains_newline;
+
+ if (!contains_newline) {
+ const column = column_counter % row_size;
+ column_widths[column] = std.math.max(column_widths[column], width);
+ }
+ break;
+ }
+ }
- if (@"else".payload) |payload| {
- try renderExpression(allocator, ais, tree, payload, Space.Space);
+ // Render exprs in current section.
+ column_counter = 0;
+ var last_col_index: usize = row_size - 1;
+ for (section_exprs) |expr, i| {
+ if (i + 1 < section_exprs.len) {
+ const next_expr = section_exprs[i + 1];
+ try renderExpression(gpa, ais, tree, expr, .none);
+
+ const comma = tree.lastToken(expr) + 1;
+
+ if (column_counter != last_col_index) {
+ if (!expr_newlines[i] and !expr_newlines[i + 1]) {
+ // Neither the current or next expression is multiline
+ try renderToken(ais, tree, comma, .space); // ,
+ assert(column_widths[column_counter % row_size] >= expr_widths[i]);
+ const padding = column_widths[column_counter % row_size] - expr_widths[i];
+ try ais.writer().writeByteNTimes(' ', padding);
+
+ column_counter += 1;
+ continue;
+ }
+ }
+ if (single_line and row_size != 1) {
+ try renderToken(ais, tree, comma, .space); // ,
+ continue;
}
- return renderExpression(allocator, ais, tree, @"else".body, space);
+ column_counter = 0;
+ try renderToken(ais, tree, comma, .newline); // ,
+ try renderExtraNewline(ais, tree, next_expr);
} else {
- return renderExpression(allocator, ais, tree, if_node.body, space);
+ try renderExpression(gpa, ais, tree, expr, .comma); // ,
}
- },
+ }
- .Asm => {
- const asm_node = @fieldParentPtr(ast.Node.Asm, "base", base);
+ if (expr_index == array_init.ast.elements.len)
+ break;
+ }
- try renderToken(tree, ais, asm_node.asm_token, Space.Space); // asm
+ ais.popIndent();
+ return renderToken(ais, tree, rbrace, space); // rbrace
+}
- if (asm_node.volatile_token) |volatile_token| {
- try renderToken(tree, ais, volatile_token, Space.Space); // volatile
- try renderToken(tree, ais, tree.nextToken(volatile_token), Space.None); // (
- } else {
- try renderToken(tree, ais, tree.nextToken(asm_node.asm_token), Space.None); // (
- }
+fn renderContainerDecl(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ container_decl_node: ast.Node.Index,
+ container_decl: ast.full.ContainerDecl,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const node_tags = tree.nodes.items(.tag);
- asmblk: {
- ais.pushIndent();
- defer ais.popIndent();
+ if (container_decl.layout_token) |layout_token| {
+ try renderToken(ais, tree, layout_token, .space);
+ }
- if (asm_node.outputs.len == 0 and asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) {
- try renderExpression(allocator, ais, tree, asm_node.template, Space.None);
- break :asmblk;
- }
+ var lbrace: ast.TokenIndex = undefined;
+ if (container_decl.ast.enum_token) |enum_token| {
+ try renderToken(ais, tree, container_decl.ast.main_token, .none); // union
+ try renderToken(ais, tree, enum_token - 1, .none); // lparen
+ try renderToken(ais, tree, enum_token, .none); // enum
+ if (container_decl.ast.arg != 0) {
+ try renderToken(ais, tree, enum_token + 1, .none); // lparen
+ try renderExpression(gpa, ais, tree, container_decl.ast.arg, .none);
+ const rparen = tree.lastToken(container_decl.ast.arg) + 1;
+ try renderToken(ais, tree, rparen, .none); // rparen
+ try renderToken(ais, tree, rparen + 1, .space); // rparen
+ lbrace = rparen + 2;
+ } else {
+ try renderToken(ais, tree, enum_token + 1, .space); // rparen
+ lbrace = enum_token + 2;
+ }
+ } else if (container_decl.ast.arg != 0) {
+ try renderToken(ais, tree, container_decl.ast.main_token, .none); // union
+ try renderToken(ais, tree, container_decl.ast.main_token + 1, .none); // lparen
+ try renderExpression(gpa, ais, tree, container_decl.ast.arg, .none);
+ const rparen = tree.lastToken(container_decl.ast.arg) + 1;
+ try renderToken(ais, tree, rparen, .space); // rparen
+ lbrace = rparen + 1;
+ } else {
+ try renderToken(ais, tree, container_decl.ast.main_token, .space); // union
+ lbrace = container_decl.ast.main_token + 1;
+ }
- try renderExpression(allocator, ais, tree, asm_node.template, Space.Newline);
+ const rbrace = tree.lastToken(container_decl_node);
+ if (container_decl.ast.members.len == 0) {
+ ais.pushIndentNextLine();
+ if (token_tags[lbrace + 1] == .container_doc_comment) {
+ try renderToken(ais, tree, lbrace, .newline); // lbrace
+ try renderContainerDocComments(ais, tree, lbrace + 1);
+ } else {
+ try renderToken(ais, tree, lbrace, .none); // lbrace
+ }
+ ais.popIndent();
+ return renderToken(ais, tree, rbrace, space); // rbrace
+ }
- ais.setIndentDelta(asm_indent_delta);
- defer ais.setIndentDelta(indent_delta);
+ const src_has_trailing_comma = token_tags[rbrace - 1] == .comma;
+ if (!src_has_trailing_comma) one_line: {
+ // We can only print all the members in-line if all the members are fields.
+ for (container_decl.ast.members) |member| {
+ if (!node_tags[member].isContainerField()) break :one_line;
+ }
+ // All the declarations on the same line.
+ try renderToken(ais, tree, lbrace, .space); // lbrace
+ for (container_decl.ast.members) |member| {
+ try renderMember(gpa, ais, tree, member, .space);
+ }
+ return renderToken(ais, tree, rbrace, space); // rbrace
+ }
- const colon1 = tree.nextToken(asm_node.template.lastToken());
+ // One member per line.
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, lbrace, .newline); // lbrace
+ if (token_tags[lbrace + 1] == .container_doc_comment) {
+ try renderContainerDocComments(ais, tree, lbrace + 1);
+ }
+ try renderMembers(gpa, ais, tree, container_decl.ast.members);
+ ais.popIndent();
- const colon2 = if (asm_node.outputs.len == 0) blk: {
- try renderToken(tree, ais, colon1, Space.Newline); // :
+ return renderToken(ais, tree, rbrace, space); // rbrace
+}
- break :blk tree.nextToken(colon1);
- } else blk: {
- try renderToken(tree, ais, colon1, Space.Space); // :
+fn renderAsm(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ asm_node: ast.full.Asm,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
- ais.pushIndent();
- defer ais.popIndent();
+ try renderToken(ais, tree, asm_node.ast.asm_token, .space); // asm
- for (asm_node.outputs) |*asm_output, i| {
- if (i + 1 < asm_node.outputs.len) {
- const next_asm_output = asm_node.outputs[i + 1];
- try renderAsmOutput(allocator, ais, tree, asm_output, Space.None);
+ if (asm_node.volatile_token) |volatile_token| {
+ try renderToken(ais, tree, volatile_token, .space); // volatile
+ try renderToken(ais, tree, volatile_token + 1, .none); // lparen
+ } else {
+ try renderToken(ais, tree, asm_node.ast.asm_token + 1, .none); // lparen
+ }
- const comma = tree.prevToken(next_asm_output.firstToken());
- try renderToken(tree, ais, comma, Space.Newline); // ,
- try renderExtraNewlineToken(tree, ais, next_asm_output.firstToken());
- } else if (asm_node.inputs.len == 0 and asm_node.clobbers.len == 0) {
- try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline);
- break :asmblk;
+ if (asm_node.ast.items.len == 0) {
+ ais.pushIndent();
+ if (asm_node.first_clobber) |first_clobber| {
+ // asm ("foo" ::: "a", "b")
+ // asm ("foo" ::: "a", "b",)
+ try renderExpression(gpa, ais, tree, asm_node.ast.template, .space);
+ // Render the three colons.
+ try renderToken(ais, tree, first_clobber - 3, .none);
+ try renderToken(ais, tree, first_clobber - 2, .none);
+ try renderToken(ais, tree, first_clobber - 1, .space);
+
+ var tok_i = first_clobber;
+ while (true) : (tok_i += 1) {
+ try renderToken(ais, tree, tok_i, .none);
+ tok_i += 1;
+ switch (token_tags[tok_i]) {
+ .r_paren => {
+ ais.popIndent();
+ return renderToken(ais, tree, tok_i, space);
+ },
+ .comma => {
+ if (token_tags[tok_i + 1] == .r_paren) {
+ ais.popIndent();
+ return renderToken(ais, tree, tok_i + 1, space);
} else {
- try renderAsmOutput(allocator, ais, tree, asm_output, Space.Newline);
- const comma_or_colon = tree.nextToken(asm_output.lastToken());
- break :blk switch (tree.token_ids[comma_or_colon]) {
- .Comma => tree.nextToken(comma_or_colon),
- else => comma_or_colon,
- };
+ try renderToken(ais, tree, tok_i, .space);
}
- }
- unreachable;
- };
+ },
+ else => unreachable,
+ }
+ }
+ } else {
+ // asm ("foo")
+ try renderExpression(gpa, ais, tree, asm_node.ast.template, .none);
+ ais.popIndent();
+ return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen
+ }
+ }
- const colon3 = if (asm_node.inputs.len == 0) blk: {
- try renderToken(tree, ais, colon2, Space.Newline); // :
- break :blk tree.nextToken(colon2);
- } else blk: {
- try renderToken(tree, ais, colon2, Space.Space); // :
- ais.pushIndent();
- defer ais.popIndent();
- for (asm_node.inputs) |*asm_input, i| {
- if (i + 1 < asm_node.inputs.len) {
- const next_asm_input = &asm_node.inputs[i + 1];
- try renderAsmInput(allocator, ais, tree, asm_input, Space.None);
-
- const comma = tree.prevToken(next_asm_input.firstToken());
- try renderToken(tree, ais, comma, Space.Newline); // ,
- try renderExtraNewlineToken(tree, ais, next_asm_input.firstToken());
- } else if (asm_node.clobbers.len == 0) {
- try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline);
- break :asmblk;
- } else {
- try renderAsmInput(allocator, ais, tree, asm_input, Space.Newline);
- const comma_or_colon = tree.nextToken(asm_input.lastToken());
- break :blk switch (tree.token_ids[comma_or_colon]) {
- .Comma => tree.nextToken(comma_or_colon),
- else => comma_or_colon,
- };
- }
- }
- unreachable;
+ ais.pushIndent();
+ try renderExpression(gpa, ais, tree, asm_node.ast.template, .newline);
+ ais.setIndentDelta(asm_indent_delta);
+ const colon1 = tree.lastToken(asm_node.ast.template) + 1;
+
+ const colon2 = if (asm_node.outputs.len == 0) colon2: {
+ try renderToken(ais, tree, colon1, .newline); // :
+ break :colon2 colon1 + 1;
+ } else colon2: {
+ try renderToken(ais, tree, colon1, .space); // :
+
+ ais.pushIndent();
+ for (asm_node.outputs) |asm_output, i| {
+ if (i + 1 < asm_node.outputs.len) {
+ const next_asm_output = asm_node.outputs[i + 1];
+ try renderAsmOutput(gpa, ais, tree, asm_output, .none);
+
+ const comma = tree.firstToken(next_asm_output) - 1;
+ try renderToken(ais, tree, comma, .newline); // ,
+ try renderExtraNewlineToken(ais, tree, tree.firstToken(next_asm_output));
+ } else if (asm_node.inputs.len == 0 and asm_node.first_clobber == null) {
+ try renderAsmOutput(gpa, ais, tree, asm_output, .newline);
+ ais.popIndent();
+ ais.setIndentDelta(indent_delta);
+ ais.popIndent();
+ return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen
+ } else {
+ try renderAsmOutput(gpa, ais, tree, asm_output, .newline);
+ const comma_or_colon = tree.lastToken(asm_output) + 1;
+ ais.popIndent();
+ break :colon2 switch (token_tags[comma_or_colon]) {
+ .comma => comma_or_colon + 1,
+ else => comma_or_colon,
};
-
- try renderToken(tree, ais, colon3, Space.Space); // :
- ais.pushIndent();
- defer ais.popIndent();
- for (asm_node.clobbers) |clobber_node, i| {
- if (i + 1 >= asm_node.clobbers.len) {
- try renderExpression(allocator, ais, tree, clobber_node, Space.Newline);
- break :asmblk;
- } else {
- try renderExpression(allocator, ais, tree, clobber_node, Space.None);
- const comma = tree.nextToken(clobber_node.lastToken());
- try renderToken(tree, ais, comma, Space.Space); // ,
- }
- }
}
+ } else unreachable;
+ };
- return renderToken(tree, ais, asm_node.rparen, space);
- },
+ const colon3 = if (asm_node.inputs.len == 0) colon3: {
+ try renderToken(ais, tree, colon2, .newline); // :
+ break :colon3 colon2 + 1;
+ } else colon3: {
+ try renderToken(ais, tree, colon2, .space); // :
+ ais.pushIndent();
+ for (asm_node.inputs) |asm_input, i| {
+ if (i + 1 < asm_node.inputs.len) {
+ const next_asm_input = asm_node.inputs[i + 1];
+ try renderAsmInput(gpa, ais, tree, asm_input, .none);
+
+ const first_token = tree.firstToken(next_asm_input);
+ try renderToken(ais, tree, first_token - 1, .newline); // ,
+ try renderExtraNewlineToken(ais, tree, first_token);
+ } else if (asm_node.first_clobber == null) {
+ try renderAsmInput(gpa, ais, tree, asm_input, .newline);
+ ais.popIndent();
+ ais.setIndentDelta(indent_delta);
+ ais.popIndent();
+ return renderToken(ais, tree, asm_node.ast.rparen, space); // rparen
+ } else {
+ try renderAsmInput(gpa, ais, tree, asm_input, .newline);
+ const comma_or_colon = tree.lastToken(asm_input) + 1;
+ ais.popIndent();
+ break :colon3 switch (token_tags[comma_or_colon]) {
+ .comma => comma_or_colon + 1,
+ else => comma_or_colon,
+ };
+ }
+ }
+ unreachable;
+ };
- .EnumLiteral => {
- const enum_literal = @fieldParentPtr(ast.Node.EnumLiteral, "base", base);
+ try renderToken(ais, tree, colon3, .space); // :
+ const first_clobber = asm_node.first_clobber.?;
+ var tok_i = first_clobber;
+ while (true) {
+ switch (token_tags[tok_i + 1]) {
+ .r_paren => {
+ ais.setIndentDelta(indent_delta);
+ ais.popIndent();
+ try renderToken(ais, tree, tok_i, .newline);
+ return renderToken(ais, tree, tok_i + 1, space);
+ },
+ .comma => {
+ try renderToken(ais, tree, tok_i, .none);
+ try renderToken(ais, tree, tok_i + 1, .space);
+ tok_i += 2;
+ },
+ else => unreachable,
+ }
+ } else unreachable; // TODO shouldn't need this on while(true)
+}
- try renderToken(tree, ais, enum_literal.dot, Space.None); // .
- return renderToken(tree, ais, enum_literal.name, space); // name
- },
+fn renderCall(
+ gpa: *Allocator,
+ ais: *Ais,
+ tree: ast.Tree,
+ call: ast.full.Call,
+ space: Space,
+) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
- .ContainerField,
- .Root,
- .VarDecl,
- .Use,
- .TestDecl,
- => unreachable,
+ if (call.async_token) |async_token| {
+ try renderToken(ais, tree, async_token, .space);
+ }
+ try renderExpression(gpa, ais, tree, call.ast.fn_expr, .none);
+
+ const lparen = call.ast.lparen;
+ const params = call.ast.params;
+ if (params.len == 0) {
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, lparen, .none);
+ ais.popIndent();
+ return renderToken(ais, tree, lparen + 1, space); // )
}
-}
-fn renderArrayType(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- lbracket: ast.TokenIndex,
- rhs: *ast.Node,
- len_expr: *ast.Node,
- opt_sentinel: ?*ast.Node,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- const rbracket = tree.nextToken(if (opt_sentinel) |sentinel|
- sentinel.lastToken()
- else
- len_expr.lastToken());
+ const last_param = params[params.len - 1];
+ const after_last_param_tok = tree.lastToken(last_param) + 1;
+ if (token_tags[after_last_param_tok] == .comma) {
+ ais.pushIndentNextLine();
+ try renderToken(ais, tree, lparen, .newline); // (
+ for (params) |param_node, i| {
+ if (i + 1 < params.len) {
+ try renderExpression(gpa, ais, tree, param_node, .none);
- const starts_with_comment = tree.token_ids[lbracket + 1] == .LineComment;
- const ends_with_comment = tree.token_ids[rbracket - 1] == .LineComment;
- const new_space = if (ends_with_comment) Space.Newline else Space.None;
- {
- const do_indent = (starts_with_comment or ends_with_comment);
- if (do_indent) ais.pushIndent();
- defer if (do_indent) ais.popIndent();
+ // Unindent the comma for multiline string literals.
+ const is_multiline_string =
+ token_tags[tree.firstToken(param_node)] == .multiline_string_literal_line;
+ if (is_multiline_string) ais.popIndent();
- try renderToken(tree, ais, lbracket, Space.None); // [
- try renderExpression(allocator, ais, tree, len_expr, new_space);
+ const comma = tree.lastToken(param_node) + 1;
+ try renderToken(ais, tree, comma, .newline); // ,
- if (starts_with_comment) {
- try ais.maybeInsertNewline();
+ if (is_multiline_string) ais.pushIndent();
+
+ try renderExtraNewline(ais, tree, params[i + 1]);
+ } else {
+ try renderExpression(gpa, ais, tree, param_node, .comma);
+ }
}
- if (opt_sentinel) |sentinel| {
- const colon_token = tree.prevToken(sentinel.firstToken());
- try renderToken(tree, ais, colon_token, Space.None); // :
- try renderExpression(allocator, ais, tree, sentinel, Space.None);
+ ais.popIndent();
+ return renderToken(ais, tree, after_last_param_tok + 1, space); // )
+ }
+
+ try renderToken(ais, tree, lparen, .none); // (
+
+ for (params) |param_node, i| {
+ const first_param_token = tree.firstToken(param_node);
+ if (token_tags[first_param_token] == .multiline_string_literal_line or
+ hasSameLineComment(tree, first_param_token - 1))
+ {
+ ais.pushIndentOneShot();
}
- if (starts_with_comment) {
- try ais.maybeInsertNewline();
+ try renderExpression(gpa, ais, tree, param_node, .none);
+
+ if (i + 1 < params.len) {
+ const comma = tree.lastToken(param_node) + 1;
+ const next_multiline_string =
+ token_tags[tree.firstToken(params[i + 1])] == .multiline_string_literal_line;
+ const comma_space: Space = if (next_multiline_string) .none else .space;
+ try renderToken(ais, tree, comma, comma_space);
}
}
- try renderToken(tree, ais, rbracket, Space.None); // ]
- return renderExpression(allocator, ais, tree, rhs, space);
+ return renderToken(ais, tree, after_last_param_tok, space); // )
}
-fn renderAsmOutput(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- asm_output: *const ast.Node.Asm.Output,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- try ais.writer().writeAll("[");
- try renderExpression(allocator, ais, tree, asm_output.symbolic_name, Space.None);
- try ais.writer().writeAll("] ");
- try renderExpression(allocator, ais, tree, asm_output.constraint, Space.None);
- try ais.writer().writeAll(" (");
-
- switch (asm_output.kind) {
- .Variable => |variable_name| {
- try renderExpression(allocator, ais, tree, &variable_name.base, Space.None);
+/// Renders the given expression indented, popping the indent before rendering
+/// any following line comments
+fn renderExpressionIndented(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ ais.pushIndent();
+
+ var last_token = tree.lastToken(node);
+ const punctuation = switch (space) {
+ .none, .space, .newline, .skip => false,
+ .comma => true,
+ .comma_space => token_tags[last_token + 1] == .comma,
+ .semicolon => token_tags[last_token + 1] == .semicolon,
+ };
+
+ try renderExpression(gpa, ais, tree, node, if (punctuation) .none else .skip);
+
+ switch (space) {
+ .none, .space, .newline, .skip => {},
+ .comma => {
+ if (token_tags[last_token + 1] == .comma) {
+ try renderToken(ais, tree, last_token + 1, .skip);
+ last_token += 1;
+ } else {
+ try ais.writer().writeByte(',');
+ }
+ },
+ .comma_space => if (token_tags[last_token + 1] == .comma) {
+ try renderToken(ais, tree, last_token + 1, .skip);
+ last_token += 1;
},
- .Return => |return_type| {
- try ais.writer().writeAll("-> ");
- try renderExpression(allocator, ais, tree, return_type, Space.None);
+ .semicolon => if (token_tags[last_token + 1] == .semicolon) {
+ try renderToken(ais, tree, last_token + 1, .skip);
+ last_token += 1;
},
}
- return renderToken(tree, ais, asm_output.lastToken(), space); // )
-}
+ ais.popIndent();
-fn renderAsmInput(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- asm_input: *const ast.Node.Asm.Input,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- try ais.writer().writeAll("[");
- try renderExpression(allocator, ais, tree, asm_input.symbolic_name, Space.None);
- try ais.writer().writeAll("] ");
- try renderExpression(allocator, ais, tree, asm_input.constraint, Space.None);
- try ais.writer().writeAll(" (");
- try renderExpression(allocator, ais, tree, asm_input.expr, Space.None);
- return renderToken(tree, ais, asm_input.lastToken(), space); // )
-}
+ if (space == .skip) return;
-fn renderVarDecl(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- var_decl: *ast.Node.VarDecl,
-) (@TypeOf(ais.*).Error || Error)!void {
- if (var_decl.getVisibToken()) |visib_token| {
- try renderToken(tree, ais, visib_token, Space.Space); // pub
- }
+ const comment_start = token_starts[last_token] + tokenSliceForRender(tree, last_token).len;
+ const comment = try renderComments(ais, tree, comment_start, token_starts[last_token + 1]);
- if (var_decl.getExternExportToken()) |extern_export_token| {
- try renderToken(tree, ais, extern_export_token, Space.Space); // extern
+ if (!comment) switch (space) {
+ .none => {},
+ .space,
+ .comma_space,
+ => try ais.writer().writeByte(' '),
+ .newline,
+ .comma,
+ .semicolon,
+ => try ais.insertNewline(),
+ .skip => unreachable,
+ };
+}
- if (var_decl.getLibName()) |lib_name| {
- try renderExpression(allocator, ais, tree, lib_name, Space.Space); // "lib"
- }
+/// Render an expression, and the comma that follows it, if it is present in the source.
+fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const maybe_comma = tree.lastToken(node) + 1;
+ if (token_tags[maybe_comma] == .comma) {
+ try renderExpression(gpa, ais, tree, node, .none);
+ return renderToken(ais, tree, maybe_comma, space);
+ } else {
+ return renderExpression(gpa, ais, tree, node, space);
}
+}
- if (var_decl.getComptimeToken()) |comptime_token| {
- try renderToken(tree, ais, comptime_token, Space.Space); // comptime
+fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const maybe_comma = token + 1;
+ if (token_tags[maybe_comma] == .comma) {
+ try renderToken(ais, tree, token, .none);
+ return renderToken(ais, tree, maybe_comma, space);
+ } else {
+ return renderToken(ais, tree, token, space);
}
+}
- if (var_decl.getThreadLocalToken()) |thread_local_token| {
- try renderToken(tree, ais, thread_local_token, Space.Space); // threadlocal
- }
- try renderToken(tree, ais, var_decl.mut_token, Space.Space); // var
+const Space = enum {
+ /// Output the token lexeme only.
+ none,
+ /// Output the token lexeme followed by a single space.
+ space,
+ /// Output the token lexeme followed by a newline.
+ newline,
+ /// If the next token is a comma, render it as well. If not, insert one.
+ /// In either case, a newline will be inserted afterwards.
+ comma,
+ /// Additionally consume the next token if it is a comma.
+ /// In either case, a space will be inserted afterwards.
+ comma_space,
+ /// Additionally consume the next token if it is a semicolon.
+ /// In either case, a newline will be inserted afterwards.
+ semicolon,
+ /// Skip rendering whitespace and comments. If this is used, the caller
+ /// *must* handle handle whitespace and comments manually.
+ skip,
+};
- const name_space = if (var_decl.getTypeNode() == null and
- (var_decl.getAlignNode() != null or
- var_decl.getSectionNode() != null or
- var_decl.getInitNode() != null))
- Space.Space
- else
- Space.None;
- try renderToken(tree, ais, var_decl.name_token, name_space);
-
- if (var_decl.getTypeNode()) |type_node| {
- try renderToken(tree, ais, tree.nextToken(var_decl.name_token), Space.Space);
- const s = if (var_decl.getAlignNode() != null or
- var_decl.getSectionNode() != null or
- var_decl.getInitNode() != null) Space.Space else Space.None;
- try renderExpression(allocator, ais, tree, type_node, s);
- }
-
- if (var_decl.getAlignNode()) |align_node| {
- const lparen = tree.prevToken(align_node.firstToken());
- const align_kw = tree.prevToken(lparen);
- const rparen = tree.nextToken(align_node.lastToken());
- try renderToken(tree, ais, align_kw, Space.None); // align
- try renderToken(tree, ais, lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, align_node, Space.None);
- const s = if (var_decl.getSectionNode() != null or var_decl.getInitNode() != null) Space.Space else Space.None;
- try renderToken(tree, ais, rparen, s); // )
- }
-
- if (var_decl.getSectionNode()) |section_node| {
- const lparen = tree.prevToken(section_node.firstToken());
- const section_kw = tree.prevToken(lparen);
- const rparen = tree.nextToken(section_node.lastToken());
- try renderToken(tree, ais, section_kw, Space.None); // linksection
- try renderToken(tree, ais, lparen, Space.None); // (
- try renderExpression(allocator, ais, tree, section_node, Space.None);
- const s = if (var_decl.getInitNode() != null) Space.Space else Space.None;
- try renderToken(tree, ais, rparen, s); // )
- }
-
- if (var_decl.getInitNode()) |init_node| {
- const eq_token = var_decl.getEqToken().?;
- const eq_space = blk: {
- const loc = tree.tokenLocation(tree.token_locs[eq_token].end, tree.nextToken(eq_token));
- break :blk if (loc.line == 0) Space.Space else Space.Newline;
- };
+fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
- {
- ais.pushIndent();
- defer ais.popIndent();
- try renderToken(tree, ais, eq_token, eq_space); // =
- }
- ais.pushIndentOneShot();
- try renderExpression(allocator, ais, tree, init_node, Space.None);
- }
+ const token_start = token_starts[token_index];
+ const lexeme = tokenSliceForRender(tree, token_index);
- try renderToken(tree, ais, var_decl.semicolon_token, Space.Newline);
-}
+ try ais.writer().writeAll(lexeme);
-fn renderParamDecl(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- param_decl: ast.Node.FnProto.ParamDecl,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- try renderDocComments(tree, ais, param_decl, param_decl.doc_comments);
+ if (space == .skip) return;
- if (param_decl.comptime_token) |comptime_token| {
- try renderToken(tree, ais, comptime_token, Space.Space);
- }
- if (param_decl.noalias_token) |noalias_token| {
- try renderToken(tree, ais, noalias_token, Space.Space);
- }
- if (param_decl.name_token) |name_token| {
- try renderToken(tree, ais, name_token, Space.None);
- try renderToken(tree, ais, tree.nextToken(name_token), Space.Space); // :
- }
- switch (param_decl.param_type) {
- .any_type, .type_expr => |node| try renderExpression(allocator, ais, tree, node, space),
+ if (space == .comma and token_tags[token_index + 1] != .comma) {
+ try ais.writer().writeByte(',');
}
-}
-fn renderStatement(
- allocator: *mem.Allocator,
- ais: anytype,
- tree: *ast.Tree,
- base: *ast.Node,
-) (@TypeOf(ais.*).Error || Error)!void {
- switch (base.tag) {
- .VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", base);
- try renderVarDecl(allocator, ais, tree, var_decl);
+ const comment = try renderComments(ais, tree, token_start + lexeme.len, token_starts[token_index + 1]);
+ switch (space) {
+ .none => {},
+ .space => if (!comment) try ais.writer().writeByte(' '),
+ .newline => if (!comment) try ais.insertNewline(),
+
+ .comma => if (token_tags[token_index + 1] == .comma) {
+ try renderToken(ais, tree, token_index + 1, .newline);
+ } else if (!comment) {
+ try ais.insertNewline();
},
- else => {
- if (base.requireSemiColon()) {
- try renderExpression(allocator, ais, tree, base, Space.None);
- const semicolon_index = tree.nextToken(base.lastToken());
- assert(tree.token_ids[semicolon_index] == .Semicolon);
- try renderToken(tree, ais, semicolon_index, Space.Newline);
- } else {
- try renderExpression(allocator, ais, tree, base, Space.Newline);
- }
+ .comma_space => if (token_tags[token_index + 1] == .comma) {
+ try renderToken(ais, tree, token_index + 1, .space);
+ } else if (!comment) {
+ try ais.writer().writeByte(' ');
+ },
+
+ .semicolon => if (token_tags[token_index + 1] == .semicolon) {
+ try renderToken(ais, tree, token_index + 1, .newline);
+ } else if (!comment) {
+ try ais.insertNewline();
},
+
+ .skip => unreachable,
}
}
-const Space = enum {
- None,
- Newline,
- Comma,
- Space,
- SpaceOrOutdent,
- NoNewline,
- NoComment,
- BlockStart,
-};
+/// Returns true if there exists a comment between the start of token
+/// `start_token` and the start of token `end_token`. This is used to determine
+/// if e.g. a fn_proto should be wrapped and have a trailing comma inserted
+/// even if there is none in the source.
+fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
+ const token_starts = tree.tokens.items(.start);
-fn renderTokenOffset(
- tree: *ast.Tree,
- ais: anytype,
- token_index: ast.TokenIndex,
- space: Space,
- token_skip_bytes: usize,
-) (@TypeOf(ais.*).Error || Error)!void {
- if (space == Space.BlockStart) {
- // If placing the lbrace on the current line would cause an uggly gap then put the lbrace on the next line
- const new_space = if (ais.isLineOverIndented()) Space.Newline else Space.Space;
- return renderToken(tree, ais, token_index, new_space);
- }
+ const start = token_starts[start_token];
+ const end = token_starts[end_token];
- var token_loc = tree.token_locs[token_index];
- try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(token_loc)[token_skip_bytes..], " "));
+ return mem.indexOf(u8, tree.source[start..end], "//") != null;
+}
- if (space == Space.NoComment)
- return;
+/// Assumes that start is the first byte past the previous token and
+/// that end is the last byte before the next token.
+fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool {
+ var index: usize = start;
+ while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| {
+ const comment_start = index + offset;
- var next_token_id = tree.token_ids[token_index + 1];
- var next_token_loc = tree.token_locs[token_index + 1];
+ // If there is no newline, the comment ends with EOF
+ const newline_index = mem.indexOfScalar(u8, tree.source[comment_start..end], '\n');
+ const newline = if (newline_index) |i| comment_start + i else null;
- if (space == Space.Comma) switch (next_token_id) {
- .Comma => return renderToken(tree, ais, token_index + 1, Space.Newline),
- .LineComment => {
- try ais.writer().writeAll(", ");
- return renderToken(tree, ais, token_index + 1, Space.Newline);
- },
- else => {
- if (token_index + 2 < tree.token_ids.len and
- tree.token_ids[token_index + 2] == .MultilineStringLiteralLine)
- {
- try ais.writer().writeAll(",");
- return;
- } else {
- try ais.writer().writeAll(",");
+ const untrimmed_comment = tree.source[comment_start .. newline orelse tree.source.len];
+ const trimmed_comment = mem.trimRight(u8, untrimmed_comment, &std.ascii.spaces);
+
+ // Don't leave any whitespace at the start of the file
+ if (index != 0) {
+ if (index == start and mem.containsAtLeast(u8, tree.source[index..comment_start], 2, "\n")) {
+ // Leave up to one empty line before the first comment
try ais.insertNewline();
- return;
+ try ais.insertNewline();
+ } else if (mem.indexOfScalar(u8, tree.source[index..comment_start], '\n') != null) {
+ // Respect the newline directly before the comment.
+ // Note: This allows an empty line between comments
+ try ais.insertNewline();
+ } else if (index == start) {
+ // Otherwise if the first comment is on the same line as
+ // the token before it, prefix it with a single space.
+ try ais.writer().writeByte(' ');
}
- },
- };
-
- // Skip over same line doc comments
- var offset: usize = 1;
- if (next_token_id == .DocComment) {
- const loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
- if (loc.line == 0) {
- offset += 1;
- next_token_id = tree.token_ids[token_index + offset];
- next_token_loc = tree.token_locs[token_index + offset];
}
- }
- if (next_token_id != .LineComment) {
- switch (space) {
- Space.None, Space.NoNewline => return,
- Space.Newline => {
- if (next_token_id == .MultilineStringLiteralLine) {
- return;
- } else {
- try ais.insertNewline();
- return;
- }
- },
- Space.Space, Space.SpaceOrOutdent => {
- if (next_token_id == .MultilineStringLiteralLine)
- return;
- try ais.writer().writeByte(' ');
- return;
- },
- Space.NoComment, Space.Comma, Space.BlockStart => unreachable,
- }
- }
+ try ais.writer().print("{s}\n", .{trimmed_comment});
+ index = 1 + (newline orelse return true);
- while (true) {
- const comment_is_empty = mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " ").len == 2;
- if (comment_is_empty) {
- switch (space) {
- Space.Newline => {
- offset += 1;
- token_loc = next_token_loc;
- next_token_id = tree.token_ids[token_index + offset];
- next_token_loc = tree.token_locs[token_index + offset];
- if (next_token_id != .LineComment) {
- try ais.insertNewline();
- return;
- }
- },
- else => break,
+ if (ais.disabled_offset) |disabled_offset| {
+ if (mem.eql(u8, trimmed_comment, "// zig fmt: on")) {
+ // write the source for which formatting was disabled directly
+ // to the underlying writer, fixing up invaild whitespace
+ try writeFixingWhitespace(ais.underlying_writer, tree.source[disabled_offset..index]);
+ ais.disabled_offset = null;
}
- } else {
- break;
+ } else if (mem.eql(u8, trimmed_comment, "// zig fmt: off")) {
+ ais.disabled_offset = index;
}
}
- var loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
- if (loc.line == 0) {
- if (tree.token_ids[token_index] != .MultilineStringLiteralLine) {
- try ais.writer().writeByte(' ');
- }
- try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " "));
- offset = 2;
- token_loc = next_token_loc;
- next_token_loc = tree.token_locs[token_index + offset];
- next_token_id = tree.token_ids[token_index + offset];
- if (next_token_id != .LineComment) {
- switch (space) {
- .None, .Space, .SpaceOrOutdent => {
- try ais.insertNewline();
- },
- .Newline => {
- if (next_token_id == .MultilineStringLiteralLine) {
- return;
- } else {
- try ais.insertNewline();
- return;
- }
- },
- .NoNewline => {},
- .NoComment, .Comma, .BlockStart => unreachable,
- }
- return;
- }
- loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
+ if (index != start and mem.containsAtLeast(u8, tree.source[index - 1 .. end], 2, "\n")) {
+ try ais.insertNewline();
}
- while (true) {
- // translate-c doesn't generate correct newlines
- // in generated code (loc.line == 0) so treat that case
- // as though there was meant to be a newline between the tokens
- var newline_count = if (loc.line <= 1) @as(u8, 1) else @as(u8, 2);
- while (newline_count > 0) : (newline_count -= 1) try ais.insertNewline();
- try ais.writer().writeAll(mem.trimRight(u8, tree.tokenSliceLoc(next_token_loc), " "));
-
- offset += 1;
- token_loc = next_token_loc;
- next_token_loc = tree.token_locs[token_index + offset];
- next_token_id = tree.token_ids[token_index + offset];
- if (next_token_id != .LineComment) {
- switch (space) {
- .Newline => {
- if (next_token_id == .MultilineStringLiteralLine) {
- return;
- } else {
- try ais.insertNewline();
- return;
- }
- },
- .None, .Space, .SpaceOrOutdent => {
- try ais.insertNewline();
- },
- .NoNewline => {},
- .NoComment, .Comma, .BlockStart => unreachable,
- }
- return;
- }
- loc = tree.tokenLocationLoc(token_loc.end, next_token_loc);
+ return index != start;
+}
+
+fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!void {
+ return renderExtraNewlineToken(ais, tree, tree.firstToken(node));
+}
+
+/// Check if there is an empty line immediately before the given token. If so, render it.
+fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex) Error!void {
+ const token_starts = tree.tokens.items(.start);
+ const token_start = token_starts[token_index];
+ if (token_start == 0) return;
+ const prev_token_end = if (token_index == 0)
+ 0
+ else
+ token_starts[token_index - 1] + tokenSliceForRender(tree, token_index - 1).len;
+
+ // If there is a comment present, it will handle the empty line
+ if (mem.indexOf(u8, tree.source[prev_token_end..token_start], "//") != null) return;
+
+ // Iterate backwards to the end of the previous token, stopping if a
+ // non-whitespace character is encountered or two newlines have been found.
+ var i = token_start - 1;
+ var newlines: u2 = 0;
+ while (std.ascii.isSpace(tree.source[i])) : (i -= 1) {
+ if (tree.source[i] == '\n') newlines += 1;
+ if (newlines == 2) return ais.insertNewline();
+ if (i == prev_token_end) break;
}
}
-fn renderToken(
- tree: *ast.Tree,
- ais: anytype,
- token_index: ast.TokenIndex,
- space: Space,
-) (@TypeOf(ais.*).Error || Error)!void {
- return renderTokenOffset(tree, ais, token_index, space, 0);
+/// end_token is the token one past the last doc comment token. This function
+/// searches backwards from there.
+fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error!void {
+ // Search backwards for the first doc comment.
+ const token_tags = tree.tokens.items(.tag);
+ if (end_token == 0) return;
+ var tok = end_token - 1;
+ while (token_tags[tok] == .doc_comment) {
+ if (tok == 0) break;
+ tok -= 1;
+ } else {
+ tok += 1;
+ }
+ const first_tok = tok;
+ if (first_tok == end_token) return;
+ try renderExtraNewlineToken(ais, tree, first_tok);
+
+ while (token_tags[tok] == .doc_comment) : (tok += 1) {
+ try renderToken(ais, tree, tok, .newline);
+ }
}
-fn renderDocComments(
- tree: *ast.Tree,
- ais: anytype,
- node: anytype,
- doc_comments: ?*ast.Node.DocComment,
-) (@TypeOf(ais.*).Error || Error)!void {
- const comment = doc_comments orelse return;
- return renderDocCommentsToken(tree, ais, comment, node.firstToken());
+/// start_token is first container doc comment token.
+fn renderContainerDocComments(ais: *Ais, tree: ast.Tree, start_token: ast.TokenIndex) Error!void {
+ const token_tags = tree.tokens.items(.tag);
+ var tok = start_token;
+ while (token_tags[tok] == .container_doc_comment) : (tok += 1) {
+ try renderToken(ais, tree, tok, .newline);
+ }
+ // Render extra newline if there is one between final container doc comment and
+ // the next token. If the next token is a doc comment, that code path
+ // will have its own logic to insert a newline.
+ if (token_tags[tok] != .doc_comment) {
+ try renderExtraNewlineToken(ais, tree, tok);
+ }
}
-fn renderDocCommentsToken(
- tree: *ast.Tree,
- ais: anytype,
- comment: *ast.Node.DocComment,
- first_token: ast.TokenIndex,
-) (@TypeOf(ais.*).Error || Error)!void {
- var tok_i = comment.first_line;
- while (true) : (tok_i += 1) {
- switch (tree.token_ids[tok_i]) {
- .DocComment, .ContainerDocComment => {
- if (comment.first_line < first_token) {
- try renderToken(tree, ais, tok_i, Space.Newline);
- } else {
- try renderToken(tree, ais, tok_i, Space.NoComment);
- try ais.insertNewline();
- }
- },
- .LineComment => continue,
- else => break,
- }
+fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 {
+ var ret = tree.tokenSlice(token_index);
+ if (tree.tokens.items(.tag)[token_index] == .multiline_string_literal_line) {
+ assert(ret[ret.len - 1] == '\n');
+ ret.len -= 1;
}
+ return ret;
}
-fn nodeIsBlock(base: *const ast.Node) bool {
- return switch (base.tag) {
- .Block,
- .LabeledBlock,
- .If,
- .For,
- .While,
- .Switch,
- => true,
- else => false,
+fn hasSameLineComment(tree: ast.Tree, token_index: ast.TokenIndex) bool {
+ const token_starts = tree.tokens.items(.start);
+ const between_source = tree.source[token_starts[token_index]..token_starts[token_index + 1]];
+ for (between_source) |byte| switch (byte) {
+ '\n' => return false,
+ '/' => return true,
+ else => continue,
+ };
+ return false;
+}
+
+/// Returns `true` if and only if there are any tokens or line comments between
+/// start_token and end_token.
+fn anythingBetween(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
+ if (start_token + 1 != end_token) return true;
+ const token_starts = tree.tokens.items(.start);
+ const between_source = tree.source[token_starts[start_token]..token_starts[start_token + 1]];
+ for (between_source) |byte| switch (byte) {
+ '/' => return true,
+ else => continue,
};
+ return false;
}
-fn nodeCausesSliceOpSpace(base: *ast.Node) bool {
- return switch (base.tag) {
- .Catch,
- .Add,
- .AddWrap,
- .ArrayCat,
- .ArrayMult,
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .BangEqual,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BoolAnd,
- .BoolOr,
- .Div,
- .EqualEqual,
- .ErrorUnion,
- .GreaterOrEqual,
- .GreaterThan,
- .LessOrEqual,
- .LessThan,
- .MergeErrorSets,
- .Mod,
- .Mul,
- .MulWrap,
- .Range,
- .Sub,
- .SubWrap,
- .OrElse,
+fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Error!void {
+ for (slice) |byte| switch (byte) {
+ '\t' => try writer.writeAll(" " ** 4),
+ '\r' => {},
+ else => try writer.writeByte(byte),
+ };
+}
+
+fn nodeIsBlock(tag: ast.Node.Tag) bool {
+ return switch (tag) {
+ .block,
+ .block_semicolon,
+ .block_two,
+ .block_two_semicolon,
+ .@"if",
+ .if_simple,
+ .@"for",
+ .for_simple,
+ .@"while",
+ .while_simple,
+ .while_cont,
+ .@"switch",
+ .switch_comma,
=> true,
+ else => false,
+ };
+}
+fn nodeIsIf(tag: ast.Node.Tag) bool {
+ return switch (tag) {
+ .@"if", .if_simple => true,
else => false,
};
}
-fn copyFixingWhitespace(ais: anytype, slice: []const u8) @TypeOf(ais.*).Error!void {
- for (slice) |byte| switch (byte) {
- '\t' => try ais.writer().writeAll(" "),
- '\r' => {},
- else => try ais.writer().writeByte(byte),
+fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool {
+ return switch (tag) {
+ .@"catch",
+ .add,
+ .add_wrap,
+ .array_cat,
+ .array_mult,
+ .assign,
+ .assign_bit_and,
+ .assign_bit_or,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_xor,
+ .assign_div,
+ .assign_sub,
+ .assign_sub_wrap,
+ .assign_mod,
+ .assign_add,
+ .assign_add_wrap,
+ .assign_mul,
+ .assign_mul_wrap,
+ .bang_equal,
+ .bit_and,
+ .bit_or,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_xor,
+ .bool_and,
+ .bool_or,
+ .div,
+ .equal_equal,
+ .error_union,
+ .greater_or_equal,
+ .greater_than,
+ .less_or_equal,
+ .less_than,
+ .merge_error_sets,
+ .mod,
+ .mul,
+ .mul_wrap,
+ .sub,
+ .sub_wrap,
+ .@"orelse",
+ => true,
+
+ else => false,
};
}
// Returns the number of nodes in `expr` that are on the same line as `rtoken`,
// or null if they all are on the same line.
-fn rowSize(tree: *ast.Tree, exprs: []*ast.Node, rtoken: ast.TokenIndex) ?usize {
- const first_token = exprs[0].firstToken();
- const first_loc = tree.tokenLocation(tree.token_locs[first_token].start, rtoken);
- if (first_loc.line == 0) {
- const maybe_comma = tree.prevToken(rtoken);
- if (tree.token_ids[maybe_comma] == .Comma)
+fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex) ?usize {
+ const token_tags = tree.tokens.items(.tag);
+
+ const first_token = tree.firstToken(exprs[0]);
+ if (tree.tokensOnSameLine(first_token, rtoken)) {
+ const maybe_comma = rtoken - 1;
+ if (token_tags[maybe_comma] == .comma)
return 1;
return null; // no newlines
}
@@ -2671,9 +2570,8 @@ fn rowSize(tree: *ast.Tree, exprs: []*ast.Node, rtoken: ast.TokenIndex) ?usize {
var count: usize = 1;
for (exprs) |expr, i| {
if (i + 1 < exprs.len) {
- const expr_last_token = expr.lastToken() + 1;
- const loc = tree.tokenLocation(tree.token_locs[expr_last_token].start, exprs[i + 1].firstToken());
- if (loc.line != 0) return count;
+ const expr_last_token = tree.lastToken(expr) + 1;
+ if (!tree.tokensOnSameLine(expr_last_token, tree.firstToken(exprs[i + 1]))) return count;
count += 1;
} else {
return count;
@@ -2681,3 +2579,150 @@ fn rowSize(tree: *ast.Tree, exprs: []*ast.Node, rtoken: ast.TokenIndex) ?usize {
}
unreachable;
}
+
+/// Automatically inserts indentation of written data by keeping
+/// track of the current indentation level
+fn AutoIndentingStream(comptime UnderlyingWriter: type) type {
+ return struct {
+ const Self = @This();
+ pub const Error = UnderlyingWriter.Error;
+ pub const Writer = std.io.Writer(*Self, Error, write);
+
+ underlying_writer: UnderlyingWriter,
+
+ /// Offset into the source at which formatting has been disabled with
+ /// a `zig fmt: off` comment.
+ ///
+ /// If non-null, the AutoIndentingStream will not write any bytes
+ /// to the underlying writer. It will however continue to track the
+ /// indentation level.
+ disabled_offset: ?usize = null,
+
+ indent_count: usize = 0,
+ indent_delta: usize,
+ current_line_empty: bool = true,
+ /// automatically popped when applied
+ indent_one_shot_count: usize = 0,
+ /// the most recently applied indent
+ applied_indent: usize = 0,
+ /// not used until the next line
+ indent_next_line: usize = 0,
+
+ pub fn writer(self: *Self) Writer {
+ return .{ .context = self };
+ }
+
+ pub fn write(self: *Self, bytes: []const u8) Error!usize {
+ if (bytes.len == 0)
+ return @as(usize, 0);
+
+ try self.applyIndent();
+ return self.writeNoIndent(bytes);
+ }
+
+ // Change the indent delta without changing the final indentation level
+ pub fn setIndentDelta(self: *Self, new_indent_delta: usize) void {
+ if (self.indent_delta == new_indent_delta) {
+ return;
+ } else if (self.indent_delta > new_indent_delta) {
+ assert(self.indent_delta % new_indent_delta == 0);
+ self.indent_count = self.indent_count * (self.indent_delta / new_indent_delta);
+ } else {
+ // assert that the current indentation (in spaces) in a multiple of the new delta
+ assert((self.indent_count * self.indent_delta) % new_indent_delta == 0);
+ self.indent_count = self.indent_count / (new_indent_delta / self.indent_delta);
+ }
+ self.indent_delta = new_indent_delta;
+ }
+
+ fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize {
+ if (bytes.len == 0)
+ return @as(usize, 0);
+
+ if (self.disabled_offset == null) try self.underlying_writer.writeAll(bytes);
+ if (bytes[bytes.len - 1] == '\n')
+ self.resetLine();
+ return bytes.len;
+ }
+
+ pub fn insertNewline(self: *Self) Error!void {
+ _ = try self.writeNoIndent("\n");
+ }
+
+ fn resetLine(self: *Self) void {
+ self.current_line_empty = true;
+ self.indent_next_line = 0;
+ }
+
+ /// Insert a newline unless the current line is blank
+ pub fn maybeInsertNewline(self: *Self) Error!void {
+ if (!self.current_line_empty)
+ try self.insertNewline();
+ }
+
+ /// Push default indentation
+ /// Doesn't actually write any indentation.
+ /// Just primes the stream to be able to write the correct indentation if it needs to.
+ pub fn pushIndent(self: *Self) void {
+ self.indent_count += 1;
+ }
+
+ /// Push an indent that is automatically popped after being applied
+ pub fn pushIndentOneShot(self: *Self) void {
+ self.indent_one_shot_count += 1;
+ self.pushIndent();
+ }
+
+ /// Turns all one-shot indents into regular indents
+ /// Returns number of indents that must now be manually popped
+ pub fn lockOneShotIndent(self: *Self) usize {
+ var locked_count = self.indent_one_shot_count;
+ self.indent_one_shot_count = 0;
+ return locked_count;
+ }
+
+ /// Push an indent that should not take effect until the next line
+ pub fn pushIndentNextLine(self: *Self) void {
+ self.indent_next_line += 1;
+ self.pushIndent();
+ }
+
+ pub fn popIndent(self: *Self) void {
+ assert(self.indent_count != 0);
+ self.indent_count -= 1;
+
+ if (self.indent_next_line > 0)
+ self.indent_next_line -= 1;
+ }
+
+ /// Writes ' ' bytes if the current line is empty
+ fn applyIndent(self: *Self) Error!void {
+ const current_indent = self.currentIndent();
+ if (self.current_line_empty and current_indent > 0) {
+ if (self.disabled_offset == null) {
+ try self.underlying_writer.writeByteNTimes(' ', current_indent);
+ }
+ self.applied_indent = current_indent;
+ }
+
+ self.indent_count -= self.indent_one_shot_count;
+ self.indent_one_shot_count = 0;
+ self.current_line_empty = false;
+ }
+
+ /// Checks to see if the most recent indentation exceeds the currently pushed indents
+ pub fn isLineOverIndented(self: *Self) bool {
+ if (self.current_line_empty) return false;
+ return self.applied_indent > self.currentIndent();
+ }
+
+ fn currentIndent(self: *Self) usize {
+ var indent_current: usize = 0;
+ if (self.indent_count > 0) {
+ const indent_count = self.indent_count - self.indent_next_line;
+ indent_current = indent_count * self.indent_delta;
+ }
+ return indent_current;
+ }
+ };
+}
diff --git a/lib/std/zig/tokenizer.zig b/lib/std/zig/tokenizer.zig
index 083f942db68f..88feabd02175 100644
--- a/lib/std/zig/tokenizer.zig
+++ b/lib/std/zig/tokenizer.zig
@@ -7,7 +7,7 @@ const std = @import("../std.zig");
const mem = std.mem;
pub const Token = struct {
- id: Id,
+ tag: Tag,
loc: Loc,
pub const Loc = struct {
@@ -15,315 +15,315 @@ pub const Token = struct {
end: usize,
};
- pub const keywords = std.ComptimeStringMap(Id, .{
- .{ "align", .Keyword_align },
- .{ "allowzero", .Keyword_allowzero },
- .{ "and", .Keyword_and },
- .{ "anyframe", .Keyword_anyframe },
- .{ "anytype", .Keyword_anytype },
- .{ "asm", .Keyword_asm },
- .{ "async", .Keyword_async },
- .{ "await", .Keyword_await },
- .{ "break", .Keyword_break },
- .{ "callconv", .Keyword_callconv },
- .{ "catch", .Keyword_catch },
- .{ "comptime", .Keyword_comptime },
- .{ "const", .Keyword_const },
- .{ "continue", .Keyword_continue },
- .{ "defer", .Keyword_defer },
- .{ "else", .Keyword_else },
- .{ "enum", .Keyword_enum },
- .{ "errdefer", .Keyword_errdefer },
- .{ "error", .Keyword_error },
- .{ "export", .Keyword_export },
- .{ "extern", .Keyword_extern },
- .{ "false", .Keyword_false },
- .{ "fn", .Keyword_fn },
- .{ "for", .Keyword_for },
- .{ "if", .Keyword_if },
- .{ "inline", .Keyword_inline },
- .{ "noalias", .Keyword_noalias },
- .{ "noasync", .Keyword_nosuspend }, // TODO: remove this
- .{ "noinline", .Keyword_noinline },
- .{ "nosuspend", .Keyword_nosuspend },
- .{ "null", .Keyword_null },
- .{ "opaque", .Keyword_opaque },
- .{ "or", .Keyword_or },
- .{ "orelse", .Keyword_orelse },
- .{ "packed", .Keyword_packed },
- .{ "pub", .Keyword_pub },
- .{ "resume", .Keyword_resume },
- .{ "return", .Keyword_return },
- .{ "linksection", .Keyword_linksection },
- .{ "struct", .Keyword_struct },
- .{ "suspend", .Keyword_suspend },
- .{ "switch", .Keyword_switch },
- .{ "test", .Keyword_test },
- .{ "threadlocal", .Keyword_threadlocal },
- .{ "true", .Keyword_true },
- .{ "try", .Keyword_try },
- .{ "undefined", .Keyword_undefined },
- .{ "union", .Keyword_union },
- .{ "unreachable", .Keyword_unreachable },
- .{ "usingnamespace", .Keyword_usingnamespace },
- .{ "var", .Keyword_var },
- .{ "volatile", .Keyword_volatile },
- .{ "while", .Keyword_while },
+ pub const keywords = std.ComptimeStringMap(Tag, .{
+ .{ "align", .keyword_align },
+ .{ "allowzero", .keyword_allowzero },
+ .{ "and", .keyword_and },
+ .{ "anyframe", .keyword_anyframe },
+ .{ "anytype", .keyword_anytype },
+ .{ "asm", .keyword_asm },
+ .{ "async", .keyword_async },
+ .{ "await", .keyword_await },
+ .{ "break", .keyword_break },
+ .{ "callconv", .keyword_callconv },
+ .{ "catch", .keyword_catch },
+ .{ "comptime", .keyword_comptime },
+ .{ "const", .keyword_const },
+ .{ "continue", .keyword_continue },
+ .{ "defer", .keyword_defer },
+ .{ "else", .keyword_else },
+ .{ "enum", .keyword_enum },
+ .{ "errdefer", .keyword_errdefer },
+ .{ "error", .keyword_error },
+ .{ "export", .keyword_export },
+ .{ "extern", .keyword_extern },
+ .{ "false", .keyword_false },
+ .{ "fn", .keyword_fn },
+ .{ "for", .keyword_for },
+ .{ "if", .keyword_if },
+ .{ "inline", .keyword_inline },
+ .{ "noalias", .keyword_noalias },
+ .{ "noinline", .keyword_noinline },
+ .{ "nosuspend", .keyword_nosuspend },
+ .{ "null", .keyword_null },
+ .{ "opaque", .keyword_opaque },
+ .{ "or", .keyword_or },
+ .{ "orelse", .keyword_orelse },
+ .{ "packed", .keyword_packed },
+ .{ "pub", .keyword_pub },
+ .{ "resume", .keyword_resume },
+ .{ "return", .keyword_return },
+ .{ "linksection", .keyword_linksection },
+ .{ "struct", .keyword_struct },
+ .{ "suspend", .keyword_suspend },
+ .{ "switch", .keyword_switch },
+ .{ "test", .keyword_test },
+ .{ "threadlocal", .keyword_threadlocal },
+ .{ "true", .keyword_true },
+ .{ "try", .keyword_try },
+ .{ "undefined", .keyword_undefined },
+ .{ "union", .keyword_union },
+ .{ "unreachable", .keyword_unreachable },
+ .{ "usingnamespace", .keyword_usingnamespace },
+ .{ "var", .keyword_var },
+ .{ "volatile", .keyword_volatile },
+ .{ "while", .keyword_while },
});
- pub fn getKeyword(bytes: []const u8) ?Id {
+ pub fn getKeyword(bytes: []const u8) ?Tag {
return keywords.get(bytes);
}
- pub const Id = enum {
- Invalid,
- Invalid_ampersands,
- Invalid_periodasterisks,
- Identifier,
- StringLiteral,
- MultilineStringLiteralLine,
- CharLiteral,
- Eof,
- Builtin,
- Bang,
- Pipe,
- PipePipe,
- PipeEqual,
- Equal,
- EqualEqual,
- EqualAngleBracketRight,
- BangEqual,
- LParen,
- RParen,
- Semicolon,
- Percent,
- PercentEqual,
- LBrace,
- RBrace,
- LBracket,
- RBracket,
- Period,
- PeriodAsterisk,
- Ellipsis2,
- Ellipsis3,
- Caret,
- CaretEqual,
- Plus,
- PlusPlus,
- PlusEqual,
- PlusPercent,
- PlusPercentEqual,
- Minus,
- MinusEqual,
- MinusPercent,
- MinusPercentEqual,
- Asterisk,
- AsteriskEqual,
- AsteriskAsterisk,
- AsteriskPercent,
- AsteriskPercentEqual,
- Arrow,
- Colon,
- Slash,
- SlashEqual,
- Comma,
- Ampersand,
- AmpersandEqual,
- QuestionMark,
- AngleBracketLeft,
- AngleBracketLeftEqual,
- AngleBracketAngleBracketLeft,
- AngleBracketAngleBracketLeftEqual,
- AngleBracketRight,
- AngleBracketRightEqual,
- AngleBracketAngleBracketRight,
- AngleBracketAngleBracketRightEqual,
- Tilde,
- IntegerLiteral,
- FloatLiteral,
- LineComment,
- DocComment,
- ContainerDocComment,
- ShebangLine,
- Keyword_align,
- Keyword_allowzero,
- Keyword_and,
- Keyword_anyframe,
- Keyword_anytype,
- Keyword_asm,
- Keyword_async,
- Keyword_await,
- Keyword_break,
- Keyword_callconv,
- Keyword_catch,
- Keyword_comptime,
- Keyword_const,
- Keyword_continue,
- Keyword_defer,
- Keyword_else,
- Keyword_enum,
- Keyword_errdefer,
- Keyword_error,
- Keyword_export,
- Keyword_extern,
- Keyword_false,
- Keyword_fn,
- Keyword_for,
- Keyword_if,
- Keyword_inline,
- Keyword_noalias,
- Keyword_noinline,
- Keyword_nosuspend,
- Keyword_null,
- Keyword_opaque,
- Keyword_or,
- Keyword_orelse,
- Keyword_packed,
- Keyword_pub,
- Keyword_resume,
- Keyword_return,
- Keyword_linksection,
- Keyword_struct,
- Keyword_suspend,
- Keyword_switch,
- Keyword_test,
- Keyword_threadlocal,
- Keyword_true,
- Keyword_try,
- Keyword_undefined,
- Keyword_union,
- Keyword_unreachable,
- Keyword_usingnamespace,
- Keyword_var,
- Keyword_volatile,
- Keyword_while,
-
- pub fn symbol(id: Id) []const u8 {
- return switch (id) {
- .Invalid => "Invalid",
- .Invalid_ampersands => "&&",
- .Invalid_periodasterisks => ".**",
- .Identifier => "Identifier",
- .StringLiteral => "StringLiteral",
- .MultilineStringLiteralLine => "MultilineStringLiteralLine",
- .CharLiteral => "CharLiteral",
- .Eof => "Eof",
- .Builtin => "Builtin",
- .IntegerLiteral => "IntegerLiteral",
- .FloatLiteral => "FloatLiteral",
- .LineComment => "LineComment",
- .DocComment => "DocComment",
- .ContainerDocComment => "ContainerDocComment",
- .ShebangLine => "ShebangLine",
-
- .Bang => "!",
- .Pipe => "|",
- .PipePipe => "||",
- .PipeEqual => "|=",
- .Equal => "=",
- .EqualEqual => "==",
- .EqualAngleBracketRight => "=>",
- .BangEqual => "!=",
- .LParen => "(",
- .RParen => ")",
- .Semicolon => ";",
- .Percent => "%",
- .PercentEqual => "%=",
- .LBrace => "{",
- .RBrace => "}",
- .LBracket => "[",
- .RBracket => "]",
- .Period => ".",
- .PeriodAsterisk => ".*",
- .Ellipsis2 => "..",
- .Ellipsis3 => "...",
- .Caret => "^",
- .CaretEqual => "^=",
- .Plus => "+",
- .PlusPlus => "++",
- .PlusEqual => "+=",
- .PlusPercent => "+%",
- .PlusPercentEqual => "+%=",
- .Minus => "-",
- .MinusEqual => "-=",
- .MinusPercent => "-%",
- .MinusPercentEqual => "-%=",
- .Asterisk => "*",
- .AsteriskEqual => "*=",
- .AsteriskAsterisk => "**",
- .AsteriskPercent => "*%",
- .AsteriskPercentEqual => "*%=",
- .Arrow => "->",
- .Colon => ":",
- .Slash => "/",
- .SlashEqual => "/=",
- .Comma => ",",
- .Ampersand => "&",
- .AmpersandEqual => "&=",
- .QuestionMark => "?",
- .AngleBracketLeft => "<",
- .AngleBracketLeftEqual => "<=",
- .AngleBracketAngleBracketLeft => "<<",
- .AngleBracketAngleBracketLeftEqual => "<<=",
- .AngleBracketRight => ">",
- .AngleBracketRightEqual => ">=",
- .AngleBracketAngleBracketRight => ">>",
- .AngleBracketAngleBracketRightEqual => ">>=",
- .Tilde => "~",
- .Keyword_align => "align",
- .Keyword_allowzero => "allowzero",
- .Keyword_and => "and",
- .Keyword_anyframe => "anyframe",
- .Keyword_anytype => "anytype",
- .Keyword_asm => "asm",
- .Keyword_async => "async",
- .Keyword_await => "await",
- .Keyword_break => "break",
- .Keyword_callconv => "callconv",
- .Keyword_catch => "catch",
- .Keyword_comptime => "comptime",
- .Keyword_const => "const",
- .Keyword_continue => "continue",
- .Keyword_defer => "defer",
- .Keyword_else => "else",
- .Keyword_enum => "enum",
- .Keyword_errdefer => "errdefer",
- .Keyword_error => "error",
- .Keyword_export => "export",
- .Keyword_extern => "extern",
- .Keyword_false => "false",
- .Keyword_fn => "fn",
- .Keyword_for => "for",
- .Keyword_if => "if",
- .Keyword_inline => "inline",
- .Keyword_noalias => "noalias",
- .Keyword_noinline => "noinline",
- .Keyword_nosuspend => "nosuspend",
- .Keyword_null => "null",
- .Keyword_opaque => "opaque",
- .Keyword_or => "or",
- .Keyword_orelse => "orelse",
- .Keyword_packed => "packed",
- .Keyword_pub => "pub",
- .Keyword_resume => "resume",
- .Keyword_return => "return",
- .Keyword_linksection => "linksection",
- .Keyword_struct => "struct",
- .Keyword_suspend => "suspend",
- .Keyword_switch => "switch",
- .Keyword_test => "test",
- .Keyword_threadlocal => "threadlocal",
- .Keyword_true => "true",
- .Keyword_try => "try",
- .Keyword_undefined => "undefined",
- .Keyword_union => "union",
- .Keyword_unreachable => "unreachable",
- .Keyword_usingnamespace => "usingnamespace",
- .Keyword_var => "var",
- .Keyword_volatile => "volatile",
- .Keyword_while => "while",
+ pub const Tag = enum {
+ invalid,
+ invalid_ampersands,
+ invalid_periodasterisks,
+ identifier,
+ string_literal,
+ multiline_string_literal_line,
+ char_literal,
+ eof,
+ builtin,
+ bang,
+ pipe,
+ pipe_pipe,
+ pipe_equal,
+ equal,
+ equal_equal,
+ equal_angle_bracket_right,
+ bang_equal,
+ l_paren,
+ r_paren,
+ semicolon,
+ percent,
+ percent_equal,
+ l_brace,
+ r_brace,
+ l_bracket,
+ r_bracket,
+ period,
+ period_asterisk,
+ ellipsis2,
+ ellipsis3,
+ caret,
+ caret_equal,
+ plus,
+ plus_plus,
+ plus_equal,
+ plus_percent,
+ plus_percent_equal,
+ minus,
+ minus_equal,
+ minus_percent,
+ minus_percent_equal,
+ asterisk,
+ asterisk_equal,
+ asterisk_asterisk,
+ asterisk_percent,
+ asterisk_percent_equal,
+ arrow,
+ colon,
+ slash,
+ slash_equal,
+ comma,
+ ampersand,
+ ampersand_equal,
+ question_mark,
+ angle_bracket_left,
+ angle_bracket_left_equal,
+ angle_bracket_angle_bracket_left,
+ angle_bracket_angle_bracket_left_equal,
+ angle_bracket_right,
+ angle_bracket_right_equal,
+ angle_bracket_angle_bracket_right,
+ angle_bracket_angle_bracket_right_equal,
+ tilde,
+ integer_literal,
+ float_literal,
+ doc_comment,
+ container_doc_comment,
+ keyword_align,
+ keyword_allowzero,
+ keyword_and,
+ keyword_anyframe,
+ keyword_anytype,
+ keyword_asm,
+ keyword_async,
+ keyword_await,
+ keyword_break,
+ keyword_callconv,
+ keyword_catch,
+ keyword_comptime,
+ keyword_const,
+ keyword_continue,
+ keyword_defer,
+ keyword_else,
+ keyword_enum,
+ keyword_errdefer,
+ keyword_error,
+ keyword_export,
+ keyword_extern,
+ keyword_false,
+ keyword_fn,
+ keyword_for,
+ keyword_if,
+ keyword_inline,
+ keyword_noalias,
+ keyword_noinline,
+ keyword_nosuspend,
+ keyword_null,
+ keyword_opaque,
+ keyword_or,
+ keyword_orelse,
+ keyword_packed,
+ keyword_pub,
+ keyword_resume,
+ keyword_return,
+ keyword_linksection,
+ keyword_struct,
+ keyword_suspend,
+ keyword_switch,
+ keyword_test,
+ keyword_threadlocal,
+ keyword_true,
+ keyword_try,
+ keyword_undefined,
+ keyword_union,
+ keyword_unreachable,
+ keyword_usingnamespace,
+ keyword_var,
+ keyword_volatile,
+ keyword_while,
+
+ pub fn lexeme(tag: Tag) ?[]const u8 {
+ return switch (tag) {
+ .invalid,
+ .identifier,
+ .string_literal,
+ .multiline_string_literal_line,
+ .char_literal,
+ .eof,
+ .builtin,
+ .integer_literal,
+ .float_literal,
+ .doc_comment,
+ .container_doc_comment,
+ => null,
+
+ .invalid_ampersands => "&&",
+ .invalid_periodasterisks => ".**",
+ .bang => "!",
+ .pipe => "|",
+ .pipe_pipe => "||",
+ .pipe_equal => "|=",
+ .equal => "=",
+ .equal_equal => "==",
+ .equal_angle_bracket_right => "=>",
+ .bang_equal => "!=",
+ .l_paren => "(",
+ .r_paren => ")",
+ .semicolon => ";",
+ .percent => "%",
+ .percent_equal => "%=",
+ .l_brace => "{",
+ .r_brace => "}",
+ .l_bracket => "[",
+ .r_bracket => "]",
+ .period => ".",
+ .period_asterisk => ".*",
+ .ellipsis2 => "..",
+ .ellipsis3 => "...",
+ .caret => "^",
+ .caret_equal => "^=",
+ .plus => "+",
+ .plus_plus => "++",
+ .plus_equal => "+=",
+ .plus_percent => "+%",
+ .plus_percent_equal => "+%=",
+ .minus => "-",
+ .minus_equal => "-=",
+ .minus_percent => "-%",
+ .minus_percent_equal => "-%=",
+ .asterisk => "*",
+ .asterisk_equal => "*=",
+ .asterisk_asterisk => "**",
+ .asterisk_percent => "*%",
+ .asterisk_percent_equal => "*%=",
+ .arrow => "->",
+ .colon => ":",
+ .slash => "/",
+ .slash_equal => "/=",
+ .comma => ",",
+ .ampersand => "&",
+ .ampersand_equal => "&=",
+ .question_mark => "?",
+ .angle_bracket_left => "<",
+ .angle_bracket_left_equal => "<=",
+ .angle_bracket_angle_bracket_left => "<<",
+ .angle_bracket_angle_bracket_left_equal => "<<=",
+ .angle_bracket_right => ">",
+ .angle_bracket_right_equal => ">=",
+ .angle_bracket_angle_bracket_right => ">>",
+ .angle_bracket_angle_bracket_right_equal => ">>=",
+ .tilde => "~",
+ .keyword_align => "align",
+ .keyword_allowzero => "allowzero",
+ .keyword_and => "and",
+ .keyword_anyframe => "anyframe",
+ .keyword_anytype => "anytype",
+ .keyword_asm => "asm",
+ .keyword_async => "async",
+ .keyword_await => "await",
+ .keyword_break => "break",
+ .keyword_callconv => "callconv",
+ .keyword_catch => "catch",
+ .keyword_comptime => "comptime",
+ .keyword_const => "const",
+ .keyword_continue => "continue",
+ .keyword_defer => "defer",
+ .keyword_else => "else",
+ .keyword_enum => "enum",
+ .keyword_errdefer => "errdefer",
+ .keyword_error => "error",
+ .keyword_export => "export",
+ .keyword_extern => "extern",
+ .keyword_false => "false",
+ .keyword_fn => "fn",
+ .keyword_for => "for",
+ .keyword_if => "if",
+ .keyword_inline => "inline",
+ .keyword_noalias => "noalias",
+ .keyword_noinline => "noinline",
+ .keyword_nosuspend => "nosuspend",
+ .keyword_null => "null",
+ .keyword_opaque => "opaque",
+ .keyword_or => "or",
+ .keyword_orelse => "orelse",
+ .keyword_packed => "packed",
+ .keyword_pub => "pub",
+ .keyword_resume => "resume",
+ .keyword_return => "return",
+ .keyword_linksection => "linksection",
+ .keyword_struct => "struct",
+ .keyword_suspend => "suspend",
+ .keyword_switch => "switch",
+ .keyword_test => "test",
+ .keyword_threadlocal => "threadlocal",
+ .keyword_true => "true",
+ .keyword_try => "try",
+ .keyword_undefined => "undefined",
+ .keyword_union => "union",
+ .keyword_unreachable => "unreachable",
+ .keyword_usingnamespace => "usingnamespace",
+ .keyword_var => "var",
+ .keyword_volatile => "volatile",
+ .keyword_while => "while",
};
}
+
+ pub fn symbol(tag: Tag) []const u8 {
+ return tag.lexeme() orelse @tagName(tag);
+ }
};
};
@@ -334,7 +334,7 @@ pub const Tokenizer = struct {
/// For debugging purposes
pub fn dump(self: *Tokenizer, token: *const Token) void {
- std.debug.warn("{s} \"{s}\"\n", .{ @tagName(token.id), self.buffer[token.start..token.end] });
+ std.debug.warn("{s} \"{s}\"\n", .{ @tagName(token.tag), self.buffer[token.start..token.end] });
}
pub fn init(buffer: []const u8) Tokenizer {
@@ -421,7 +421,7 @@ pub const Tokenizer = struct {
const start_index = self.index;
var state: State = .start;
var result = Token{
- .id = .Eof,
+ .tag = .eof,
.loc = .{
.start = self.index,
.end = undefined,
@@ -438,14 +438,14 @@ pub const Tokenizer = struct {
},
'"' => {
state = .string_literal;
- result.id = .StringLiteral;
+ result.tag = .string_literal;
},
'\'' => {
state = .char_literal;
},
'a'...'z', 'A'...'Z', '_' => {
state = .identifier;
- result.id = .Identifier;
+ result.tag = .identifier;
},
'@' => {
state = .saw_at_sign;
@@ -460,42 +460,42 @@ pub const Tokenizer = struct {
state = .pipe;
},
'(' => {
- result.id = .LParen;
+ result.tag = .l_paren;
self.index += 1;
break;
},
')' => {
- result.id = .RParen;
+ result.tag = .r_paren;
self.index += 1;
break;
},
'[' => {
- result.id = .LBracket;
+ result.tag = .l_bracket;
self.index += 1;
break;
},
']' => {
- result.id = .RBracket;
+ result.tag = .r_bracket;
self.index += 1;
break;
},
';' => {
- result.id = .Semicolon;
+ result.tag = .semicolon;
self.index += 1;
break;
},
',' => {
- result.id = .Comma;
+ result.tag = .comma;
self.index += 1;
break;
},
'?' => {
- result.id = .QuestionMark;
+ result.tag = .question_mark;
self.index += 1;
break;
},
':' => {
- result.id = .Colon;
+ result.tag = .colon;
self.index += 1;
break;
},
@@ -519,20 +519,20 @@ pub const Tokenizer = struct {
},
'\\' => {
state = .backslash;
- result.id = .MultilineStringLiteralLine;
+ result.tag = .multiline_string_literal_line;
},
'{' => {
- result.id = .LBrace;
+ result.tag = .l_brace;
self.index += 1;
break;
},
'}' => {
- result.id = .RBrace;
+ result.tag = .r_brace;
self.index += 1;
break;
},
'~' => {
- result.id = .Tilde;
+ result.tag = .tilde;
self.index += 1;
break;
},
@@ -550,14 +550,14 @@ pub const Tokenizer = struct {
},
'0' => {
state = .zero;
- result.id = .IntegerLiteral;
+ result.tag = .integer_literal;
},
'1'...'9' => {
state = .int_literal_dec;
- result.id = .IntegerLiteral;
+ result.tag = .integer_literal;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
self.index += 1;
break;
},
@@ -565,42 +565,42 @@ pub const Tokenizer = struct {
.saw_at_sign => switch (c) {
'"' => {
- result.id = .Identifier;
+ result.tag = .identifier;
state = .string_literal;
},
else => {
// reinterpret as a builtin
self.index -= 1;
state = .builtin;
- result.id = .Builtin;
+ result.tag = .builtin;
},
},
.ampersand => switch (c) {
'&' => {
- result.id = .Invalid_ampersands;
+ result.tag = .invalid_ampersands;
self.index += 1;
break;
},
'=' => {
- result.id = .AmpersandEqual;
+ result.tag = .ampersand_equal;
self.index += 1;
break;
},
else => {
- result.id = .Ampersand;
+ result.tag = .ampersand;
break;
},
},
.asterisk => switch (c) {
'=' => {
- result.id = .AsteriskEqual;
+ result.tag = .asterisk_equal;
self.index += 1;
break;
},
'*' => {
- result.id = .AsteriskAsterisk;
+ result.tag = .asterisk_asterisk;
self.index += 1;
break;
},
@@ -608,43 +608,43 @@ pub const Tokenizer = struct {
state = .asterisk_percent;
},
else => {
- result.id = .Asterisk;
+ result.tag = .asterisk;
break;
},
},
.asterisk_percent => switch (c) {
'=' => {
- result.id = .AsteriskPercentEqual;
+ result.tag = .asterisk_percent_equal;
self.index += 1;
break;
},
else => {
- result.id = .AsteriskPercent;
+ result.tag = .asterisk_percent;
break;
},
},
.percent => switch (c) {
'=' => {
- result.id = .PercentEqual;
+ result.tag = .percent_equal;
self.index += 1;
break;
},
else => {
- result.id = .Percent;
+ result.tag = .percent;
break;
},
},
.plus => switch (c) {
'=' => {
- result.id = .PlusEqual;
+ result.tag = .plus_equal;
self.index += 1;
break;
},
'+' => {
- result.id = .PlusPlus;
+ result.tag = .plus_plus;
self.index += 1;
break;
},
@@ -652,31 +652,31 @@ pub const Tokenizer = struct {
state = .plus_percent;
},
else => {
- result.id = .Plus;
+ result.tag = .plus;
break;
},
},
.plus_percent => switch (c) {
'=' => {
- result.id = .PlusPercentEqual;
+ result.tag = .plus_percent_equal;
self.index += 1;
break;
},
else => {
- result.id = .PlusPercent;
+ result.tag = .plus_percent;
break;
},
},
.caret => switch (c) {
'=' => {
- result.id = .CaretEqual;
+ result.tag = .caret_equal;
self.index += 1;
break;
},
else => {
- result.id = .Caret;
+ result.tag = .caret;
break;
},
},
@@ -684,8 +684,8 @@ pub const Tokenizer = struct {
.identifier => switch (c) {
'a'...'z', 'A'...'Z', '_', '0'...'9' => {},
else => {
- if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
- result.id = id;
+ if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| {
+ result.tag = tag;
}
break;
},
@@ -724,7 +724,7 @@ pub const Tokenizer = struct {
state = .char_literal_backslash;
},
'\'', 0x80...0xbf, 0xf8...0xff => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
0xc0...0xdf => { // 110xxxxx
@@ -746,7 +746,7 @@ pub const Tokenizer = struct {
.char_literal_backslash => switch (c) {
'\n' => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
'x' => {
@@ -769,7 +769,7 @@ pub const Tokenizer = struct {
}
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -780,7 +780,7 @@ pub const Tokenizer = struct {
seen_escape_digits = 0;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
},
},
@@ -791,14 +791,14 @@ pub const Tokenizer = struct {
},
'}' => {
if (seen_escape_digits == 0) {
- result.id = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
} else {
state = .char_literal_end;
}
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
state = .char_literal_unicode_invalid;
},
},
@@ -813,12 +813,12 @@ pub const Tokenizer = struct {
.char_literal_end => switch (c) {
'\'' => {
- result.id = .CharLiteral;
+ result.tag = .char_literal;
self.index += 1;
break;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -831,7 +831,7 @@ pub const Tokenizer = struct {
}
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -847,58 +847,58 @@ pub const Tokenizer = struct {
.bang => switch (c) {
'=' => {
- result.id = .BangEqual;
+ result.tag = .bang_equal;
self.index += 1;
break;
},
else => {
- result.id = .Bang;
+ result.tag = .bang;
break;
},
},
.pipe => switch (c) {
'=' => {
- result.id = .PipeEqual;
+ result.tag = .pipe_equal;
self.index += 1;
break;
},
'|' => {
- result.id = .PipePipe;
+ result.tag = .pipe_pipe;
self.index += 1;
break;
},
else => {
- result.id = .Pipe;
+ result.tag = .pipe;
break;
},
},
.equal => switch (c) {
'=' => {
- result.id = .EqualEqual;
+ result.tag = .equal_equal;
self.index += 1;
break;
},
'>' => {
- result.id = .EqualAngleBracketRight;
+ result.tag = .equal_angle_bracket_right;
self.index += 1;
break;
},
else => {
- result.id = .Equal;
+ result.tag = .equal;
break;
},
},
.minus => switch (c) {
'>' => {
- result.id = .Arrow;
+ result.tag = .arrow;
self.index += 1;
break;
},
'=' => {
- result.id = .MinusEqual;
+ result.tag = .minus_equal;
self.index += 1;
break;
},
@@ -906,19 +906,19 @@ pub const Tokenizer = struct {
state = .minus_percent;
},
else => {
- result.id = .Minus;
+ result.tag = .minus;
break;
},
},
.minus_percent => switch (c) {
'=' => {
- result.id = .MinusPercentEqual;
+ result.tag = .minus_percent_equal;
self.index += 1;
break;
},
else => {
- result.id = .MinusPercent;
+ result.tag = .minus_percent;
break;
},
},
@@ -928,24 +928,24 @@ pub const Tokenizer = struct {
state = .angle_bracket_angle_bracket_left;
},
'=' => {
- result.id = .AngleBracketLeftEqual;
+ result.tag = .angle_bracket_left_equal;
self.index += 1;
break;
},
else => {
- result.id = .AngleBracketLeft;
+ result.tag = .angle_bracket_left;
break;
},
},
.angle_bracket_angle_bracket_left => switch (c) {
'=' => {
- result.id = .AngleBracketAngleBracketLeftEqual;
+ result.tag = .angle_bracket_angle_bracket_left_equal;
self.index += 1;
break;
},
else => {
- result.id = .AngleBracketAngleBracketLeft;
+ result.tag = .angle_bracket_angle_bracket_left;
break;
},
},
@@ -955,24 +955,24 @@ pub const Tokenizer = struct {
state = .angle_bracket_angle_bracket_right;
},
'=' => {
- result.id = .AngleBracketRightEqual;
+ result.tag = .angle_bracket_right_equal;
self.index += 1;
break;
},
else => {
- result.id = .AngleBracketRight;
+ result.tag = .angle_bracket_right;
break;
},
},
.angle_bracket_angle_bracket_right => switch (c) {
'=' => {
- result.id = .AngleBracketAngleBracketRightEqual;
+ result.tag = .angle_bracket_angle_bracket_right_equal;
self.index += 1;
break;
},
else => {
- result.id = .AngleBracketAngleBracketRight;
+ result.tag = .angle_bracket_angle_bracket_right;
break;
},
},
@@ -985,30 +985,30 @@ pub const Tokenizer = struct {
state = .period_asterisk;
},
else => {
- result.id = .Period;
+ result.tag = .period;
break;
},
},
.period_2 => switch (c) {
'.' => {
- result.id = .Ellipsis3;
+ result.tag = .ellipsis3;
self.index += 1;
break;
},
else => {
- result.id = .Ellipsis2;
+ result.tag = .ellipsis2;
break;
},
},
.period_asterisk => switch (c) {
'*' => {
- result.id = .Invalid_periodasterisks;
+ result.tag = .invalid_periodasterisks;
break;
},
else => {
- result.id = .PeriodAsterisk;
+ result.tag = .period_asterisk;
break;
},
},
@@ -1016,15 +1016,14 @@ pub const Tokenizer = struct {
.slash => switch (c) {
'/' => {
state = .line_comment_start;
- result.id = .LineComment;
},
'=' => {
- result.id = .SlashEqual;
+ result.tag = .slash_equal;
self.index += 1;
break;
},
else => {
- result.id = .Slash;
+ result.tag = .slash;
break;
},
},
@@ -1033,10 +1032,13 @@ pub const Tokenizer = struct {
state = .doc_comment_start;
},
'!' => {
- result.id = .ContainerDocComment;
+ result.tag = .container_doc_comment;
state = .container_doc_comment;
},
- '\n' => break,
+ '\n' => {
+ state = .start;
+ result.loc.start = self.index + 1;
+ },
'\t', '\r' => state = .line_comment,
else => {
state = .line_comment;
@@ -1048,20 +1050,28 @@ pub const Tokenizer = struct {
state = .line_comment;
},
'\n' => {
- result.id = .DocComment;
+ result.tag = .doc_comment;
break;
},
'\t', '\r' => {
state = .doc_comment;
- result.id = .DocComment;
+ result.tag = .doc_comment;
},
else => {
state = .doc_comment;
- result.id = .DocComment;
+ result.tag = .doc_comment;
self.checkLiteralCharacter();
},
},
- .line_comment, .doc_comment, .container_doc_comment => switch (c) {
+ .line_comment => switch (c) {
+ '\n' => {
+ state = .start;
+ result.loc.start = self.index + 1;
+ },
+ '\t', '\r' => {},
+ else => self.checkLiteralCharacter(),
+ },
+ .doc_comment, .container_doc_comment => switch (c) {
'\n' => break,
'\t', '\r' => {},
else => self.checkLiteralCharacter(),
@@ -1083,7 +1093,7 @@ pub const Tokenizer = struct {
},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1093,7 +1103,7 @@ pub const Tokenizer = struct {
state = .int_literal_bin;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1104,7 +1114,7 @@ pub const Tokenizer = struct {
'0'...'1' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1114,7 +1124,7 @@ pub const Tokenizer = struct {
state = .int_literal_oct;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1125,7 +1135,7 @@ pub const Tokenizer = struct {
'0'...'7' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1135,7 +1145,7 @@ pub const Tokenizer = struct {
state = .int_literal_dec;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1145,16 +1155,16 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_dec;
- result.id = .FloatLiteral;
+ result.tag = .float_literal;
},
'e', 'E' => {
state = .float_exponent_unsigned;
- result.id = .FloatLiteral;
+ result.tag = .float_literal;
},
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1164,7 +1174,7 @@ pub const Tokenizer = struct {
state = .int_literal_hex;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1174,23 +1184,23 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_hex;
- result.id = .FloatLiteral;
+ result.tag = .float_literal;
},
'p', 'P' => {
state = .float_exponent_unsigned;
- result.id = .FloatLiteral;
+ result.tag = .float_literal;
},
'0'...'9', 'a'...'f', 'A'...'F' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
},
.num_dot_dec => switch (c) {
'.' => {
- result.id = .IntegerLiteral;
+ result.tag = .integer_literal;
self.index -= 1;
state = .start;
break;
@@ -1203,14 +1213,14 @@ pub const Tokenizer = struct {
},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
},
.num_dot_hex => switch (c) {
'.' => {
- result.id = .IntegerLiteral;
+ result.tag = .integer_literal;
self.index -= 1;
state = .start;
break;
@@ -1219,12 +1229,12 @@ pub const Tokenizer = struct {
state = .float_exponent_unsigned;
},
'0'...'9', 'a'...'f', 'A'...'F' => {
- result.id = .FloatLiteral;
+ result.tag = .float_literal;
state = .float_fraction_hex;
},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1234,7 +1244,7 @@ pub const Tokenizer = struct {
state = .float_fraction_dec;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1248,7 +1258,7 @@ pub const Tokenizer = struct {
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1258,7 +1268,7 @@ pub const Tokenizer = struct {
state = .float_fraction_hex;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1272,7 +1282,7 @@ pub const Tokenizer = struct {
'0'...'9', 'a'...'f', 'A'...'F' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1292,7 +1302,7 @@ pub const Tokenizer = struct {
state = .float_exponent_num;
},
else => {
- result.id = .Invalid;
+ result.tag = .invalid;
break;
},
},
@@ -1303,7 +1313,7 @@ pub const Tokenizer = struct {
'0'...'9' => {},
else => {
if (isIdentifierChar(c)) {
- result.id = .Invalid;
+ result.tag = .invalid;
}
break;
},
@@ -1324,21 +1334,20 @@ pub const Tokenizer = struct {
.string_literal, // find this error later
.multiline_string_literal_line,
.builtin,
+ .line_comment,
+ .line_comment_start,
=> {},
.identifier => {
- if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |id| {
- result.id = id;
+ if (Token.getKeyword(self.buffer[result.loc.start..self.index])) |tag| {
+ result.tag = tag;
}
},
- .line_comment, .line_comment_start => {
- result.id = .LineComment;
- },
.doc_comment, .doc_comment_start => {
- result.id = .DocComment;
+ result.tag = .doc_comment;
},
.container_doc_comment => {
- result.id = .ContainerDocComment;
+ result.tag = .container_doc_comment;
},
.int_literal_dec_no_underscore,
@@ -1361,80 +1370,81 @@ pub const Tokenizer = struct {
.char_literal_unicode,
.string_literal_backslash,
=> {
- result.id = .Invalid;
+ result.tag = .invalid;
},
.equal => {
- result.id = .Equal;
+ result.tag = .equal;
},
.bang => {
- result.id = .Bang;
+ result.tag = .bang;
},
.minus => {
- result.id = .Minus;
+ result.tag = .minus;
},
.slash => {
- result.id = .Slash;
+ result.tag = .slash;
},
.zero => {
- result.id = .IntegerLiteral;
+ result.tag = .integer_literal;
},
.ampersand => {
- result.id = .Ampersand;
+ result.tag = .ampersand;
},
.period => {
- result.id = .Period;
+ result.tag = .period;
},
.period_2 => {
- result.id = .Ellipsis2;
+ result.tag = .ellipsis2;
},
.period_asterisk => {
- result.id = .PeriodAsterisk;
+ result.tag = .period_asterisk;
},
.pipe => {
- result.id = .Pipe;
+ result.tag = .pipe;
},
.angle_bracket_angle_bracket_right => {
- result.id = .AngleBracketAngleBracketRight;
+ result.tag = .angle_bracket_angle_bracket_right;
},
.angle_bracket_right => {
- result.id = .AngleBracketRight;
+ result.tag = .angle_bracket_right;
},
.angle_bracket_angle_bracket_left => {
- result.id = .AngleBracketAngleBracketLeft;
+ result.tag = .angle_bracket_angle_bracket_left;
},
.angle_bracket_left => {
- result.id = .AngleBracketLeft;
+ result.tag = .angle_bracket_left;
},
.plus_percent => {
- result.id = .PlusPercent;
+ result.tag = .plus_percent;
},
.plus => {
- result.id = .Plus;
+ result.tag = .plus;
},
.percent => {
- result.id = .Percent;
+ result.tag = .percent;
},
.caret => {
- result.id = .Caret;
+ result.tag = .caret;
},
.asterisk_percent => {
- result.id = .AsteriskPercent;
+ result.tag = .asterisk_percent;
},
.asterisk => {
- result.id = .Asterisk;
+ result.tag = .asterisk;
},
.minus_percent => {
- result.id = .MinusPercent;
+ result.tag = .minus_percent;
},
}
}
- if (result.id == .Eof) {
+ if (result.tag == .eof) {
if (self.pending_invalid_token) |token| {
self.pending_invalid_token = null;
return token;
}
+ result.loc.start = self.index;
}
result.loc.end = self.index;
@@ -1446,7 +1456,7 @@ pub const Tokenizer = struct {
const invalid_length = self.getInvalidCharacterLength();
if (invalid_length == 0) return;
self.pending_invalid_token = .{
- .id = .Invalid,
+ .tag = .invalid,
.loc = .{
.start = self.index,
.end = self.index + invalid_length,
@@ -1493,220 +1503,218 @@ pub const Tokenizer = struct {
};
test "tokenizer" {
- testTokenize("test", &[_]Token.Id{.Keyword_test});
+ testTokenize("test", &.{.keyword_test});
+}
+
+test "line comment followed by top-level comptime" {
+ testTokenize(
+ \\// line comment
+ \\comptime {}
+ \\
+ , &.{
+ .keyword_comptime,
+ .l_brace,
+ .r_brace,
+ });
}
test "tokenizer - unknown length pointer and then c pointer" {
testTokenize(
\\[*]u8
\\[*c]u8
- , &[_]Token.Id{
- .LBracket,
- .Asterisk,
- .RBracket,
- .Identifier,
- .LBracket,
- .Asterisk,
- .Identifier,
- .RBracket,
- .Identifier,
+ , &.{
+ .l_bracket,
+ .asterisk,
+ .r_bracket,
+ .identifier,
+ .l_bracket,
+ .asterisk,
+ .identifier,
+ .r_bracket,
+ .identifier,
});
}
test "tokenizer - code point literal with hex escape" {
testTokenize(
\\'\x1b'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\x1'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
}
test "tokenizer - code point literal with unicode escapes" {
// Valid unicode escapes
testTokenize(
\\'\u{3}'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{01}'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{2a}'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{3f9}'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\'\u{6E09aBc1523}'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
testTokenize(
\\"\u{440}"
- , &[_]Token.Id{.StringLiteral});
+ , &.{.string_literal});
// Invalid unicode escapes
testTokenize(
\\'\u'
- , &[_]Token.Id{.Invalid});
+ , &.{.invalid});
testTokenize(
\\'\u{{'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{}'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{s}'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{2z}'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\u{4a'
- , &[_]Token.Id{.Invalid});
+ , &.{.invalid});
// Test old-style unicode literals
testTokenize(
\\'\u0333'
- , &[_]Token.Id{ .Invalid, .Invalid });
+ , &.{ .invalid, .invalid });
testTokenize(
\\'\U0333'
- , &[_]Token.Id{ .Invalid, .IntegerLiteral, .Invalid });
+ , &.{ .invalid, .integer_literal, .invalid });
}
test "tokenizer - code point literal with unicode code point" {
testTokenize(
\\'💩'
- , &[_]Token.Id{.CharLiteral});
+ , &.{.char_literal});
}
test "tokenizer - float literal e exponent" {
- testTokenize("a = 4.94065645841246544177e-324;\n", &[_]Token.Id{
- .Identifier,
- .Equal,
- .FloatLiteral,
- .Semicolon,
+ testTokenize("a = 4.94065645841246544177e-324;\n", &.{
+ .identifier,
+ .equal,
+ .float_literal,
+ .semicolon,
});
}
test "tokenizer - float literal p exponent" {
- testTokenize("a = 0x1.a827999fcef32p+1022;\n", &[_]Token.Id{
- .Identifier,
- .Equal,
- .FloatLiteral,
- .Semicolon,
+ testTokenize("a = 0x1.a827999fcef32p+1022;\n", &.{
+ .identifier,
+ .equal,
+ .float_literal,
+ .semicolon,
});
}
test "tokenizer - chars" {
- testTokenize("'c'", &[_]Token.Id{.CharLiteral});
+ testTokenize("'c'", &.{.char_literal});
}
test "tokenizer - invalid token characters" {
- testTokenize("#", &[_]Token.Id{.Invalid});
- testTokenize("`", &[_]Token.Id{.Invalid});
- testTokenize("'c", &[_]Token.Id{.Invalid});
- testTokenize("'", &[_]Token.Id{.Invalid});
- testTokenize("''", &[_]Token.Id{ .Invalid, .Invalid });
+ testTokenize("#", &.{.invalid});
+ testTokenize("`", &.{.invalid});
+ testTokenize("'c", &.{.invalid});
+ testTokenize("'", &.{.invalid});
+ testTokenize("''", &.{ .invalid, .invalid });
}
test "tokenizer - invalid literal/comment characters" {
- testTokenize("\"\x00\"", &[_]Token.Id{
- .StringLiteral,
- .Invalid,
+ testTokenize("\"\x00\"", &.{
+ .string_literal,
+ .invalid,
});
- testTokenize("//\x00", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\x00", &.{
+ .invalid,
});
- testTokenize("//\x1f", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\x1f", &.{
+ .invalid,
});
- testTokenize("//\x7f", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\x7f", &.{
+ .invalid,
});
}
test "tokenizer - utf8" {
- testTokenize("//\xc2\x80", &[_]Token.Id{.LineComment});
- testTokenize("//\xf4\x8f\xbf\xbf", &[_]Token.Id{.LineComment});
+ testTokenize("//\xc2\x80", &.{});
+ testTokenize("//\xf4\x8f\xbf\xbf", &.{});
}
test "tokenizer - invalid utf8" {
- testTokenize("//\x80", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\x80", &.{
+ .invalid,
});
- testTokenize("//\xbf", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xbf", &.{
+ .invalid,
});
- testTokenize("//\xf8", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xf8", &.{
+ .invalid,
});
- testTokenize("//\xff", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xff", &.{
+ .invalid,
});
- testTokenize("//\xc2\xc0", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xc2\xc0", &.{
+ .invalid,
});
- testTokenize("//\xe0", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xe0", &.{
+ .invalid,
});
- testTokenize("//\xf0", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xf0", &.{
+ .invalid,
});
- testTokenize("//\xf0\x90\x80\xc0", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xf0\x90\x80\xc0", &.{
+ .invalid,
});
}
test "tokenizer - illegal unicode codepoints" {
// unicode newline characters.U+0085, U+2028, U+2029
- testTokenize("//\xc2\x84", &[_]Token.Id{.LineComment});
- testTokenize("//\xc2\x85", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xc2\x84", &.{});
+ testTokenize("//\xc2\x85", &.{
+ .invalid,
});
- testTokenize("//\xc2\x86", &[_]Token.Id{.LineComment});
- testTokenize("//\xe2\x80\xa7", &[_]Token.Id{.LineComment});
- testTokenize("//\xe2\x80\xa8", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xc2\x86", &.{});
+ testTokenize("//\xe2\x80\xa7", &.{});
+ testTokenize("//\xe2\x80\xa8", &.{
+ .invalid,
});
- testTokenize("//\xe2\x80\xa9", &[_]Token.Id{
- .LineComment,
- .Invalid,
+ testTokenize("//\xe2\x80\xa9", &.{
+ .invalid,
});
- testTokenize("//\xe2\x80\xaa", &[_]Token.Id{.LineComment});
+ testTokenize("//\xe2\x80\xaa", &.{});
}
test "tokenizer - string identifier and builtin fns" {
testTokenize(
\\const @"if" = @import("std");
- , &[_]Token.Id{
- .Keyword_const,
- .Identifier,
- .Equal,
- .Builtin,
- .LParen,
- .StringLiteral,
- .RParen,
- .Semicolon,
+ , &.{
+ .keyword_const,
+ .identifier,
+ .equal,
+ .builtin,
+ .l_paren,
+ .string_literal,
+ .r_paren,
+ .semicolon,
});
}
test "tokenizer - multiline string literal with literal tab" {
testTokenize(
\\\\foo bar
- , &[_]Token.Id{
- .MultilineStringLiteralLine,
+ , &.{
+ .multiline_string_literal_line,
});
}
@@ -1718,32 +1726,30 @@ test "tokenizer - comments with literal tab" {
\\// foo
\\/// foo
\\/// /foo
- , &[_]Token.Id{
- .LineComment,
- .ContainerDocComment,
- .DocComment,
- .LineComment,
- .DocComment,
- .DocComment,
+ , &.{
+ .container_doc_comment,
+ .doc_comment,
+ .doc_comment,
+ .doc_comment,
});
}
test "tokenizer - pipe and then invalid" {
- testTokenize("||=", &[_]Token.Id{
- .PipePipe,
- .Equal,
+ testTokenize("||=", &.{
+ .pipe_pipe,
+ .equal,
});
}
test "tokenizer - line comment and doc comment" {
- testTokenize("//", &[_]Token.Id{.LineComment});
- testTokenize("// a / b", &[_]Token.Id{.LineComment});
- testTokenize("// /", &[_]Token.Id{.LineComment});
- testTokenize("/// a", &[_]Token.Id{.DocComment});
- testTokenize("///", &[_]Token.Id{.DocComment});
- testTokenize("////", &[_]Token.Id{.LineComment});
- testTokenize("//!", &[_]Token.Id{.ContainerDocComment});
- testTokenize("//!!", &[_]Token.Id{.ContainerDocComment});
+ testTokenize("//", &.{});
+ testTokenize("// a / b", &.{});
+ testTokenize("// /", &.{});
+ testTokenize("/// a", &.{.doc_comment});
+ testTokenize("///", &.{.doc_comment});
+ testTokenize("////", &.{});
+ testTokenize("//!", &.{.container_doc_comment});
+ testTokenize("//!!", &.{.container_doc_comment});
}
test "tokenizer - line comment followed by identifier" {
@@ -1751,304 +1757,304 @@ test "tokenizer - line comment followed by identifier" {
\\ Unexpected,
\\ // another
\\ Another,
- , &[_]Token.Id{
- .Identifier,
- .Comma,
- .LineComment,
- .Identifier,
- .Comma,
+ , &.{
+ .identifier,
+ .comma,
+ .identifier,
+ .comma,
});
}
test "tokenizer - UTF-8 BOM is recognized and skipped" {
- testTokenize("\xEF\xBB\xBFa;\n", &[_]Token.Id{
- .Identifier,
- .Semicolon,
+ testTokenize("\xEF\xBB\xBFa;\n", &.{
+ .identifier,
+ .semicolon,
});
}
test "correctly parse pointer assignment" {
- testTokenize("b.*=3;\n", &[_]Token.Id{
- .Identifier,
- .PeriodAsterisk,
- .Equal,
- .IntegerLiteral,
- .Semicolon,
+ testTokenize("b.*=3;\n", &.{
+ .identifier,
+ .period_asterisk,
+ .equal,
+ .integer_literal,
+ .semicolon,
});
}
test "correctly parse pointer dereference followed by asterisk" {
- testTokenize("\"b\".* ** 10", &[_]Token.Id{
- .StringLiteral,
- .PeriodAsterisk,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("\"b\".* ** 10", &.{
+ .string_literal,
+ .period_asterisk,
+ .asterisk_asterisk,
+ .integer_literal,
});
- testTokenize("(\"b\".*)** 10", &[_]Token.Id{
- .LParen,
- .StringLiteral,
- .PeriodAsterisk,
- .RParen,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("(\"b\".*)** 10", &.{
+ .l_paren,
+ .string_literal,
+ .period_asterisk,
+ .r_paren,
+ .asterisk_asterisk,
+ .integer_literal,
});
- testTokenize("\"b\".*** 10", &[_]Token.Id{
- .StringLiteral,
- .Invalid_periodasterisks,
- .AsteriskAsterisk,
- .IntegerLiteral,
+ testTokenize("\"b\".*** 10", &.{
+ .string_literal,
+ .invalid_periodasterisks,
+ .asterisk_asterisk,
+ .integer_literal,
});
}
test "tokenizer - range literals" {
- testTokenize("0...9", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("'0'...'9'", &[_]Token.Id{ .CharLiteral, .Ellipsis3, .CharLiteral });
- testTokenize("0x00...0x09", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("0b00...0b11", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
- testTokenize("0o00...0o11", &[_]Token.Id{ .IntegerLiteral, .Ellipsis3, .IntegerLiteral });
+ testTokenize("0...9", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("'0'...'9'", &.{ .char_literal, .ellipsis3, .char_literal });
+ testTokenize("0x00...0x09", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("0b00...0b11", &.{ .integer_literal, .ellipsis3, .integer_literal });
+ testTokenize("0o00...0o11", &.{ .integer_literal, .ellipsis3, .integer_literal });
}
test "tokenizer - number literals decimal" {
- testTokenize("0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("1", &[_]Token.Id{.IntegerLiteral});
- testTokenize("2", &[_]Token.Id{.IntegerLiteral});
- testTokenize("3", &[_]Token.Id{.IntegerLiteral});
- testTokenize("4", &[_]Token.Id{.IntegerLiteral});
- testTokenize("5", &[_]Token.Id{.IntegerLiteral});
- testTokenize("6", &[_]Token.Id{.IntegerLiteral});
- testTokenize("7", &[_]Token.Id{.IntegerLiteral});
- testTokenize("8", &[_]Token.Id{.IntegerLiteral});
- testTokenize("9", &[_]Token.Id{.IntegerLiteral});
- testTokenize("1..", &[_]Token.Id{ .IntegerLiteral, .Ellipsis2 });
- testTokenize("0a", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("9b", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1z", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1z_1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("9z3", &[_]Token.Id{ .Invalid, .Identifier });
-
- testTokenize("0_0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0001", &[_]Token.Id{.IntegerLiteral});
- testTokenize("01234567890", &[_]Token.Id{.IntegerLiteral});
- testTokenize("012_345_6789_0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0_1_2_3_4_5_6_7_8_9_0", &[_]Token.Id{.IntegerLiteral});
-
- testTokenize("00_", &[_]Token.Id{.Invalid});
- testTokenize("0_0_", &[_]Token.Id{.Invalid});
- testTokenize("0__0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0_0f", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0_0_f", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0_0_f_00", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1_,", &[_]Token.Id{ .Invalid, .Comma });
-
- testTokenize("1.", &[_]Token.Id{.FloatLiteral});
- testTokenize("0.0", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.0", &[_]Token.Id{.FloatLiteral});
- testTokenize("10.0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0e0", &[_]Token.Id{.FloatLiteral});
- testTokenize("1e0", &[_]Token.Id{.FloatLiteral});
- testTokenize("1e100", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.e100", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.0e100", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.0e+100", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.0e-100", &[_]Token.Id{.FloatLiteral});
- testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &[_]Token.Id{.FloatLiteral});
- testTokenize("1.+", &[_]Token.Id{ .FloatLiteral, .Plus });
-
- testTokenize("1e", &[_]Token.Id{.Invalid});
- testTokenize("1.0e1f0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0p100", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0p-100", &[_]Token.Id{ .Invalid, .Identifier, .Minus, .IntegerLiteral });
- testTokenize("1.0p1f0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0_,", &[_]Token.Id{ .Invalid, .Comma });
- testTokenize("1_.0", &[_]Token.Id{ .Invalid, .Period, .IntegerLiteral });
- testTokenize("1._", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.a", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.z", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1._0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1._+", &[_]Token.Id{ .Invalid, .Identifier, .Plus });
- testTokenize("1._e", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0e", &[_]Token.Id{.Invalid});
- testTokenize("1.0e,", &[_]Token.Id{ .Invalid, .Comma });
- testTokenize("1.0e_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0e+_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0e-_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("1.0e0_+", &[_]Token.Id{ .Invalid, .Plus });
+ testTokenize("0", &.{.integer_literal});
+ testTokenize("1", &.{.integer_literal});
+ testTokenize("2", &.{.integer_literal});
+ testTokenize("3", &.{.integer_literal});
+ testTokenize("4", &.{.integer_literal});
+ testTokenize("5", &.{.integer_literal});
+ testTokenize("6", &.{.integer_literal});
+ testTokenize("7", &.{.integer_literal});
+ testTokenize("8", &.{.integer_literal});
+ testTokenize("9", &.{.integer_literal});
+ testTokenize("1..", &.{ .integer_literal, .ellipsis2 });
+ testTokenize("0a", &.{ .invalid, .identifier });
+ testTokenize("9b", &.{ .invalid, .identifier });
+ testTokenize("1z", &.{ .invalid, .identifier });
+ testTokenize("1z_1", &.{ .invalid, .identifier });
+ testTokenize("9z3", &.{ .invalid, .identifier });
+
+ testTokenize("0_0", &.{.integer_literal});
+ testTokenize("0001", &.{.integer_literal});
+ testTokenize("01234567890", &.{.integer_literal});
+ testTokenize("012_345_6789_0", &.{.integer_literal});
+ testTokenize("0_1_2_3_4_5_6_7_8_9_0", &.{.integer_literal});
+
+ testTokenize("00_", &.{.invalid});
+ testTokenize("0_0_", &.{.invalid});
+ testTokenize("0__0", &.{ .invalid, .identifier });
+ testTokenize("0_0f", &.{ .invalid, .identifier });
+ testTokenize("0_0_f", &.{ .invalid, .identifier });
+ testTokenize("0_0_f_00", &.{ .invalid, .identifier });
+ testTokenize("1_,", &.{ .invalid, .comma });
+
+ testTokenize("1.", &.{.float_literal});
+ testTokenize("0.0", &.{.float_literal});
+ testTokenize("1.0", &.{.float_literal});
+ testTokenize("10.0", &.{.float_literal});
+ testTokenize("0e0", &.{.float_literal});
+ testTokenize("1e0", &.{.float_literal});
+ testTokenize("1e100", &.{.float_literal});
+ testTokenize("1.e100", &.{.float_literal});
+ testTokenize("1.0e100", &.{.float_literal});
+ testTokenize("1.0e+100", &.{.float_literal});
+ testTokenize("1.0e-100", &.{.float_literal});
+ testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &.{.float_literal});
+ testTokenize("1.+", &.{ .float_literal, .plus });
+
+ testTokenize("1e", &.{.invalid});
+ testTokenize("1.0e1f0", &.{ .invalid, .identifier });
+ testTokenize("1.0p100", &.{ .invalid, .identifier });
+ testTokenize("1.0p-100", &.{ .invalid, .identifier, .minus, .integer_literal });
+ testTokenize("1.0p1f0", &.{ .invalid, .identifier });
+ testTokenize("1.0_,", &.{ .invalid, .comma });
+ testTokenize("1_.0", &.{ .invalid, .period, .integer_literal });
+ testTokenize("1._", &.{ .invalid, .identifier });
+ testTokenize("1.a", &.{ .invalid, .identifier });
+ testTokenize("1.z", &.{ .invalid, .identifier });
+ testTokenize("1._0", &.{ .invalid, .identifier });
+ testTokenize("1._+", &.{ .invalid, .identifier, .plus });
+ testTokenize("1._e", &.{ .invalid, .identifier });
+ testTokenize("1.0e", &.{.invalid});
+ testTokenize("1.0e,", &.{ .invalid, .comma });
+ testTokenize("1.0e_", &.{ .invalid, .identifier });
+ testTokenize("1.0e+_", &.{ .invalid, .identifier });
+ testTokenize("1.0e-_", &.{ .invalid, .identifier });
+ testTokenize("1.0e0_+", &.{ .invalid, .plus });
}
test "tokenizer - number literals binary" {
- testTokenize("0b0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b1", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b2", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b3", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b4", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b5", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b6", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b7", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b8", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0b9", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0ba", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0bb", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0bc", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0bd", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0be", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0bf", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0bz", &[_]Token.Id{ .Invalid, .Identifier });
-
- testTokenize("0b0000_0000", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b1111_1111", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b10_10_10_10", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b0_1_0_1_0_1_0_1", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0b1.", &[_]Token.Id{ .IntegerLiteral, .Period });
- testTokenize("0b1.0", &[_]Token.Id{ .IntegerLiteral, .Period, .IntegerLiteral });
-
- testTokenize("0B0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b_0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b1_", &[_]Token.Id{.Invalid});
- testTokenize("0b0__1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b0_1_", &[_]Token.Id{.Invalid});
- testTokenize("0b1e", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b1p", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b1e0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b1p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0b1_,", &[_]Token.Id{ .Invalid, .Comma });
+ testTokenize("0b0", &.{.integer_literal});
+ testTokenize("0b1", &.{.integer_literal});
+ testTokenize("0b2", &.{ .invalid, .integer_literal });
+ testTokenize("0b3", &.{ .invalid, .integer_literal });
+ testTokenize("0b4", &.{ .invalid, .integer_literal });
+ testTokenize("0b5", &.{ .invalid, .integer_literal });
+ testTokenize("0b6", &.{ .invalid, .integer_literal });
+ testTokenize("0b7", &.{ .invalid, .integer_literal });
+ testTokenize("0b8", &.{ .invalid, .integer_literal });
+ testTokenize("0b9", &.{ .invalid, .integer_literal });
+ testTokenize("0ba", &.{ .invalid, .identifier });
+ testTokenize("0bb", &.{ .invalid, .identifier });
+ testTokenize("0bc", &.{ .invalid, .identifier });
+ testTokenize("0bd", &.{ .invalid, .identifier });
+ testTokenize("0be", &.{ .invalid, .identifier });
+ testTokenize("0bf", &.{ .invalid, .identifier });
+ testTokenize("0bz", &.{ .invalid, .identifier });
+
+ testTokenize("0b0000_0000", &.{.integer_literal});
+ testTokenize("0b1111_1111", &.{.integer_literal});
+ testTokenize("0b10_10_10_10", &.{.integer_literal});
+ testTokenize("0b0_1_0_1_0_1_0_1", &.{.integer_literal});
+ testTokenize("0b1.", &.{ .integer_literal, .period });
+ testTokenize("0b1.0", &.{ .integer_literal, .period, .integer_literal });
+
+ testTokenize("0B0", &.{ .invalid, .identifier });
+ testTokenize("0b_", &.{ .invalid, .identifier });
+ testTokenize("0b_0", &.{ .invalid, .identifier });
+ testTokenize("0b1_", &.{.invalid});
+ testTokenize("0b0__1", &.{ .invalid, .identifier });
+ testTokenize("0b0_1_", &.{.invalid});
+ testTokenize("0b1e", &.{ .invalid, .identifier });
+ testTokenize("0b1p", &.{ .invalid, .identifier });
+ testTokenize("0b1e0", &.{ .invalid, .identifier });
+ testTokenize("0b1p0", &.{ .invalid, .identifier });
+ testTokenize("0b1_,", &.{ .invalid, .comma });
}
test "tokenizer - number literals octal" {
- testTokenize("0o0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o1", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o2", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o3", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o4", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o5", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o6", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o7", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o8", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0o9", &[_]Token.Id{ .Invalid, .IntegerLiteral });
- testTokenize("0oa", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0ob", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0oc", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0od", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0oe", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0of", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0oz", &[_]Token.Id{ .Invalid, .Identifier });
-
- testTokenize("0o01234567", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o0123_4567", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o01_23_45_67", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o0_1_2_3_4_5_6_7", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0o7.", &[_]Token.Id{ .IntegerLiteral, .Period });
- testTokenize("0o7.0", &[_]Token.Id{ .IntegerLiteral, .Period, .IntegerLiteral });
-
- testTokenize("0O0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o_0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o1_", &[_]Token.Id{.Invalid});
- testTokenize("0o0__1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o0_1_", &[_]Token.Id{.Invalid});
- testTokenize("0o1e", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o1p", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o1e0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o1p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0o_,", &[_]Token.Id{ .Invalid, .Identifier, .Comma });
+ testTokenize("0o0", &.{.integer_literal});
+ testTokenize("0o1", &.{.integer_literal});
+ testTokenize("0o2", &.{.integer_literal});
+ testTokenize("0o3", &.{.integer_literal});
+ testTokenize("0o4", &.{.integer_literal});
+ testTokenize("0o5", &.{.integer_literal});
+ testTokenize("0o6", &.{.integer_literal});
+ testTokenize("0o7", &.{.integer_literal});
+ testTokenize("0o8", &.{ .invalid, .integer_literal });
+ testTokenize("0o9", &.{ .invalid, .integer_literal });
+ testTokenize("0oa", &.{ .invalid, .identifier });
+ testTokenize("0ob", &.{ .invalid, .identifier });
+ testTokenize("0oc", &.{ .invalid, .identifier });
+ testTokenize("0od", &.{ .invalid, .identifier });
+ testTokenize("0oe", &.{ .invalid, .identifier });
+ testTokenize("0of", &.{ .invalid, .identifier });
+ testTokenize("0oz", &.{ .invalid, .identifier });
+
+ testTokenize("0o01234567", &.{.integer_literal});
+ testTokenize("0o0123_4567", &.{.integer_literal});
+ testTokenize("0o01_23_45_67", &.{.integer_literal});
+ testTokenize("0o0_1_2_3_4_5_6_7", &.{.integer_literal});
+ testTokenize("0o7.", &.{ .integer_literal, .period });
+ testTokenize("0o7.0", &.{ .integer_literal, .period, .integer_literal });
+
+ testTokenize("0O0", &.{ .invalid, .identifier });
+ testTokenize("0o_", &.{ .invalid, .identifier });
+ testTokenize("0o_0", &.{ .invalid, .identifier });
+ testTokenize("0o1_", &.{.invalid});
+ testTokenize("0o0__1", &.{ .invalid, .identifier });
+ testTokenize("0o0_1_", &.{.invalid});
+ testTokenize("0o1e", &.{ .invalid, .identifier });
+ testTokenize("0o1p", &.{ .invalid, .identifier });
+ testTokenize("0o1e0", &.{ .invalid, .identifier });
+ testTokenize("0o1p0", &.{ .invalid, .identifier });
+ testTokenize("0o_,", &.{ .invalid, .identifier, .comma });
}
test "tokenizer - number literals hexadeciaml" {
- testTokenize("0x0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x1", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x2", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x3", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x4", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x5", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x6", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x7", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x8", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x9", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xa", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xb", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xc", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xd", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xe", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xf", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xA", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xB", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xC", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xD", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xE", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0xF", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x0z", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0xz", &[_]Token.Id{ .Invalid, .Identifier });
-
- testTokenize("0x0123456789ABCDEF", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x0123_4567_89AB_CDEF", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x01_23_45_67_89AB_CDE_F", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &[_]Token.Id{.IntegerLiteral});
-
- testTokenize("0X0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x_", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x_1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x1_", &[_]Token.Id{.Invalid});
- testTokenize("0x0__1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0_1_", &[_]Token.Id{.Invalid});
- testTokenize("0x_,", &[_]Token.Id{ .Invalid, .Identifier, .Comma });
-
- testTokenize("0x1.", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x1.0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xF.", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xF.0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xF.F", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xF.Fp0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xF.FP0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x1p0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xfp0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x1.+0xF.", &[_]Token.Id{ .FloatLiteral, .Plus, .FloatLiteral });
-
- testTokenize("0x0123456.789ABCDEF", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x0_123_456.789_ABC_DEF", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x0p0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0x0.0p0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xff.ffp10", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xff.ffP10", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xff.p10", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xffp10", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xff_ff.ff_ffp1_0_0_0", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &[_]Token.Id{.FloatLiteral});
- testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &[_]Token.Id{.FloatLiteral});
-
- testTokenize("0x1e", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x1e0", &[_]Token.Id{.IntegerLiteral});
- testTokenize("0x1p", &[_]Token.Id{.Invalid});
- testTokenize("0xfp0z1", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0xff.ffpff", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.p", &[_]Token.Id{.Invalid});
- testTokenize("0x0.z", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0._", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0_.0", &[_]Token.Id{ .Invalid, .Period, .IntegerLiteral });
- testTokenize("0x0_.0.0", &[_]Token.Id{ .Invalid, .Period, .FloatLiteral });
- testTokenize("0x0._0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0_", &[_]Token.Id{.Invalid});
- testTokenize("0x0_p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0_.p0", &[_]Token.Id{ .Invalid, .Period, .Identifier });
- testTokenize("0x0._p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0_p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0._0p0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0p_0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0p+_0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0p-_0", &[_]Token.Id{ .Invalid, .Identifier });
- testTokenize("0x0.0p0_", &[_]Token.Id{ .Invalid, .Eof });
+ testTokenize("0x0", &.{.integer_literal});
+ testTokenize("0x1", &.{.integer_literal});
+ testTokenize("0x2", &.{.integer_literal});
+ testTokenize("0x3", &.{.integer_literal});
+ testTokenize("0x4", &.{.integer_literal});
+ testTokenize("0x5", &.{.integer_literal});
+ testTokenize("0x6", &.{.integer_literal});
+ testTokenize("0x7", &.{.integer_literal});
+ testTokenize("0x8", &.{.integer_literal});
+ testTokenize("0x9", &.{.integer_literal});
+ testTokenize("0xa", &.{.integer_literal});
+ testTokenize("0xb", &.{.integer_literal});
+ testTokenize("0xc", &.{.integer_literal});
+ testTokenize("0xd", &.{.integer_literal});
+ testTokenize("0xe", &.{.integer_literal});
+ testTokenize("0xf", &.{.integer_literal});
+ testTokenize("0xA", &.{.integer_literal});
+ testTokenize("0xB", &.{.integer_literal});
+ testTokenize("0xC", &.{.integer_literal});
+ testTokenize("0xD", &.{.integer_literal});
+ testTokenize("0xE", &.{.integer_literal});
+ testTokenize("0xF", &.{.integer_literal});
+ testTokenize("0x0z", &.{ .invalid, .identifier });
+ testTokenize("0xz", &.{ .invalid, .identifier });
+
+ testTokenize("0x0123456789ABCDEF", &.{.integer_literal});
+ testTokenize("0x0123_4567_89AB_CDEF", &.{.integer_literal});
+ testTokenize("0x01_23_45_67_89AB_CDE_F", &.{.integer_literal});
+ testTokenize("0x0_1_2_3_4_5_6_7_8_9_A_B_C_D_E_F", &.{.integer_literal});
+
+ testTokenize("0X0", &.{ .invalid, .identifier });
+ testTokenize("0x_", &.{ .invalid, .identifier });
+ testTokenize("0x_1", &.{ .invalid, .identifier });
+ testTokenize("0x1_", &.{.invalid});
+ testTokenize("0x0__1", &.{ .invalid, .identifier });
+ testTokenize("0x0_1_", &.{.invalid});
+ testTokenize("0x_,", &.{ .invalid, .identifier, .comma });
+
+ testTokenize("0x1.", &.{.float_literal});
+ testTokenize("0x1.0", &.{.float_literal});
+ testTokenize("0xF.", &.{.float_literal});
+ testTokenize("0xF.0", &.{.float_literal});
+ testTokenize("0xF.F", &.{.float_literal});
+ testTokenize("0xF.Fp0", &.{.float_literal});
+ testTokenize("0xF.FP0", &.{.float_literal});
+ testTokenize("0x1p0", &.{.float_literal});
+ testTokenize("0xfp0", &.{.float_literal});
+ testTokenize("0x1.+0xF.", &.{ .float_literal, .plus, .float_literal });
+
+ testTokenize("0x0123456.789ABCDEF", &.{.float_literal});
+ testTokenize("0x0_123_456.789_ABC_DEF", &.{.float_literal});
+ testTokenize("0x0_1_2_3_4_5_6.7_8_9_A_B_C_D_E_F", &.{.float_literal});
+ testTokenize("0x0p0", &.{.float_literal});
+ testTokenize("0x0.0p0", &.{.float_literal});
+ testTokenize("0xff.ffp10", &.{.float_literal});
+ testTokenize("0xff.ffP10", &.{.float_literal});
+ testTokenize("0xff.p10", &.{.float_literal});
+ testTokenize("0xffp10", &.{.float_literal});
+ testTokenize("0xff_ff.ff_ffp1_0_0_0", &.{.float_literal});
+ testTokenize("0xf_f_f_f.f_f_f_fp+1_000", &.{.float_literal});
+ testTokenize("0xf_f_f_f.f_f_f_fp-1_00_0", &.{.float_literal});
+
+ testTokenize("0x1e", &.{.integer_literal});
+ testTokenize("0x1e0", &.{.integer_literal});
+ testTokenize("0x1p", &.{.invalid});
+ testTokenize("0xfp0z1", &.{ .invalid, .identifier });
+ testTokenize("0xff.ffpff", &.{ .invalid, .identifier });
+ testTokenize("0x0.p", &.{.invalid});
+ testTokenize("0x0.z", &.{ .invalid, .identifier });
+ testTokenize("0x0._", &.{ .invalid, .identifier });
+ testTokenize("0x0_.0", &.{ .invalid, .period, .integer_literal });
+ testTokenize("0x0_.0.0", &.{ .invalid, .period, .float_literal });
+ testTokenize("0x0._0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0_", &.{.invalid});
+ testTokenize("0x0_p0", &.{ .invalid, .identifier });
+ testTokenize("0x0_.p0", &.{ .invalid, .period, .identifier });
+ testTokenize("0x0._p0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0_p0", &.{ .invalid, .identifier });
+ testTokenize("0x0._0p0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p+_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p-_0", &.{ .invalid, .identifier });
+ testTokenize("0x0.0p0_", &.{ .invalid, .eof });
}
-fn testTokenize(source: []const u8, expected_tokens: []const Token.Id) void {
+fn testTokenize(source: []const u8, expected_tokens: []const Token.Tag) void {
var tokenizer = Tokenizer.init(source);
for (expected_tokens) |expected_token_id| {
const token = tokenizer.next();
- if (token.id != expected_token_id) {
- std.debug.panic("expected {s}, found {s}\n", .{ @tagName(expected_token_id), @tagName(token.id) });
+ if (token.tag != expected_token_id) {
+ std.debug.panic("expected {s}, found {s}\n", .{ @tagName(expected_token_id), @tagName(token.tag) });
}
}
const last_token = tokenizer.next();
- std.testing.expect(last_token.id == .Eof);
+ std.testing.expect(last_token.tag == .eof);
+ std.testing.expect(last_token.loc.start == source.len);
}
diff --git a/src/BuiltinFn.zig b/src/BuiltinFn.zig
new file mode 100644
index 000000000000..deb1cbfa761c
--- /dev/null
+++ b/src/BuiltinFn.zig
@@ -0,0 +1,844 @@
+const std = @import("std");
+
+pub const Tag = enum {
+ add_with_overflow,
+ align_cast,
+ align_of,
+ as,
+ async_call,
+ atomic_load,
+ atomic_rmw,
+ atomic_store,
+ bit_cast,
+ bit_offset_of,
+ bool_to_int,
+ bit_size_of,
+ breakpoint,
+ mul_add,
+ byte_swap,
+ bit_reverse,
+ byte_offset_of,
+ call,
+ c_define,
+ c_import,
+ c_include,
+ clz,
+ cmpxchg_strong,
+ cmpxchg_weak,
+ compile_error,
+ compile_log,
+ ctz,
+ c_undef,
+ div_exact,
+ div_floor,
+ div_trunc,
+ embed_file,
+ enum_to_int,
+ error_name,
+ error_return_trace,
+ error_to_int,
+ err_set_cast,
+ @"export",
+ fence,
+ field,
+ field_parent_ptr,
+ float_cast,
+ float_to_int,
+ frame,
+ Frame,
+ frame_address,
+ frame_size,
+ has_decl,
+ has_field,
+ import,
+ int_cast,
+ int_to_enum,
+ int_to_error,
+ int_to_float,
+ int_to_ptr,
+ memcpy,
+ memset,
+ wasm_memory_size,
+ wasm_memory_grow,
+ mod,
+ mul_with_overflow,
+ panic,
+ pop_count,
+ ptr_cast,
+ ptr_to_int,
+ rem,
+ return_address,
+ set_align_stack,
+ set_cold,
+ set_eval_branch_quota,
+ set_float_mode,
+ set_runtime_safety,
+ shl_exact,
+ shl_with_overflow,
+ shr_exact,
+ shuffle,
+ size_of,
+ splat,
+ reduce,
+ src,
+ sqrt,
+ sin,
+ cos,
+ exp,
+ exp2,
+ log,
+ log2,
+ log10,
+ fabs,
+ floor,
+ ceil,
+ trunc,
+ round,
+ sub_with_overflow,
+ tag_name,
+ This,
+ truncate,
+ Type,
+ type_info,
+ type_name,
+ TypeOf,
+ union_init,
+};
+
+tag: Tag,
+
+/// `true` if the builtin call can take advantage of a result location pointer.
+needs_mem_loc: bool = false,
+/// `true` if the builtin call can be the left-hand side of an expression (assigned to).
+allows_lvalue: bool = false,
+/// The number of parameters to this builtin function. `null` means variable number
+/// of parameters.
+param_count: ?u8,
+
+pub const list = list: {
+ @setEvalBranchQuota(3000);
+ break :list std.ComptimeStringMap(@This(), .{
+ .{
+ "@addWithOverflow",
+ .{
+ .tag = .add_with_overflow,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@alignCast",
+ .{
+ .tag = .align_cast,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@alignOf",
+ .{
+ .tag = .align_of,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@as",
+ .{
+ .tag = .as,
+ .needs_mem_loc = true,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@asyncCall",
+ .{
+ .tag = .async_call,
+ .param_count = null,
+ },
+ },
+ .{
+ "@atomicLoad",
+ .{
+ .tag = .atomic_load,
+ .param_count = 3,
+ },
+ },
+ .{
+ "@atomicRmw",
+ .{
+ .tag = .atomic_rmw,
+ .param_count = 5,
+ },
+ },
+ .{
+ "@atomicStore",
+ .{
+ .tag = .atomic_store,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@bitCast",
+ .{
+ .tag = .bit_cast,
+ .needs_mem_loc = true,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@bitOffsetOf",
+ .{
+ .tag = .bit_offset_of,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@boolToInt",
+ .{
+ .tag = .bool_to_int,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@bitSizeOf",
+ .{
+ .tag = .bit_size_of,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@breakpoint",
+ .{
+ .tag = .breakpoint,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@mulAdd",
+ .{
+ .tag = .mul_add,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@byteSwap",
+ .{
+ .tag = .byte_swap,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@bitReverse",
+ .{
+ .tag = .bit_reverse,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@byteOffsetOf",
+ .{
+ .tag = .byte_offset_of,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@call",
+ .{
+ .tag = .call,
+ .needs_mem_loc = true,
+ .param_count = 3,
+ },
+ },
+ .{
+ "@cDefine",
+ .{
+ .tag = .c_define,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@cImport",
+ .{
+ .tag = .c_import,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@cInclude",
+ .{
+ .tag = .c_include,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@clz",
+ .{
+ .tag = .clz,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@cmpxchgStrong",
+ .{
+ .tag = .cmpxchg_strong,
+ .param_count = 6,
+ },
+ },
+ .{
+ "@cmpxchgWeak",
+ .{
+ .tag = .cmpxchg_weak,
+ .param_count = 6,
+ },
+ },
+ .{
+ "@compileError",
+ .{
+ .tag = .compile_error,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@compileLog",
+ .{
+ .tag = .compile_log,
+ .param_count = null,
+ },
+ },
+ .{
+ "@ctz",
+ .{
+ .tag = .ctz,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@cUndef",
+ .{
+ .tag = .c_undef,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@divExact",
+ .{
+ .tag = .div_exact,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@divFloor",
+ .{
+ .tag = .div_floor,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@divTrunc",
+ .{
+ .tag = .div_trunc,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@embedFile",
+ .{
+ .tag = .embed_file,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@enumToInt",
+ .{
+ .tag = .enum_to_int,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@errorName",
+ .{
+ .tag = .error_name,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@errorReturnTrace",
+ .{
+ .tag = .error_return_trace,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@errorToInt",
+ .{
+ .tag = .error_to_int,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@errSetCast",
+ .{
+ .tag = .err_set_cast,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@export",
+ .{
+ .tag = .@"export",
+ .param_count = 2,
+ },
+ },
+ .{
+ "@fence",
+ .{
+ .tag = .fence,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@field",
+ .{
+ .tag = .field,
+ .needs_mem_loc = true,
+ .param_count = 2,
+ .allows_lvalue = true,
+ },
+ },
+ .{
+ "@fieldParentPtr",
+ .{
+ .tag = .field_parent_ptr,
+ .param_count = 3,
+ },
+ },
+ .{
+ "@floatCast",
+ .{
+ .tag = .float_cast,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@floatToInt",
+ .{
+ .tag = .float_to_int,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@frame",
+ .{
+ .tag = .frame,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@Frame",
+ .{
+ .tag = .Frame,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@frameAddress",
+ .{
+ .tag = .frame_address,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@frameSize",
+ .{
+ .tag = .frame_size,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@hasDecl",
+ .{
+ .tag = .has_decl,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@hasField",
+ .{
+ .tag = .has_field,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@import",
+ .{
+ .tag = .import,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@intCast",
+ .{
+ .tag = .int_cast,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@intToEnum",
+ .{
+ .tag = .int_to_enum,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@intToError",
+ .{
+ .tag = .int_to_error,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@intToFloat",
+ .{
+ .tag = .int_to_float,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@intToPtr",
+ .{
+ .tag = .int_to_ptr,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@memcpy",
+ .{
+ .tag = .memcpy,
+ .param_count = 3,
+ },
+ },
+ .{
+ "@memset",
+ .{
+ .tag = .memset,
+ .param_count = 3,
+ },
+ },
+ .{
+ "@wasmMemorySize",
+ .{
+ .tag = .wasm_memory_size,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@wasmMemoryGrow",
+ .{
+ .tag = .wasm_memory_grow,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@mod",
+ .{
+ .tag = .mod,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@mulWithOverflow",
+ .{
+ .tag = .mul_with_overflow,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@panic",
+ .{
+ .tag = .panic,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@popCount",
+ .{
+ .tag = .pop_count,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@ptrCast",
+ .{
+ .tag = .ptr_cast,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@ptrToInt",
+ .{
+ .tag = .ptr_to_int,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@rem",
+ .{
+ .tag = .rem,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@returnAddress",
+ .{
+ .tag = .return_address,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@setAlignStack",
+ .{
+ .tag = .set_align_stack,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@setCold",
+ .{
+ .tag = .set_cold,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@setEvalBranchQuota",
+ .{
+ .tag = .set_eval_branch_quota,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@setFloatMode",
+ .{
+ .tag = .set_float_mode,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@setRuntimeSafety",
+ .{
+ .tag = .set_runtime_safety,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@shlExact",
+ .{
+ .tag = .shl_exact,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@shlWithOverflow",
+ .{
+ .tag = .shl_with_overflow,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@shrExact",
+ .{
+ .tag = .shr_exact,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@shuffle",
+ .{
+ .tag = .shuffle,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@sizeOf",
+ .{
+ .tag = .size_of,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@splat",
+ .{
+ .tag = .splat,
+ .needs_mem_loc = true,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@reduce",
+ .{
+ .tag = .reduce,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@src",
+ .{
+ .tag = .src,
+ .needs_mem_loc = true,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@sqrt",
+ .{
+ .tag = .sqrt,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@sin",
+ .{
+ .tag = .sin,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@cos",
+ .{
+ .tag = .cos,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@exp",
+ .{
+ .tag = .exp,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@exp2",
+ .{
+ .tag = .exp2,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@log",
+ .{
+ .tag = .log,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@log2",
+ .{
+ .tag = .log2,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@log10",
+ .{
+ .tag = .log10,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@fabs",
+ .{
+ .tag = .fabs,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@floor",
+ .{
+ .tag = .floor,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@ceil",
+ .{
+ .tag = .ceil,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@trunc",
+ .{
+ .tag = .trunc,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@round",
+ .{
+ .tag = .round,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@subWithOverflow",
+ .{
+ .tag = .sub_with_overflow,
+ .param_count = 4,
+ },
+ },
+ .{
+ "@tagName",
+ .{
+ .tag = .tag_name,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@This",
+ .{
+ .tag = .This,
+ .param_count = 0,
+ },
+ },
+ .{
+ "@truncate",
+ .{
+ .tag = .truncate,
+ .param_count = 2,
+ },
+ },
+ .{
+ "@Type",
+ .{
+ .tag = .Type,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@typeInfo",
+ .{
+ .tag = .type_info,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@typeName",
+ .{
+ .tag = .type_name,
+ .param_count = 1,
+ },
+ },
+ .{
+ "@TypeOf",
+ .{
+ .tag = .TypeOf,
+ .param_count = null,
+ },
+ },
+ .{
+ "@unionInit",
+ .{
+ .tag = .union_init,
+ .needs_mem_loc = true,
+ .param_count = 3,
+ },
+ },
+ });
+};
diff --git a/src/Compilation.zig b/src/Compilation.zig
index 180d49a19611..39e10bececfa 100644
--- a/src/Compilation.zig
+++ b/src/Compilation.zig
@@ -921,7 +921,7 @@ pub fn create(gpa: *Allocator, options: InitOptions) !*Compilation {
// TODO this is duped so it can be freed in Container.deinit
.sub_file_path = try gpa.dupe(u8, root_pkg.root_src_path),
.source = .{ .unloaded = {} },
- .contents = .{ .not_available = {} },
+ .tree = undefined,
.status = .never_loaded,
.pkg = root_pkg,
.root_container = .{
@@ -1334,7 +1334,7 @@ pub fn update(self: *Compilation) !void {
self.c_object_work_queue.writeItemAssumeCapacity(entry.key);
}
- const use_stage1 = build_options.is_stage1 and self.bin_file.options.use_llvm;
+ const use_stage1 = build_options.omit_stage2 or build_options.is_stage1 and self.bin_file.options.use_llvm;
if (!use_stage1) {
if (self.bin_file.options.module) |module| {
module.compile_log_text.shrinkAndFree(module.gpa, 0);
@@ -1884,7 +1884,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
const c_headers_dir_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{"include"});
const c_headers_dir_path_z = try arena.dupeZ(u8, c_headers_dir_path);
var clang_errors: []translate_c.ClangErrMsg = &[0]translate_c.ClangErrMsg{};
- const tree = translate_c.translate(
+ var tree = translate_c.translate(
comp.gpa,
new_argv.ptr,
new_argv.ptr + new_argv.len,
@@ -1903,7 +1903,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
};
},
};
- defer tree.deinit();
+ defer tree.deinit(comp.gpa);
if (comp.verbose_cimport) {
log.info("C import .d file: {s}", .{out_dep_path});
@@ -1921,9 +1921,10 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
var out_zig_file = try o_dir.createFile(cimport_zig_basename, .{});
defer out_zig_file.close();
- var bos = std.io.bufferedWriter(out_zig_file.writer());
- _ = try std.zig.render(comp.gpa, bos.writer(), tree);
- try bos.flush();
+ const formatted = try tree.render(comp.gpa);
+ defer comp.gpa.free(formatted);
+
+ try out_zig_file.writeAll(formatted);
man.writeManifest() catch |err| {
log.warn("failed to write cache manifest for C import: {s}", .{@errorName(err)});
@@ -1936,7 +1937,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
"o", &digest, cimport_zig_basename,
});
if (comp.verbose_cimport) {
- log.info("C import output: {s}\n", .{out_zig_path});
+ log.info("C import output: {s}", .{out_zig_path});
}
return CImportResult{
.out_zig_path = out_zig_path,
@@ -3000,7 +3001,7 @@ pub fn updateSubCompilation(sub_compilation: *Compilation) !void {
for (errors.list) |full_err_msg| {
switch (full_err_msg) {
.src => |src| {
- log.err("{s}:{d}:{d}: {s}\n", .{
+ log.err("{s}:{d}:{d}: {s}", .{
src.src_path,
src.line + 1,
src.column + 1,
diff --git a/src/Module.zig b/src/Module.zig
index 322f1906730b..7af4648c79d7 100644
--- a/src/Module.zig
+++ b/src/Module.zig
@@ -244,9 +244,9 @@ pub const Decl = struct {
}
pub fn src(self: Decl) usize {
- const tree = self.container.file_scope.contents.tree;
- const decl_node = tree.root_node.decls()[self.src_index];
- return tree.token_locs[decl_node.firstToken()].start;
+ const tree = &self.container.file_scope.tree;
+ const decl_node = tree.rootDecls()[self.src_index];
+ return tree.tokens.items(.start)[tree.firstToken(decl_node)];
}
pub fn fullyQualifiedNameHash(self: Decl) Scope.NameHash {
@@ -428,14 +428,14 @@ pub const Scope = struct {
}
/// Asserts the scope is a child of a File and has an AST tree and returns the tree.
- pub fn tree(self: *Scope) *ast.Tree {
+ pub fn tree(self: *Scope) *const ast.Tree {
switch (self.tag) {
- .file => return self.cast(File).?.contents.tree,
- .block => return self.cast(Block).?.src_decl.container.file_scope.contents.tree,
- .gen_zir => return self.cast(GenZIR).?.decl.container.file_scope.contents.tree,
- .local_val => return self.cast(LocalVal).?.gen_zir.decl.container.file_scope.contents.tree,
- .local_ptr => return self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.contents.tree,
- .container => return self.cast(Container).?.file_scope.contents.tree,
+ .file => return &self.cast(File).?.tree,
+ .block => return &self.cast(Block).?.src_decl.container.file_scope.tree,
+ .gen_zir => return &self.cast(GenZIR).?.decl.container.file_scope.tree,
+ .local_val => return &self.cast(LocalVal).?.gen_zir.decl.container.file_scope.tree,
+ .local_ptr => return &self.cast(LocalPtr).?.gen_zir.decl.container.file_scope.tree,
+ .container => return &self.cast(Container).?.file_scope.tree,
}
}
@@ -540,6 +540,12 @@ pub const Scope = struct {
pub const File = struct {
pub const base_tag: Tag = .file;
base: Scope = Scope{ .tag = base_tag },
+ status: enum {
+ never_loaded,
+ unloaded_success,
+ unloaded_parse_failure,
+ loaded_success,
+ },
/// Relative to the owning package's root_src_dir.
/// Reference to external memory, not owned by File.
@@ -548,16 +554,8 @@ pub const Scope = struct {
unloaded: void,
bytes: [:0]const u8,
},
- contents: union {
- not_available: void,
- tree: *ast.Tree,
- },
- status: enum {
- never_loaded,
- unloaded_success,
- unloaded_parse_failure,
- loaded_success,
- },
+ /// Whether this is populated or not depends on `status`.
+ tree: ast.Tree,
/// Package that this file is a part of, managed externally.
pkg: *Package,
@@ -571,7 +569,7 @@ pub const Scope = struct {
=> {},
.loaded_success => {
- self.contents.tree.deinit();
+ self.tree.deinit(gpa);
self.status = .unloaded_success;
},
}
@@ -926,7 +924,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
.complete => return,
.outdated => blk: {
- log.debug("re-analyzing {s}\n", .{decl.name});
+ log.debug("re-analyzing {s}", .{decl.name});
// The exports this Decl performs will be re-discovered, so we remove them here
// prior to re-analysis.
@@ -950,7 +948,7 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
.unreferenced => false,
};
- const type_changed = mod.astGenAndAnalyzeDecl(decl) catch |err| switch (err) {
+ const type_changed = mod.astgenAndSemaDecl(decl) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.AnalysisFail => return error.AnalysisFail,
else => {
@@ -992,141 +990,72 @@ pub fn ensureDeclAnalyzed(mod: *Module, decl: *Decl) InnerError!void {
}
}
-fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
+/// Returns `true` if the Decl type changed.
+/// Returns `true` if this is the first time analyzing the Decl.
+/// Returns `false` otherwise.
+fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
const tracy = trace(@src());
defer tracy.end();
- const tree = try self.getAstTree(decl.container.file_scope);
- const ast_node = tree.root_node.decls()[decl.src_index];
- switch (ast_node.tag) {
- .FnProto => {
- const fn_proto = ast_node.castTag(.FnProto).?;
+ const tree = try mod.getAstTree(decl.container.file_scope);
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const decl_node = tree.rootDecls()[decl.src_index];
+ switch (node_tags[decl_node]) {
+ .fn_decl => {
+ const fn_proto = node_datas[decl_node].lhs;
+ const body = node_datas[decl_node].rhs;
+ switch (node_tags[fn_proto]) {
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoSimple(¶ms, fn_proto));
+ },
+ .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoMulti(fn_proto)),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProtoOne(¶ms, fn_proto));
+ },
+ .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, body, tree.fnProto(fn_proto)),
+ else => unreachable,
+ }
+ },
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoSimple(¶ms, decl_node));
+ },
+ .fn_proto_multi => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoMulti(decl_node)),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProtoOne(¶ms, decl_node));
+ },
+ .fn_proto => return mod.astgenAndSemaFn(decl, tree.*, 0, tree.fnProto(decl_node)),
+ .global_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.globalVarDecl(decl_node)),
+ .local_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.localVarDecl(decl_node)),
+ .simple_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.simpleVarDecl(decl_node)),
+ .aligned_var_decl => return mod.astgenAndSemaVarDecl(decl, tree.*, tree.alignedVarDecl(decl_node)),
+
+ .@"comptime" => {
decl.analysis = .in_progress;
- // This arena allocator's memory is discarded at the end of this function. It is used
- // to determine the type of the function, and hence the type of the decl, which is needed
- // to complete the Decl analysis.
- var fn_type_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer fn_type_scope_arena.deinit();
- var fn_type_scope: Scope.GenZIR = .{
+ // A comptime decl does not store any value so we can just deinit this arena after analysis is done.
+ var analysis_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer analysis_arena.deinit();
+ var gen_scope: Scope.GenZIR = .{
.decl = decl,
- .arena = &fn_type_scope_arena.allocator,
+ .arena = &analysis_arena.allocator,
.parent = &decl.container.base,
.force_comptime = true,
};
- defer fn_type_scope.instructions.deinit(self.gpa);
-
- decl.is_pub = fn_proto.getVisibToken() != null;
-
- const param_decls = fn_proto.params();
- const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_decls.len);
+ defer gen_scope.instructions.deinit(mod.gpa);
- const fn_src = tree.token_locs[fn_proto.fn_token].start;
- const type_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.type_type),
- });
- const type_type_rl: astgen.ResultLoc = .{ .ty = type_type };
- for (param_decls) |param_decl, i| {
- const param_type_node = switch (param_decl.param_type) {
- .any_type => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement anytype parameter", .{}),
- .type_expr => |node| node,
- };
- param_types[i] = try astgen.expr(self, &fn_type_scope.base, type_type_rl, param_type_node);
- }
- if (fn_proto.getVarArgsToken()) |var_args_token| {
- return self.failTok(&fn_type_scope.base, var_args_token, "TODO implement var args", .{});
- }
- if (fn_proto.getLibName()) |lib_name| blk: {
- const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name.firstToken()), "\""); // TODO: call identifierTokenString
- log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str});
- const target = self.comp.getTarget();
- if (target_util.is_libc_lib_name(target, lib_name_str)) {
- if (!self.comp.bin_file.options.link_libc) {
- return self.failNode(
- &fn_type_scope.base,
- lib_name,
- "dependency on libc must be explicitly specified in the build command",
- .{},
- );
- }
- break :blk;
- }
- if (target_util.is_libcpp_lib_name(target, lib_name_str)) {
- if (!self.comp.bin_file.options.link_libcpp) {
- return self.failNode(
- &fn_type_scope.base,
- lib_name,
- "dependency on libc++ must be explicitly specified in the build command",
- .{},
- );
- }
- break :blk;
- }
- if (!target.isWasm() and !self.comp.bin_file.options.pic) {
- return self.failNode(
- &fn_type_scope.base,
- lib_name,
- "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.",
- .{ lib_name, lib_name },
- );
- }
- self.comp.stage1AddLinkLib(lib_name_str) catch |err| {
- return self.failNode(
- &fn_type_scope.base,
- lib_name,
- "unable to add link lib '{s}': {s}",
- .{ lib_name, @errorName(err) },
- );
- };
- }
- if (fn_proto.getAlignExpr()) |align_expr| {
- return self.failNode(&fn_type_scope.base, align_expr, "TODO implement function align expression", .{});
- }
- if (fn_proto.getSectionExpr()) |sect_expr| {
- return self.failNode(&fn_type_scope.base, sect_expr, "TODO implement function section expression", .{});
+ const block_expr = node_datas[decl_node].lhs;
+ _ = try astgen.comptimeExpr(mod, &gen_scope.base, .none, block_expr);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
}
- const enum_literal_type = try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.enum_literal_type),
- });
- const enum_literal_type_rl: astgen.ResultLoc = .{ .ty = enum_literal_type };
- const cc = if (fn_proto.getCallconvExpr()) |callconv_expr|
- try astgen.expr(self, &fn_type_scope.base, enum_literal_type_rl, callconv_expr)
- else
- try astgen.addZIRInstConst(self, &fn_type_scope.base, fn_src, .{
- .ty = Type.initTag(.enum_literal),
- .val = try Value.Tag.enum_literal.create(
- &fn_type_scope_arena.allocator,
- try fn_type_scope_arena.allocator.dupe(u8, "Unspecified"),
- ),
- });
-
- const return_type_expr = switch (fn_proto.return_type) {
- .Explicit => |node| node,
- .InferErrorSet => |node| return self.failNode(&fn_type_scope.base, node, "TODO implement inferred error sets", .{}),
- .Invalid => |tok| return self.failTok(&fn_type_scope.base, tok, "unable to parse return type", .{}),
- };
-
- const return_type_inst = try astgen.expr(self, &fn_type_scope.base, type_type_rl, return_type_expr);
- const fn_type_inst = try astgen.addZIRInst(self, &fn_type_scope.base, fn_src, zir.Inst.FnType, .{
- .return_type = return_type_inst,
- .param_types = param_types,
- .cc = cc,
- }, .{});
-
- if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
- }
-
- // We need the memory for the Type to go into the arena for the Decl
- var decl_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer decl_arena.deinit();
- const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
-
- var inst_table = Scope.Block.InstTable.init(self.gpa);
+ var inst_table = Scope.Block.InstTable.init(mod.gpa);
defer inst_table.deinit();
var branch_quota: u32 = default_eval_branch_quota;
@@ -1138,424 +1067,627 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
.owner_decl = decl,
.src_decl = decl,
.instructions = .{},
- .arena = &decl_arena.allocator,
+ .arena = &analysis_arena.allocator,
.inlining = null,
- .is_comptime = false,
+ .is_comptime = true,
.branch_quota = &branch_quota,
};
- defer block_scope.instructions.deinit(self.gpa);
+ defer block_scope.instructions.deinit(mod.gpa);
- const fn_type = try zir_sema.analyzeBodyValueAsType(self, &block_scope, fn_type_inst, .{
- .instructions = fn_type_scope.instructions.items,
+ _ = try zir_sema.analyzeBody(mod, &block_scope, .{
+ .instructions = gen_scope.instructions.items,
});
- const body_node = fn_proto.getBodyNode() orelse {
- // Extern function.
- var type_changed = true;
- if (decl.typedValueManaged()) |tvm| {
- type_changed = !tvm.typed_value.ty.eql(fn_type);
- tvm.deinit(self.gpa);
- }
- const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl);
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+ return true;
+ },
+ .@"usingnamespace" => @panic("TODO usingnamespace decl"),
+ else => unreachable,
+ }
+}
- decl_arena_state.* = decl_arena.state;
- decl.typed_value = .{
- .most_recent = .{
- .typed_value = .{ .ty = fn_type, .val = fn_val },
- .arena = decl_arena_state,
- },
- };
- decl.analysis = .complete;
- decl.generation = self.generation;
+fn astgenAndSemaFn(
+ mod: *Module,
+ decl: *Decl,
+ tree: ast.Tree,
+ body_node: ast.Node.Index,
+ fn_proto: ast.full.FnProto,
+) !bool {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ decl.analysis = .in_progress;
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ // This arena allocator's memory is discarded at the end of this function. It is used
+ // to determine the type of the function, and hence the type of the decl, which is needed
+ // to complete the Decl analysis.
+ var fn_type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer fn_type_scope_arena.deinit();
+ var fn_type_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &fn_type_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer fn_type_scope.instructions.deinit(mod.gpa);
- try self.comp.bin_file.allocateDeclIndexes(decl);
- try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
+ decl.is_pub = fn_proto.visib_token != null;
- if (type_changed and self.emit_h != null) {
- try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
+ // The AST params array does not contain anytype and ... parameters.
+ // We must iterate to count how many param types to allocate.
+ const param_count = blk: {
+ var count: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |_| {
+ count += 1;
+ }
+ break :blk count;
+ };
+ const param_types = try fn_type_scope.arena.alloc(*zir.Inst, param_count);
+ const fn_src = token_starts[fn_proto.ast.fn_token];
+ const type_type = try astgen.addZIRInstConst(mod, &fn_type_scope.base, fn_src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.type_type),
+ });
+ const type_type_rl: astgen.ResultLoc = .{ .ty = type_type };
+
+ {
+ var param_type_i: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |param| : (param_type_i += 1) {
+ if (param.anytype_ellipsis3) |token| {
+ switch (token_tags[token]) {
+ .keyword_anytype => return mod.failTok(
+ &fn_type_scope.base,
+ token,
+ "TODO implement anytype parameter",
+ .{},
+ ),
+ .ellipsis3 => return mod.failTok(
+ &fn_type_scope.base,
+ token,
+ "TODO implement var args",
+ .{},
+ ),
+ else => unreachable,
}
+ }
+ const param_type_node = param.type_expr;
+ assert(param_type_node != 0);
+ param_types[param_type_i] =
+ try astgen.expr(mod, &fn_type_scope.base, type_type_rl, param_type_node);
+ }
+ assert(param_type_i == param_count);
+ }
+ if (fn_proto.lib_name) |lib_name_token| blk: {
+ // TODO call std.zig.parseStringLiteral
+ const lib_name_str = mem.trim(u8, tree.tokenSlice(lib_name_token), "\"");
+ log.debug("extern fn symbol expected in lib '{s}'", .{lib_name_str});
+ const target = mod.comp.getTarget();
+ if (target_util.is_libc_lib_name(target, lib_name_str)) {
+ if (!mod.comp.bin_file.options.link_libc) {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "dependency on libc must be explicitly specified in the build command",
+ .{},
+ );
+ }
+ break :blk;
+ }
+ if (target_util.is_libcpp_lib_name(target, lib_name_str)) {
+ if (!mod.comp.bin_file.options.link_libcpp) {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "dependency on libc++ must be explicitly specified in the build command",
+ .{},
+ );
+ }
+ break :blk;
+ }
+ if (!target.isWasm() and !mod.comp.bin_file.options.pic) {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "dependency on dynamic library '{s}' requires enabling Position Independent Code. Fixed by `-l{s}` or `-fPIC`.",
+ .{ lib_name_str, lib_name_str },
+ );
+ }
+ mod.comp.stage1AddLinkLib(lib_name_str) catch |err| {
+ return mod.failTok(
+ &fn_type_scope.base,
+ lib_name_token,
+ "unable to add link lib '{s}': {s}",
+ .{ lib_name_str, @errorName(err) },
+ );
+ };
+ }
+ if (fn_proto.ast.align_expr != 0) {
+ return mod.failNode(
+ &fn_type_scope.base,
+ fn_proto.ast.align_expr,
+ "TODO implement function align expression",
+ .{},
+ );
+ }
+ if (fn_proto.ast.section_expr != 0) {
+ return mod.failNode(
+ &fn_type_scope.base,
+ fn_proto.ast.section_expr,
+ "TODO implement function section expression",
+ .{},
+ );
+ }
- return type_changed;
- };
+ const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
+ if (token_tags[maybe_bang] == .bang) {
+ return mod.failTok(&fn_type_scope.base, maybe_bang, "TODO implement inferred error sets", .{});
+ }
+ const return_type_inst = try astgen.expr(
+ mod,
+ &fn_type_scope.base,
+ type_type_rl,
+ fn_proto.ast.return_type,
+ );
+ const fn_type_inst = if (fn_proto.ast.callconv_expr != 0) cc: {
+ // TODO instead of enum literal type, this needs to be the
+ // std.builtin.CallingConvention enum. We need to implement importing other files
+ // and enums in order to fix this.
+ const src = token_starts[tree.firstToken(fn_proto.ast.callconv_expr)];
+ const enum_lit_ty = try astgen.addZIRInstConst(mod, &fn_type_scope.base, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.enum_literal_type),
+ });
+ const cc = try astgen.comptimeExpr(mod, &fn_type_scope.base, .{
+ .ty = enum_lit_ty,
+ }, fn_proto.ast.callconv_expr);
+ break :cc try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type_cc, .{
+ .return_type = return_type_inst,
+ .param_types = param_types,
+ .cc = cc,
+ });
+ } else
+ try astgen.addZirInstTag(mod, &fn_type_scope.base, fn_src, .fn_type, .{
+ .return_type = return_type_inst,
+ .param_types = param_types,
+ });
- const new_func = try decl_arena.allocator.create(Fn);
- const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
+ }
- const fn_zir: zir.Body = blk: {
- // We put the ZIR inside the Decl arena.
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &decl_arena.allocator,
- .parent = &decl.container.base,
- .force_comptime = false,
- };
- defer gen_scope.instructions.deinit(self.gpa);
-
- // We need an instruction for each parameter, and they must be first in the body.
- try gen_scope.instructions.resize(self.gpa, fn_proto.params_len);
- var params_scope = &gen_scope.base;
- for (fn_proto.params()) |param, i| {
- const name_token = param.name_token.?;
- const src = tree.token_locs[name_token].start;
- const param_name = try self.identifierTokenString(&gen_scope.base, name_token);
- const arg = try decl_arena.allocator.create(zir.Inst.Arg);
- arg.* = .{
- .base = .{
- .tag = .arg,
- .src = src,
- },
- .positionals = .{
- .name = param_name,
- },
- .kw_args = .{},
- };
- gen_scope.instructions.items[i] = &arg.base;
- const sub_scope = try decl_arena.allocator.create(Scope.LocalVal);
- sub_scope.* = .{
- .parent = params_scope,
- .gen_zir = &gen_scope,
- .name = param_name,
- .inst = &arg.base,
- };
- params_scope = &sub_scope.base;
- }
+ // We need the memory for the Type to go into the arena for the Decl
+ var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ errdefer decl_arena.deinit();
+ const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
- const body_block = body_node.cast(ast.Node.Block).?;
+ var inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer inst_table.deinit();
- try astgen.blockExpr(self, params_scope, body_block);
+ var branch_quota: u32 = default_eval_branch_quota;
- if (gen_scope.instructions.items.len == 0 or
- !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
- {
- const src = tree.token_locs[body_block.rbrace].start;
- _ = try astgen.addZIRNoOp(self, &gen_scope.base, src, .return_void);
- }
+ var block_scope: Scope.Block = .{
+ .parent = null,
+ .inst_table = &inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &decl_arena.allocator,
+ .inlining = null,
+ .is_comptime = false,
+ .branch_quota = &branch_quota,
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
- if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
- }
+ const fn_type = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, fn_type_inst, .{
+ .instructions = fn_type_scope.instructions.items,
+ });
+ if (body_node == 0) {
+ // Extern function.
+ var type_changed = true;
+ if (decl.typedValueManaged()) |tvm| {
+ type_changed = !tvm.typed_value.ty.eql(fn_type);
- break :blk .{
- .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
- };
- };
+ tvm.deinit(mod.gpa);
+ }
+ const fn_val = try Value.Tag.extern_fn.create(&decl_arena.allocator, decl);
- const is_inline = fn_type.fnCallingConvention() == .Inline;
- const anal_state = ([2]Fn.Analysis{ .queued, .inline_only })[@boolToInt(is_inline)];
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{ .ty = fn_type, .val = fn_val },
+ .arena = decl_arena_state,
+ },
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
- new_func.* = .{
- .state = anal_state,
- .zir = fn_zir,
- .body = undefined,
- .owner_decl = decl,
- };
- fn_payload.* = .{
- .base = .{ .tag = .function },
- .data = new_func,
- };
+ try mod.comp.bin_file.allocateDeclIndexes(decl);
+ try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl });
- var prev_type_has_bits = false;
- var prev_is_inline = false;
- var type_changed = true;
+ if (type_changed and mod.emit_h != null) {
+ try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
+ }
- if (decl.typedValueManaged()) |tvm| {
- prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
- type_changed = !tvm.typed_value.ty.eql(fn_type);
- if (tvm.typed_value.val.castTag(.function)) |payload| {
- const prev_func = payload.data;
- prev_is_inline = prev_func.state == .inline_only;
- }
+ return type_changed;
+ }
- tvm.deinit(self.gpa);
- }
+ const new_func = try decl_arena.allocator.create(Fn);
+ const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
- decl_arena_state.* = decl_arena.state;
- decl.typed_value = .{
- .most_recent = .{
- .typed_value = .{
- .ty = fn_type,
- .val = Value.initPayload(&fn_payload.base),
- },
- .arena = decl_arena_state,
+ const fn_zir: zir.Body = blk: {
+ // We put the ZIR inside the Decl arena.
+ var gen_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &decl_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = false,
+ };
+ defer gen_scope.instructions.deinit(mod.gpa);
+
+ // We need an instruction for each parameter, and they must be first in the body.
+ try gen_scope.instructions.resize(mod.gpa, param_count);
+ var params_scope = &gen_scope.base;
+ var i: usize = 0;
+ var it = fn_proto.iterate(tree);
+ while (it.next()) |param| : (i += 1) {
+ const name_token = param.name_token.?;
+ const src = token_starts[name_token];
+ const param_name = try mod.identifierTokenString(&gen_scope.base, name_token);
+ const arg = try decl_arena.allocator.create(zir.Inst.Arg);
+ arg.* = .{
+ .base = .{
+ .tag = .arg,
+ .src = src,
+ },
+ .positionals = .{
+ .name = param_name,
},
+ .kw_args = .{},
};
- decl.analysis = .complete;
- decl.generation = self.generation;
-
- if (!is_inline and fn_type.hasCodeGenBits()) {
- // We don't fully codegen the decl until later, but we do need to reserve a global
- // offset table index for it. This allows us to codegen decls out of dependency order,
- // increasing how many computations can be done in parallel.
- try self.comp.bin_file.allocateDeclIndexes(decl);
- try self.comp.work_queue.writeItem(.{ .codegen_decl = decl });
- if (type_changed and self.emit_h != null) {
- try self.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
- }
- } else if (!prev_is_inline and prev_type_has_bits) {
- self.comp.bin_file.freeDecl(decl);
- }
+ gen_scope.instructions.items[i] = &arg.base;
+ const sub_scope = try decl_arena.allocator.create(Scope.LocalVal);
+ sub_scope.* = .{
+ .parent = params_scope,
+ .gen_zir = &gen_scope,
+ .name = param_name,
+ .inst = &arg.base,
+ };
+ params_scope = &sub_scope.base;
+ }
- if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- if (is_inline) {
- return self.failTok(
- &block_scope.base,
- maybe_export_token,
- "export of inline function",
- .{},
- );
- }
- const export_src = tree.token_locs[maybe_export_token].start;
- const name_loc = tree.token_locs[fn_proto.getNameToken().?];
- const name = tree.tokenSliceLoc(name_loc);
- // The scope needs to have the decl in it.
- try self.analyzeExport(&block_scope.base, export_src, name, decl);
- }
- }
- return type_changed or is_inline != prev_is_inline;
- },
- .VarDecl => {
- const var_decl = @fieldParentPtr(ast.Node.VarDecl, "base", ast_node);
+ _ = try astgen.expr(mod, params_scope, .none, body_node);
- decl.analysis = .in_progress;
+ if (gen_scope.instructions.items.len == 0 or
+ !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
+ {
+ const src = token_starts[tree.lastToken(body_node)];
+ _ = try astgen.addZIRNoOp(mod, &gen_scope.base, src, .return_void);
+ }
- // We need the memory for the Type to go into the arena for the Decl
- var decl_arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer decl_arena.deinit();
- const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
+ }
- var decl_inst_table = Scope.Block.InstTable.init(self.gpa);
- defer decl_inst_table.deinit();
+ break :blk .{
+ .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
+ };
+ };
- var branch_quota: u32 = default_eval_branch_quota;
+ const is_inline = fn_type.fnCallingConvention() == .Inline;
+ const anal_state: Fn.Analysis = if (is_inline) .inline_only else .queued;
- var block_scope: Scope.Block = .{
- .parent = null,
- .inst_table = &decl_inst_table,
- .func = null,
- .owner_decl = decl,
- .src_decl = decl,
- .instructions = .{},
- .arena = &decl_arena.allocator,
- .inlining = null,
- .is_comptime = true,
- .branch_quota = &branch_quota,
- };
- defer block_scope.instructions.deinit(self.gpa);
-
- decl.is_pub = var_decl.getVisibToken() != null;
- const is_extern = blk: {
- const maybe_extern_token = var_decl.getExternExportToken() orelse
- break :blk false;
- if (tree.token_ids[maybe_extern_token] != .Keyword_extern) break :blk false;
- if (var_decl.getInitNode()) |some| {
- return self.failNode(&block_scope.base, some, "extern variables have no initializers", .{});
- }
- break :blk true;
- };
- if (var_decl.getLibName()) |lib_name| {
- assert(is_extern);
- return self.failNode(&block_scope.base, lib_name, "TODO implement function library name", .{});
- }
- const is_mutable = tree.token_ids[var_decl.mut_token] == .Keyword_var;
- const is_threadlocal = if (var_decl.getThreadLocalToken()) |some| blk: {
- if (!is_mutable) {
- return self.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{});
- }
- break :blk true;
- } else false;
- assert(var_decl.getComptimeToken() == null);
- if (var_decl.getAlignNode()) |align_expr| {
- return self.failNode(&block_scope.base, align_expr, "TODO implement function align expression", .{});
- }
- if (var_decl.getSectionNode()) |sect_expr| {
- return self.failNode(&block_scope.base, sect_expr, "TODO implement function section expression", .{});
- }
+ new_func.* = .{
+ .state = anal_state,
+ .zir = fn_zir,
+ .body = undefined,
+ .owner_decl = decl,
+ };
+ fn_payload.* = .{
+ .base = .{ .tag = .function },
+ .data = new_func,
+ };
- const var_info: struct { ty: Type, val: ?Value } = if (var_decl.getInitNode()) |init_node| vi: {
- var gen_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer gen_scope_arena.deinit();
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &gen_scope_arena.allocator,
- .parent = &decl.container.base,
- .force_comptime = false,
- };
- defer gen_scope.instructions.deinit(self.gpa);
+ var prev_type_has_bits = false;
+ var prev_is_inline = false;
+ var type_changed = true;
- const init_result_loc: astgen.ResultLoc = if (var_decl.getTypeNode()) |type_node| rl: {
- const src = tree.token_locs[type_node.firstToken()].start;
- const type_type = try astgen.addZIRInstConst(self, &gen_scope.base, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.type_type),
- });
- const var_type = try astgen.expr(self, &gen_scope.base, .{ .ty = type_type }, type_node);
- break :rl .{ .ty = var_type };
- } else .none;
+ if (decl.typedValueManaged()) |tvm| {
+ prev_type_has_bits = tvm.typed_value.ty.hasCodeGenBits();
+ type_changed = !tvm.typed_value.ty.eql(fn_type);
+ if (tvm.typed_value.val.castTag(.function)) |payload| {
+ const prev_func = payload.data;
+ prev_is_inline = prev_func.state == .inline_only;
+ }
- const init_inst = try astgen.comptimeExpr(self, &gen_scope.base, init_result_loc, init_node);
- if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
- }
+ tvm.deinit(mod.gpa);
+ }
- var var_inst_table = Scope.Block.InstTable.init(self.gpa);
- defer var_inst_table.deinit();
-
- var branch_quota_vi: u32 = default_eval_branch_quota;
- var inner_block: Scope.Block = .{
- .parent = null,
- .inst_table = &var_inst_table,
- .func = null,
- .owner_decl = decl,
- .src_decl = decl,
- .instructions = .{},
- .arena = &gen_scope_arena.allocator,
- .inlining = null,
- .is_comptime = true,
- .branch_quota = &branch_quota_vi,
- };
- defer inner_block.instructions.deinit(self.gpa);
- try zir_sema.analyzeBody(self, &inner_block, .{
- .instructions = gen_scope.instructions.items,
- });
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{
+ .ty = fn_type,
+ .val = Value.initPayload(&fn_payload.base),
+ },
+ .arena = decl_arena_state,
+ },
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+
+ if (!is_inline and fn_type.hasCodeGenBits()) {
+ // We don't fully codegen the decl until later, but we do need to reserve a global
+ // offset table index for it. This allows us to codegen decls out of dependency order,
+ // increasing how many computations can be done in parallel.
+ try mod.comp.bin_file.allocateDeclIndexes(decl);
+ try mod.comp.work_queue.writeItem(.{ .codegen_decl = decl });
+ if (type_changed and mod.emit_h != null) {
+ try mod.comp.work_queue.writeItem(.{ .emit_h_decl = decl });
+ }
+ } else if (!prev_is_inline and prev_type_has_bits) {
+ mod.comp.bin_file.freeDecl(decl);
+ }
+
+ if (fn_proto.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ if (is_inline) {
+ return mod.failTok(
+ &block_scope.base,
+ maybe_export_token,
+ "export of inline function",
+ .{},
+ );
+ }
+ const export_src = token_starts[maybe_export_token];
+ const name = tree.tokenSlice(fn_proto.name_token.?); // TODO identifierTokenString
+ // The scope needs to have the decl in it.
+ try mod.analyzeExport(&block_scope.base, export_src, name, decl);
+ }
+ }
+ return type_changed or is_inline != prev_is_inline;
+}
- // The result location guarantees the type coercion.
- const analyzed_init_inst = var_inst_table.get(init_inst).?;
- // The is_comptime in the Scope.Block guarantees the result is comptime-known.
- const val = analyzed_init_inst.value().?;
+fn astgenAndSemaVarDecl(
+ mod: *Module,
+ decl: *Decl,
+ tree: ast.Tree,
+ var_decl: ast.full.VarDecl,
+) !bool {
+ const tracy = trace(@src());
+ defer tracy.end();
- const ty = try analyzed_init_inst.ty.copy(block_scope.arena);
- break :vi .{
- .ty = ty,
- .val = try val.copy(block_scope.arena),
- };
- } else if (!is_extern) {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "variables must be initialized", .{});
- } else if (var_decl.getTypeNode()) |type_node| vi: {
- // Temporary arena for the zir instructions.
- var type_scope_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer type_scope_arena.deinit();
- var type_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &type_scope_arena.allocator,
- .parent = &decl.container.base,
- .force_comptime = true,
- };
- defer type_scope.instructions.deinit(self.gpa);
+ decl.analysis = .in_progress;
- const var_type = try astgen.typeExpr(self, &type_scope.base, type_node);
- if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
- }
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
- const ty = try zir_sema.analyzeBodyValueAsType(self, &block_scope, var_type, .{
- .instructions = type_scope.instructions.items,
- });
- break :vi .{
- .ty = ty,
- .val = null,
- };
- } else {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "unable to infer variable type", .{});
- };
+ // We need the memory for the Type to go into the arena for the Decl
+ var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ errdefer decl_arena.deinit();
+ const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
- if (is_mutable and !var_info.ty.isValidVarType(is_extern)) {
- return self.failTok(&block_scope.base, var_decl.firstToken(), "variable of type '{}' must be const", .{var_info.ty});
- }
+ var decl_inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer decl_inst_table.deinit();
- var type_changed = true;
- if (decl.typedValueManaged()) |tvm| {
- type_changed = !tvm.typed_value.ty.eql(var_info.ty);
+ var branch_quota: u32 = default_eval_branch_quota;
- tvm.deinit(self.gpa);
- }
+ var block_scope: Scope.Block = .{
+ .parent = null,
+ .inst_table = &decl_inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &decl_arena.allocator,
+ .inlining = null,
+ .is_comptime = true,
+ .branch_quota = &branch_quota,
+ };
+ defer block_scope.instructions.deinit(mod.gpa);
+
+ decl.is_pub = var_decl.visib_token != null;
+ const is_extern = blk: {
+ const maybe_extern_token = var_decl.extern_export_token orelse break :blk false;
+ if (token_tags[maybe_extern_token] != .keyword_extern) break :blk false;
+ if (var_decl.ast.init_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.init_node,
+ "extern variables have no initializers",
+ .{},
+ );
+ }
+ break :blk true;
+ };
+ if (var_decl.lib_name) |lib_name| {
+ assert(is_extern);
+ return mod.failTok(&block_scope.base, lib_name, "TODO implement function library name", .{});
+ }
+ const is_mutable = token_tags[var_decl.ast.mut_token] == .keyword_var;
+ const is_threadlocal = if (var_decl.threadlocal_token) |some| blk: {
+ if (!is_mutable) {
+ return mod.failTok(&block_scope.base, some, "threadlocal variable cannot be constant", .{});
+ }
+ break :blk true;
+ } else false;
+ assert(var_decl.comptime_token == null);
+ if (var_decl.ast.align_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.align_node,
+ "TODO implement function align expression",
+ .{},
+ );
+ }
+ if (var_decl.ast.section_node != 0) {
+ return mod.failNode(
+ &block_scope.base,
+ var_decl.ast.section_node,
+ "TODO implement function section expression",
+ .{},
+ );
+ }
- const new_variable = try decl_arena.allocator.create(Var);
- new_variable.* = .{
- .owner_decl = decl,
- .init = var_info.val orelse undefined,
- .is_extern = is_extern,
- .is_mutable = is_mutable,
- .is_threadlocal = is_threadlocal,
- };
- const var_val = try Value.Tag.variable.create(&decl_arena.allocator, new_variable);
-
- decl_arena_state.* = decl_arena.state;
- decl.typed_value = .{
- .most_recent = .{
- .typed_value = .{
- .ty = var_info.ty,
- .val = var_val,
- },
- .arena = decl_arena_state,
- },
- };
- decl.analysis = .complete;
- decl.generation = self.generation;
-
- if (var_decl.getExternExportToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- const export_src = tree.token_locs[maybe_export_token].start;
- const name_loc = tree.token_locs[var_decl.name_token];
- const name = tree.tokenSliceLoc(name_loc);
- // The scope needs to have the decl in it.
- try self.analyzeExport(&block_scope.base, export_src, name, decl);
- }
- }
- return type_changed;
- },
- .Comptime => {
- const comptime_decl = @fieldParentPtr(ast.Node.Comptime, "base", ast_node);
+ const var_info: struct { ty: Type, val: ?Value } = if (var_decl.ast.init_node != 0) vi: {
+ var gen_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer gen_scope_arena.deinit();
+ var gen_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &gen_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer gen_scope.instructions.deinit(mod.gpa);
- decl.analysis = .in_progress;
+ const init_result_loc: astgen.ResultLoc = if (var_decl.ast.type_node != 0) rl: {
+ const type_node = var_decl.ast.type_node;
+ const src = token_starts[tree.firstToken(type_node)];
+ const type_type = try astgen.addZIRInstConst(mod, &gen_scope.base, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.type_type),
+ });
+ const var_type = try astgen.expr(mod, &gen_scope.base, .{ .ty = type_type }, type_node);
+ break :rl .{ .ty = var_type };
+ } else .none;
+
+ const init_inst = try astgen.comptimeExpr(
+ mod,
+ &gen_scope.base,
+ init_result_loc,
+ var_decl.ast.init_node,
+ );
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "var_init", decl.name, gen_scope.instructions.items) catch {};
+ }
- // A comptime decl does not store any value so we can just deinit
- // this arena after analysis is done.
- var analysis_arena = std.heap.ArenaAllocator.init(self.gpa);
- defer analysis_arena.deinit();
- var gen_scope: Scope.GenZIR = .{
- .decl = decl,
- .arena = &analysis_arena.allocator,
- .parent = &decl.container.base,
- .force_comptime = true,
- };
- defer gen_scope.instructions.deinit(self.gpa);
+ var var_inst_table = Scope.Block.InstTable.init(mod.gpa);
+ defer var_inst_table.deinit();
+
+ var branch_quota_vi: u32 = default_eval_branch_quota;
+ var inner_block: Scope.Block = .{
+ .parent = null,
+ .inst_table = &var_inst_table,
+ .func = null,
+ .owner_decl = decl,
+ .src_decl = decl,
+ .instructions = .{},
+ .arena = &gen_scope_arena.allocator,
+ .inlining = null,
+ .is_comptime = true,
+ .branch_quota = &branch_quota_vi,
+ };
+ defer inner_block.instructions.deinit(mod.gpa);
+ try zir_sema.analyzeBody(mod, &inner_block, .{
+ .instructions = gen_scope.instructions.items,
+ });
- _ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
- if (std.builtin.mode == .Debug and self.comp.verbose_ir) {
- zir.dumpZir(self.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
- }
+ // The result location guarantees the type coercion.
+ const analyzed_init_inst = var_inst_table.get(init_inst).?;
+ // The is_comptime in the Scope.Block guarantees the result is comptime-known.
+ const val = analyzed_init_inst.value().?;
- var inst_table = Scope.Block.InstTable.init(self.gpa);
- defer inst_table.deinit();
+ const ty = try analyzed_init_inst.ty.copy(block_scope.arena);
+ break :vi .{
+ .ty = ty,
+ .val = try val.copy(block_scope.arena),
+ };
+ } else if (!is_extern) {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "variables must be initialized",
+ .{},
+ );
+ } else if (var_decl.ast.type_node != 0) vi: {
+ const type_node = var_decl.ast.type_node;
+ // Temporary arena for the zir instructions.
+ var type_scope_arena = std.heap.ArenaAllocator.init(mod.gpa);
+ defer type_scope_arena.deinit();
+ var type_scope: Scope.GenZIR = .{
+ .decl = decl,
+ .arena = &type_scope_arena.allocator,
+ .parent = &decl.container.base,
+ .force_comptime = true,
+ };
+ defer type_scope.instructions.deinit(mod.gpa);
- var branch_quota: u32 = default_eval_branch_quota;
+ const var_type = try astgen.typeExpr(mod, &type_scope.base, type_node);
+ if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+ zir.dumpZir(mod.gpa, "var_type", decl.name, type_scope.instructions.items) catch {};
+ }
- var block_scope: Scope.Block = .{
- .parent = null,
- .inst_table = &inst_table,
- .func = null,
- .owner_decl = decl,
- .src_decl = decl,
- .instructions = .{},
- .arena = &analysis_arena.allocator,
- .inlining = null,
- .is_comptime = true,
- .branch_quota = &branch_quota,
- };
- defer block_scope.instructions.deinit(self.gpa);
+ const ty = try zir_sema.analyzeBodyValueAsType(mod, &block_scope, var_type, .{
+ .instructions = type_scope.instructions.items,
+ });
+ break :vi .{
+ .ty = ty,
+ .val = null,
+ };
+ } else {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "unable to infer variable type",
+ .{},
+ );
+ };
- _ = try zir_sema.analyzeBody(self, &block_scope, .{
- .instructions = gen_scope.instructions.items,
- });
+ if (is_mutable and !var_info.ty.isValidVarType(is_extern)) {
+ return mod.failTok(
+ &block_scope.base,
+ var_decl.ast.mut_token,
+ "variable of type '{}' must be const",
+ .{var_info.ty},
+ );
+ }
- decl.analysis = .complete;
- decl.generation = self.generation;
- return true;
+ var type_changed = true;
+ if (decl.typedValueManaged()) |tvm| {
+ type_changed = !tvm.typed_value.ty.eql(var_info.ty);
+
+ tvm.deinit(mod.gpa);
+ }
+
+ const new_variable = try decl_arena.allocator.create(Var);
+ new_variable.* = .{
+ .owner_decl = decl,
+ .init = var_info.val orelse undefined,
+ .is_extern = is_extern,
+ .is_mutable = is_mutable,
+ .is_threadlocal = is_threadlocal,
+ };
+ const var_val = try Value.Tag.variable.create(&decl_arena.allocator, new_variable);
+
+ decl_arena_state.* = decl_arena.state;
+ decl.typed_value = .{
+ .most_recent = .{
+ .typed_value = .{
+ .ty = var_info.ty,
+ .val = var_val,
+ },
+ .arena = decl_arena_state,
},
- .Use => @panic("TODO usingnamespace decl"),
- else => unreachable,
+ };
+ decl.analysis = .complete;
+ decl.generation = mod.generation;
+
+ if (var_decl.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ const export_src = token_starts[maybe_export_token];
+ const name_token = var_decl.ast.mut_token + 1;
+ const name = tree.tokenSlice(name_token); // TODO identifierTokenString
+ // The scope needs to have the decl in it.
+ try mod.analyzeExport(&block_scope.base, export_src, name, decl);
+ }
}
+ return type_changed;
}
fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void {
@@ -1566,7 +1698,7 @@ fn declareDeclDependency(self: *Module, depender: *Decl, dependee: *Decl) !void
dependee.dependants.putAssumeCapacity(depender, {});
}
-pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
+pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*const ast.Tree {
const tracy = trace(@src());
defer tracy.end();
@@ -1577,8 +1709,10 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
const source = try root_scope.getSource(self);
var keep_tree = false;
- const tree = try std.zig.parse(self.gpa, source);
- defer if (!keep_tree) tree.deinit();
+ root_scope.tree = try std.zig.parse(self.gpa, source);
+ defer if (!keep_tree) root_scope.tree.deinit(self.gpa);
+
+ const tree = &root_scope.tree;
if (tree.errors.len != 0) {
const parse_err = tree.errors[0];
@@ -1586,12 +1720,12 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
var msg = std.ArrayList(u8).init(self.gpa);
defer msg.deinit();
- try parse_err.render(tree.token_ids, msg.writer());
+ try tree.renderError(parse_err, msg.writer());
const err_msg = try self.gpa.create(ErrorMsg);
err_msg.* = .{
.src_loc = .{
.file_scope = root_scope,
- .byte_offset = tree.token_locs[parse_err.loc()].start,
+ .byte_offset = tree.tokens.items(.start)[parse_err.token],
},
.msg = msg.toOwnedSlice(),
};
@@ -1602,7 +1736,6 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
}
root_scope.status = .loaded_success;
- root_scope.contents = .{ .tree = tree };
keep_tree = true;
return tree;
@@ -1610,151 +1743,353 @@ pub fn getAstTree(self: *Module, root_scope: *Scope.File) !*ast.Tree {
.unloaded_parse_failure => return error.AnalysisFail,
- .loaded_success => return root_scope.contents.tree,
+ .loaded_success => return &root_scope.tree,
}
}
-pub fn analyzeContainer(self: *Module, container_scope: *Scope.Container) !void {
+pub fn analyzeContainer(mod: *Module, container_scope: *Scope.Container) !void {
const tracy = trace(@src());
defer tracy.end();
// We may be analyzing it for the first time, or this may be
// an incremental update. This code handles both cases.
- const tree = try self.getAstTree(container_scope.file_scope);
- const decls = tree.root_node.decls();
+ const tree = try mod.getAstTree(container_scope.file_scope);
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const decls = tree.rootDecls();
- try self.comp.work_queue.ensureUnusedCapacity(decls.len);
- try container_scope.decls.ensureCapacity(self.gpa, decls.len);
+ try mod.comp.work_queue.ensureUnusedCapacity(decls.len);
+ try container_scope.decls.ensureCapacity(mod.gpa, decls.len);
// Keep track of the decls that we expect to see in this file so that
// we know which ones have been deleted.
- var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(self.gpa);
+ var deleted_decls = std.AutoArrayHashMap(*Decl, void).init(mod.gpa);
defer deleted_decls.deinit();
try deleted_decls.ensureCapacity(container_scope.decls.items().len);
for (container_scope.decls.items()) |entry| {
deleted_decls.putAssumeCapacityNoClobber(entry.key, {});
}
- for (decls) |src_decl, decl_i| {
- if (src_decl.cast(ast.Node.FnProto)) |fn_proto| {
- // We will create a Decl for it regardless of analysis status.
- const name_tok = fn_proto.getNameToken() orelse {
- @panic("TODO missing function name");
- };
-
- const name_loc = tree.token_locs[name_tok];
- const name = tree.tokenSliceLoc(name_loc);
- const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
- if (self.decl_table.get(name_hash)) |decl| {
- // Update the AST Node index of the decl, even if its contents are unchanged, it may
- // have been re-ordered.
- decl.src_index = decl_i;
- if (deleted_decls.swapRemove(decl) == null) {
- decl.analysis = .sema_failure;
- const msg = try ErrorMsg.create(self.gpa, .{
- .file_scope = container_scope.file_scope,
- .byte_offset = tree.token_locs[name_tok].start,
- }, "redefinition of '{s}'", .{decl.name});
- errdefer msg.destroy(self.gpa);
- try self.failed_decls.putNoClobber(self.gpa, decl, msg);
- } else {
- if (!srcHashEql(decl.contents_hash, contents_hash)) {
- try self.markOutdatedDecl(decl);
- decl.contents_hash = contents_hash;
- } else switch (self.comp.bin_file.tag) {
- .coff => {
- // TODO Implement for COFF
- },
- .elf => if (decl.fn_link.elf.len != 0) {
- // TODO Look into detecting when this would be unnecessary by storing enough state
- // in `Decl` to notice that the line number did not change.
- self.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
- },
- .macho => if (decl.fn_link.macho.len != 0) {
- // TODO Look into detecting when this would be unnecessary by storing enough state
- // in `Decl` to notice that the line number did not change.
- self.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
- },
- .c, .wasm, .spirv => {},
- }
- }
- } else {
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
- container_scope.decls.putAssumeCapacity(new_decl, {});
- if (fn_proto.getExternExportInlineToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- }
- }
- }
- } else if (src_decl.castTag(.VarDecl)) |var_decl| {
- const name_loc = tree.token_locs[var_decl.name_token];
- const name = tree.tokenSliceLoc(name_loc);
- const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
- if (self.decl_table.get(name_hash)) |decl| {
- // Update the AST Node index of the decl, even if its contents are unchanged, it may
- // have been re-ordered.
- decl.src_index = decl_i;
- if (deleted_decls.swapRemove(decl) == null) {
- decl.analysis = .sema_failure;
- const err_msg = try ErrorMsg.create(self.gpa, .{
- .file_scope = container_scope.file_scope,
- .byte_offset = name_loc.start,
- }, "redefinition of '{s}'", .{decl.name});
- errdefer err_msg.destroy(self.gpa);
- try self.failed_decls.putNoClobber(self.gpa, decl, err_msg);
- } else if (!srcHashEql(decl.contents_hash, contents_hash)) {
- try self.markOutdatedDecl(decl);
- decl.contents_hash = contents_hash;
- }
- } else {
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
- container_scope.decls.putAssumeCapacity(new_decl, {});
- if (var_decl.getExternExportToken()) |maybe_export_token| {
- if (tree.token_ids[maybe_export_token] == .Keyword_export) {
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- }
- }
+ for (decls) |decl_node, decl_i| switch (node_tags[decl_node]) {
+ .fn_decl => {
+ const fn_proto = node_datas[decl_node].lhs;
+ const body = node_datas[decl_node].rhs;
+ switch (node_tags[fn_proto]) {
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoSimple(¶ms, fn_proto),
+ );
+ },
+ .fn_proto_multi => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoMulti(fn_proto),
+ ),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProtoOne(¶ms, fn_proto),
+ );
+ },
+ .fn_proto => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ body,
+ tree.fnProto(fn_proto),
+ ),
+ else => unreachable,
}
- } else if (src_decl.castTag(.Comptime)) |comptime_node| {
- const name_index = self.getNextAnonNameIndex();
- const name = try std.fmt.allocPrint(self.gpa, "__comptime_{d}", .{name_index});
- defer self.gpa.free(name);
+ },
+ .fn_proto_simple => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoSimple(¶ms, decl_node),
+ );
+ },
+ .fn_proto_multi => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoMulti(decl_node),
+ ),
+ .fn_proto_one => {
+ var params: [1]ast.Node.Index = undefined;
+ try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProtoOne(¶ms, decl_node),
+ );
+ },
+ .fn_proto => try mod.semaContainerFn(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ 0,
+ tree.fnProto(decl_node),
+ ),
+
+ .global_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.globalVarDecl(decl_node),
+ ),
+ .local_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.localVarDecl(decl_node),
+ ),
+ .simple_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.simpleVarDecl(decl_node),
+ ),
+ .aligned_var_decl => try mod.semaContainerVar(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.alignedVarDecl(decl_node),
+ ),
+
+ .@"comptime" => {
+ const name_index = mod.getNextAnonNameIndex();
+ const name = try std.fmt.allocPrint(mod.gpa, "__comptime_{d}", .{name_index});
+ defer mod.gpa.free(name);
const name_hash = container_scope.fullyQualifiedNameHash(name);
- const contents_hash = std.zig.hashSrc(tree.getNodeSource(src_decl));
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
- const new_decl = try self.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
container_scope.decls.putAssumeCapacity(new_decl, {});
- self.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
- } else if (src_decl.castTag(.ContainerField)) |container_field| {
- log.err("TODO: analyze container field", .{});
- } else if (src_decl.castTag(.TestDecl)) |test_decl| {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
+ },
+
+ .container_field_init => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerFieldInit(decl_node),
+ ),
+ .container_field_align => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerFieldAlign(decl_node),
+ ),
+ .container_field => try mod.semaContainerField(
+ container_scope,
+ &deleted_decls,
+ decl_node,
+ decl_i,
+ tree.*,
+ tree.containerField(decl_node),
+ ),
+
+ .test_decl => {
log.err("TODO: analyze test decl", .{});
- } else if (src_decl.castTag(.Use)) |use_decl| {
+ },
+ .@"usingnamespace" => {
log.err("TODO: analyze usingnamespace decl", .{});
- } else {
- unreachable;
- }
- }
+ },
+ else => unreachable,
+ };
// Handle explicitly deleted decls from the source code. Not to be confused
// with when we delete decls because they are no longer referenced.
for (deleted_decls.items()) |entry| {
- log.debug("noticed '{s}' deleted from source\n", .{entry.key.name});
- try self.deleteDecl(entry.key);
+ log.debug("noticed '{s}' deleted from source", .{entry.key.name});
+ try mod.deleteDecl(entry.key);
+ }
+}
+
+fn semaContainerFn(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ body_node: ast.Node.Index,
+ fn_proto: ast.full.FnProto,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ // We will create a Decl for it regardless of analysis status.
+ const name_tok = fn_proto.name_token orelse {
+ @panic("TODO missing function name");
+ };
+ const name = tree.tokenSlice(name_tok); // TODO use identifierTokenString
+ const name_hash = container_scope.fullyQualifiedNameHash(name);
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
+ if (mod.decl_table.get(name_hash)) |decl| {
+ // Update the AST Node index of the decl, even if its contents are unchanged, it may
+ // have been re-ordered.
+ decl.src_index = decl_i;
+ if (deleted_decls.swapRemove(decl) == null) {
+ decl.analysis = .sema_failure;
+ const msg = try ErrorMsg.create(mod.gpa, .{
+ .file_scope = container_scope.file_scope,
+ .byte_offset = token_starts[name_tok],
+ }, "redefinition of '{s}'", .{decl.name});
+ errdefer msg.destroy(mod.gpa);
+ try mod.failed_decls.putNoClobber(mod.gpa, decl, msg);
+ } else {
+ if (!srcHashEql(decl.contents_hash, contents_hash)) {
+ try mod.markOutdatedDecl(decl);
+ decl.contents_hash = contents_hash;
+ } else switch (mod.comp.bin_file.tag) {
+ .coff => {
+ // TODO Implement for COFF
+ },
+ .elf => if (decl.fn_link.elf.len != 0) {
+ // TODO Look into detecting when this would be unnecessary by storing enough state
+ // in `Decl` to notice that the line number did not change.
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
+ },
+ .macho => if (decl.fn_link.macho.len != 0) {
+ // TODO Look into detecting when this would be unnecessary by storing enough state
+ // in `Decl` to notice that the line number did not change.
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .update_line_number = decl });
+ },
+ .c, .wasm, .spirv => {},
+ }
+ }
+ } else {
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ container_scope.decls.putAssumeCapacity(new_decl, {});
+ if (fn_proto.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
+ }
+ }
+ }
+}
+
+fn semaContainerVar(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ var_decl: ast.full.VarDecl,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ const name_token = var_decl.ast.mut_token + 1;
+ const name_src = token_starts[name_token];
+ const name = tree.tokenSlice(name_token); // TODO identifierTokenString
+ const name_hash = container_scope.fullyQualifiedNameHash(name);
+ const contents_hash = std.zig.hashSrc(tree.getNodeSource(decl_node));
+ if (mod.decl_table.get(name_hash)) |decl| {
+ // Update the AST Node index of the decl, even if its contents are unchanged, it may
+ // have been re-ordered.
+ decl.src_index = decl_i;
+ if (deleted_decls.swapRemove(decl) == null) {
+ decl.analysis = .sema_failure;
+ const err_msg = try ErrorMsg.create(mod.gpa, .{
+ .file_scope = container_scope.file_scope,
+ .byte_offset = name_src,
+ }, "redefinition of '{s}'", .{decl.name});
+ errdefer err_msg.destroy(mod.gpa);
+ try mod.failed_decls.putNoClobber(mod.gpa, decl, err_msg);
+ } else if (!srcHashEql(decl.contents_hash, contents_hash)) {
+ try mod.markOutdatedDecl(decl);
+ decl.contents_hash = contents_hash;
+ }
+ } else {
+ const new_decl = try mod.createNewDecl(&container_scope.base, name, decl_i, name_hash, contents_hash);
+ container_scope.decls.putAssumeCapacity(new_decl, {});
+ if (var_decl.extern_export_token) |maybe_export_token| {
+ if (token_tags[maybe_export_token] == .keyword_export) {
+ mod.comp.work_queue.writeItemAssumeCapacity(.{ .analyze_decl = new_decl });
+ }
+ }
}
}
+fn semaContainerField(
+ mod: *Module,
+ container_scope: *Scope.Container,
+ deleted_decls: *std.AutoArrayHashMap(*Decl, void),
+ decl_node: ast.Node.Index,
+ decl_i: usize,
+ tree: ast.Tree,
+ field: ast.full.ContainerField,
+) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ log.err("TODO: analyze container field", .{});
+}
+
pub fn deleteDecl(self: *Module, decl: *Decl) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
try self.deletion_set.ensureCapacity(self.gpa, self.deletion_set.items.len + decl.dependencies.items().len);
// Remove from the namespace it resides in. In the case of an anonymous Decl it will
// not be present in the set, and this does nothing.
decl.container.removeDecl(decl);
- log.debug("deleting decl '{s}'\n", .{decl.name});
+ log.debug("deleting decl '{s}'", .{decl.name});
const name_hash = decl.fullyQualifiedNameHash();
self.decl_table.removeAssertDiscard(name_hash);
// Remove itself from its dependencies, because we are about to destroy the decl pointer.
@@ -1856,18 +2191,18 @@ pub fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
defer inner_block.instructions.deinit(self.gpa);
func.state = .in_progress;
- log.debug("set {s} to in_progress\n", .{decl.name});
+ log.debug("set {s} to in_progress", .{decl.name});
try zir_sema.analyzeBody(self, &inner_block, func.zir);
const instructions = try arena.allocator.dupe(*Inst, inner_block.instructions.items);
func.state = .success;
func.body = .{ .instructions = instructions };
- log.debug("set {s} to success\n", .{decl.name});
+ log.debug("set {s} to success", .{decl.name});
}
fn markOutdatedDecl(self: *Module, decl: *Decl) !void {
- log.debug("mark {s} outdated\n", .{decl.name});
+ log.debug("mark {s} outdated", .{decl.name});
try self.comp.work_queue.writeItem(.{ .analyze_decl = decl });
if (self.failed_decls.swapRemove(decl)) |entry| {
entry.value.destroy(self.gpa);
@@ -2395,15 +2730,16 @@ pub fn createContainerDecl(
fn getAnonTypeName(self: *Module, scope: *Scope, base_token: std.zig.ast.TokenIndex) ![]u8 {
// TODO add namespaces, generic function signatrues
const tree = scope.tree();
- const base_name = switch (tree.token_ids[base_token]) {
- .Keyword_struct => "struct",
- .Keyword_enum => "enum",
- .Keyword_union => "union",
- .Keyword_opaque => "opaque",
+ const token_tags = tree.tokens.items(.tag);
+ const base_name = switch (token_tags[base_token]) {
+ .keyword_struct => "struct",
+ .keyword_enum => "enum",
+ .keyword_union => "union",
+ .keyword_opaque => "opaque",
else => unreachable,
};
- const loc = tree.tokenLocationLoc(0, tree.token_locs[base_token]);
- return std.fmt.allocPrint(self.gpa, "{s}:{}:{}", .{ base_name, loc.line, loc.column });
+ const loc = tree.tokenLocation(0, base_token);
+ return std.fmt.allocPrint(self.gpa, "{s}:{d}:{d}", .{ base_name, loc.line, loc.column });
}
fn getNextAnonNameIndex(self: *Module) usize {
@@ -2639,7 +2975,7 @@ pub fn analyzeImport(self: *Module, scope: *Scope, src: usize, target_string: []
file_scope.* = .{
.sub_file_path = resolved_path,
.source = .{ .unloaded = {} },
- .contents = .{ .not_available = {} },
+ .tree = undefined,
.status = .never_loaded,
.pkg = found_pkg orelse cur_pkg,
.root_container = .{
@@ -3149,18 +3485,19 @@ pub fn failTok(
comptime format: []const u8,
args: anytype,
) InnerError {
- const src = scope.tree().token_locs[token_index].start;
+ const src = scope.tree().tokens.items(.start)[token_index];
return self.fail(scope, src, format, args);
}
pub fn failNode(
self: *Module,
scope: *Scope,
- ast_node: *ast.Node,
+ ast_node: ast.Node.Index,
comptime format: []const u8,
args: anytype,
) InnerError {
- const src = scope.tree().token_locs[ast_node.firstToken()].start;
+ const tree = scope.tree();
+ const src = tree.tokens.items(.start)[tree.firstToken(ast_node)];
return self.fail(scope, src, format, args);
}
@@ -3594,6 +3931,9 @@ pub fn validateVarType(mod: *Module, scope: *Scope, src: usize, ty: Type) !void
/// Identifier token -> String (allocated in scope.arena())
pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex) InnerError![]const u8 {
const tree = scope.tree();
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+ assert(token_tags[token] == .identifier);
const ident_name = tree.tokenSlice(token);
if (mem.startsWith(u8, ident_name, "@")) {
@@ -3602,7 +3942,7 @@ pub fn identifierTokenString(mod: *Module, scope: *Scope, token: ast.TokenIndex)
return std.zig.parseStringLiteral(scope.arena(), raw_string, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
const bad_byte = raw_string[bad_index];
- const src = tree.token_locs[token].start;
+ const src = token_starts[token];
return mod.fail(scope, src + 1 + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
},
else => |e| return e,
diff --git a/src/astgen.zig b/src/astgen.zig
index ece16d70da3e..5b27925a5ffe 100644
--- a/src/astgen.zig
+++ b/src/astgen.zig
@@ -1,16 +1,18 @@
const std = @import("std");
const mem = std.mem;
const Allocator = std.mem.Allocator;
+const assert = std.debug.assert;
+
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const TypedValue = @import("TypedValue.zig");
-const assert = std.debug.assert;
const zir = @import("zir.zig");
const Module = @import("Module.zig");
const ast = std.zig.ast;
const trace = @import("tracy.zig").trace;
const Scope = Module.Scope;
const InnerError = Module.InnerError;
+const BuiltinFn = @import("BuiltinFn.zig");
pub const ResultLoc = union(enum) {
/// The expression is the right-hand side of assignment to `_`. Only the side-effects of the
@@ -55,8 +57,11 @@ pub const ResultLoc = union(enum) {
};
};
-pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*zir.Inst {
- const type_src = scope.tree().token_locs[type_node.firstToken()].start;
+pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const token_starts = tree.tokens.items(.start);
+
+ const type_src = token_starts[tree.firstToken(type_node)];
const type_type = try addZIRInstConst(mod, scope, type_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.type_type),
@@ -65,134 +70,191 @@ pub fn typeExpr(mod: *Module, scope: *Scope, type_node: *ast.Node) InnerError!*z
return expr(mod, scope, type_rl, type_node);
}
-fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
- switch (node.tag) {
- .Root => unreachable,
- .Use => unreachable,
- .TestDecl => unreachable,
- .DocComment => unreachable,
- .VarDecl => unreachable,
- .SwitchCase => unreachable,
- .SwitchElse => unreachable,
- .Else => unreachable,
- .Payload => unreachable,
- .PointerPayload => unreachable,
- .PointerIndexPayload => unreachable,
- .ErrorTag => unreachable,
- .FieldInitializer => unreachable,
- .ContainerField => unreachable,
-
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .Add,
- .AddWrap,
- .Sub,
- .SubWrap,
- .Mul,
- .MulWrap,
- .Div,
- .Mod,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BangEqual,
- .EqualEqual,
- .GreaterThan,
- .GreaterOrEqual,
- .LessThan,
- .LessOrEqual,
- .ArrayCat,
- .ArrayMult,
- .BoolAnd,
- .BoolOr,
- .Asm,
- .StringLiteral,
- .IntegerLiteral,
- .Call,
- .Unreachable,
- .Return,
- .If,
- .While,
- .BoolNot,
- .AddressOf,
- .FloatLiteral,
- .UndefinedLiteral,
- .BoolLiteral,
- .NullLiteral,
- .OptionalType,
- .Block,
- .LabeledBlock,
- .Break,
- .PtrType,
- .ArrayType,
- .ArrayTypeSentinel,
- .EnumLiteral,
- .MultilineStringLiteral,
- .CharLiteral,
- .Defer,
- .Catch,
- .ErrorUnion,
- .MergeErrorSets,
- .Range,
- .Await,
- .BitNot,
- .Negation,
- .NegationWrap,
- .Resume,
- .Try,
- .SliceType,
- .Slice,
- .ArrayInitializer,
- .ArrayInitializerDot,
- .StructInitializer,
- .StructInitializerDot,
- .Switch,
- .For,
- .Suspend,
- .Continue,
- .AnyType,
- .ErrorType,
- .FnProto,
- .AnyFrameType,
- .ErrorSetDecl,
- .ContainerDecl,
- .Comptime,
- .Nosuspend,
+fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const node_tags = tree.nodes.items(.tag);
+ const main_tokens = tree.nodes.items(.main_token);
+ switch (node_tags[node]) {
+ .root => unreachable,
+ .@"usingnamespace" => unreachable,
+ .test_decl => unreachable,
+ .global_var_decl => unreachable,
+ .local_var_decl => unreachable,
+ .simple_var_decl => unreachable,
+ .aligned_var_decl => unreachable,
+ .switch_case => unreachable,
+ .switch_case_one => unreachable,
+ .container_field_init => unreachable,
+ .container_field_align => unreachable,
+ .container_field => unreachable,
+ .asm_output => unreachable,
+ .asm_input => unreachable,
+
+ .assign,
+ .assign_bit_and,
+ .assign_bit_or,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_xor,
+ .assign_div,
+ .assign_sub,
+ .assign_sub_wrap,
+ .assign_mod,
+ .assign_add,
+ .assign_add_wrap,
+ .assign_mul,
+ .assign_mul_wrap,
+ .add,
+ .add_wrap,
+ .sub,
+ .sub_wrap,
+ .mul,
+ .mul_wrap,
+ .div,
+ .mod,
+ .bit_and,
+ .bit_or,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_xor,
+ .bang_equal,
+ .equal_equal,
+ .greater_than,
+ .greater_or_equal,
+ .less_than,
+ .less_or_equal,
+ .array_cat,
+ .array_mult,
+ .bool_and,
+ .bool_or,
+ .@"asm",
+ .asm_simple,
+ .string_literal,
+ .integer_literal,
+ .call,
+ .call_comma,
+ .async_call,
+ .async_call_comma,
+ .call_one,
+ .call_one_comma,
+ .async_call_one,
+ .async_call_one_comma,
+ .unreachable_literal,
+ .@"return",
+ .@"if",
+ .if_simple,
+ .@"while",
+ .while_simple,
+ .while_cont,
+ .bool_not,
+ .address_of,
+ .float_literal,
+ .undefined_literal,
+ .true_literal,
+ .false_literal,
+ .null_literal,
+ .optional_type,
+ .block,
+ .block_semicolon,
+ .block_two,
+ .block_two_semicolon,
+ .@"break",
+ .ptr_type_aligned,
+ .ptr_type_sentinel,
+ .ptr_type,
+ .ptr_type_bit_range,
+ .array_type,
+ .array_type_sentinel,
+ .enum_literal,
+ .multiline_string_literal,
+ .char_literal,
+ .@"defer",
+ .@"errdefer",
+ .@"catch",
+ .error_union,
+ .merge_error_sets,
+ .switch_range,
+ .@"await",
+ .bit_not,
+ .negation,
+ .negation_wrap,
+ .@"resume",
+ .@"try",
+ .slice,
+ .slice_open,
+ .slice_sentinel,
+ .array_init_one,
+ .array_init_one_comma,
+ .array_init_dot_two,
+ .array_init_dot_two_comma,
+ .array_init_dot,
+ .array_init_dot_comma,
+ .array_init,
+ .array_init_comma,
+ .struct_init_one,
+ .struct_init_one_comma,
+ .struct_init_dot_two,
+ .struct_init_dot_two_comma,
+ .struct_init_dot,
+ .struct_init_dot_comma,
+ .struct_init,
+ .struct_init_comma,
+ .@"switch",
+ .switch_comma,
+ .@"for",
+ .for_simple,
+ .@"suspend",
+ .@"continue",
+ .@"anytype",
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ .fn_decl,
+ .anyframe_type,
+ .anyframe_literal,
+ .error_set_decl,
+ .container_decl,
+ .container_decl_trailing,
+ .container_decl_two,
+ .container_decl_two_trailing,
+ .container_decl_arg,
+ .container_decl_arg_trailing,
+ .tagged_union,
+ .tagged_union_trailing,
+ .tagged_union_two,
+ .tagged_union_two_trailing,
+ .tagged_union_enum_tag,
+ .tagged_union_enum_tag_trailing,
+ .@"comptime",
+ .@"nosuspend",
+ .error_value,
=> return mod.failNode(scope, node, "invalid left-hand side to assignment", .{}),
- // @field can be assigned to
- .BuiltinCall => {
- const call = node.castTag(.BuiltinCall).?;
- const tree = scope.tree();
- const builtin_name = tree.tokenSlice(call.builtin_token);
-
- if (!mem.eql(u8, builtin_name, "@field")) {
- return mod.failNode(scope, node, "invalid left-hand side to assignment", .{});
+ .builtin_call,
+ .builtin_call_comma,
+ .builtin_call_two,
+ .builtin_call_two_comma,
+ => {
+ const builtin_token = main_tokens[node];
+ const builtin_name = tree.tokenSlice(builtin_token);
+ // If the builtin is an invalid name, we don't cause an error here; instead
+ // let it pass, and the error will be "invalid builtin function" later.
+ if (BuiltinFn.list.get(builtin_name)) |info| {
+ if (!info.allows_lvalue) {
+ return mod.failNode(scope, node, "invalid left-hand side to assignment", .{});
+ }
}
},
- // can be assigned to
- .UnwrapOptional,
- .Deref,
- .Period,
- .ArrayAccess,
- .Identifier,
- .GroupedExpression,
- .OrElse,
+ // These can be assigned to.
+ .unwrap_optional,
+ .deref,
+ .field_access,
+ .array_access,
+ .identifier,
+ .grouped_expression,
+ .@"orelse",
=> {},
}
return expr(mod, scope, .ref, node);
@@ -202,154 +264,403 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: *ast.Node) InnerError!*zir.Inst {
/// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the
/// result instruction can be used to inspect whether it is isNoReturn() but that is it,
/// it must otherwise not be used.
-pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
- switch (node.tag) {
- .Root => unreachable, // Top-level declaration.
- .Use => unreachable, // Top-level declaration.
- .TestDecl => unreachable, // Top-level declaration.
- .DocComment => unreachable, // Top-level declaration.
- .VarDecl => unreachable, // Handled in `blockExpr`.
- .SwitchCase => unreachable, // Handled in `switchExpr`.
- .SwitchElse => unreachable, // Handled in `switchExpr`.
- .Range => unreachable, // Handled in `switchExpr`.
- .Else => unreachable, // Handled explicitly the control flow expression functions.
- .Payload => unreachable, // Handled explicitly.
- .PointerPayload => unreachable, // Handled explicitly.
- .PointerIndexPayload => unreachable, // Handled explicitly.
- .ErrorTag => unreachable, // Handled explicitly.
- .FieldInitializer => unreachable, // Handled explicitly.
- .ContainerField => unreachable, // Handled explicitly.
-
- .Assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node.castTag(.Assign).?)),
- .AssignBitAnd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitAnd).?, .bit_and)),
- .AssignBitOr => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitOr).?, .bit_or)),
- .AssignBitShiftLeft => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftLeft).?, .shl)),
- .AssignBitShiftRight => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitShiftRight).?, .shr)),
- .AssignBitXor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignBitXor).?, .xor)),
- .AssignDiv => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignDiv).?, .div)),
- .AssignSub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSub).?, .sub)),
- .AssignSubWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignSubWrap).?, .subwrap)),
- .AssignMod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMod).?, .mod_rem)),
- .AssignAdd => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAdd).?, .add)),
- .AssignAddWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignAddWrap).?, .addwrap)),
- .AssignMul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMul).?, .mul)),
- .AssignMulWrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node.castTag(.AssignMulWrap).?, .mulwrap)),
-
- .Add => return simpleBinOp(mod, scope, rl, node.castTag(.Add).?, .add),
- .AddWrap => return simpleBinOp(mod, scope, rl, node.castTag(.AddWrap).?, .addwrap),
- .Sub => return simpleBinOp(mod, scope, rl, node.castTag(.Sub).?, .sub),
- .SubWrap => return simpleBinOp(mod, scope, rl, node.castTag(.SubWrap).?, .subwrap),
- .Mul => return simpleBinOp(mod, scope, rl, node.castTag(.Mul).?, .mul),
- .MulWrap => return simpleBinOp(mod, scope, rl, node.castTag(.MulWrap).?, .mulwrap),
- .Div => return simpleBinOp(mod, scope, rl, node.castTag(.Div).?, .div),
- .Mod => return simpleBinOp(mod, scope, rl, node.castTag(.Mod).?, .mod_rem),
- .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bit_and),
- .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bit_or),
- .BitShiftLeft => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftLeft).?, .shl),
- .BitShiftRight => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftRight).?, .shr),
- .BitXor => return simpleBinOp(mod, scope, rl, node.castTag(.BitXor).?, .xor),
-
- .BangEqual => return simpleBinOp(mod, scope, rl, node.castTag(.BangEqual).?, .cmp_neq),
- .EqualEqual => return simpleBinOp(mod, scope, rl, node.castTag(.EqualEqual).?, .cmp_eq),
- .GreaterThan => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterThan).?, .cmp_gt),
- .GreaterOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterOrEqual).?, .cmp_gte),
- .LessThan => return simpleBinOp(mod, scope, rl, node.castTag(.LessThan).?, .cmp_lt),
- .LessOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.LessOrEqual).?, .cmp_lte),
-
- .ArrayCat => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayCat).?, .array_cat),
- .ArrayMult => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayMult).?, .array_mul),
-
- .BoolAnd => return boolBinOp(mod, scope, rl, node.castTag(.BoolAnd).?),
- .BoolOr => return boolBinOp(mod, scope, rl, node.castTag(.BoolOr).?),
-
- .BoolNot => return rvalue(mod, scope, rl, try boolNot(mod, scope, node.castTag(.BoolNot).?)),
- .BitNot => return rvalue(mod, scope, rl, try bitNot(mod, scope, node.castTag(.BitNot).?)),
- .Negation => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.Negation).?, .sub)),
- .NegationWrap => return rvalue(mod, scope, rl, try negation(mod, scope, node.castTag(.NegationWrap).?, .subwrap)),
-
- .Identifier => return try identifier(mod, scope, rl, node.castTag(.Identifier).?),
- .Asm => return rvalue(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
- .StringLiteral => return rvalue(mod, scope, rl, try stringLiteral(mod, scope, node.castTag(.StringLiteral).?)),
- .IntegerLiteral => return rvalue(mod, scope, rl, try integerLiteral(mod, scope, node.castTag(.IntegerLiteral).?)),
- .BuiltinCall => return builtinCall(mod, scope, rl, node.castTag(.BuiltinCall).?),
- .Call => return callExpr(mod, scope, rl, node.castTag(.Call).?),
- .Unreachable => return unreach(mod, scope, node.castTag(.Unreachable).?),
- .Return => return ret(mod, scope, node.castTag(.Return).?),
- .If => return ifExpr(mod, scope, rl, node.castTag(.If).?),
- .While => return whileExpr(mod, scope, rl, node.castTag(.While).?),
- .Period => return field(mod, scope, rl, node.castTag(.Period).?),
- .Deref => return rvalue(mod, scope, rl, try deref(mod, scope, node.castTag(.Deref).?)),
- .AddressOf => return rvalue(mod, scope, rl, try addressOf(mod, scope, node.castTag(.AddressOf).?)),
- .FloatLiteral => return rvalue(mod, scope, rl, try floatLiteral(mod, scope, node.castTag(.FloatLiteral).?)),
- .UndefinedLiteral => return rvalue(mod, scope, rl, try undefLiteral(mod, scope, node.castTag(.UndefinedLiteral).?)),
- .BoolLiteral => return rvalue(mod, scope, rl, try boolLiteral(mod, scope, node.castTag(.BoolLiteral).?)),
- .NullLiteral => return rvalue(mod, scope, rl, try nullLiteral(mod, scope, node.castTag(.NullLiteral).?)),
- .OptionalType => return rvalue(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
- .UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?),
- .Block => return rvalueVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
- .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block),
- .Break => return rvalue(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
- .Continue => return rvalue(mod, scope, rl, try continueExpr(mod, scope, node.castTag(.Continue).?)),
- .PtrType => return rvalue(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
- .GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr),
- .ArrayType => return rvalue(mod, scope, rl, try arrayType(mod, scope, node.castTag(.ArrayType).?)),
- .ArrayTypeSentinel => return rvalue(mod, scope, rl, try arrayTypeSentinel(mod, scope, node.castTag(.ArrayTypeSentinel).?)),
- .EnumLiteral => return rvalue(mod, scope, rl, try enumLiteral(mod, scope, node.castTag(.EnumLiteral).?)),
- .MultilineStringLiteral => return rvalue(mod, scope, rl, try multilineStrLiteral(mod, scope, node.castTag(.MultilineStringLiteral).?)),
- .CharLiteral => return rvalue(mod, scope, rl, try charLiteral(mod, scope, node.castTag(.CharLiteral).?)),
- .SliceType => return rvalue(mod, scope, rl, try sliceType(mod, scope, node.castTag(.SliceType).?)),
- .ErrorUnion => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.ErrorUnion).?, .error_union_type)),
- .MergeErrorSets => return rvalue(mod, scope, rl, try typeInixOp(mod, scope, node.castTag(.MergeErrorSets).?, .merge_error_sets)),
- .AnyFrameType => return rvalue(mod, scope, rl, try anyFrameType(mod, scope, node.castTag(.AnyFrameType).?)),
- .ErrorSetDecl => return rvalue(mod, scope, rl, try errorSetDecl(mod, scope, node.castTag(.ErrorSetDecl).?)),
- .ErrorType => return rvalue(mod, scope, rl, try errorType(mod, scope, node.castTag(.ErrorType).?)),
- .For => return forExpr(mod, scope, rl, node.castTag(.For).?),
- .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?),
- .Slice => return rvalue(mod, scope, rl, try sliceExpr(mod, scope, node.castTag(.Slice).?)),
- .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
- .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?),
- .OrElse => return orelseExpr(mod, scope, rl, node.castTag(.OrElse).?),
- .Switch => return switchExpr(mod, scope, rl, node.castTag(.Switch).?),
- .ContainerDecl => return containerDecl(mod, scope, rl, node.castTag(.ContainerDecl).?),
-
- .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
- .Await => return mod.failNode(scope, node, "TODO implement astgen.expr for .Await", .{}),
- .Resume => return mod.failNode(scope, node, "TODO implement astgen.expr for .Resume", .{}),
- .Try => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}),
- .ArrayInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializer", .{}),
- .ArrayInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .ArrayInitializerDot", .{}),
- .StructInitializer => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializer", .{}),
- .StructInitializerDot => return mod.failNode(scope, node, "TODO implement astgen.expr for .StructInitializerDot", .{}),
- .Suspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Suspend", .{}),
- .AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}),
- .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}),
- .Nosuspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Nosuspend", .{}),
- }
-}
-
-fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Comptime) InnerError!*zir.Inst {
- const tracy = trace(@src());
- defer tracy.end();
+pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const node_tags = tree.nodes.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+
+ switch (node_tags[node]) {
+ .root => unreachable, // Top-level declaration.
+ .@"usingnamespace" => unreachable, // Top-level declaration.
+ .test_decl => unreachable, // Top-level declaration.
+ .container_field_init => unreachable, // Top-level declaration.
+ .container_field_align => unreachable, // Top-level declaration.
+ .container_field => unreachable, // Top-level declaration.
+ .fn_decl => unreachable, // Top-level declaration.
+
+ .global_var_decl => unreachable, // Handled in `blockExpr`.
+ .local_var_decl => unreachable, // Handled in `blockExpr`.
+ .simple_var_decl => unreachable, // Handled in `blockExpr`.
+ .aligned_var_decl => unreachable, // Handled in `blockExpr`.
+
+ .switch_case => unreachable, // Handled in `switchExpr`.
+ .switch_case_one => unreachable, // Handled in `switchExpr`.
+ .switch_range => unreachable, // Handled in `switchExpr`.
+
+ .asm_output => unreachable, // Handled in `asmExpr`.
+ .asm_input => unreachable, // Handled in `asmExpr`.
+
+ .assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node)),
+ .assign_bit_and => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_and)),
+ .assign_bit_or => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_or)),
+ .assign_bit_shift_left => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shl)),
+ .assign_bit_shift_right => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shr)),
+ .assign_bit_xor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .xor)),
+ .assign_div => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .div)),
+ .assign_sub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .sub)),
+ .assign_sub_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .subwrap)),
+ .assign_mod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mod_rem)),
+ .assign_add => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .add)),
+ .assign_add_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .addwrap)),
+ .assign_mul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mul)),
+ .assign_mul_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mulwrap)),
+
+ .add => return simpleBinOp(mod, scope, rl, node, .add),
+ .add_wrap => return simpleBinOp(mod, scope, rl, node, .addwrap),
+ .sub => return simpleBinOp(mod, scope, rl, node, .sub),
+ .sub_wrap => return simpleBinOp(mod, scope, rl, node, .subwrap),
+ .mul => return simpleBinOp(mod, scope, rl, node, .mul),
+ .mul_wrap => return simpleBinOp(mod, scope, rl, node, .mulwrap),
+ .div => return simpleBinOp(mod, scope, rl, node, .div),
+ .mod => return simpleBinOp(mod, scope, rl, node, .mod_rem),
+ .bit_and => return simpleBinOp(mod, scope, rl, node, .bit_and),
+ .bit_or => return simpleBinOp(mod, scope, rl, node, .bit_or),
+ .bit_shift_left => return simpleBinOp(mod, scope, rl, node, .shl),
+ .bit_shift_right => return simpleBinOp(mod, scope, rl, node, .shr),
+ .bit_xor => return simpleBinOp(mod, scope, rl, node, .xor),
+
+ .bang_equal => return simpleBinOp(mod, scope, rl, node, .cmp_neq),
+ .equal_equal => return simpleBinOp(mod, scope, rl, node, .cmp_eq),
+ .greater_than => return simpleBinOp(mod, scope, rl, node, .cmp_gt),
+ .greater_or_equal => return simpleBinOp(mod, scope, rl, node, .cmp_gte),
+ .less_than => return simpleBinOp(mod, scope, rl, node, .cmp_lt),
+ .less_or_equal => return simpleBinOp(mod, scope, rl, node, .cmp_lte),
+
+ .array_cat => return simpleBinOp(mod, scope, rl, node, .array_cat),
+ .array_mult => return simpleBinOp(mod, scope, rl, node, .array_mul),
+
+ .bool_and => return boolBinOp(mod, scope, rl, node, true),
+ .bool_or => return boolBinOp(mod, scope, rl, node, false),
+
+ .bool_not => return rvalue(mod, scope, rl, try boolNot(mod, scope, node)),
+ .bit_not => return rvalue(mod, scope, rl, try bitNot(mod, scope, node)),
+ .negation => return rvalue(mod, scope, rl, try negation(mod, scope, node, .sub)),
+ .negation_wrap => return rvalue(mod, scope, rl, try negation(mod, scope, node, .subwrap)),
+
+ .identifier => return identifier(mod, scope, rl, node),
+
+ .asm_simple => return asmExpr(mod, scope, rl, tree.asmSimple(node)),
+ .@"asm" => return asmExpr(mod, scope, rl, tree.asmFull(node)),
+
+ .string_literal => return stringLiteral(mod, scope, rl, node),
+ .multiline_string_literal => return multilineStringLiteral(mod, scope, rl, node),
+
+ .integer_literal => return integerLiteral(mod, scope, rl, node),
+
+ .builtin_call_two, .builtin_call_two_comma => {
+ if (node_datas[node].lhs == 0) {
+ const params = [_]ast.Node.Index{};
+ return builtinCall(mod, scope, rl, node, ¶ms);
+ } else if (node_datas[node].rhs == 0) {
+ const params = [_]ast.Node.Index{node_datas[node].lhs};
+ return builtinCall(mod, scope, rl, node, ¶ms);
+ } else {
+ const params = [_]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
+ return builtinCall(mod, scope, rl, node, ¶ms);
+ }
+ },
+ .builtin_call, .builtin_call_comma => {
+ const params = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
+ return builtinCall(mod, scope, rl, node, params);
+ },
+
+ .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => {
+ var params: [1]ast.Node.Index = undefined;
+ return callExpr(mod, scope, rl, tree.callOne(¶ms, node));
+ },
+ .call, .call_comma, .async_call, .async_call_comma => {
+ return callExpr(mod, scope, rl, tree.callFull(node));
+ },
+
+ .unreachable_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ return addZIRNoOp(mod, scope, src, .unreachable_safe);
+ },
+ .@"return" => return ret(mod, scope, node),
+ .field_access => return fieldAccess(mod, scope, rl, node),
+ .float_literal => return floatLiteral(mod, scope, rl, node),
+
+ .if_simple => return ifExpr(mod, scope, rl, tree.ifSimple(node)),
+ .@"if" => return ifExpr(mod, scope, rl, tree.ifFull(node)),
+
+ .while_simple => return whileExpr(mod, scope, rl, tree.whileSimple(node)),
+ .while_cont => return whileExpr(mod, scope, rl, tree.whileCont(node)),
+ .@"while" => return whileExpr(mod, scope, rl, tree.whileFull(node)),
+
+ .for_simple => return forExpr(mod, scope, rl, tree.forSimple(node)),
+ .@"for" => return forExpr(mod, scope, rl, tree.forFull(node)),
+
+ // TODO handling these separately would actually be simpler & have fewer branches
+ // once we have a ZIR instruction for each of these 3 cases.
+ .slice_open => return sliceExpr(mod, scope, rl, tree.sliceOpen(node)),
+ .slice => return sliceExpr(mod, scope, rl, tree.slice(node)),
+ .slice_sentinel => return sliceExpr(mod, scope, rl, tree.sliceSentinel(node)),
+
+ .deref => {
+ const lhs = try expr(mod, scope, .none, node_datas[node].lhs);
+ const src = token_starts[main_tokens[node]];
+ const result = try addZIRUnOp(mod, scope, src, .deref, lhs);
+ return rvalue(mod, scope, rl, result);
+ },
+ .address_of => {
+ const result = try expr(mod, scope, .ref, node_datas[node].lhs);
+ return rvalue(mod, scope, rl, result);
+ },
+ .undefined_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.@"undefined"),
+ .val = Value.initTag(.undef),
+ });
+ return rvalue(mod, scope, rl, result);
+ },
+ .true_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.bool),
+ .val = Value.initTag(.bool_true),
+ });
+ return rvalue(mod, scope, rl, result);
+ },
+ .false_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.bool),
+ .val = Value.initTag(.bool_false),
+ });
+ return rvalue(mod, scope, rl, result);
+ },
+ .null_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.@"null"),
+ .val = Value.initTag(.null_value),
+ });
+ return rvalue(mod, scope, rl, result);
+ },
+ .optional_type => {
+ const src = token_starts[main_tokens[node]];
+ const operand = try typeExpr(mod, scope, node_datas[node].lhs);
+ const result = try addZIRUnOp(mod, scope, src, .optional_type, operand);
+ return rvalue(mod, scope, rl, result);
+ },
+ .unwrap_optional => {
+ const operand = try expr(mod, scope, rl, node_datas[node].lhs);
+ const op: zir.Inst.Tag = switch (rl) {
+ .ref => .optional_payload_safe_ptr,
+ else => .optional_payload_safe,
+ };
+ const src = token_starts[main_tokens[node]];
+ return addZIRUnOp(mod, scope, src, op, operand);
+ },
+ .block_two, .block_two_semicolon => {
+ const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
+ if (node_datas[node].lhs == 0) {
+ return blockExpr(mod, scope, rl, node, statements[0..0]);
+ } else if (node_datas[node].rhs == 0) {
+ return blockExpr(mod, scope, rl, node, statements[0..1]);
+ } else {
+ return blockExpr(mod, scope, rl, node, statements[0..2]);
+ }
+ },
+ .block, .block_semicolon => {
+ const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
+ return blockExpr(mod, scope, rl, node, statements);
+ },
+ .enum_literal => {
+ const ident_token = main_tokens[node];
+ const name = try mod.identifierTokenString(scope, ident_token);
+ const src = token_starts[ident_token];
+ const result = try addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{});
+ return rvalue(mod, scope, rl, result);
+ },
+ .error_union => {
+ const error_set = try typeExpr(mod, scope, node_datas[node].lhs);
+ const payload = try typeExpr(mod, scope, node_datas[node].rhs);
+ const src = token_starts[main_tokens[node]];
+ const result = try addZIRBinOp(mod, scope, src, .error_union_type, error_set, payload);
+ return rvalue(mod, scope, rl, result);
+ },
+ .merge_error_sets => {
+ const lhs = try typeExpr(mod, scope, node_datas[node].lhs);
+ const rhs = try typeExpr(mod, scope, node_datas[node].rhs);
+ const src = token_starts[main_tokens[node]];
+ const result = try addZIRBinOp(mod, scope, src, .merge_error_sets, lhs, rhs);
+ return rvalue(mod, scope, rl, result);
+ },
+ .anyframe_literal => {
+ const main_token = main_tokens[node];
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.anyframe_type),
+ });
+ return rvalue(mod, scope, rl, result);
+ },
+ .anyframe_type => {
+ const src = token_starts[node_datas[node].lhs];
+ const return_type = try typeExpr(mod, scope, node_datas[node].rhs);
+ const result = try addZIRUnOp(mod, scope, src, .anyframe_type, return_type);
+ return rvalue(mod, scope, rl, result);
+ },
+ .@"catch" => {
+ const catch_token = main_tokens[node];
+ const payload_token: ?ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe)
+ catch_token + 2
+ else
+ null;
+ switch (rl) {
+ .ref => return orelseCatchExpr(
+ mod,
+ scope,
+ rl,
+ node_datas[node].lhs,
+ main_tokens[node],
+ .is_err_ptr,
+ .err_union_payload_unsafe_ptr,
+ .err_union_code_ptr,
+ node_datas[node].rhs,
+ payload_token,
+ ),
+ else => return orelseCatchExpr(
+ mod,
+ scope,
+ rl,
+ node_datas[node].lhs,
+ main_tokens[node],
+ .is_err,
+ .err_union_payload_unsafe,
+ .err_union_code,
+ node_datas[node].rhs,
+ payload_token,
+ ),
+ }
+ },
+ .@"orelse" => switch (rl) {
+ .ref => return orelseCatchExpr(
+ mod,
+ scope,
+ rl,
+ node_datas[node].lhs,
+ main_tokens[node],
+ .is_null_ptr,
+ .optional_payload_unsafe_ptr,
+ undefined,
+ node_datas[node].rhs,
+ null,
+ ),
+ else => return orelseCatchExpr(
+ mod,
+ scope,
+ rl,
+ node_datas[node].lhs,
+ main_tokens[node],
+ .is_null,
+ .optional_payload_unsafe,
+ undefined,
+ node_datas[node].rhs,
+ null,
+ ),
+ },
- return comptimeExpr(mod, scope, rl, node.expr);
+ .ptr_type_aligned => return ptrType(mod, scope, rl, tree.ptrTypeAligned(node)),
+ .ptr_type_sentinel => return ptrType(mod, scope, rl, tree.ptrTypeSentinel(node)),
+ .ptr_type => return ptrType(mod, scope, rl, tree.ptrType(node)),
+ .ptr_type_bit_range => return ptrType(mod, scope, rl, tree.ptrTypeBitRange(node)),
+
+ .container_decl,
+ .container_decl_trailing,
+ => return containerDecl(mod, scope, rl, tree.containerDecl(node)),
+ .container_decl_two, .container_decl_two_trailing => {
+ var buffer: [2]ast.Node.Index = undefined;
+ return containerDecl(mod, scope, rl, tree.containerDeclTwo(&buffer, node));
+ },
+ .container_decl_arg,
+ .container_decl_arg_trailing,
+ => return containerDecl(mod, scope, rl, tree.containerDeclArg(node)),
+
+ .tagged_union,
+ .tagged_union_trailing,
+ => return containerDecl(mod, scope, rl, tree.taggedUnion(node)),
+ .tagged_union_two, .tagged_union_two_trailing => {
+ var buffer: [2]ast.Node.Index = undefined;
+ return containerDecl(mod, scope, rl, tree.taggedUnionTwo(&buffer, node));
+ },
+ .tagged_union_enum_tag,
+ .tagged_union_enum_tag_trailing,
+ => return containerDecl(mod, scope, rl, tree.taggedUnionEnumTag(node)),
+
+ .@"break" => return breakExpr(mod, scope, rl, node),
+ .@"continue" => return continueExpr(mod, scope, rl, node),
+ .grouped_expression => return expr(mod, scope, rl, node_datas[node].lhs),
+ .array_type => return arrayType(mod, scope, rl, node),
+ .array_type_sentinel => return arrayTypeSentinel(mod, scope, rl, node),
+ .char_literal => return charLiteral(mod, scope, rl, node),
+ .error_set_decl => return errorSetDecl(mod, scope, rl, node),
+ .array_access => return arrayAccess(mod, scope, rl, node),
+ .@"comptime" => return comptimeExpr(mod, scope, rl, node_datas[node].lhs),
+ .@"switch", .switch_comma => return switchExpr(mod, scope, rl, node),
+
+ .@"defer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .defer", .{}),
+ .@"errdefer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .errdefer", .{}),
+ .@"await" => return mod.failNode(scope, node, "TODO implement astgen.expr for .await", .{}),
+ .@"resume" => return mod.failNode(scope, node, "TODO implement astgen.expr for .resume", .{}),
+ .@"try" => return mod.failNode(scope, node, "TODO implement astgen.expr for .Try", .{}),
+
+ .array_init_one,
+ .array_init_one_comma,
+ .array_init_dot_two,
+ .array_init_dot_two_comma,
+ .array_init_dot,
+ .array_init_dot_comma,
+ .array_init,
+ .array_init_comma,
+ => return mod.failNode(scope, node, "TODO implement astgen.expr for array literals", .{}),
+
+ .struct_init_one,
+ .struct_init_one_comma,
+ .struct_init_dot_two,
+ .struct_init_dot_two_comma,
+ .struct_init_dot,
+ .struct_init_dot_comma,
+ .struct_init,
+ .struct_init_comma,
+ => return mod.failNode(scope, node, "TODO implement astgen.expr for struct literals", .{}),
+
+ .@"suspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .suspend", .{}),
+ .@"anytype" => return mod.failNode(scope, node, "TODO implement astgen.expr for .anytype", .{}),
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ => return mod.failNode(scope, node, "TODO implement astgen.expr for function prototypes", .{}),
+
+ .@"nosuspend" => return mod.failNode(scope, node, "TODO implement astgen.expr for .nosuspend", .{}),
+ .error_value => return mod.failNode(scope, node, "TODO implement astgen.expr for .error_value", .{}),
+ }
}
pub fn comptimeExpr(
mod: *Module,
parent_scope: *Scope,
rl: ResultLoc,
- node: *ast.Node,
+ node: ast.Node.Index,
) InnerError!*zir.Inst {
// If we are already in a comptime scope, no need to make another one.
if (parent_scope.isComptime()) {
return expr(mod, parent_scope, rl, node);
}
- // Optimization for labeled blocks: don't need to have 2 layers of blocks,
- // we can reuse the existing one.
- if (node.castTag(.LabeledBlock)) |block_node| {
- return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime);
- }
+ const tree = parent_scope.tree();
+ const token_starts = tree.tokens.items(.start);
// Make a scope to collect generated instructions in the sub-expression.
var block_scope: Scope.GenZIR = .{
@@ -365,9 +676,7 @@ pub fn comptimeExpr(
// instruction is the block's result value.
_ = try expr(mod, &block_scope.base, rl, node);
- const tree = parent_scope.tree();
- const src = tree.token_locs[node.firstToken()].start;
-
+ const src = token_starts[tree.firstToken(node)];
const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
@@ -378,10 +687,17 @@ pub fn comptimeExpr(
fn breakExpr(
mod: *Module,
parent_scope: *Scope,
- node: *ast.Node.ControlFlowExpression,
+ rl: ResultLoc,
+ node: ast.Node.Index,
) InnerError!*zir.Inst {
const tree = parent_scope.tree();
- const src = tree.token_locs[node.ltoken].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
+ const break_label = node_datas[node].lhs;
+ const rhs = node_datas[node].rhs;
// Look for the label in the scope.
var scope = parent_scope;
@@ -391,7 +707,7 @@ fn breakExpr(
const gen_zir = scope.cast(Scope.GenZIR).?;
const block_inst = blk: {
- if (node.getLabel()) |break_label| {
+ if (break_label != 0) {
if (gen_zir.label) |*label| {
if (try tokenIdentEql(mod, parent_scope, label.token, break_label)) {
label.used = true;
@@ -405,11 +721,12 @@ fn breakExpr(
continue;
};
- const rhs = node.getRHS() orelse {
- return addZirInstTag(mod, parent_scope, src, .break_void, .{
+ if (rhs == 0) {
+ const result = try addZirInstTag(mod, parent_scope, src, .break_void, .{
.block = block_inst,
});
- };
+ return rvalue(mod, parent_scope, rl, result);
+ }
gen_zir.break_count += 1;
const prev_rvalue_rl_count = gen_zir.rvalue_rl_count;
const operand = try expr(mod, parent_scope, gen_zir.break_result_loc, rhs);
@@ -429,11 +746,11 @@ fn breakExpr(
try gen_zir.labeled_store_to_block_ptr_list.append(mod.gpa, store_inst);
}
}
- return br;
+ return rvalue(mod, parent_scope, rl, br);
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- else => if (node.getLabel()) |break_label| {
+ else => if (break_label != 0) {
const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
@@ -443,9 +760,19 @@ fn breakExpr(
}
}
-fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
+fn continueExpr(
+ mod: *Module,
+ parent_scope: *Scope,
+ rl: ResultLoc,
+ node: ast.Node.Index,
+) InnerError!*zir.Inst {
const tree = parent_scope.tree();
- const src = tree.token_locs[node.ltoken].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
+ const break_label = node_datas[node].lhs;
// Look for the label in the scope.
var scope = parent_scope;
@@ -457,7 +784,7 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
scope = gen_zir.parent;
continue;
};
- if (node.getLabel()) |break_label| blk: {
+ if (break_label != 0) blk: {
if (gen_zir.label) |*label| {
if (try tokenIdentEql(mod, parent_scope, label.token, break_label)) {
label.used = true;
@@ -469,13 +796,14 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
continue;
}
- return addZirInstTag(mod, parent_scope, src, .break_void, .{
+ const result = try addZirInstTag(mod, parent_scope, src, .break_void, .{
.block = continue_block,
});
+ return rvalue(mod, parent_scope, rl, result);
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- else => if (node.getLabel()) |break_label| {
+ else => if (break_label != 0) {
const label_name = try mod.identifierTokenString(parent_scope, break_label);
return mod.failTok(parent_scope, break_label, "label not found: '{s}'", .{label_name});
} else {
@@ -485,11 +813,27 @@ fn continueExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowE
}
}
-pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block) InnerError!void {
+pub fn blockExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ block_node: ast.Node.Index,
+ statements: []const ast.Node.Index,
+) InnerError!*zir.Inst {
const tracy = trace(@src());
defer tracy.end();
- try blockExprStmts(mod, parent_scope, &block_node.base, block_node.statements());
+ const tree = scope.tree();
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
+
+ const lbrace = main_tokens[block_node];
+ if (token_tags[lbrace - 1] == .colon) {
+ return labeledBlockExpr(mod, scope, rl, block_node, statements, .block);
+ }
+
+ try blockExprStmts(mod, scope, block_node, statements);
+ return rvalueVoid(mod, scope, rl, block_node, {});
}
fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIndex) !void {
@@ -502,8 +846,11 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn
if (gen_zir.label) |prev_label| {
if (try tokenIdentEql(mod, parent_scope, label, prev_label.token)) {
const tree = parent_scope.tree();
- const label_src = tree.token_locs[label].start;
- const prev_label_src = tree.token_locs[prev_label.token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const label_src = token_starts[label];
+ const prev_label_src = token_starts[prev_label.token];
const label_name = try mod.identifierTokenString(parent_scope, label);
const msg = msg: {
@@ -539,7 +886,8 @@ fn labeledBlockExpr(
mod: *Module,
parent_scope: *Scope,
rl: ResultLoc,
- block_node: *ast.Node.LabeledBlock,
+ block_node: ast.Node.Index,
+ statements: []const ast.Node.Index,
zir_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
const tracy = trace(@src());
@@ -548,9 +896,16 @@ fn labeledBlockExpr(
assert(zir_tag == .block or zir_tag == .block_comptime);
const tree = parent_scope.tree();
- const src = tree.token_locs[block_node.lbrace].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
- try checkLabelRedefinition(mod, parent_scope, block_node.label);
+ const lbrace = main_tokens[block_node];
+ const label_token = lbrace - 2;
+ assert(token_tags[label_token] == .identifier);
+ const src = token_starts[lbrace];
+
+ try checkLabelRedefinition(mod, parent_scope, label_token);
// Create the Block ZIR instruction so that we can put it into the GenZIR struct
// so that break statements can reference it.
@@ -575,7 +930,7 @@ fn labeledBlockExpr(
.instructions = .{},
// TODO @as here is working around a stage1 miscompilation bug :(
.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{
- .token = block_node.label,
+ .token = label_token,
.block_inst = block_inst,
}),
};
@@ -584,10 +939,10 @@ fn labeledBlockExpr(
defer block_scope.labeled_breaks.deinit(mod.gpa);
defer block_scope.labeled_store_to_block_ptr_list.deinit(mod.gpa);
- try blockExprStmts(mod, &block_scope.base, &block_node.base, block_node.statements());
+ try blockExprStmts(mod, &block_scope.base, block_node, statements);
if (!block_scope.label.?.used) {
- return mod.fail(parent_scope, tree.token_locs[block_node.label].start, "unused block label", .{});
+ return mod.failTok(parent_scope, label_token, "unused block label", .{});
}
try gen_zir.instructions.append(mod.gpa, &block_inst.base);
@@ -627,37 +982,41 @@ fn labeledBlockExpr(
fn blockExprStmts(
mod: *Module,
parent_scope: *Scope,
- node: *ast.Node,
- statements: []*ast.Node,
+ node: ast.Node.Index,
+ statements: []const ast.Node.Index,
) !void {
const tree = parent_scope.tree();
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const node_tags = tree.nodes.items(.tag);
var block_arena = std.heap.ArenaAllocator.init(mod.gpa);
defer block_arena.deinit();
var scope = parent_scope;
for (statements) |statement| {
- const src = tree.token_locs[statement.firstToken()].start;
+ const src = token_starts[tree.firstToken(statement)];
_ = try addZIRNoOp(mod, scope, src, .dbg_stmt);
- switch (statement.tag) {
- .VarDecl => {
- const var_decl_node = statement.castTag(.VarDecl).?;
- scope = try varDecl(mod, scope, var_decl_node, &block_arena.allocator);
- },
- .Assign => try assign(mod, scope, statement.castTag(.Assign).?),
- .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bit_and),
- .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bit_or),
- .AssignBitShiftLeft => try assignOp(mod, scope, statement.castTag(.AssignBitShiftLeft).?, .shl),
- .AssignBitShiftRight => try assignOp(mod, scope, statement.castTag(.AssignBitShiftRight).?, .shr),
- .AssignBitXor => try assignOp(mod, scope, statement.castTag(.AssignBitXor).?, .xor),
- .AssignDiv => try assignOp(mod, scope, statement.castTag(.AssignDiv).?, .div),
- .AssignSub => try assignOp(mod, scope, statement.castTag(.AssignSub).?, .sub),
- .AssignSubWrap => try assignOp(mod, scope, statement.castTag(.AssignSubWrap).?, .subwrap),
- .AssignMod => try assignOp(mod, scope, statement.castTag(.AssignMod).?, .mod_rem),
- .AssignAdd => try assignOp(mod, scope, statement.castTag(.AssignAdd).?, .add),
- .AssignAddWrap => try assignOp(mod, scope, statement.castTag(.AssignAddWrap).?, .addwrap),
- .AssignMul => try assignOp(mod, scope, statement.castTag(.AssignMul).?, .mul),
- .AssignMulWrap => try assignOp(mod, scope, statement.castTag(.AssignMulWrap).?, .mulwrap),
+ switch (node_tags[statement]) {
+ .global_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.globalVarDecl(statement)),
+ .local_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.localVarDecl(statement)),
+ .simple_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.simpleVarDecl(statement)),
+ .aligned_var_decl => scope = try varDecl(mod, scope, &block_arena.allocator, tree.alignedVarDecl(statement)),
+
+ .assign => try assign(mod, scope, statement),
+ .assign_bit_and => try assignOp(mod, scope, statement, .bit_and),
+ .assign_bit_or => try assignOp(mod, scope, statement, .bit_or),
+ .assign_bit_shift_left => try assignOp(mod, scope, statement, .shl),
+ .assign_bit_shift_right => try assignOp(mod, scope, statement, .shr),
+ .assign_bit_xor => try assignOp(mod, scope, statement, .xor),
+ .assign_div => try assignOp(mod, scope, statement, .div),
+ .assign_sub => try assignOp(mod, scope, statement, .sub),
+ .assign_sub_wrap => try assignOp(mod, scope, statement, .subwrap),
+ .assign_mod => try assignOp(mod, scope, statement, .mod_rem),
+ .assign_add => try assignOp(mod, scope, statement, .add),
+ .assign_add_wrap => try assignOp(mod, scope, statement, .addwrap),
+ .assign_mul => try assignOp(mod, scope, statement, .mul),
+ .assign_mul_wrap => try assignOp(mod, scope, statement, .mulwrap),
else => {
const possibly_unused_result = try expr(mod, scope, .none, statement);
@@ -672,18 +1031,23 @@ fn blockExprStmts(
fn varDecl(
mod: *Module,
scope: *Scope,
- node: *ast.Node.VarDecl,
block_arena: *Allocator,
+ var_decl: ast.full.VarDecl,
) InnerError!*Scope {
- if (node.getComptimeToken()) |comptime_token| {
+ if (var_decl.comptime_token) |comptime_token| {
return mod.failTok(scope, comptime_token, "TODO implement comptime locals", .{});
}
- if (node.getAlignNode()) |align_node| {
- return mod.failNode(scope, align_node, "TODO implement alignment on locals", .{});
+ if (var_decl.ast.align_node != 0) {
+ return mod.failNode(scope, var_decl.ast.align_node, "TODO implement alignment on locals", .{});
}
const tree = scope.tree();
- const name_src = tree.token_locs[node.name_token].start;
- const ident_name = try mod.identifierTokenString(scope, node.name_token);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ const name_token = var_decl.ast.mut_token + 1;
+ const name_src = token_starts[name_token];
+ const ident_name = try mod.identifierTokenString(scope, name_token);
// Local variables shadowing detection, including function parameters.
{
@@ -729,20 +1093,21 @@ fn varDecl(
// TODO add note for other definition
return mod.fail(scope, name_src, "redefinition of '{s}'", .{ident_name});
}
- const init_node = node.getInitNode() orelse
+ if (var_decl.ast.init_node == 0) {
return mod.fail(scope, name_src, "variables must be initialized", .{});
+ }
- switch (tree.token_ids[node.mut_token]) {
- .Keyword_const => {
+ switch (token_tags[var_decl.ast.mut_token]) {
+ .keyword_const => {
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
- if (!nodeMayNeedMemoryLocation(init_node, scope)) {
- const result_loc: ResultLoc = if (node.getTypeNode()) |type_node|
- .{ .ty = try typeExpr(mod, scope, type_node) }
+ if (!nodeMayNeedMemoryLocation(scope, var_decl.ast.init_node)) {
+ const result_loc: ResultLoc = if (var_decl.ast.type_node != 0)
+ .{ .ty = try typeExpr(mod, scope, var_decl.ast.type_node) }
else
.none;
- const init_inst = try expr(mod, scope, result_loc, init_node);
+ const init_inst = try expr(mod, scope, result_loc, var_decl.ast.init_node);
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
.parent = scope,
@@ -766,8 +1131,8 @@ fn varDecl(
var resolve_inferred_alloc: ?*zir.Inst = null;
var opt_type_inst: ?*zir.Inst = null;
- if (node.getTypeNode()) |type_node| {
- const type_inst = try typeExpr(mod, &init_scope.base, type_node);
+ if (var_decl.ast.type_node != 0) {
+ const type_inst = try typeExpr(mod, &init_scope.base, var_decl.ast.type_node);
opt_type_inst = type_inst;
init_scope.rl_ptr = try addZIRUnOp(mod, &init_scope.base, name_src, .alloc, type_inst);
} else {
@@ -776,7 +1141,7 @@ fn varDecl(
init_scope.rl_ptr = &alloc.base;
}
const init_result_loc: ResultLoc = .{ .block_ptr = &init_scope };
- const init_inst = try expr(mod, &init_scope.base, init_result_loc, init_node);
+ const init_inst = try expr(mod, &init_scope.base, init_result_loc, var_decl.ast.init_node);
const parent_zir = &scope.getGenZIR().instructions;
if (init_scope.rvalue_rl_count == 1) {
// Result location pointer not used. We don't need an alloc for this
@@ -834,10 +1199,13 @@ fn varDecl(
};
return &sub_scope.base;
},
- .Keyword_var => {
+ .keyword_var => {
var resolve_inferred_alloc: ?*zir.Inst = null;
- const var_data: struct { result_loc: ResultLoc, alloc: *zir.Inst } = if (node.getTypeNode()) |type_node| a: {
- const type_inst = try typeExpr(mod, scope, type_node);
+ const var_data: struct {
+ result_loc: ResultLoc,
+ alloc: *zir.Inst,
+ } = if (var_decl.ast.type_node != 0) a: {
+ const type_inst = try typeExpr(mod, scope, var_decl.ast.type_node);
const alloc = try addZIRUnOp(mod, scope, name_src, .alloc_mut, type_inst);
break :a .{ .alloc = alloc, .result_loc = .{ .ptr = alloc } };
} else a: {
@@ -845,7 +1213,7 @@ fn varDecl(
resolve_inferred_alloc = &alloc.base;
break :a .{ .alloc = &alloc.base, .result_loc = .{ .inferred_ptr = alloc } };
};
- const init_inst = try expr(mod, scope, var_data.result_loc, init_node);
+ const init_inst = try expr(mod, scope, var_data.result_loc, var_decl.ast.init_node);
if (resolve_inferred_alloc) |inst| {
_ = try addZIRUnOp(mod, scope, name_src, .resolve_inferred_alloc, inst);
}
@@ -862,232 +1230,210 @@ fn varDecl(
}
}
-fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!void {
- if (infix_node.lhs.castTag(.Identifier)) |ident| {
- // This intentionally does not support @"_" syntax.
- const ident_name = scope.tree().tokenSlice(ident.token);
+fn assign(mod: *Module, scope: *Scope, infix_node: ast.Node.Index) InnerError!void {
+ const tree = scope.tree();
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_tags = tree.nodes.items(.tag);
+
+ const lhs = node_datas[infix_node].lhs;
+ const rhs = node_datas[infix_node].rhs;
+ if (node_tags[lhs] == .identifier) {
+ // This intentionally does not support `@"_"` syntax.
+ const ident_name = tree.tokenSlice(main_tokens[lhs]);
if (mem.eql(u8, ident_name, "_")) {
- _ = try expr(mod, scope, .discard, infix_node.rhs);
+ _ = try expr(mod, scope, .discard, rhs);
return;
}
}
- const lvalue = try lvalExpr(mod, scope, infix_node.lhs);
- _ = try expr(mod, scope, .{ .ptr = lvalue }, infix_node.rhs);
+ const lvalue = try lvalExpr(mod, scope, lhs);
+ _ = try expr(mod, scope, .{ .ptr = lvalue }, rhs);
}
fn assignOp(
mod: *Module,
scope: *Scope,
- infix_node: *ast.Node.SimpleInfixOp,
+ infix_node: ast.Node.Index,
op_inst_tag: zir.Inst.Tag,
) InnerError!void {
- const lhs_ptr = try lvalExpr(mod, scope, infix_node.lhs);
- const lhs = try addZIRUnOp(mod, scope, lhs_ptr.src, .deref, lhs_ptr);
- const lhs_type = try addZIRUnOp(mod, scope, lhs_ptr.src, .typeof, lhs);
- const rhs = try expr(mod, scope, .{ .ty = lhs_type }, infix_node.rhs);
-
const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const lhs_ptr = try lvalExpr(mod, scope, node_datas[infix_node].lhs);
+ const lhs = try addZIRUnOp(mod, scope, lhs_ptr.src, .deref, lhs_ptr);
+ const lhs_type = try addZIRUnOp(mod, scope, lhs_ptr.src, .typeof, lhs);
+ const rhs = try expr(mod, scope, .{ .ty = lhs_type }, node_datas[infix_node].rhs);
+ const src = token_starts[main_tokens[infix_node]];
const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs);
_ = try addZIRBinOp(mod, scope, src, .store, lhs_ptr, result);
}
-fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
+fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
const bool_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.bool_type),
});
- const operand = try expr(mod, scope, .{ .ty = bool_type }, node.rhs);
+ const operand = try expr(mod, scope, .{ .ty = bool_type }, node_datas[node].lhs);
return addZIRUnOp(mod, scope, src, .bool_not, operand);
}
-fn bitNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
+fn bitNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- const operand = try expr(mod, scope, .none, node.rhs);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
+ const operand = try expr(mod, scope, .none, node_datas[node].lhs);
return addZIRUnOp(mod, scope, src, .bit_not, operand);
}
-fn negation(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst {
+fn negation(
+ mod: *Module,
+ scope: *Scope,
+ node: ast.Node.Index,
+ op_inst_tag: zir.Inst.Tag,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const src = token_starts[main_tokens[node]];
const lhs = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.comptime_int),
.val = Value.initTag(.zero),
});
- const rhs = try expr(mod, scope, .none, node.rhs);
-
+ const rhs = try expr(mod, scope, .none, node_datas[node].lhs);
return addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs);
}
-fn addressOf(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
- return expr(mod, scope, .ref, node.rhs);
-}
-
-fn optionalType(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- const operand = try typeExpr(mod, scope, node.rhs);
- return addZIRUnOp(mod, scope, src, .optional_type, operand);
-}
-
-fn sliceType(mod: *Module, scope: *Scope, node: *ast.Node.SliceType) InnerError!*zir.Inst {
+fn ptrType(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ ptr_info: ast.full.PtrType,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, .Slice);
-}
+ const token_starts = tree.tokens.items(.start);
-fn ptrType(mod: *Module, scope: *Scope, node: *ast.Node.PtrType) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- return ptrSliceType(mod, scope, src, &node.ptr_info, node.rhs, switch (tree.token_ids[node.op_token]) {
- .Asterisk, .AsteriskAsterisk => .One,
- // TODO stage1 type inference bug
- .LBracket => @as(std.builtin.TypeInfo.Pointer.Size, switch (tree.token_ids[node.op_token + 2]) {
- .Identifier => .C,
- else => .Many,
- }),
- else => unreachable,
- });
-}
+ const src = token_starts[ptr_info.ast.main_token];
-fn ptrSliceType(mod: *Module, scope: *Scope, src: usize, ptr_info: *ast.PtrInfo, rhs: *ast.Node, size: std.builtin.TypeInfo.Pointer.Size) InnerError!*zir.Inst {
const simple = ptr_info.allowzero_token == null and
- ptr_info.align_info == null and
+ ptr_info.ast.align_node == 0 and
ptr_info.volatile_token == null and
- ptr_info.sentinel == null;
+ ptr_info.ast.sentinel == 0;
if (simple) {
- const child_type = try typeExpr(mod, scope, rhs);
+ const child_type = try typeExpr(mod, scope, ptr_info.ast.child_type);
const mutable = ptr_info.const_token == null;
- // TODO stage1 type inference bug
const T = zir.Inst.Tag;
- return addZIRUnOp(mod, scope, src, switch (size) {
+ const result = try addZIRUnOp(mod, scope, src, switch (ptr_info.size) {
.One => if (mutable) T.single_mut_ptr_type else T.single_const_ptr_type,
.Many => if (mutable) T.many_mut_ptr_type else T.many_const_ptr_type,
.C => if (mutable) T.c_mut_ptr_type else T.c_const_ptr_type,
.Slice => if (mutable) T.mut_slice_type else T.const_slice_type,
}, child_type);
+ return rvalue(mod, scope, rl, result);
}
var kw_args: std.meta.fieldInfo(zir.Inst.PtrType, .kw_args).field_type = .{};
- kw_args.size = size;
+ kw_args.size = ptr_info.size;
kw_args.@"allowzero" = ptr_info.allowzero_token != null;
- if (ptr_info.align_info) |some| {
- kw_args.@"align" = try expr(mod, scope, .none, some.node);
- if (some.bit_range) |bit_range| {
- kw_args.align_bit_start = try expr(mod, scope, .none, bit_range.start);
- kw_args.align_bit_end = try expr(mod, scope, .none, bit_range.end);
+ if (ptr_info.ast.align_node != 0) {
+ kw_args.@"align" = try expr(mod, scope, .none, ptr_info.ast.align_node);
+ if (ptr_info.ast.bit_range_start != 0) {
+ kw_args.align_bit_start = try expr(mod, scope, .none, ptr_info.ast.bit_range_start);
+ kw_args.align_bit_end = try expr(mod, scope, .none, ptr_info.ast.bit_range_end);
}
}
kw_args.mutable = ptr_info.const_token == null;
kw_args.@"volatile" = ptr_info.volatile_token != null;
- if (ptr_info.sentinel) |some| {
- kw_args.sentinel = try expr(mod, scope, .none, some);
- }
-
- const child_type = try typeExpr(mod, scope, rhs);
- if (kw_args.sentinel) |some| {
- kw_args.sentinel = try addZIRBinOp(mod, scope, some.src, .as, child_type, some);
+ const child_type = try typeExpr(mod, scope, ptr_info.ast.child_type);
+ if (ptr_info.ast.sentinel != 0) {
+ kw_args.sentinel = try expr(mod, scope, .{ .ty = child_type }, ptr_info.ast.sentinel);
}
-
- return addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args);
+ const result = try addZIRInst(mod, scope, src, zir.Inst.PtrType, .{ .child_type = child_type }, kw_args);
+ return rvalue(mod, scope, rl, result);
}
-fn arrayType(mod: *Module, scope: *Scope, node: *ast.Node.ArrayType) !*zir.Inst {
+fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
const usize_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.usize_type),
});
+ const len_node = node_datas[node].lhs;
+ const elem_node = node_datas[node].rhs;
+ if (len_node == 0) {
+ const elem_type = try typeExpr(mod, scope, elem_node);
+ const result = try addZIRUnOp(mod, scope, src, .mut_slice_type, elem_type);
+ return rvalue(mod, scope, rl, result);
+ } else {
+ // TODO check for [_]T
+ const len = try expr(mod, scope, .{ .ty = usize_type }, len_node);
+ const elem_type = try typeExpr(mod, scope, elem_node);
- // TODO check for [_]T
- const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr);
- const elem_type = try typeExpr(mod, scope, node.rhs);
-
- return addZIRBinOp(mod, scope, src, .array_type, len, elem_type);
+ const result = try addZIRBinOp(mod, scope, src, .array_type, len, elem_type);
+ return rvalue(mod, scope, rl, result);
+ }
}
-fn arrayTypeSentinel(mod: *Module, scope: *Scope, node: *ast.Node.ArrayTypeSentinel) !*zir.Inst {
+fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const node_datas = tree.nodes.items(.data);
+
+ const len_node = node_datas[node].lhs;
+ const extra = tree.extraData(node_datas[node].rhs, ast.Node.ArrayTypeSentinel);
+ const src = token_starts[main_tokens[node]];
const usize_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.usize_type),
});
// TODO check for [_]T
- const len = try expr(mod, scope, .{ .ty = usize_type }, node.len_expr);
- const sentinel_uncasted = try expr(mod, scope, .none, node.sentinel);
- const elem_type = try typeExpr(mod, scope, node.rhs);
+ const len = try expr(mod, scope, .{ .ty = usize_type }, len_node);
+ const sentinel_uncasted = try expr(mod, scope, .none, extra.sentinel);
+ const elem_type = try typeExpr(mod, scope, extra.elem_type);
const sentinel = try addZIRBinOp(mod, scope, src, .as, elem_type, sentinel_uncasted);
- return addZIRInst(mod, scope, src, zir.Inst.ArrayTypeSentinel, .{
+ const result = try addZIRInst(mod, scope, src, zir.Inst.ArrayTypeSentinel, .{
.len = len,
.sentinel = sentinel,
.elem_type = elem_type,
}, .{});
-}
-
-fn anyFrameType(mod: *Module, scope: *Scope, node: *ast.Node.AnyFrameType) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.anyframe_token].start;
- if (node.result) |some| {
- const return_type = try typeExpr(mod, scope, some.return_type);
- return addZIRUnOp(mod, scope, src, .anyframe_type, return_type);
- } else {
- return addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.anyframe_type),
- });
- }
-}
-
-fn typeInixOp(mod: *Module, scope: *Scope, node: *ast.Node.SimpleInfixOp, op_inst_tag: zir.Inst.Tag) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- const error_set = try typeExpr(mod, scope, node.lhs);
- const payload = try typeExpr(mod, scope, node.rhs);
- return addZIRBinOp(mod, scope, src, op_inst_tag, error_set, payload);
-}
-
-fn enumLiteral(mod: *Module, scope: *Scope, node: *ast.Node.EnumLiteral) !*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.name].start;
- const name = try mod.identifierTokenString(scope, node.name);
-
- return addZIRInst(mod, scope, src, zir.Inst.EnumLiteral, .{ .name = name }, .{});
-}
-
-fn unwrapOptional(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.rtoken].start;
-
- const operand = try expr(mod, scope, rl, node.lhs);
- const op: zir.Inst.Tag = switch (rl) {
- .ref => .optional_payload_safe_ptr,
- else => .optional_payload_safe,
- };
- return addZIRUnOp(mod, scope, src, op, operand);
+ return rvalue(mod, scope, rl, result);
}
fn containerField(
mod: *Module,
scope: *Scope,
- node: *ast.Node.ContainerField,
+ field: ast.full.ContainerField,
) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.firstToken()].start;
- const name = try mod.identifierTokenString(scope, node.name_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[field.ast.name_token];
+ const name = try mod.identifierTokenString(scope, field.ast.name_token);
- if (node.comptime_token == null and node.value_expr == null and node.align_expr == null) {
- if (node.type_expr) |some| {
- const ty = try typeExpr(mod, scope, some);
+ if (field.comptime_token == null and field.ast.value_expr == 0 and field.ast.align_expr == 0) {
+ if (field.ast.type_expr != 0) {
+ const ty = try typeExpr(mod, scope, field.ast.type_expr);
return addZIRInst(mod, scope, src, zir.Inst.ContainerFieldTyped, .{
.bytes = name,
.ty = ty,
@@ -1099,9 +1445,11 @@ fn containerField(
}
}
- const ty = if (node.type_expr) |some| try typeExpr(mod, scope, some) else null;
- const alignment = if (node.align_expr) |some| try expr(mod, scope, .none, some) else null;
- const init = if (node.value_expr) |some| try expr(mod, scope, .none, some) else null;
+ const ty = if (field.ast.type_expr != 0) try typeExpr(mod, scope, field.ast.type_expr) else null;
+ // TODO result location should be alignment type
+ const alignment = if (field.ast.align_expr != 0) try expr(mod, scope, .none, field.ast.align_expr) else null;
+ // TODO result location should be the field type
+ const init = if (field.ast.value_expr != 0) try expr(mod, scope, .none, field.ast.value_expr) else null;
return addZIRInst(mod, scope, src, zir.Inst.ContainerField, .{
.bytes = name,
@@ -1109,13 +1457,22 @@ fn containerField(
.ty = ty,
.init = init,
.alignment = alignment,
- .is_comptime = node.comptime_token != null,
+ .is_comptime = field.comptime_token != null,
});
}
-fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ContainerDecl) InnerError!*zir.Inst {
+fn containerDecl(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ container_decl: ast.full.ContainerDecl,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.kind_token].start;
+ const token_starts = tree.tokens.items(.start);
+ const node_tags = tree.nodes.items(.tag);
+ const token_tags = tree.tokens.items(.tag);
+
+ const src = token_starts[container_decl.ast.main_token];
var gen_scope: Scope.GenZIR = .{
.parent = scope,
@@ -1129,10 +1486,16 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
var fields = std.ArrayList(*zir.Inst).init(mod.gpa);
defer fields.deinit();
- for (node.fieldsAndDecls()) |fd| {
- if (fd.castTag(.ContainerField)) |f| {
- try fields.append(try containerField(mod, &gen_scope.base, f));
- }
+ for (container_decl.ast.members) |member| {
+ // TODO just handle these cases differently since they end up with different ZIR
+ // instructions anyway. It will be simpler & have fewer branches.
+ const field = switch (node_tags[member]) {
+ .container_field_init => try containerField(mod, &gen_scope.base, tree.containerFieldInit(member)),
+ .container_field_align => try containerField(mod, &gen_scope.base, tree.containerFieldAlign(member)),
+ .container_field => try containerField(mod, &gen_scope.base, tree.containerField(member)),
+ else => continue,
+ };
+ try fields.append(field);
}
var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
@@ -1140,19 +1503,22 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
const arena = &decl_arena.allocator;
var layout: std.builtin.TypeInfo.ContainerLayout = .Auto;
- if (node.layout_token) |some| switch (tree.token_ids[some]) {
- .Keyword_extern => layout = .Extern,
- .Keyword_packed => layout = .Packed,
+ if (container_decl.layout_token) |some| switch (token_tags[some]) {
+ .keyword_extern => layout = .Extern,
+ .keyword_packed => layout = .Packed,
else => unreachable,
};
- const container_type = switch (tree.token_ids[node.kind_token]) {
- .Keyword_enum => blk: {
- const tag_type: ?*zir.Inst = switch (node.init_arg_expr) {
- .Type => |t| try typeExpr(mod, &gen_scope.base, t),
- .None => null,
- .Enum => unreachable,
- };
+ // TODO this implementation is incorrect. The types must be created in semantic
+ // analysis, not astgen, because the same ZIR is re-used for multiple inline function calls,
+ // comptime function calls, and generic function instantiations, and these
+ // must result in different instances of container types.
+ const container_type = switch (token_tags[container_decl.ast.main_token]) {
+ .keyword_enum => blk: {
+ const tag_type: ?*zir.Inst = if (container_decl.ast.arg != 0)
+ try typeExpr(mod, &gen_scope.base, container_decl.ast.arg)
+ else
+ null;
const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.EnumType, .{
.fields = try arena.dupe(*zir.Inst, fields.items),
}, .{
@@ -1174,8 +1540,8 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
};
break :blk Type.initPayload(&enum_type.base);
},
- .Keyword_struct => blk: {
- assert(node.init_arg_expr == .None);
+ .keyword_struct => blk: {
+ assert(container_decl.ast.arg == 0);
const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.StructType, .{
.fields = try arena.dupe(*zir.Inst, fields.items),
}, .{
@@ -1196,22 +1562,17 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
};
break :blk Type.initPayload(&struct_type.base);
},
- .Keyword_union => blk: {
- const init_inst = switch (node.init_arg_expr) {
- .Enum => |e| if (e) |t| try typeExpr(mod, &gen_scope.base, t) else null,
- .None => null,
- .Type => |t| try typeExpr(mod, &gen_scope.base, t),
- };
- const init_kind: zir.Inst.UnionType.InitKind = switch (node.init_arg_expr) {
- .Enum => .enum_type,
- .None => .none,
- .Type => .tag_type,
- };
+ .keyword_union => blk: {
+ const init_inst: ?*zir.Inst = if (container_decl.ast.arg != 0)
+ try typeExpr(mod, &gen_scope.base, container_decl.ast.arg)
+ else
+ null;
+ const has_enum_token = container_decl.ast.enum_token != null;
const inst = try addZIRInst(mod, &gen_scope.base, src, zir.Inst.UnionType, .{
.fields = try arena.dupe(*zir.Inst, fields.items),
}, .{
.layout = layout,
- .init_kind = init_kind,
+ .has_enum_token = has_enum_token,
.init_inst = init_inst,
});
const union_type = try arena.create(Type.Payload.Union);
@@ -1229,7 +1590,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
};
break :blk Type.initPayload(&union_type.base);
},
- .Keyword_opaque => blk: {
+ .keyword_opaque => blk: {
if (fields.items.len > 0) {
return mod.fail(scope, fields.items[0].src, "opaque types cannot have fields", .{});
}
@@ -1245,7 +1606,7 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
else => unreachable,
};
const val = try Value.Tag.ty.create(arena, container_type);
- const decl = try mod.createContainerDecl(scope, node.kind_token, &decl_arena, .{
+ const decl = try mod.createContainerDecl(scope, container_decl.ast.main_token, &decl_arena, .{
.ty = Type.initTag(.type),
.val = val,
});
@@ -1258,101 +1619,69 @@ fn containerDecl(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Con
}
}
-fn errorSetDecl(mod: *Module, scope: *Scope, node: *ast.Node.ErrorSetDecl) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.error_token].start;
- const decls = node.decls();
- const fields = try scope.arena().alloc([]const u8, decls.len);
-
- for (decls) |decl, i| {
- const tag = decl.castTag(.ErrorTag).?;
- fields[i] = try mod.identifierTokenString(scope, tag.name_token);
- }
-
- return addZIRInst(mod, scope, src, zir.Inst.ErrorSet, .{ .fields = fields }, .{});
-}
-
-fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn errorSetDecl(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ node: ast.Node.Index,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.token].start;
- return addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.anyerror_type),
- });
-}
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+
+ // Count how many fields there are.
+ const error_token = main_tokens[node];
+ const count: usize = count: {
+ var tok_i = error_token + 2;
+ var count: usize = 0;
+ while (true) : (tok_i += 1) {
+ switch (token_tags[tok_i]) {
+ .doc_comment, .comma => {},
+ .identifier => count += 1,
+ .r_paren => break :count count,
+ else => unreachable,
+ }
+ } else unreachable; // TODO should not need else unreachable here
+ };
-fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch) InnerError!*zir.Inst {
- switch (rl) {
- .ref => return orelseCatchExpr(
- mod,
- scope,
- rl,
- node.lhs,
- node.op_token,
- .is_err_ptr,
- .err_union_payload_unsafe_ptr,
- .err_union_code_ptr,
- node.rhs,
- node.payload,
- ),
- else => return orelseCatchExpr(
- mod,
- scope,
- rl,
- node.lhs,
- node.op_token,
- .is_err,
- .err_union_payload_unsafe,
- .err_union_code,
- node.rhs,
- node.payload,
- ),
- }
-}
-
-fn orelseExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
- switch (rl) {
- .ref => return orelseCatchExpr(
- mod,
- scope,
- rl,
- node.lhs,
- node.op_token,
- .is_null_ptr,
- .optional_payload_unsafe_ptr,
- undefined,
- node.rhs,
- null,
- ),
- else => return orelseCatchExpr(
- mod,
- scope,
- rl,
- node.lhs,
- node.op_token,
- .is_null,
- .optional_payload_unsafe,
- undefined,
- node.rhs,
- null,
- ),
+ const fields = try scope.arena().alloc([]const u8, count);
+ {
+ var tok_i = error_token + 2;
+ var field_i: usize = 0;
+ while (true) : (tok_i += 1) {
+ switch (token_tags[tok_i]) {
+ .doc_comment, .comma => {},
+ .identifier => {
+ fields[field_i] = try mod.identifierTokenString(scope, tok_i);
+ field_i += 1;
+ },
+ .r_paren => break,
+ else => unreachable,
+ }
+ }
}
+ const src = token_starts[error_token];
+ const result = try addZIRInst(mod, scope, src, zir.Inst.ErrorSet, .{ .fields = fields }, .{});
+ return rvalue(mod, scope, rl, result);
}
fn orelseCatchExpr(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- lhs: *ast.Node,
+ lhs: ast.Node.Index,
op_token: ast.TokenIndex,
cond_op: zir.Inst.Tag,
unwrap_op: zir.Inst.Tag,
unwrap_code_op: zir.Inst.Tag,
- rhs: *ast.Node,
- payload_node: ?*ast.Node,
+ rhs: ast.Node.Index,
+ payload_token: ?ast.TokenIndex,
) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[op_token].start;
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[op_token];
var block_scope: Scope.GenZIR = .{
.parent = scope,
@@ -1390,12 +1719,11 @@ fn orelseCatchExpr(
var err_val_scope: Scope.LocalVal = undefined;
const then_sub_scope = blk: {
- const payload = payload_node orelse break :blk &then_scope.base;
-
- const err_name = tree.tokenSlice(payload.castTag(.Payload).?.error_symbol.firstToken());
- if (mem.eql(u8, err_name, "_"))
- break :blk &then_scope.base;
-
+ const payload = payload_token orelse break :blk &then_scope.base;
+ if (mem.eql(u8, tree.tokenSlice(payload), "_")) {
+ return mod.failTok(&then_scope.base, payload, "discard of error capture; omit it instead", .{});
+ }
+ const err_name = try mod.identifierTokenString(scope, payload);
err_val_scope = .{
.parent = &then_scope.base,
.gen_zir = &then_scope,
@@ -1524,124 +1852,121 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as
return mem.eql(u8, ident_name_1, ident_name_2);
}
-pub fn field(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.SimpleInfixOp) InnerError!*zir.Inst {
+pub fn fieldAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.op_token].start;
- // TODO custom AST node for field access so that we don't have to go through a node cast here
- const field_name = try mod.identifierTokenString(scope, node.rhs.castTag(.Identifier).?.token);
+ const token_starts = tree.tokens.items(.start);
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_datas = tree.nodes.items(.data);
+
+ const dot_token = main_tokens[node];
+ const src = token_starts[dot_token];
+ const field_ident = dot_token + 1;
+ const field_name = try mod.identifierTokenString(scope, field_ident);
if (rl == .ref) {
return addZirInstTag(mod, scope, src, .field_ptr, .{
- .object = try expr(mod, scope, .ref, node.lhs),
+ .object = try expr(mod, scope, .ref, node_datas[node].lhs),
.field_name = field_name,
});
+ } else {
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
+ .object = try expr(mod, scope, .none, node_datas[node].lhs),
+ .field_name = field_name,
+ }));
}
- return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val, .{
- .object = try expr(mod, scope, .none, node.lhs),
- .field_name = field_name,
- }));
}
-fn namedField(
+fn arrayAccess(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- call: *ast.Node.BuiltinCall,
+ node: ast.Node.Index,
) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 2);
-
const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
-
- const string_type = try addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.const_slice_u8_type),
- });
- const string_rl: ResultLoc = .{ .ty = string_type };
-
- if (rl == .ref) {
- return addZirInstTag(mod, scope, src, .field_ptr_named, .{
- .object = try expr(mod, scope, .ref, params[0]),
- .field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
- });
- }
- return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
- .object = try expr(mod, scope, .none, params[0]),
- .field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
- }));
-}
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const node_datas = tree.nodes.items(.data);
-fn arrayAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.ArrayAccess) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.rtoken].start;
+ const src = token_starts[main_tokens[node]];
const usize_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.usize_type),
});
const index_rl: ResultLoc = .{ .ty = usize_type };
-
- if (rl == .ref) {
- return addZirInstTag(mod, scope, src, .elem_ptr, .{
- .array = try expr(mod, scope, .ref, node.lhs),
- .index = try expr(mod, scope, index_rl, node.index_expr),
- });
+ switch (rl) {
+ .ref => return addZirInstTag(mod, scope, src, .elem_ptr, .{
+ .array = try expr(mod, scope, .ref, node_datas[node].lhs),
+ .index = try expr(mod, scope, index_rl, node_datas[node].rhs),
+ }),
+ else => return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
+ .array = try expr(mod, scope, .none, node_datas[node].lhs),
+ .index = try expr(mod, scope, index_rl, node_datas[node].rhs),
+ })),
}
- return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .elem_val, .{
- .array = try expr(mod, scope, .none, node.lhs),
- .index = try expr(mod, scope, index_rl, node.index_expr),
- }));
}
-fn sliceExpr(mod: *Module, scope: *Scope, node: *ast.Node.Slice) InnerError!*zir.Inst {
+fn sliceExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ slice: ast.full.Slice,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.rtoken].start;
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[slice.ast.lbracket];
const usize_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.usize_type),
});
- const array_ptr = try expr(mod, scope, .ref, node.lhs);
- const start = try expr(mod, scope, .{ .ty = usize_type }, node.start);
+ const array_ptr = try expr(mod, scope, .ref, slice.ast.sliced);
+ const start = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.start);
- if (node.end == null and node.sentinel == null) {
- return try addZIRBinOp(mod, scope, src, .slice_start, array_ptr, start);
+ if (slice.ast.sentinel == 0) {
+ if (slice.ast.end == 0) {
+ const result = try addZIRBinOp(mod, scope, src, .slice_start, array_ptr, start);
+ return rvalue(mod, scope, rl, result);
+ } else {
+ const end = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.end);
+ // TODO a ZIR slice_open instruction
+ const result = try addZIRInst(mod, scope, src, zir.Inst.Slice, .{
+ .array_ptr = array_ptr,
+ .start = start,
+ }, .{ .end = end });
+ return rvalue(mod, scope, rl, result);
+ }
}
- const end = if (node.end) |end| try expr(mod, scope, .{ .ty = usize_type }, end) else null;
- // we could get the child type here, but it is easier to just do it in semantic analysis.
- const sentinel = if (node.sentinel) |sentinel| try expr(mod, scope, .none, sentinel) else null;
-
- return try addZIRInst(
- mod,
- scope,
- src,
- zir.Inst.Slice,
- .{ .array_ptr = array_ptr, .start = start },
- .{ .end = end, .sentinel = sentinel },
- );
-}
-
-fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError!*zir.Inst {
- const tree = scope.tree();
- const src = tree.token_locs[node.rtoken].start;
- const lhs = try expr(mod, scope, .none, node.lhs);
- return addZIRUnOp(mod, scope, src, .deref, lhs);
+ const end = try expr(mod, scope, .{ .ty = usize_type }, slice.ast.end);
+ // TODO pass the proper result loc to this expression using a ZIR instruction
+ // "get the child element type for a slice target".
+ const sentinel = try expr(mod, scope, .none, slice.ast.sentinel);
+ const result = try addZIRInst(mod, scope, src, zir.Inst.Slice, .{
+ .array_ptr = array_ptr,
+ .start = start,
+ }, .{
+ .end = end,
+ .sentinel = sentinel,
+ });
+ return rvalue(mod, scope, rl, result);
}
fn simpleBinOp(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- infix_node: *ast.Node.SimpleInfixOp,
+ infix_node: ast.Node.Index,
op_inst_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
-
- const lhs = try expr(mod, scope, .none, infix_node.lhs);
- const rhs = try expr(mod, scope, .none, infix_node.rhs);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const lhs = try expr(mod, scope, .none, node_datas[infix_node].lhs);
+ const rhs = try expr(mod, scope, .none, node_datas[infix_node].rhs);
+ const src = token_starts[main_tokens[infix_node]];
const result = try addZIRBinOp(mod, scope, src, op_inst_tag, lhs, rhs);
return rvalue(mod, scope, rl, result);
}
@@ -1650,10 +1975,15 @@ fn boolBinOp(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- infix_node: *ast.Node.SimpleInfixOp,
+ infix_node: ast.Node.Index,
+ is_bool_and: bool,
) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[infix_node]];
const bool_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.bool_type),
@@ -1668,7 +1998,7 @@ fn boolBinOp(
};
defer block_scope.instructions.deinit(mod.gpa);
- const lhs = try expr(mod, scope, .{ .ty = bool_type }, infix_node.lhs);
+ const lhs = try expr(mod, scope, .{ .ty = bool_type }, node_datas[infix_node].lhs);
const condbr = try addZIRInstSpecial(mod, &block_scope.base, src, zir.Inst.CondBr, .{
.condition = lhs,
.then_body = undefined, // populated below
@@ -1688,7 +2018,7 @@ fn boolBinOp(
};
defer rhs_scope.instructions.deinit(mod.gpa);
- const rhs = try expr(mod, &rhs_scope.base, .{ .ty = bool_type }, infix_node.rhs);
+ const rhs = try expr(mod, &rhs_scope.base, .{ .ty = bool_type }, node_datas[infix_node].rhs);
_ = try addZIRInst(mod, &rhs_scope.base, src, zir.Inst.Break, .{
.block = block,
.operand = rhs,
@@ -1703,7 +2033,6 @@ fn boolBinOp(
};
defer const_scope.instructions.deinit(mod.gpa);
- const is_bool_and = infix_node.base.tag == .BoolAnd;
_ = try addZIRInst(mod, &const_scope.base, src, zir.Inst.Break, .{
.block = block,
.operand = try addZIRInstConst(mod, &const_scope.base, src, .{
@@ -1731,96 +2060,42 @@ fn boolBinOp(
return rvalue(mod, scope, rl, &block.base);
}
-const CondKind = union(enum) {
- bool,
- optional: ?*zir.Inst,
- err_union: ?*zir.Inst,
+fn ifExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ if_full: ast.full.If,
+) InnerError!*zir.Inst {
+ var block_scope: Scope.GenZIR = .{
+ .parent = scope,
+ .decl = scope.ownerDecl().?,
+ .arena = scope.arena(),
+ .force_comptime = scope.isComptime(),
+ .instructions = .{},
+ };
+ setBlockResultLoc(&block_scope, rl);
+ defer block_scope.instructions.deinit(mod.gpa);
- fn cond(self: *CondKind, mod: *Module, block_scope: *Scope.GenZIR, src: usize, cond_node: *ast.Node) !*zir.Inst {
- switch (self.*) {
- .bool => {
- const bool_type = try addZIRInstConst(mod, &block_scope.base, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.bool_type),
- });
- return try expr(mod, &block_scope.base, .{ .ty = bool_type }, cond_node);
- },
- .optional => {
- const cond_ptr = try expr(mod, &block_scope.base, .ref, cond_node);
- self.* = .{ .optional = cond_ptr };
- const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, cond_ptr);
- return try addZIRUnOp(mod, &block_scope.base, src, .is_non_null, result);
- },
- .err_union => {
- const err_ptr = try expr(mod, &block_scope.base, .ref, cond_node);
- self.* = .{ .err_union = err_ptr };
- const result = try addZIRUnOp(mod, &block_scope.base, src, .deref, err_ptr);
- return try addZIRUnOp(mod, &block_scope.base, src, .is_err, result);
- },
- }
- }
-
- fn thenSubScope(self: CondKind, mod: *Module, then_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope {
- if (self == .bool) return &then_scope.base;
-
- const payload = payload_node.?.castTag(.PointerPayload) orelse {
- // condition is error union and payload is not explicitly ignored
- _ = try addZIRUnOp(mod, &then_scope.base, src, .ensure_err_payload_void, self.err_union.?);
- return &then_scope.base;
- };
- const is_ptr = payload.ptr_token != null;
- const ident_node = payload.value_symbol.castTag(.Identifier).?;
-
- // This intentionally does not support @"_" syntax.
- const ident_name = then_scope.base.tree().tokenSlice(ident_node.token);
- if (mem.eql(u8, ident_name, "_")) {
- if (is_ptr)
- return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{});
- return &then_scope.base;
- }
-
- return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement payload symbols", .{});
- }
-
- fn elseSubScope(self: CondKind, mod: *Module, else_scope: *Scope.GenZIR, src: usize, payload_node: ?*ast.Node) !*Scope {
- if (self != .err_union) return &else_scope.base;
-
- const payload_ptr = try addZIRUnOp(mod, &else_scope.base, src, .err_union_payload_unsafe_ptr, self.err_union.?);
-
- const payload = payload_node.?.castTag(.Payload).?;
- const ident_node = payload.error_symbol.castTag(.Identifier).?;
-
- // This intentionally does not support @"_" syntax.
- const ident_name = else_scope.base.tree().tokenSlice(ident_node.token);
- if (mem.eql(u8, ident_name, "_")) {
- return &else_scope.base;
- }
+ const tree = scope.tree();
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
- return mod.failNode(&else_scope.base, payload.error_symbol, "TODO implement payload symbols", .{});
- }
-};
+ const if_src = token_starts[if_full.ast.if_token];
-fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) InnerError!*zir.Inst {
- var cond_kind: CondKind = .bool;
- if (if_node.payload) |_| cond_kind = .{ .optional = null };
- if (if_node.@"else") |else_node| {
- if (else_node.payload) |payload| {
- cond_kind = .{ .err_union = null };
+ const cond = c: {
+ // TODO https://github.com/ziglang/zig/issues/7929
+ if (if_full.error_token) |error_token| {
+ return mod.failTok(scope, error_token, "TODO implement if error union", .{});
+ } else if (if_full.payload_token) |payload_token| {
+ return mod.failTok(scope, payload_token, "TODO implement if optional", .{});
+ } else {
+ const bool_type = try addZIRInstConst(mod, &block_scope.base, if_src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.bool_type),
+ });
+ break :c try expr(mod, &block_scope.base, .{ .ty = bool_type }, if_full.ast.cond_expr);
}
- }
- var block_scope: Scope.GenZIR = .{
- .parent = scope,
- .decl = scope.ownerDecl().?,
- .arena = scope.arena(),
- .force_comptime = scope.isComptime(),
- .instructions = .{},
};
- setBlockResultLoc(&block_scope, rl);
- defer block_scope.instructions.deinit(mod.gpa);
-
- const tree = scope.tree();
- const if_src = tree.token_locs[if_node.if_token].start;
- const cond = try cond_kind.cond(mod, &block_scope, if_src, if_node.condition);
const condbr = try addZIRInstSpecial(mod, &block_scope.base, if_src, zir.Inst.CondBr, .{
.condition = cond,
@@ -1832,7 +2107,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
- const then_src = tree.token_locs[if_node.body.lastToken()].start;
+ const then_src = token_starts[tree.lastToken(if_full.ast.then_expr)];
var then_scope: Scope.GenZIR = .{
.parent = scope,
.decl = block_scope.decl,
@@ -1843,10 +2118,10 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
defer then_scope.instructions.deinit(mod.gpa);
// declare payload to the then_scope
- const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, if_node.payload);
+ const then_sub_scope = &then_scope.base;
block_scope.break_count += 1;
- const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, if_node.body);
+ const then_result = try expr(mod, then_sub_scope, block_scope.break_result_loc, if_full.ast.then_expr);
// We hold off on the break instructions as well as copying the then/else
// instructions into place until we know whether to keep store_to_block_ptr
// instructions or not.
@@ -1860,20 +2135,19 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
};
defer else_scope.instructions.deinit(mod.gpa);
- var else_src: usize = undefined;
- var else_sub_scope: *Module.Scope = undefined;
- const else_result: ?*zir.Inst = if (if_node.@"else") |else_node| blk: {
- else_src = tree.token_locs[else_node.body.lastToken()].start;
- // declare payload to the then_scope
- else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
-
+ const else_node = if_full.ast.else_expr;
+ const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: {
block_scope.break_count += 1;
- break :blk try expr(mod, else_sub_scope, block_scope.break_result_loc, else_node.body);
- } else blk: {
- else_src = tree.token_locs[if_node.lastToken()].start;
- else_sub_scope = &else_scope.base;
- break :blk null;
- };
+ const sub_scope = &else_scope.base;
+ break :blk .{
+ .src = token_starts[tree.lastToken(else_node)],
+ .result = try expr(mod, sub_scope, block_scope.break_result_loc, else_node),
+ };
+ } else
+ .{
+ .src = token_starts[tree.lastToken(if_full.ast.then_expr)],
+ .result = null,
+ };
return finishThenElseBlock(
mod,
@@ -1885,9 +2159,9 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
&condbr.positionals.then_body,
&condbr.positionals.else_body,
then_src,
- else_src,
+ else_info.src,
then_result,
- else_result,
+ else_info.result,
block,
block,
);
@@ -1918,23 +2192,15 @@ fn whileExpr(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- while_node: *ast.Node.While,
+ while_full: ast.full.While,
) InnerError!*zir.Inst {
- var cond_kind: CondKind = .bool;
- if (while_node.payload) |_| cond_kind = .{ .optional = null };
- if (while_node.@"else") |else_node| {
- if (else_node.payload) |payload| {
- cond_kind = .{ .err_union = null };
- }
+ if (while_full.label_token) |label_token| {
+ try checkLabelRedefinition(mod, scope, label_token);
}
-
- if (while_node.label) |label| {
- try checkLabelRedefinition(mod, scope, label);
+ if (while_full.inline_token) |inline_token| {
+ return mod.failTok(scope, inline_token, "TODO inline while", .{});
}
- if (while_node.inline_token) |tok|
- return mod.failTok(scope, tok, "TODO inline while", .{});
-
var loop_scope: Scope.GenZIR = .{
.parent = scope,
.decl = scope.ownerDecl().?,
@@ -1955,12 +2221,28 @@ fn whileExpr(
defer continue_scope.instructions.deinit(mod.gpa);
const tree = scope.tree();
- const while_src = tree.token_locs[while_node.while_token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const while_src = token_starts[while_full.ast.while_token];
const void_type = try addZIRInstConst(mod, scope, while_src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.void_type),
});
- const cond = try cond_kind.cond(mod, &continue_scope, while_src, while_node.condition);
+ const cond = c: {
+ // TODO https://github.com/ziglang/zig/issues/7929
+ if (while_full.error_token) |error_token| {
+ return mod.failTok(scope, error_token, "TODO implement while error union", .{});
+ } else if (while_full.payload_token) |payload_token| {
+ return mod.failTok(scope, payload_token, "TODO implement while optional", .{});
+ } else {
+ const bool_type = try addZIRInstConst(mod, &continue_scope.base, while_src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.bool_type),
+ });
+ break :c try expr(mod, &continue_scope.base, .{ .ty = bool_type }, while_full.ast.cond_expr);
+ }
+ };
const condbr = try addZIRInstSpecial(mod, &continue_scope.base, while_src, zir.Inst.CondBr, .{
.condition = cond,
@@ -1974,8 +2256,8 @@ fn whileExpr(
// are no jumps to it. This happens when the last statement of a while body is noreturn
// and there are no `continue` statements.
// The "repeat" at the end of a loop body is implied.
- if (while_node.continue_expr) |cont_expr| {
- _ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, cont_expr);
+ if (while_full.ast.cont_expr != 0) {
+ _ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, while_full.ast.cont_expr);
}
const loop = try scope.arena().create(zir.Inst.Loop);
loop.* = .{
@@ -1995,14 +2277,14 @@ fn whileExpr(
});
loop_scope.break_block = while_block;
loop_scope.continue_block = cond_block;
- if (while_node.label) |some| {
+ if (while_full.label_token) |label_token| {
loop_scope.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{
- .token = some,
+ .token = label_token,
.block_inst = while_block,
});
}
- const then_src = tree.token_locs[while_node.body.lastToken()].start;
+ const then_src = token_starts[tree.lastToken(while_full.ast.then_expr)];
var then_scope: Scope.GenZIR = .{
.parent = &continue_scope.base,
.decl = continue_scope.decl,
@@ -2012,11 +2294,10 @@ fn whileExpr(
};
defer then_scope.instructions.deinit(mod.gpa);
- // declare payload to the then_scope
- const then_sub_scope = try cond_kind.thenSubScope(mod, &then_scope, then_src, while_node.payload);
+ const then_sub_scope = &then_scope.base;
loop_scope.break_count += 1;
- const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, while_node.body);
+ const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, while_full.ast.then_expr);
var else_scope: Scope.GenZIR = .{
.parent = &continue_scope.base,
@@ -2027,21 +2308,23 @@ fn whileExpr(
};
defer else_scope.instructions.deinit(mod.gpa);
- var else_src: usize = undefined;
- const else_result: ?*zir.Inst = if (while_node.@"else") |else_node| blk: {
- else_src = tree.token_locs[else_node.body.lastToken()].start;
- // declare payload to the then_scope
- const else_sub_scope = try cond_kind.elseSubScope(mod, &else_scope, else_src, else_node.payload);
-
+ const else_node = while_full.ast.else_expr;
+ const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: {
loop_scope.break_count += 1;
- break :blk try expr(mod, else_sub_scope, loop_scope.break_result_loc, else_node.body);
- } else blk: {
- else_src = tree.token_locs[while_node.lastToken()].start;
- break :blk null;
- };
+ const sub_scope = &else_scope.base;
+ break :blk .{
+ .src = token_starts[tree.lastToken(else_node)],
+ .result = try expr(mod, sub_scope, loop_scope.break_result_loc, else_node),
+ };
+ } else
+ .{
+ .src = token_starts[tree.lastToken(while_full.ast.then_expr)],
+ .result = null,
+ };
+
if (loop_scope.label) |some| {
if (!some.used) {
- return mod.fail(scope, tree.token_locs[some.token].start, "unused while label", .{});
+ return mod.fail(scope, token_starts[some.token], "unused while loop label", .{});
}
}
return finishThenElseBlock(
@@ -2054,9 +2337,9 @@ fn whileExpr(
&condbr.positionals.then_body,
&condbr.positionals.else_body,
then_src,
- else_src,
+ else_info.src,
then_result,
- else_result,
+ else_info.result,
while_block,
cond_block,
);
@@ -2066,18 +2349,23 @@ fn forExpr(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- for_node: *ast.Node.For,
+ for_full: ast.full.While,
) InnerError!*zir.Inst {
- if (for_node.label) |label| {
- try checkLabelRedefinition(mod, scope, label);
+ if (for_full.label_token) |label_token| {
+ try checkLabelRedefinition(mod, scope, label_token);
}
- if (for_node.inline_token) |tok|
- return mod.failTok(scope, tok, "TODO inline for", .{});
+ if (for_full.inline_token) |inline_token| {
+ return mod.failTok(scope, inline_token, "TODO inline for", .{});
+ }
- // setup variables and constants
+ // Set up variables and constants.
const tree = scope.tree();
- const for_src = tree.token_locs[for_node.for_token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ const for_src = token_starts[for_full.ast.while_token];
const index_ptr = blk: {
const usize_type = try addZIRInstConst(mod, scope, for_src, .{
.ty = Type.initTag(.type),
@@ -2092,8 +2380,8 @@ fn forExpr(
_ = try addZIRBinOp(mod, scope, for_src, .store, index_ptr, zero);
break :blk index_ptr;
};
- const array_ptr = try expr(mod, scope, .ref, for_node.array_expr);
- const cond_src = tree.token_locs[for_node.array_expr.firstToken()].start;
+ const array_ptr = try expr(mod, scope, .ref, for_full.ast.cond_expr);
+ const cond_src = token_starts[tree.firstToken(for_full.ast.cond_expr)];
const len = try addZIRUnOp(mod, scope, cond_src, .indexable_ptr_len, array_ptr);
var loop_scope: Scope.GenZIR = .{
@@ -2155,15 +2443,15 @@ fn forExpr(
});
loop_scope.break_block = for_block;
loop_scope.continue_block = cond_block;
- if (for_node.label) |some| {
+ if (for_full.label_token) |label_token| {
loop_scope.label = @as(?Scope.GenZIR.Label, Scope.GenZIR.Label{
- .token = some,
+ .token = label_token,
.block_inst = for_block,
});
}
// while body
- const then_src = tree.token_locs[for_node.body.lastToken()].start;
+ const then_src = token_starts[tree.lastToken(for_full.ast.then_expr)];
var then_scope: Scope.GenZIR = .{
.parent = &cond_scope.base,
.decl = cond_scope.decl,
@@ -2175,23 +2463,27 @@ fn forExpr(
var index_scope: Scope.LocalPtr = undefined;
const then_sub_scope = blk: {
- const payload = for_node.payload.castTag(.PointerIndexPayload).?;
- const is_ptr = payload.ptr_token != null;
- const value_name = tree.tokenSlice(payload.value_symbol.firstToken());
+ const payload_token = for_full.payload_token.?;
+ const ident = if (token_tags[payload_token] == .asterisk)
+ payload_token + 1
+ else
+ payload_token;
+ const is_ptr = ident != payload_token;
+ const value_name = tree.tokenSlice(ident);
if (!mem.eql(u8, value_name, "_")) {
- return mod.failNode(&then_scope.base, payload.value_symbol, "TODO implement for value payload", .{});
+ return mod.failNode(&then_scope.base, ident, "TODO implement for loop value payload", .{});
} else if (is_ptr) {
- return mod.failTok(&then_scope.base, payload.ptr_token.?, "pointer modifier invalid on discard", .{});
+ return mod.failTok(&then_scope.base, payload_token, "pointer modifier invalid on discard", .{});
}
- const index_symbol_node = payload.index_symbol orelse
- break :blk &then_scope.base;
-
- const index_name = tree.tokenSlice(index_symbol_node.firstToken());
- if (mem.eql(u8, index_name, "_")) {
+ const index_token = if (token_tags[ident + 1] == .comma)
+ ident + 2
+ else
break :blk &then_scope.base;
+ if (mem.eql(u8, tree.tokenSlice(index_token), "_")) {
+ return mod.failTok(&then_scope.base, index_token, "discard of index capture; omit it instead", .{});
}
- // TODO make this const without an extra copy?
+ const index_name = try mod.identifierTokenString(&then_scope.base, index_token);
index_scope = .{
.parent = &then_scope.base,
.gen_zir = &then_scope,
@@ -2202,7 +2494,7 @@ fn forExpr(
};
loop_scope.break_count += 1;
- const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, for_node.body);
+ const then_result = try expr(mod, then_sub_scope, loop_scope.break_result_loc, for_full.ast.then_expr);
// else branch
var else_scope: Scope.GenZIR = .{
@@ -2214,18 +2506,23 @@ fn forExpr(
};
defer else_scope.instructions.deinit(mod.gpa);
- var else_src: usize = undefined;
- const else_result: ?*zir.Inst = if (for_node.@"else") |else_node| blk: {
- else_src = tree.token_locs[else_node.body.lastToken()].start;
+ const else_node = for_full.ast.else_expr;
+ const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: {
loop_scope.break_count += 1;
- break :blk try expr(mod, &else_scope.base, loop_scope.break_result_loc, else_node.body);
- } else blk: {
- else_src = tree.token_locs[for_node.lastToken()].start;
- break :blk null;
- };
+ const sub_scope = &else_scope.base;
+ break :blk .{
+ .src = token_starts[tree.lastToken(else_node)],
+ .result = try expr(mod, sub_scope, loop_scope.break_result_loc, else_node),
+ };
+ } else
+ .{
+ .src = token_starts[tree.lastToken(for_full.ast.then_expr)],
+ .result = null,
+ };
+
if (loop_scope.label) |some| {
if (!some.used) {
- return mod.fail(scope, tree.token_locs[some.token].start, "unused for label", .{});
+ return mod.fail(scope, token_starts[some.token], "unused for loop label", .{});
}
}
return finishThenElseBlock(
@@ -2238,39 +2535,48 @@ fn forExpr(
&condbr.positionals.then_body,
&condbr.positionals.else_body,
then_src,
- else_src,
+ else_info.src,
then_result,
- else_result,
+ else_info.result,
for_block,
cond_block,
);
}
-fn switchCaseUsesRef(node: *ast.Node.Switch) bool {
- for (node.cases()) |uncasted_case| {
- const case = uncasted_case.castTag(.SwitchCase).?;
- const uncasted_payload = case.payload orelse continue;
- const payload = uncasted_payload.castTag(.PointerPayload).?;
- if (payload.ptr_token) |_| return true;
- }
- return false;
-}
-
-fn getRangeNode(node: *ast.Node) ?*ast.Node.SimpleInfixOp {
- var cur = node;
+fn getRangeNode(
+ node_tags: []const ast.Node.Tag,
+ node_datas: []const ast.Node.Data,
+ start_node: ast.Node.Index,
+) ?ast.Node.Index {
+ var node = start_node;
while (true) {
- switch (cur.tag) {
- .Range => return @fieldParentPtr(ast.Node.SimpleInfixOp, "base", cur),
- .GroupedExpression => cur = @fieldParentPtr(ast.Node.GroupedExpression, "base", cur).expr,
+ switch (node_tags[node]) {
+ .switch_range => return node,
+ .grouped_expression => node = node_datas[node].lhs,
else => return null,
}
}
}
-fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node.Switch) InnerError!*zir.Inst {
+fn switchExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ switch_node: ast.Node.Index,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const switch_src = tree.token_locs[switch_node.switch_token].start;
- const use_ref = switchCaseUsesRef(switch_node);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+ const node_tags = tree.nodes.items(.tag);
+
+ const switch_token = main_tokens[switch_node];
+ const target_node = node_datas[switch_node].lhs;
+ const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
+ const case_nodes = tree.extra_data[extra.start..extra.end];
+
+ const switch_src = token_starts[switch_token];
var block_scope: Scope.GenZIR = .{
.parent = scope,
@@ -2285,18 +2591,26 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
var items = std.ArrayList(*zir.Inst).init(mod.gpa);
defer items.deinit();
- // first we gather all the switch items and check else/'_' prongs
+ // First we gather all the switch items and check else/'_' prongs.
var else_src: ?usize = null;
var underscore_src: ?usize = null;
var first_range: ?*zir.Inst = null;
var simple_case_count: usize = 0;
- for (switch_node.cases()) |uncasted_case| {
- const case = uncasted_case.castTag(.SwitchCase).?;
- const case_src = tree.token_locs[case.firstToken()].start;
- assert(case.items_len != 0);
-
+ var any_payload_is_ref = false;
+ for (case_nodes) |case_node| {
+ const case = switch (node_tags[case_node]) {
+ .switch_case_one => tree.switchCaseOne(case_node),
+ .switch_case => tree.switchCase(case_node),
+ else => unreachable,
+ };
+ if (case.payload_token) |payload_token| {
+ if (token_tags[payload_token] == .asterisk) {
+ any_payload_is_ref = true;
+ }
+ }
// Check for else/_ prong, those are handled last.
- if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) {
+ if (case.ast.values.len == 0) {
+ const case_src = token_starts[case.ast.arrow_token - 1];
if (else_src) |src| {
const msg = msg: {
const msg = try mod.errMsg(
@@ -2313,9 +2627,11 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
}
else_src = case_src;
continue;
- } else if (case.items_len == 1 and case.items()[0].tag == .Identifier and
- mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_"))
+ } else if (case.ast.values.len == 1 and
+ node_tags[case.ast.values[0]] == .identifier and
+ mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"))
{
+ const case_src = token_starts[case.ast.arrow_token - 1];
if (underscore_src) |src| {
const msg = msg: {
const msg = try mod.errMsg(
@@ -2352,14 +2668,18 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
}
}
- if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) simple_case_count += 1;
+ if (case.ast.values.len == 1 and
+ getRangeNode(node_tags, node_datas, case.ast.values[0]) == null)
+ {
+ simple_case_count += 1;
+ }
- // generate all the switch items as comptime expressions
- for (case.items()) |item| {
- if (getRangeNode(item)) |range| {
- const start = try comptimeExpr(mod, &block_scope.base, .none, range.lhs);
- const end = try comptimeExpr(mod, &block_scope.base, .none, range.rhs);
- const range_src = tree.token_locs[range.op_token].start;
+ // Generate all the switch items as comptime expressions.
+ for (case.ast.values) |item| {
+ if (getRangeNode(node_tags, node_datas, item)) |range| {
+ const start = try comptimeExpr(mod, &block_scope.base, .none, node_datas[range].lhs);
+ const end = try comptimeExpr(mod, &block_scope.base, .none, node_datas[range].rhs);
+ const range_src = token_starts[main_tokens[range]];
const range_inst = try addZIRBinOp(mod, &block_scope.base, range_src, .switch_range, start, end);
try items.append(range_inst);
} else {
@@ -2374,21 +2694,25 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
if (underscore_src != null) special_prong = .underscore;
var cases = try block_scope.arena.alloc(zir.Inst.SwitchBr.Case, simple_case_count);
- const target_ptr = if (use_ref) try expr(mod, &block_scope.base, .ref, switch_node.expr) else null;
- const target = if (target_ptr) |some|
- try addZIRUnOp(mod, &block_scope.base, some.src, .deref, some)
+ const rl_and_tag: struct { rl: ResultLoc, tag: zir.Inst.Tag } = if (any_payload_is_ref)
+ .{
+ .rl = .ref,
+ .tag = .switchbr_ref,
+ }
else
- try expr(mod, &block_scope.base, .none, switch_node.expr);
- const switch_inst = try addZIRInst(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, .{
+ .{
+ .rl = .none,
+ .tag = .switchbr,
+ };
+ const target = try expr(mod, &block_scope.base, rl_and_tag.rl, target_node);
+ const switch_inst = try addZirInstT(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, rl_and_tag.tag, .{
.target = target,
.cases = cases,
.items = try block_scope.arena.dupe(*zir.Inst, items.items),
.else_body = undefined, // populated below
- }, .{
.range = first_range,
.special_prong = special_prong,
});
-
const block = try addZIRInstBlock(mod, scope, switch_src, .block, .{
.instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
});
@@ -2411,32 +2735,38 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
};
defer else_scope.instructions.deinit(mod.gpa);
- // Now generate all but the special cases
- var special_case: ?*ast.Node.SwitchCase = null;
+ // Now generate all but the special cases.
+ var special_case: ?ast.full.SwitchCase = null;
var items_index: usize = 0;
var case_index: usize = 0;
- for (switch_node.cases()) |uncasted_case| {
- const case = uncasted_case.castTag(.SwitchCase).?;
- const case_src = tree.token_locs[case.firstToken()].start;
- // reset without freeing to reduce allocations.
- case_scope.instructions.items.len = 0;
+ for (case_nodes) |case_node| {
+ const case = switch (node_tags[case_node]) {
+ .switch_case_one => tree.switchCaseOne(case_node),
+ .switch_case => tree.switchCase(case_node),
+ else => unreachable,
+ };
+ const case_src = token_starts[main_tokens[case_node]];
+ case_scope.instructions.shrinkRetainingCapacity(0);
// Check for else/_ prong, those are handled last.
- if (case.items_len == 1 and case.items()[0].tag == .SwitchElse) {
+ if (case.ast.values.len == 0) {
special_case = case;
continue;
- } else if (case.items_len == 1 and case.items()[0].tag == .Identifier and
- mem.eql(u8, tree.tokenSlice(case.items()[0].firstToken()), "_"))
+ } else if (case.ast.values.len == 1 and
+ node_tags[case.ast.values[0]] == .identifier and
+ mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"))
{
special_case = case;
continue;
}
// If this is a simple one item prong then it is handled by the switchbr.
- if (case.items_len == 1 and getRangeNode(case.items()[0]) == null) {
+ if (case.ast.values.len == 1 and
+ getRangeNode(node_tags, node_datas, case.ast.values[0]) == null)
+ {
const item = items.items[items_index];
items_index += 1;
- try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target, target_ptr);
+ try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target);
cases[case_index] = .{
.item = item,
@@ -2446,16 +2776,14 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
continue;
}
- // TODO if the case has few items and no ranges it might be better
- // to just handle them as switch prongs.
-
// Check if the target matches any of the items.
// 1, 2, 3..6 will result in
// target == 1 or target == 2 or (target >= 3 and target <= 6)
+ // TODO handle multiple items as switch prongs rather than along with ranges.
var any_ok: ?*zir.Inst = null;
- for (case.items()) |item| {
- if (getRangeNode(item)) |range| {
- const range_src = tree.token_locs[range.op_token].start;
+ for (case.ast.values) |item| {
+ if (getRangeNode(node_tags, node_datas, item)) |range| {
+ const range_src = token_starts[main_tokens[range]];
const range_inst = items.items[items_index].castTag(.switch_range).?;
items_index += 1;
@@ -2494,7 +2822,7 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
// reset cond_scope for then_body
case_scope.instructions.items.len = 0;
- try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target, target_ptr);
+ try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target);
condbr.positionals.then_body = .{
.instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
};
@@ -2511,12 +2839,12 @@ fn switchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, switch_node: *ast.Node
// Finally generate else block or a break.
if (special_case) |case| {
- try switchCaseExpr(mod, &else_scope.base, block_scope.break_result_loc, block, case, target, target_ptr);
+ try switchCaseExpr(mod, &else_scope.base, block_scope.break_result_loc, block, case, target);
} else {
// Not handling all possible cases is a compile error.
_ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreachable_unsafe);
}
- switch_inst.castTag(.switchbr).?.positionals.else_body = .{
+ switch_inst.positionals.else_body = .{
.instructions = try block_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
};
@@ -2528,27 +2856,34 @@ fn switchCaseExpr(
scope: *Scope,
rl: ResultLoc,
block: *zir.Inst.Block,
- case: *ast.Node.SwitchCase,
+ case: ast.full.SwitchCase,
target: *zir.Inst,
- target_ptr: ?*zir.Inst,
) !void {
const tree = scope.tree();
- const case_src = tree.token_locs[case.firstToken()].start;
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ const case_src = token_starts[case.ast.arrow_token];
const sub_scope = blk: {
- const uncasted_payload = case.payload orelse break :blk scope;
- const payload = uncasted_payload.castTag(.PointerPayload).?;
- const is_ptr = payload.ptr_token != null;
- const value_name = tree.tokenSlice(payload.value_symbol.firstToken());
+ const payload_token = case.payload_token orelse break :blk scope;
+ const ident = if (token_tags[payload_token] == .asterisk)
+ payload_token + 1
+ else
+ payload_token;
+ const is_ptr = ident != payload_token;
+ const value_name = tree.tokenSlice(ident);
if (mem.eql(u8, value_name, "_")) {
if (is_ptr) {
- return mod.failTok(scope, payload.ptr_token.?, "pointer modifier invalid on discard", .{});
+ return mod.failTok(scope, payload_token, "pointer modifier invalid on discard", .{});
}
break :blk scope;
}
- return mod.failNode(scope, payload.value_symbol, "TODO implement switch value payload", .{});
+ return mod.failTok(scope, ident, "TODO implement switch value payload", .{});
};
- const case_body = try expr(mod, sub_scope, rl, case.expr);
+ const case_body = try expr(mod, sub_scope, rl, case.ast.target_expr);
if (!case_body.tag.isNoReturn()) {
_ = try addZIRInst(mod, sub_scope, case_src, zir.Inst.Break, .{
.block = block,
@@ -2557,11 +2892,16 @@ fn switchCaseExpr(
}
}
-fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
+fn ret(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[cfe.ltoken].start;
- if (cfe.getRHS()) |rhs_node| {
- if (nodeMayNeedMemoryLocation(rhs_node, scope)) {
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_tokens[node]];
+ const rhs_node = node_datas[node].lhs;
+ if (rhs_node != 0) {
+ if (nodeMayNeedMemoryLocation(scope, rhs_node)) {
const ret_ptr = try addZIRNoOp(mod, scope, src, .ret_ptr);
const operand = try expr(mod, scope, .{ .ptr = ret_ptr }, rhs_node);
return addZIRUnOp(mod, scope, src, .@"return", operand);
@@ -2575,19 +2915,31 @@ fn ret(mod: *Module, scope: *Scope, cfe: *ast.Node.ControlFlowExpression) InnerE
}
}
-fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn identifier(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ ident: ast.Node.Index,
+) InnerError!*zir.Inst {
const tracy = trace(@src());
defer tracy.end();
const tree = scope.tree();
- const ident_name = try mod.identifierTokenString(scope, ident.token);
- const src = tree.token_locs[ident.token].start;
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const ident_token = main_tokens[ident];
+ const ident_name = try mod.identifierTokenString(scope, ident_token);
+ const src = token_starts[ident_token];
if (mem.eql(u8, ident_name, "_")) {
- return mod.failNode(scope, &ident.base, "TODO implement '_' identifier", .{});
+ return mod.failNode(scope, ident, "TODO implement '_' identifier", .{});
}
- if (getSimplePrimitiveValue(ident_name)) |typed_value| {
- const result = try addZIRInstConst(mod, scope, src, typed_value);
+ if (simple_types.get(ident_name)) |val_tag| {
+ const result = try addZIRInstConst(mod, scope, src, TypedValue{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(val_tag),
+ });
return rvalue(mod, scope, rl, result);
}
@@ -2598,7 +2950,7 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
const bit_count = std.fmt.parseInt(u16, ident_name[1..], 10) catch |err| switch (err) {
error.Overflow => return mod.failNode(
scope,
- &ident.base,
+ ident,
"primitive integer type '{s}' exceeds maximum bit width of 65535",
.{ident_name},
),
@@ -2662,59 +3014,104 @@ fn identifier(mod: *Module, scope: *Scope, rl: ResultLoc, ident: *ast.Node.OneTo
}
}
- return mod.failNode(scope, &ident.base, "use of undeclared identifier '{s}'", .{ident_name});
+ return mod.failNode(scope, ident, "use of undeclared identifier '{s}'", .{ident_name});
}
-fn stringLiteral(mod: *Module, scope: *Scope, str_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn parseStringLiteral(mod: *Module, scope: *Scope, token: ast.TokenIndex) ![]u8 {
const tree = scope.tree();
- const unparsed_bytes = tree.tokenSlice(str_lit.token);
+ const token_tags = tree.tokens.items(.tag);
+ const token_starts = tree.tokens.items(.start);
+ assert(token_tags[token] == .string_literal);
+ const unparsed = tree.tokenSlice(token);
const arena = scope.arena();
-
var bad_index: usize = undefined;
- const bytes = std.zig.parseStringLiteral(arena, unparsed_bytes, &bad_index) catch |err| switch (err) {
+ const bytes = std.zig.parseStringLiteral(arena, unparsed, &bad_index) catch |err| switch (err) {
error.InvalidCharacter => {
- const bad_byte = unparsed_bytes[bad_index];
- const src = tree.token_locs[str_lit.token].start;
- return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'\n", .{bad_byte});
+ const bad_byte = unparsed[bad_index];
+ const src = token_starts[token];
+ return mod.fail(scope, src + bad_index, "invalid string literal character: '{c}'", .{
+ bad_byte,
+ });
},
else => |e| return e,
};
-
- const src = tree.token_locs[str_lit.token].start;
- return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
+ return bytes;
}
-fn multilineStrLiteral(mod: *Module, scope: *Scope, node: *ast.Node.MultilineStringLiteral) !*zir.Inst {
+fn stringLiteral(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ str_lit: ast.Node.Index,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const lines = node.linesConst();
- const src = tree.token_locs[lines[0]].start;
-
- // line lengths and new lines
- var len = lines.len - 1;
- for (lines) |line| {
- // 2 for the '//' + 1 for '\n'
- len += tree.tokenSlice(line).len - 3;
- }
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const str_lit_token = main_tokens[str_lit];
+ const bytes = try parseStringLiteral(mod, scope, str_lit_token);
+ const src = token_starts[str_lit_token];
+ const str_inst = try addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
+ return rvalue(mod, scope, rl, str_inst);
+}
- const bytes = try scope.arena().alloc(u8, len);
- var i: usize = 0;
- for (lines) |line, line_i| {
- if (line_i != 0) {
- bytes[i] = '\n';
- i += 1;
+fn multilineStringLiteral(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ str_lit: ast.Node.Index,
+) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const start = node_datas[str_lit].lhs;
+ const end = node_datas[str_lit].rhs;
+
+ // Count the number of bytes to allocate.
+ const len: usize = len: {
+ var tok_i = start;
+ var len: usize = end - start + 1;
+ while (tok_i <= end) : (tok_i += 1) {
+ // 2 for the '//' + 1 for '\n'
+ len += tree.tokenSlice(tok_i).len - 3;
}
- const slice = tree.tokenSlice(line);
- mem.copy(u8, bytes[i..], slice[2 .. slice.len - 1]);
- i += slice.len - 3;
+ break :len len;
+ };
+ const bytes = try scope.arena().alloc(u8, len);
+ // First line: do not append a newline.
+ var byte_i: usize = 0;
+ var tok_i = start;
+ {
+ const slice = tree.tokenSlice(tok_i);
+ const line_bytes = slice[2 .. slice.len - 1];
+ mem.copy(u8, bytes[byte_i..], line_bytes);
+ byte_i += line_bytes.len;
+ tok_i += 1;
}
-
- return addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
+ // Following lines: each line prepends a newline.
+ while (tok_i <= end) : (tok_i += 1) {
+ bytes[byte_i] = '\n';
+ byte_i += 1;
+ const slice = tree.tokenSlice(tok_i);
+ const line_bytes = slice[2 .. slice.len - 1];
+ mem.copy(u8, bytes[byte_i..], line_bytes);
+ byte_i += line_bytes.len;
+ }
+ const src = token_starts[start];
+ const str_inst = try addZIRInst(mod, scope, src, zir.Inst.Str, .{ .bytes = bytes }, .{});
+ return rvalue(mod, scope, rl, str_inst);
}
-fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst {
+fn charLiteral(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
const tree = scope.tree();
- const src = tree.token_locs[node.token].start;
- const slice = tree.tokenSlice(node.token);
+ const main_tokens = tree.nodes.items(.main_token);
+ const main_token = main_tokens[node];
+ const token_starts = tree.tokens.items(.start);
+
+ const src = token_starts[main_token];
+ const slice = tree.tokenSlice(main_token);
var bad_index: usize = undefined;
const value = std.zig.parseCharLiteral(slice, &bad_index) catch |err| switch (err) {
@@ -2723,18 +3120,27 @@ fn charLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) !*zir.Inst
return mod.fail(scope, src + bad_index, "invalid character: '{c}'\n", .{bad_byte});
},
};
-
- return addZIRInstConst(mod, scope, src, .{
+ const result = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.comptime_int),
.val = try Value.Tag.int_u64.create(scope.arena(), value),
});
+ return rvalue(mod, scope, rl, result);
}
-fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn integerLiteral(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ int_lit: ast.Node.Index,
+) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
- const prefixed_bytes = tree.tokenSlice(int_lit.token);
- const base = if (mem.startsWith(u8, prefixed_bytes, "0x"))
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const int_token = main_tokens[int_lit];
+ const prefixed_bytes = tree.tokenSlice(int_token);
+ const base: u8 = if (mem.startsWith(u8, prefixed_bytes, "0x"))
16
else if (mem.startsWith(u8, prefixed_bytes, "0o"))
8
@@ -2749,90 +3155,70 @@ fn integerLiteral(mod: *Module, scope: *Scope, int_lit: *ast.Node.OneToken) Inne
prefixed_bytes[2..];
if (std.fmt.parseInt(u64, bytes, base)) |small_int| {
- const src = tree.token_locs[int_lit.token].start;
- return addZIRInstConst(mod, scope, src, .{
+ const src = token_starts[int_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.comptime_int),
.val = try Value.Tag.int_u64.create(arena, small_int),
});
+ return rvalue(mod, scope, rl, result);
} else |err| {
- return mod.failTok(scope, int_lit.token, "TODO implement int literals that don't fit in a u64", .{});
+ return mod.failTok(scope, int_token, "TODO implement int literals that don't fit in a u64", .{});
}
}
-fn floatLiteral(mod: *Module, scope: *Scope, float_lit: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn floatLiteral(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ float_lit: ast.Node.Index,
+) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
- const bytes = tree.tokenSlice(float_lit.token);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const main_token = main_tokens[float_lit];
+ const bytes = tree.tokenSlice(main_token);
if (bytes.len > 2 and bytes[1] == 'x') {
- return mod.failTok(scope, float_lit.token, "TODO hex floats", .{});
+ return mod.failTok(scope, main_token, "TODO implement hex floats", .{});
}
-
const float_number = std.fmt.parseFloat(f128, bytes) catch |e| switch (e) {
error.InvalidCharacter => unreachable, // validated by tokenizer
};
- const src = tree.token_locs[float_lit.token].start;
- return addZIRInstConst(mod, scope, src, .{
+ const src = token_starts[main_token];
+ const result = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.comptime_float),
.val = try Value.Tag.float_128.create(arena, float_number),
});
+ return rvalue(mod, scope, rl, result);
}
-fn undefLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
- const arena = scope.arena();
- const tree = scope.tree();
- const src = tree.token_locs[node.token].start;
- return addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.@"undefined"),
- .val = Value.initTag(.undef),
- });
-}
-
-fn boolLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
- const arena = scope.arena();
- const tree = scope.tree();
- const src = tree.token_locs[node.token].start;
- return addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.bool),
- .val = switch (tree.token_ids[node.token]) {
- .Keyword_true => Value.initTag(.bool_true),
- .Keyword_false => Value.initTag(.bool_false),
- else => unreachable,
- },
- });
-}
-
-fn nullLiteral(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*zir.Inst {
+fn asmExpr(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst {
const arena = scope.arena();
const tree = scope.tree();
- const src = tree.token_locs[node.token].start;
- return addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.@"null"),
- .val = Value.initTag(.null_value),
- });
-}
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+ const node_datas = tree.nodes.items(.data);
-fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zir.Inst {
- if (asm_node.outputs.len != 0) {
- return mod.failNode(scope, &asm_node.base, "TODO implement asm with an output", .{});
+ if (full.outputs.len != 0) {
+ return mod.failTok(scope, full.ast.asm_token, "TODO implement asm with an output", .{});
}
- const arena = scope.arena();
- const tree = scope.tree();
- const inputs = try arena.alloc(*zir.Inst, asm_node.inputs.len);
- const args = try arena.alloc(*zir.Inst, asm_node.inputs.len);
-
- const src = tree.token_locs[asm_node.asm_token].start;
+ const inputs = try arena.alloc([]const u8, full.inputs.len);
+ const args = try arena.alloc(*zir.Inst, full.inputs.len);
+ const src = token_starts[full.ast.asm_token];
const str_type = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.type),
.val = Value.initTag(.const_slice_u8_type),
});
const str_type_rl: ResultLoc = .{ .ty = str_type };
- for (asm_node.inputs) |input, i| {
+ for (full.inputs) |input, i| {
// TODO semantically analyze constraints
- inputs[i] = try expr(mod, scope, str_type_rl, input.constraint);
- args[i] = try expr(mod, scope, .none, input.expr);
+ const constraint_token = main_tokens[input] + 2;
+ inputs[i] = try parseStringLiteral(mod, scope, constraint_token);
+ args[i] = try expr(mod, scope, .none, node_datas[input].lhs);
}
const return_type = try addZIRInstConst(mod, scope, src, .{
@@ -2840,81 +3226,47 @@ fn assembly(mod: *Module, scope: *Scope, asm_node: *ast.Node.Asm) InnerError!*zi
.val = Value.initTag(.void_type),
});
const asm_inst = try addZIRInst(mod, scope, src, zir.Inst.Asm, .{
- .asm_source = try expr(mod, scope, str_type_rl, asm_node.template),
+ .asm_source = try expr(mod, scope, str_type_rl, full.ast.template),
.return_type = return_type,
}, .{
- .@"volatile" = asm_node.volatile_token != null,
+ .@"volatile" = full.volatile_token != null,
//.clobbers = TODO handle clobbers
.inputs = inputs,
.args = args,
});
- return asm_inst;
-}
-
-fn ensureBuiltinParamCount(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall, count: u32) !void {
- if (call.params_len == count)
- return;
-
- const s = if (count == 1) "" else "s";
- return mod.failTok(scope, call.builtin_token, "expected {d} parameter{s}, found {d}", .{ count, s, call.params_len });
-}
-
-fn simpleCast(
- mod: *Module,
- scope: *Scope,
- rl: ResultLoc,
- call: *ast.Node.BuiltinCall,
- inst_tag: zir.Inst.Tag,
-) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 2);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const dest_type = try typeExpr(mod, scope, params[0]);
- const rhs = try expr(mod, scope, .none, params[1]);
- const result = try addZIRBinOp(mod, scope, src, inst_tag, dest_type, rhs);
- return rvalue(mod, scope, rl, result);
-}
-
-fn ptrToInt(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 1);
- const operand = try expr(mod, scope, .none, call.params()[0]);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- return addZIRUnOp(mod, scope, src, .ptrtoint, operand);
+ return rvalue(mod, scope, rl, asm_inst);
}
fn as(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
- call: *ast.Node.BuiltinCall,
+ builtin_token: ast.TokenIndex,
+ src: usize,
+ lhs: ast.Node.Index,
+ rhs: ast.Node.Index,
) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 2);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const dest_type = try typeExpr(mod, scope, params[0]);
+ const dest_type = try typeExpr(mod, scope, lhs);
switch (rl) {
.none, .discard, .ref, .ty => {
- const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
+ const result = try expr(mod, scope, .{ .ty = dest_type }, rhs);
return rvalue(mod, scope, rl, result);
},
.ptr => |result_ptr| {
- return asRlPtr(mod, scope, rl, src, result_ptr, params[1], dest_type);
+ return asRlPtr(mod, scope, rl, src, result_ptr, rhs, dest_type);
},
.block_ptr => |block_scope| {
- return asRlPtr(mod, scope, rl, src, block_scope.rl_ptr.?, params[1], dest_type);
+ return asRlPtr(mod, scope, rl, src, block_scope.rl_ptr.?, rhs, dest_type);
},
.bitcasted_ptr => |bitcasted_ptr| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
- return mod.failTok(scope, call.builtin_token, "TODO implement @as with result location @bitCast", .{});
+ return mod.failTok(scope, builtin_token, "TODO implement @as with result location @bitCast", .{});
},
.inferred_ptr => |result_alloc| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
- return mod.failTok(scope, call.builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
+ return mod.failTok(scope, builtin_token, "TODO implement @as with inferred-type result location pointer", .{});
},
}
}
@@ -2925,7 +3277,7 @@ fn asRlPtr(
rl: ResultLoc,
src: usize,
result_ptr: *zir.Inst,
- operand_node: *ast.Node,
+ operand_node: ast.Node.Index,
dest_type: *zir.Inst,
) InnerError!*zir.Inst {
// Detect whether this expr() call goes into rvalue() to store the result into the
@@ -2963,155 +3315,295 @@ fn asRlPtr(
}
}
-fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 2);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const dest_type = try typeExpr(mod, scope, params[0]);
+fn bitCast(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ builtin_token: ast.TokenIndex,
+ src: usize,
+ lhs: ast.Node.Index,
+ rhs: ast.Node.Index,
+) InnerError!*zir.Inst {
+ const dest_type = try typeExpr(mod, scope, lhs);
switch (rl) {
.none => {
- const operand = try expr(mod, scope, .none, params[1]);
+ const operand = try expr(mod, scope, .none, rhs);
return addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand);
},
.discard => {
- const operand = try expr(mod, scope, .none, params[1]);
+ const operand = try expr(mod, scope, .none, rhs);
const result = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, operand);
_ = try addZIRUnOp(mod, scope, result.src, .ensure_result_non_error, result);
return result;
},
.ref => {
- const operand = try expr(mod, scope, .ref, params[1]);
+ const operand = try expr(mod, scope, .ref, rhs);
const result = try addZIRBinOp(mod, scope, src, .bitcast_ref, dest_type, operand);
return result;
},
.ty => |result_ty| {
- const result = try expr(mod, scope, .none, params[1]);
+ const result = try expr(mod, scope, .none, rhs);
const bitcasted = try addZIRBinOp(mod, scope, src, .bitcast, dest_type, result);
return addZIRBinOp(mod, scope, src, .as, result_ty, bitcasted);
},
.ptr => |result_ptr| {
const casted_result_ptr = try addZIRUnOp(mod, scope, src, .bitcast_result_ptr, result_ptr);
- return expr(mod, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, params[1]);
+ return expr(mod, scope, .{ .bitcasted_ptr = casted_result_ptr.castTag(.bitcast_result_ptr).? }, rhs);
},
.bitcasted_ptr => |bitcasted_ptr| {
- return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location another @bitCast", .{});
+ return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location another @bitCast", .{});
},
.block_ptr => |block_ptr| {
- return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with result location inferred peer types", .{});
+ return mod.failTok(scope, builtin_token, "TODO implement @bitCast with result location inferred peer types", .{});
},
.inferred_ptr => |result_alloc| {
// TODO here we should be able to resolve the inference; we now have a type for the result.
- return mod.failTok(scope, call.builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{});
+ return mod.failTok(scope, builtin_token, "TODO implement @bitCast with inferred-type result location pointer", .{});
},
}
}
-fn import(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 1);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const target = try expr(mod, scope, .none, params[0]);
- return addZIRUnOp(mod, scope, src, .import, target);
-}
-
-fn compileError(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 1);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const target = try expr(mod, scope, .none, params[0]);
- return addZIRUnOp(mod, scope, src, .compile_error, target);
-}
-
-fn setEvalBranchQuota(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- try ensureBuiltinParamCount(mod, scope, call, 1);
- const tree = scope.tree();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- const u32_type = try addZIRInstConst(mod, scope, src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.u32_type),
- });
- const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]);
- return addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota);
-}
-
-fn typeOf(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- const tree = scope.tree();
- const arena = scope.arena();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
+fn typeOf(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ builtin_token: ast.TokenIndex,
+ src: usize,
+ params: []const ast.Node.Index,
+) InnerError!*zir.Inst {
if (params.len < 1) {
- return mod.failTok(scope, call.builtin_token, "expected at least 1 argument, found 0", .{});
+ return mod.failTok(scope, builtin_token, "expected at least 1 argument, found 0", .{});
}
if (params.len == 1) {
return rvalue(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
}
+ const arena = scope.arena();
var items = try arena.alloc(*zir.Inst, params.len);
for (params) |param, param_i|
items[param_i] = try expr(mod, scope, .none, param);
return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
}
-fn compileLog(mod: *Module, scope: *Scope, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
- const tree = scope.tree();
- const arena = scope.arena();
- const src = tree.token_locs[call.builtin_token].start;
- const params = call.params();
- var targets = try arena.alloc(*zir.Inst, params.len);
- for (params) |param, param_i|
- targets[param_i] = try expr(mod, scope, .none, param);
- return addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{});
-}
-fn builtinCall(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) InnerError!*zir.Inst {
+fn builtinCall(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ call: ast.Node.Index,
+ params: []const ast.Node.Index,
+) InnerError!*zir.Inst {
const tree = scope.tree();
- const builtin_name = tree.tokenSlice(call.builtin_token);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
+
+ const builtin_token = main_tokens[call];
+ const builtin_name = tree.tokenSlice(builtin_token);
// We handle the different builtins manually because they have different semantics depending
// on the function. For example, `@as` and others participate in result location semantics,
// and `@cImport` creates a special scope that collects a .c source code text buffer.
// Also, some builtins have a variable number of parameters.
- if (mem.eql(u8, builtin_name, "@ptrToInt")) {
- return rvalue(mod, scope, rl, try ptrToInt(mod, scope, call));
- } else if (mem.eql(u8, builtin_name, "@as")) {
- return as(mod, scope, rl, call);
- } else if (mem.eql(u8, builtin_name, "@floatCast")) {
- return simpleCast(mod, scope, rl, call, .floatcast);
- } else if (mem.eql(u8, builtin_name, "@intCast")) {
- return simpleCast(mod, scope, rl, call, .intcast);
- } else if (mem.eql(u8, builtin_name, "@bitCast")) {
- return bitCast(mod, scope, rl, call);
- } else if (mem.eql(u8, builtin_name, "@TypeOf")) {
- return typeOf(mod, scope, rl, call);
- } else if (mem.eql(u8, builtin_name, "@breakpoint")) {
- const src = tree.token_locs[call.builtin_token].start;
- return rvalue(mod, scope, rl, try addZIRNoOp(mod, scope, src, .breakpoint));
- } else if (mem.eql(u8, builtin_name, "@import")) {
- return rvalue(mod, scope, rl, try import(mod, scope, call));
- } else if (mem.eql(u8, builtin_name, "@compileError")) {
- return compileError(mod, scope, call);
- } else if (mem.eql(u8, builtin_name, "@setEvalBranchQuota")) {
- return setEvalBranchQuota(mod, scope, call);
- } else if (mem.eql(u8, builtin_name, "@compileLog")) {
- return compileLog(mod, scope, call);
- } else if (mem.eql(u8, builtin_name, "@field")) {
- return namedField(mod, scope, rl, call);
- } else {
- return mod.failTok(scope, call.builtin_token, "invalid builtin function: '{s}'", .{builtin_name});
+ const info = BuiltinFn.list.get(builtin_name) orelse {
+ return mod.failTok(scope, builtin_token, "invalid builtin function: '{s}'", .{
+ builtin_name,
+ });
+ };
+ if (info.param_count) |expected| {
+ if (expected != params.len) {
+ const s = if (expected == 1) "" else "s";
+ return mod.failTok(scope, builtin_token, "expected {d} parameter{s}, found {d}", .{
+ expected, s, params.len,
+ });
+ }
+ }
+ const src = token_starts[builtin_token];
+
+ switch (info.tag) {
+ .ptr_to_int => {
+ const operand = try expr(mod, scope, .none, params[0]);
+ const result = try addZIRUnOp(mod, scope, src, .ptrtoint, operand);
+ return rvalue(mod, scope, rl, result);
+ },
+ .float_cast => {
+ const dest_type = try typeExpr(mod, scope, params[0]);
+ const rhs = try expr(mod, scope, .none, params[1]);
+ const result = try addZIRBinOp(mod, scope, src, .floatcast, dest_type, rhs);
+ return rvalue(mod, scope, rl, result);
+ },
+ .int_cast => {
+ const dest_type = try typeExpr(mod, scope, params[0]);
+ const rhs = try expr(mod, scope, .none, params[1]);
+ const result = try addZIRBinOp(mod, scope, src, .intcast, dest_type, rhs);
+ return rvalue(mod, scope, rl, result);
+ },
+ .breakpoint => {
+ const result = try addZIRNoOp(mod, scope, src, .breakpoint);
+ return rvalue(mod, scope, rl, result);
+ },
+ .import => {
+ const target = try expr(mod, scope, .none, params[0]);
+ const result = try addZIRUnOp(mod, scope, src, .import, target);
+ return rvalue(mod, scope, rl, result);
+ },
+ .compile_error => {
+ const target = try expr(mod, scope, .none, params[0]);
+ const result = try addZIRUnOp(mod, scope, src, .compile_error, target);
+ return rvalue(mod, scope, rl, result);
+ },
+ .set_eval_branch_quota => {
+ const u32_type = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.u32_type),
+ });
+ const quota = try expr(mod, scope, .{ .ty = u32_type }, params[0]);
+ const result = try addZIRUnOp(mod, scope, src, .set_eval_branch_quota, quota);
+ return rvalue(mod, scope, rl, result);
+ },
+ .compile_log => {
+ const arena = scope.arena();
+ var targets = try arena.alloc(*zir.Inst, params.len);
+ for (params) |param, param_i|
+ targets[param_i] = try expr(mod, scope, .none, param);
+ const result = try addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{});
+ return rvalue(mod, scope, rl, result);
+ },
+ .field => {
+ const string_type = try addZIRInstConst(mod, scope, src, .{
+ .ty = Type.initTag(.type),
+ .val = Value.initTag(.const_slice_u8_type),
+ });
+ const string_rl: ResultLoc = .{ .ty = string_type };
+
+ if (rl == .ref) {
+ return addZirInstTag(mod, scope, src, .field_ptr_named, .{
+ .object = try expr(mod, scope, .ref, params[0]),
+ .field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
+ });
+ }
+ return rvalue(mod, scope, rl, try addZirInstTag(mod, scope, src, .field_val_named, .{
+ .object = try expr(mod, scope, .none, params[0]),
+ .field_name = try comptimeExpr(mod, scope, string_rl, params[1]),
+ }));
+ },
+ .as => return as(mod, scope, rl, builtin_token, src, params[0], params[1]),
+ .bit_cast => return bitCast(mod, scope, rl, builtin_token, src, params[0], params[1]),
+ .TypeOf => return typeOf(mod, scope, rl, builtin_token, src, params),
+
+ .add_with_overflow,
+ .align_cast,
+ .align_of,
+ .async_call,
+ .atomic_load,
+ .atomic_rmw,
+ .atomic_store,
+ .bit_offset_of,
+ .bool_to_int,
+ .bit_size_of,
+ .mul_add,
+ .byte_swap,
+ .bit_reverse,
+ .byte_offset_of,
+ .call,
+ .c_define,
+ .c_import,
+ .c_include,
+ .clz,
+ .cmpxchg_strong,
+ .cmpxchg_weak,
+ .ctz,
+ .c_undef,
+ .div_exact,
+ .div_floor,
+ .div_trunc,
+ .embed_file,
+ .enum_to_int,
+ .error_name,
+ .error_return_trace,
+ .error_to_int,
+ .err_set_cast,
+ .@"export",
+ .fence,
+ .field_parent_ptr,
+ .float_to_int,
+ .frame,
+ .Frame,
+ .frame_address,
+ .frame_size,
+ .has_decl,
+ .has_field,
+ .int_to_enum,
+ .int_to_error,
+ .int_to_float,
+ .int_to_ptr,
+ .memcpy,
+ .memset,
+ .wasm_memory_size,
+ .wasm_memory_grow,
+ .mod,
+ .mul_with_overflow,
+ .panic,
+ .pop_count,
+ .ptr_cast,
+ .rem,
+ .return_address,
+ .set_align_stack,
+ .set_cold,
+ .set_float_mode,
+ .set_runtime_safety,
+ .shl_exact,
+ .shl_with_overflow,
+ .shr_exact,
+ .shuffle,
+ .size_of,
+ .splat,
+ .reduce,
+ .src,
+ .sqrt,
+ .sin,
+ .cos,
+ .exp,
+ .exp2,
+ .log,
+ .log2,
+ .log10,
+ .fabs,
+ .floor,
+ .ceil,
+ .trunc,
+ .round,
+ .sub_with_overflow,
+ .tag_name,
+ .This,
+ .truncate,
+ .Type,
+ .type_info,
+ .type_name,
+ .union_init,
+ => return mod.failTok(scope, builtin_token, "TODO: implement builtin function {s}", .{
+ builtin_name,
+ }),
}
}
-fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) InnerError!*zir.Inst {
+fn callExpr(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ call: ast.full.Call,
+) InnerError!*zir.Inst {
+ if (call.async_token) |async_token| {
+ return mod.failTok(scope, async_token, "TODO implement async fn call", .{});
+ }
+
const tree = scope.tree();
- const lhs = try expr(mod, scope, .none, node.lhs);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_starts = tree.tokens.items(.start);
- const param_nodes = node.params();
- const args = try scope.getGenZIR().arena.alloc(*zir.Inst, param_nodes.len);
- for (param_nodes) |param_node, i| {
- const param_src = tree.token_locs[param_node.firstToken()].start;
+ const lhs = try expr(mod, scope, .none, call.ast.fn_expr);
+
+ const args = try scope.getGenZIR().arena.alloc(*zir.Inst, call.ast.params.len);
+ for (call.ast.params) |param_node, i| {
+ const param_src = token_starts[tree.firstToken(param_node)];
const param_type = try addZIRInst(mod, scope, param_src, zir.Inst.ParamType, .{
.func = lhs,
.arg_index = i,
@@ -3119,7 +3611,7 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) In
args[i] = try expr(mod, scope, .{ .ty = param_type }, param_node);
}
- const src = tree.token_locs[node.lhs.firstToken()].start;
+ const src = token_starts[call.ast.lparen];
const result = try addZIRInst(mod, scope, src, zir.Inst.Call, .{
.func = lhs,
.args = args,
@@ -3128,288 +3620,244 @@ fn callExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Call) In
return rvalue(mod, scope, rl, result);
}
-fn unreach(mod: *Module, scope: *Scope, unreach_node: *ast.Node.OneToken) InnerError!*zir.Inst {
+pub const simple_types = std.ComptimeStringMap(Value.Tag, .{
+ .{ "u8", .u8_type },
+ .{ "i8", .i8_type },
+ .{ "isize", .isize_type },
+ .{ "usize", .usize_type },
+ .{ "c_short", .c_short_type },
+ .{ "c_ushort", .c_ushort_type },
+ .{ "c_int", .c_int_type },
+ .{ "c_uint", .c_uint_type },
+ .{ "c_long", .c_long_type },
+ .{ "c_ulong", .c_ulong_type },
+ .{ "c_longlong", .c_longlong_type },
+ .{ "c_ulonglong", .c_ulonglong_type },
+ .{ "c_longdouble", .c_longdouble_type },
+ .{ "f16", .f16_type },
+ .{ "f32", .f32_type },
+ .{ "f64", .f64_type },
+ .{ "f128", .f128_type },
+ .{ "c_void", .c_void_type },
+ .{ "bool", .bool_type },
+ .{ "void", .void_type },
+ .{ "type", .type_type },
+ .{ "anyerror", .anyerror_type },
+ .{ "comptime_int", .comptime_int_type },
+ .{ "comptime_float", .comptime_float_type },
+ .{ "noreturn", .noreturn_type },
+});
+
+fn nodeMayNeedMemoryLocation(scope: *Scope, start_node: ast.Node.Index) bool {
const tree = scope.tree();
- const src = tree.token_locs[unreach_node.token].start;
- return addZIRNoOp(mod, scope, src, .unreachable_safe);
-}
-
-fn getSimplePrimitiveValue(name: []const u8) ?TypedValue {
- const simple_types = std.ComptimeStringMap(Value.Tag, .{
- .{ "u8", .u8_type },
- .{ "i8", .i8_type },
- .{ "isize", .isize_type },
- .{ "usize", .usize_type },
- .{ "c_short", .c_short_type },
- .{ "c_ushort", .c_ushort_type },
- .{ "c_int", .c_int_type },
- .{ "c_uint", .c_uint_type },
- .{ "c_long", .c_long_type },
- .{ "c_ulong", .c_ulong_type },
- .{ "c_longlong", .c_longlong_type },
- .{ "c_ulonglong", .c_ulonglong_type },
- .{ "c_longdouble", .c_longdouble_type },
- .{ "f16", .f16_type },
- .{ "f32", .f32_type },
- .{ "f64", .f64_type },
- .{ "f128", .f128_type },
- .{ "c_void", .c_void_type },
- .{ "bool", .bool_type },
- .{ "void", .void_type },
- .{ "type", .type_type },
- .{ "anyerror", .anyerror_type },
- .{ "comptime_int", .comptime_int_type },
- .{ "comptime_float", .comptime_float_type },
- .{ "noreturn", .noreturn_type },
- });
- if (simple_types.get(name)) |tag| {
- return TypedValue{
- .ty = Type.initTag(.type),
- .val = Value.initTag(tag),
- };
- }
- return null;
-}
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const token_tags = tree.tokens.items(.tag);
-fn nodeMayNeedMemoryLocation(start_node: *ast.Node, scope: *Scope) bool {
var node = start_node;
while (true) {
- switch (node.tag) {
- .Root,
- .Use,
- .TestDecl,
- .DocComment,
- .SwitchCase,
- .SwitchElse,
- .Else,
- .Payload,
- .PointerPayload,
- .PointerIndexPayload,
- .ContainerField,
- .ErrorTag,
- .FieldInitializer,
+ switch (node_tags[node]) {
+ .root,
+ .@"usingnamespace",
+ .test_decl,
+ .switch_case,
+ .switch_case_one,
+ .container_field_init,
+ .container_field_align,
+ .container_field,
+ .asm_output,
+ .asm_input,
=> unreachable,
- .Return,
- .Break,
- .Continue,
- .BitNot,
- .BoolNot,
- .VarDecl,
- .Defer,
- .AddressOf,
- .OptionalType,
- .Negation,
- .NegationWrap,
- .Resume,
- .ArrayType,
- .ArrayTypeSentinel,
- .PtrType,
- .SliceType,
- .Suspend,
- .AnyType,
- .ErrorType,
- .FnProto,
- .AnyFrameType,
- .IntegerLiteral,
- .FloatLiteral,
- .EnumLiteral,
- .StringLiteral,
- .MultilineStringLiteral,
- .CharLiteral,
- .BoolLiteral,
- .NullLiteral,
- .UndefinedLiteral,
- .Unreachable,
- .Identifier,
- .ErrorSetDecl,
- .ContainerDecl,
- .Asm,
- .Add,
- .AddWrap,
- .ArrayCat,
- .ArrayMult,
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .BangEqual,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BoolAnd,
- .BoolOr,
- .Div,
- .EqualEqual,
- .ErrorUnion,
- .GreaterOrEqual,
- .GreaterThan,
- .LessOrEqual,
- .LessThan,
- .MergeErrorSets,
- .Mod,
- .Mul,
- .MulWrap,
- .Range,
- .Period,
- .Sub,
- .SubWrap,
- .Slice,
- .Deref,
- .ArrayAccess,
- .Block,
+ .@"return",
+ .@"break",
+ .@"continue",
+ .bit_not,
+ .bool_not,
+ .global_var_decl,
+ .local_var_decl,
+ .simple_var_decl,
+ .aligned_var_decl,
+ .@"defer",
+ .@"errdefer",
+ .address_of,
+ .optional_type,
+ .negation,
+ .negation_wrap,
+ .@"resume",
+ .array_type,
+ .array_type_sentinel,
+ .ptr_type_aligned,
+ .ptr_type_sentinel,
+ .ptr_type,
+ .ptr_type_bit_range,
+ .@"suspend",
+ .@"anytype",
+ .fn_proto_simple,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ .fn_decl,
+ .anyframe_type,
+ .anyframe_literal,
+ .integer_literal,
+ .float_literal,
+ .enum_literal,
+ .string_literal,
+ .multiline_string_literal,
+ .char_literal,
+ .true_literal,
+ .false_literal,
+ .null_literal,
+ .undefined_literal,
+ .unreachable_literal,
+ .identifier,
+ .error_set_decl,
+ .container_decl,
+ .container_decl_trailing,
+ .container_decl_two,
+ .container_decl_two_trailing,
+ .container_decl_arg,
+ .container_decl_arg_trailing,
+ .tagged_union,
+ .tagged_union_trailing,
+ .tagged_union_two,
+ .tagged_union_two_trailing,
+ .tagged_union_enum_tag,
+ .tagged_union_enum_tag_trailing,
+ .@"asm",
+ .asm_simple,
+ .add,
+ .add_wrap,
+ .array_cat,
+ .array_mult,
+ .assign,
+ .assign_bit_and,
+ .assign_bit_or,
+ .assign_bit_shift_left,
+ .assign_bit_shift_right,
+ .assign_bit_xor,
+ .assign_div,
+ .assign_sub,
+ .assign_sub_wrap,
+ .assign_mod,
+ .assign_add,
+ .assign_add_wrap,
+ .assign_mul,
+ .assign_mul_wrap,
+ .bang_equal,
+ .bit_and,
+ .bit_or,
+ .bit_shift_left,
+ .bit_shift_right,
+ .bit_xor,
+ .bool_and,
+ .bool_or,
+ .div,
+ .equal_equal,
+ .error_union,
+ .greater_or_equal,
+ .greater_than,
+ .less_or_equal,
+ .less_than,
+ .merge_error_sets,
+ .mod,
+ .mul,
+ .mul_wrap,
+ .switch_range,
+ .field_access,
+ .sub,
+ .sub_wrap,
+ .slice,
+ .slice_open,
+ .slice_sentinel,
+ .deref,
+ .array_access,
+ .error_value,
+ .while_simple, // This variant cannot have an else expression.
+ .while_cont, // This variant cannot have an else expression.
+ .for_simple, // This variant cannot have an else expression.
+ .if_simple, // This variant cannot have an else expression.
=> return false,
- // Forward the question to a sub-expression.
- .GroupedExpression => node = node.castTag(.GroupedExpression).?.expr,
- .Try => node = node.castTag(.Try).?.rhs,
- .Await => node = node.castTag(.Await).?.rhs,
- .Catch => node = node.castTag(.Catch).?.rhs,
- .OrElse => node = node.castTag(.OrElse).?.rhs,
- .Comptime => node = node.castTag(.Comptime).?.expr,
- .Nosuspend => node = node.castTag(.Nosuspend).?.expr,
- .UnwrapOptional => node = node.castTag(.UnwrapOptional).?.lhs,
+ // Forward the question to the LHS sub-expression.
+ .grouped_expression,
+ .@"try",
+ .@"await",
+ .@"comptime",
+ .@"nosuspend",
+ .unwrap_optional,
+ => node = node_datas[node].lhs,
+
+ // Forward the question to the RHS sub-expression.
+ .@"catch",
+ .@"orelse",
+ => node = node_datas[node].rhs,
// True because these are exactly the expressions we need memory locations for.
- .ArrayInitializer,
- .ArrayInitializerDot,
- .StructInitializer,
- .StructInitializerDot,
+ .array_init_one,
+ .array_init_one_comma,
+ .array_init_dot_two,
+ .array_init_dot_two_comma,
+ .array_init_dot,
+ .array_init_dot_comma,
+ .array_init,
+ .array_init_comma,
+ .struct_init_one,
+ .struct_init_one_comma,
+ .struct_init_dot_two,
+ .struct_init_dot_two_comma,
+ .struct_init_dot,
+ .struct_init_dot_comma,
+ .struct_init,
+ .struct_init_comma,
=> return true,
// True because depending on comptime conditions, sub-expressions
// may be the kind that need memory locations.
- .While,
- .For,
- .Switch,
- .Call,
- .LabeledBlock,
+ .@"while", // This variant always has an else expression.
+ .@"if", // This variant always has an else expression.
+ .@"for", // This variant always has an else expression.
+ .@"switch",
+ .switch_comma,
+ .call_one,
+ .call_one_comma,
+ .async_call_one,
+ .async_call_one_comma,
+ .call,
+ .call_comma,
+ .async_call,
+ .async_call_comma,
=> return true,
- .BuiltinCall => {
- @setEvalBranchQuota(5000);
- const builtin_needs_mem_loc = std.ComptimeStringMap(bool, .{
- .{ "@addWithOverflow", false },
- .{ "@alignCast", false },
- .{ "@alignOf", false },
- .{ "@as", true },
- .{ "@asyncCall", false },
- .{ "@atomicLoad", false },
- .{ "@atomicRmw", false },
- .{ "@atomicStore", false },
- .{ "@bitCast", true },
- .{ "@bitOffsetOf", false },
- .{ "@boolToInt", false },
- .{ "@bitSizeOf", false },
- .{ "@breakpoint", false },
- .{ "@mulAdd", false },
- .{ "@byteSwap", false },
- .{ "@bitReverse", false },
- .{ "@byteOffsetOf", false },
- .{ "@call", true },
- .{ "@cDefine", false },
- .{ "@cImport", false },
- .{ "@cInclude", false },
- .{ "@clz", false },
- .{ "@cmpxchgStrong", false },
- .{ "@cmpxchgWeak", false },
- .{ "@compileError", false },
- .{ "@compileLog", false },
- .{ "@ctz", false },
- .{ "@cUndef", false },
- .{ "@divExact", false },
- .{ "@divFloor", false },
- .{ "@divTrunc", false },
- .{ "@embedFile", false },
- .{ "@enumToInt", false },
- .{ "@errorName", false },
- .{ "@errorReturnTrace", false },
- .{ "@errorToInt", false },
- .{ "@errSetCast", false },
- .{ "@export", false },
- .{ "@fence", false },
- .{ "@field", true },
- .{ "@fieldParentPtr", false },
- .{ "@floatCast", false },
- .{ "@floatToInt", false },
- .{ "@frame", false },
- .{ "@Frame", false },
- .{ "@frameAddress", false },
- .{ "@frameSize", false },
- .{ "@hasDecl", false },
- .{ "@hasField", false },
- .{ "@import", false },
- .{ "@intCast", false },
- .{ "@intToEnum", false },
- .{ "@intToError", false },
- .{ "@intToFloat", false },
- .{ "@intToPtr", false },
- .{ "@memcpy", false },
- .{ "@memset", false },
- .{ "@wasmMemorySize", false },
- .{ "@wasmMemoryGrow", false },
- .{ "@mod", false },
- .{ "@mulWithOverflow", false },
- .{ "@panic", false },
- .{ "@popCount", false },
- .{ "@ptrCast", false },
- .{ "@ptrToInt", false },
- .{ "@rem", false },
- .{ "@returnAddress", false },
- .{ "@setAlignStack", false },
- .{ "@setCold", false },
- .{ "@setEvalBranchQuota", false },
- .{ "@setFloatMode", false },
- .{ "@setRuntimeSafety", false },
- .{ "@shlExact", false },
- .{ "@shlWithOverflow", false },
- .{ "@shrExact", false },
- .{ "@shuffle", false },
- .{ "@sizeOf", false },
- .{ "@splat", true },
- .{ "@reduce", false },
- .{ "@src", true },
- .{ "@sqrt", false },
- .{ "@sin", false },
- .{ "@cos", false },
- .{ "@exp", false },
- .{ "@exp2", false },
- .{ "@log", false },
- .{ "@log2", false },
- .{ "@log10", false },
- .{ "@fabs", false },
- .{ "@floor", false },
- .{ "@ceil", false },
- .{ "@trunc", false },
- .{ "@round", false },
- .{ "@subWithOverflow", false },
- .{ "@tagName", false },
- .{ "@This", false },
- .{ "@truncate", false },
- .{ "@Type", false },
- .{ "@typeInfo", false },
- .{ "@typeName", false },
- .{ "@TypeOf", false },
- .{ "@unionInit", true },
- });
- const name = scope.tree().tokenSlice(node.castTag(.BuiltinCall).?.builtin_token);
- return builtin_needs_mem_loc.get(name).?;
+ .block_two,
+ .block_two_semicolon,
+ .block,
+ .block_semicolon,
+ => {
+ const lbrace = main_tokens[node];
+ if (token_tags[lbrace - 1] == .colon) {
+ // Labeled blocks may need a memory location to forward
+ // to their break statements.
+ return true;
+ } else {
+ return false;
+ }
},
- // Depending on AST properties, they may need memory locations.
- .If => return node.castTag(.If).?.@"else" != null,
+ .builtin_call,
+ .builtin_call_comma,
+ .builtin_call_two,
+ .builtin_call_two_comma,
+ => {
+ const builtin_token = main_tokens[node];
+ const builtin_name = tree.tokenSlice(builtin_token);
+ // If the builtin is an invalid name, we don't cause an error here; instead
+ // let it pass, and the error will be "invalid builtin function" later.
+ const builtin_info = BuiltinFn.list.get(builtin_name) orelse return false;
+ return builtin_info.needs_mem_loc;
+ },
}
}
}
@@ -3450,8 +3898,18 @@ fn rvalue(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr
}
}
-fn rvalueVoid(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node, result: void) InnerError!*zir.Inst {
- const src = scope.tree().token_locs[node.firstToken()].start;
+/// TODO when reworking ZIR memory layout, make the void value correspond to a hard coded
+/// index; that way this does not actually need to allocate anything.
+fn rvalueVoid(
+ mod: *Module,
+ scope: *Scope,
+ rl: ResultLoc,
+ node: ast.Node.Index,
+ result: void,
+) InnerError!*zir.Inst {
+ const tree = scope.tree();
+ const main_tokens = tree.nodes.items(.main_token);
+ const src = tree.tokens.items(.start)[tree.firstToken(node)];
const void_inst = try addZIRInstConst(mod, scope, src, .{
.ty = Type.initTag(.void),
.val = Value.initTag(.void_value),
@@ -3547,6 +4005,29 @@ pub fn addZirInstTag(
return &inst.base;
}
+pub fn addZirInstT(
+ mod: *Module,
+ scope: *Scope,
+ src: usize,
+ comptime T: type,
+ tag: zir.Inst.Tag,
+ positionals: std.meta.fieldInfo(T, .positionals).field_type,
+) !*T {
+ const gen_zir = scope.getGenZIR();
+ try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
+ const inst = try gen_zir.arena.create(T);
+ inst.* = .{
+ .base = .{
+ .tag = tag,
+ .src = src,
+ },
+ .positionals = positionals,
+ .kw_args = .{},
+ };
+ gen_zir.instructions.appendAssumeCapacity(&inst.base);
+ return inst;
+}
+
pub fn addZIRInstSpecial(
mod: *Module,
scope: *Scope,
diff --git a/src/clang.zig b/src/clang.zig
index 954cfee6b289..270f477ddb17 100644
--- a/src/clang.zig
+++ b/src/clang.zig
@@ -127,6 +127,9 @@ pub const APSInt = opaque {
pub const getNumWords = ZigClangAPSInt_getNumWords;
extern fn ZigClangAPSInt_getNumWords(*const APSInt) c_uint;
+
+ pub const lessThanEqual = ZigClangAPSInt_lessThanEqual;
+ extern fn ZigClangAPSInt_lessThanEqual(*const APSInt, rhs: u64) bool;
};
pub const ASTContext = opaque {
@@ -270,12 +273,12 @@ pub const CompoundAssignOperator = opaque {
pub const CompoundStmt = opaque {
pub const body_begin = ZigClangCompoundStmt_body_begin;
- extern fn ZigClangCompoundStmt_body_begin(*const CompoundStmt) const_body_iterator;
+ extern fn ZigClangCompoundStmt_body_begin(*const CompoundStmt) ConstBodyIterator;
pub const body_end = ZigClangCompoundStmt_body_end;
- extern fn ZigClangCompoundStmt_body_end(*const CompoundStmt) const_body_iterator;
+ extern fn ZigClangCompoundStmt_body_end(*const CompoundStmt) ConstBodyIterator;
- pub const const_body_iterator = [*]const *Stmt;
+ pub const ConstBodyIterator = [*]const *Stmt;
};
pub const ConditionalOperator = opaque {};
@@ -407,7 +410,7 @@ pub const Expr = opaque {
pub const getBeginLoc = ZigClangExpr_getBeginLoc;
extern fn ZigClangExpr_getBeginLoc(*const Expr) SourceLocation;
- pub const EvaluateAsConstantExpr = ZigClangExpr_EvaluateAsConstantExpr;
+ pub const evaluateAsConstantExpr = ZigClangExpr_EvaluateAsConstantExpr;
extern fn ZigClangExpr_EvaluateAsConstantExpr(*const Expr, *ExprEvalResult, Expr_ConstExprUsage, *const ASTContext) bool;
};
@@ -694,8 +697,6 @@ pub const ReturnStmt = opaque {
extern fn ZigClangReturnStmt_getRetValue(*const ReturnStmt) ?*const Expr;
};
-pub const SkipFunctionBodiesScope = opaque {};
-
pub const SourceManager = opaque {
pub const getSpellingLoc = ZigClangSourceManager_getSpellingLoc;
extern fn ZigClangSourceManager_getSpellingLoc(*const SourceManager, Loc: SourceLocation) SourceLocation;
diff --git a/src/codegen.zig b/src/codegen.zig
index ea08b80092ee..779366cc23cb 100644
--- a/src/codegen.zig
+++ b/src/codegen.zig
@@ -451,11 +451,16 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
const src_data: struct { lbrace_src: usize, rbrace_src: usize, source: []const u8 } = blk: {
const container_scope = module_fn.owner_decl.container;
- const tree = container_scope.file_scope.contents.tree;
- const fn_proto = tree.root_node.decls()[module_fn.owner_decl.src_index].castTag(.FnProto).?;
- const block = fn_proto.getBodyNode().?.castTag(.Block).?;
- const lbrace_src = tree.token_locs[block.lbrace].start;
- const rbrace_src = tree.token_locs[block.rbrace].start;
+ const tree = container_scope.file_scope.tree;
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const fn_decl = tree.rootDecls()[module_fn.owner_decl.src_index];
+ assert(node_tags[fn_decl] == .fn_decl);
+ const block = node_datas[fn_decl].rhs;
+ const lbrace_src = token_starts[tree.firstToken(block)];
+ const rbrace_src = token_starts[tree.lastToken(block)];
break :blk .{
.lbrace_src = lbrace_src,
.rbrace_src = rbrace_src,
diff --git a/src/codegen/c.zig b/src/codegen/c.zig
index cb3271a57fb9..d8c81ad0e4a5 100644
--- a/src/codegen/c.zig
+++ b/src/codegen/c.zig
@@ -1,4 +1,5 @@
const std = @import("std");
+const assert = std.debug.assert;
const mem = std.mem;
const log = std.log.scoped(.c);
@@ -42,7 +43,7 @@ pub const Object = struct {
next_arg_index: usize = 0,
next_local_index: usize = 0,
next_block_index: usize = 0,
- indent_writer: std.io.AutoIndentingStream(std.ArrayList(u8).Writer),
+ indent_writer: IndentWriter(std.ArrayList(u8).Writer),
fn resolveInst(o: *Object, inst: *Inst) !CValue {
if (inst.value()) |_| {
@@ -63,7 +64,7 @@ pub const Object = struct {
return local_value;
}
- fn writer(o: *Object) std.io.AutoIndentingStream(std.ArrayList(u8).Writer).Writer {
+ fn writer(o: *Object) IndentWriter(std.ArrayList(u8).Writer).Writer {
return o.indent_writer.writer();
}
@@ -796,3 +797,56 @@ fn genAsm(o: *Object, as: *Inst.Assembly) !CValue {
return o.dg.fail(o.dg.decl.src(), "TODO: C backend: inline asm expression result used", .{});
}
+
+fn IndentWriter(comptime UnderlyingWriter: type) type {
+ return struct {
+ const Self = @This();
+ pub const Error = UnderlyingWriter.Error;
+ pub const Writer = std.io.Writer(*Self, Error, write);
+
+ pub const indent_delta = 4;
+
+ underlying_writer: UnderlyingWriter,
+ indent_count: usize = 0,
+ current_line_empty: bool = true,
+
+ pub fn writer(self: *Self) Writer {
+ return .{ .context = self };
+ }
+
+ pub fn write(self: *Self, bytes: []const u8) Error!usize {
+ if (bytes.len == 0) return @as(usize, 0);
+
+ const current_indent = self.indent_count * Self.indent_delta;
+ if (self.current_line_empty and current_indent > 0) {
+ try self.underlying_writer.writeByteNTimes(' ', current_indent);
+ }
+ self.current_line_empty = false;
+
+ return self.writeNoIndent(bytes);
+ }
+
+ pub fn insertNewline(self: *Self) Error!void {
+ _ = try self.writeNoIndent("\n");
+ }
+
+ pub fn pushIndent(self: *Self) void {
+ self.indent_count += 1;
+ }
+
+ pub fn popIndent(self: *Self) void {
+ assert(self.indent_count != 0);
+ self.indent_count -= 1;
+ }
+
+ fn writeNoIndent(self: *Self, bytes: []const u8) Error!usize {
+ if (bytes.len == 0) return @as(usize, 0);
+
+ try self.underlying_writer.writeAll(bytes);
+ if (bytes[bytes.len - 1] == '\n') {
+ self.current_line_empty = true;
+ }
+ return bytes.len;
+ }
+ };
+}
diff --git a/src/ir.zig b/src/ir.zig
index 0e83dbfd5608..a0b33fba73e1 100644
--- a/src/ir.zig
+++ b/src/ir.zig
@@ -317,6 +317,7 @@ pub const Inst = struct {
pub const base_tag = Tag.arg;
base: Inst,
+ /// This exists to be emitted into debug info.
name: [*:0]const u8,
pub fn operandCount(self: *const Arg) usize {
diff --git a/src/link.zig b/src/link.zig
index 3d0a54f41699..0a4cde0284a3 100644
--- a/src/link.zig
+++ b/src/link.zig
@@ -550,11 +550,11 @@ pub const File = struct {
id_symlink_basename,
&prev_digest_buf,
) catch |err| b: {
- log.debug("archive new_digest={} readFile error: {s}", .{ digest, @errorName(err) });
+ log.debug("archive new_digest={x} readFile error: {s}", .{ digest, @errorName(err) });
break :b prev_digest_buf[0..0];
};
if (mem.eql(u8, prev_digest, &digest)) {
- log.debug("archive digest={} match - skipping invocation", .{digest});
+ log.debug("archive digest={x} match - skipping invocation", .{digest});
base.lock = man.toOwnedLock();
return;
}
diff --git a/src/link/C.zig b/src/link/C.zig
index 765249cd7d45..8fb3637cbee1 100644
--- a/src/link/C.zig
+++ b/src/link/C.zig
@@ -97,7 +97,7 @@ pub fn updateDecl(self: *C, module: *Module, decl: *Module.Decl) !void {
.value_map = codegen.CValueMap.init(module.gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
};
- object.indent_writer = std.io.autoIndentingStream(4, object.code.writer());
+ object.indent_writer = .{ .underlying_writer = object.code.writer() };
defer object.value_map.deinit();
defer object.code.deinit();
defer object.dg.fwd_decl.deinit();
diff --git a/src/link/Coff.zig b/src/link/Coff.zig
index 0dfa3d54a1a7..2eee19b4f6d4 100644
--- a/src/link/Coff.zig
+++ b/src/link/Coff.zig
@@ -892,17 +892,17 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
id_symlink_basename,
&prev_digest_buf,
) catch |err| blk: {
- log.debug("COFF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
+ log.debug("COFF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
// Handle this as a cache miss.
break :blk prev_digest_buf[0..0];
};
if (mem.eql(u8, prev_digest, &digest)) {
- log.debug("COFF LLD digest={} match - skipping invocation", .{digest});
+ log.debug("COFF LLD digest={x} match - skipping invocation", .{digest});
// Hot diggity dog! The output binary is already there.
self.base.lock = man.toOwnedLock();
return;
}
- log.debug("COFF LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
+ log.debug("COFF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
// We are about to change the output file to be different, so we invalidate the build hash now.
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
diff --git a/src/link/Elf.zig b/src/link/Elf.zig
index 18f3f5771251..e1a6a1dff1c5 100644
--- a/src/link/Elf.zig
+++ b/src/link/Elf.zig
@@ -1365,17 +1365,17 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
id_symlink_basename,
&prev_digest_buf,
) catch |err| blk: {
- log.debug("ELF LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
+ log.debug("ELF LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
// Handle this as a cache miss.
break :blk prev_digest_buf[0..0];
};
if (mem.eql(u8, prev_digest, &digest)) {
- log.debug("ELF LLD digest={} match - skipping invocation", .{digest});
+ log.debug("ELF LLD digest={x} match - skipping invocation", .{digest});
// Hot diggity dog! The output binary is already there.
self.base.lock = man.toOwnedLock();
return;
}
- log.debug("ELF LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
+ log.debug("ELF LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
// We are about to change the output file to be different, so we invalidate the build hash now.
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
@@ -2223,13 +2223,19 @@ pub fn updateDecl(self: *Elf, module: *Module, decl: *Module.Decl) !void {
try dbg_line_buffer.ensureCapacity(26);
const line_off: u28 = blk: {
- const tree = decl.container.file_scope.contents.tree;
- const file_ast_decls = tree.root_node.decls();
+ const tree = decl.container.file_scope.tree;
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
- const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
- const block = fn_proto.getBodyNode().?.castTag(.Block).?;
- const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
+ const fn_decl = file_ast_decls[decl.src_index];
+ assert(node_tags[fn_decl] == .fn_decl);
+ const block = node_datas[fn_decl].rhs;
+ const lbrace = tree.firstToken(block);
+ const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]);
break :blk @intCast(u28, line_delta);
};
@@ -2744,13 +2750,19 @@ pub fn updateDeclLineNumber(self: *Elf, module: *Module, decl: *const Module.Dec
if (self.llvm_ir_module) |_| return;
- const tree = decl.container.file_scope.contents.tree;
- const file_ast_decls = tree.root_node.decls();
+ const tree = decl.container.file_scope.tree;
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
- const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
- const block = fn_proto.getBodyNode().?.castTag(.Block).?;
- const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
+ const fn_decl = file_ast_decls[decl.src_index];
+ assert(node_tags[fn_decl] == .fn_decl);
+ const block = node_datas[fn_decl].rhs;
+ const lbrace = tree.firstToken(block);
+ const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]);
const casted_line_off = @intCast(u28, line_delta);
const shdr = &self.sections.items[self.debug_line_section_index.?];
@@ -3025,7 +3037,7 @@ const min_nop_size = 2;
/// Writes to the file a buffer, prefixed and suffixed by the specified number of
/// bytes of NOPs. Asserts each padding size is at least `min_nop_size` and total padding bytes
-/// are less than 126,976 bytes (if this limit is ever reached, this function can be
+/// are less than 1044480 bytes (if this limit is ever reached, this function can be
/// improved to make more than one pwritev call, or the limit can be raised by a fixed
/// amount by increasing the length of `vecs`).
fn pwriteDbgLineNops(
@@ -3040,7 +3052,7 @@ fn pwriteDbgLineNops(
const page_of_nops = [1]u8{DW.LNS_negate_stmt} ** 4096;
const three_byte_nop = [3]u8{ DW.LNS_advance_pc, 0b1000_0000, 0 };
- var vecs: [32]std.os.iovec_const = undefined;
+ var vecs: [256]std.os.iovec_const = undefined;
var vec_index: usize = 0;
{
var padding_left = prev_padding_size;
diff --git a/src/link/MachO.zig b/src/link/MachO.zig
index fd1c53cb67f8..139a9b894007 100644
--- a/src/link/MachO.zig
+++ b/src/link/MachO.zig
@@ -556,17 +556,17 @@ fn linkWithLLD(self: *MachO, comp: *Compilation) !void {
id_symlink_basename,
&prev_digest_buf,
) catch |err| blk: {
- log.debug("MachO LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
+ log.debug("MachO LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
// Handle this as a cache miss.
break :blk prev_digest_buf[0..0];
};
if (mem.eql(u8, prev_digest, &digest)) {
- log.debug("MachO LLD digest={} match - skipping invocation", .{digest});
+ log.debug("MachO LLD digest={x} match - skipping invocation", .{digest});
// Hot diggity dog! The output binary is already there.
self.base.lock = man.toOwnedLock();
return;
}
- log.debug("MachO LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
+ log.debug("MachO LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
// We are about to change the output file to be different, so we invalidate the build hash now.
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
diff --git a/src/link/MachO/DebugSymbols.zig b/src/link/MachO/DebugSymbols.zig
index 15aa86be5137..645e17068b2a 100644
--- a/src/link/MachO/DebugSymbols.zig
+++ b/src/link/MachO/DebugSymbols.zig
@@ -904,13 +904,19 @@ pub fn updateDeclLineNumber(self: *DebugSymbols, module: *Module, decl: *const M
const tracy = trace(@src());
defer tracy.end();
- const tree = decl.container.file_scope.contents.tree;
- const file_ast_decls = tree.root_node.decls();
+ const tree = decl.container.file_scope.tree;
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
- const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
- const block = fn_proto.getBodyNode().?.castTag(.Block).?;
- const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
+ const fn_decl = file_ast_decls[decl.src_index];
+ assert(node_tags[fn_decl] == .fn_decl);
+ const block = node_datas[fn_decl].rhs;
+ const lbrace = tree.firstToken(block);
+ const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]);
const casted_line_off = @intCast(u28, line_delta);
const dwarf_segment = &self.load_commands.items[self.dwarf_segment_cmd_index.?].Segment;
@@ -948,13 +954,19 @@ pub fn initDeclDebugBuffers(
try dbg_line_buffer.ensureCapacity(26);
const line_off: u28 = blk: {
- const tree = decl.container.file_scope.contents.tree;
- const file_ast_decls = tree.root_node.decls();
+ const tree = decl.container.file_scope.tree;
+ const node_tags = tree.nodes.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const token_starts = tree.tokens.items(.start);
+
+ const file_ast_decls = tree.rootDecls();
// TODO Look into improving the performance here by adding a token-index-to-line
// lookup table. Currently this involves scanning over the source code for newlines.
- const fn_proto = file_ast_decls[decl.src_index].castTag(.FnProto).?;
- const block = fn_proto.getBodyNode().?.castTag(.Block).?;
- const line_delta = std.zig.lineDelta(tree.source, 0, tree.token_locs[block.lbrace].start);
+ const fn_decl = file_ast_decls[decl.src_index];
+ assert(node_tags[fn_decl] == .fn_decl);
+ const block = node_datas[fn_decl].rhs;
+ const lbrace = tree.firstToken(block);
+ const line_delta = std.zig.lineDelta(tree.source, 0, token_starts[lbrace]);
break :blk @intCast(u28, line_delta);
};
diff --git a/src/link/Wasm.zig b/src/link/Wasm.zig
index bd89ce3345dc..e0e10ad88dc0 100644
--- a/src/link/Wasm.zig
+++ b/src/link/Wasm.zig
@@ -391,17 +391,17 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
id_symlink_basename,
&prev_digest_buf,
) catch |err| blk: {
- log.debug("WASM LLD new_digest={} error: {s}", .{ digest, @errorName(err) });
+ log.debug("WASM LLD new_digest={x} error: {s}", .{ digest, @errorName(err) });
// Handle this as a cache miss.
break :blk prev_digest_buf[0..0];
};
if (mem.eql(u8, prev_digest, &digest)) {
- log.debug("WASM LLD digest={} match - skipping invocation", .{digest});
+ log.debug("WASM LLD digest={x} match - skipping invocation", .{digest});
// Hot diggity dog! The output binary is already there.
self.base.lock = man.toOwnedLock();
return;
}
- log.debug("WASM LLD prev_digest={} new_digest={}", .{ prev_digest, digest });
+ log.debug("WASM LLD prev_digest={x} new_digest={x}", .{ prev_digest, digest });
// We are about to change the output file to be different, so we invalidate the build hash now.
directory.handle.deleteFile(id_symlink_basename) catch |err| switch (err) {
diff --git a/src/main.zig b/src/main.zig
index c31252a96aca..38da3d5a3b2f 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -2158,7 +2158,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
const c_headers_dir_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{"include"});
const c_headers_dir_path_z = try arena.dupeZ(u8, c_headers_dir_path);
var clang_errors: []translate_c.ClangErrMsg = &[0]translate_c.ClangErrMsg{};
- const tree = translate_c.translate(
+ var tree = translate_c.translate(
comp.gpa,
new_argv.ptr,
new_argv.ptr + new_argv.len,
@@ -2179,7 +2179,7 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
process.exit(1);
},
};
- defer tree.deinit();
+ defer tree.deinit(comp.gpa);
if (out_dep_path) |dep_file_path| {
const dep_basename = std.fs.path.basename(dep_file_path);
@@ -2193,16 +2193,21 @@ fn cmdTranslateC(comp: *Compilation, arena: *Allocator, enable_cache: bool) !voi
const digest = man.final();
const o_sub_path = try fs.path.join(arena, &[_][]const u8{ "o", &digest });
+
var o_dir = try comp.local_cache_directory.handle.makeOpenPath(o_sub_path, .{});
defer o_dir.close();
+
var zig_file = try o_dir.createFile(translated_zig_basename, .{});
defer zig_file.close();
- var bw = io.bufferedWriter(zig_file.writer());
- _ = try std.zig.render(comp.gpa, bw.writer(), tree);
- try bw.flush();
+ const formatted = try tree.render(comp.gpa);
+ defer comp.gpa.free(formatted);
- man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{@errorName(err)});
+ try zig_file.writeAll(formatted);
+
+ man.writeManifest() catch |err| warn("failed to write cache manifest: {s}", .{
+ @errorName(err),
+ });
break :digest digest;
};
@@ -2689,10 +2694,10 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
const source_code = try stdin.readAllAlloc(gpa, max_src_size);
defer gpa.free(source_code);
- const tree = std.zig.parse(gpa, source_code) catch |err| {
+ var tree = std.zig.parse(gpa, source_code) catch |err| {
fatal("error parsing stdin: {s}", .{err});
};
- defer tree.deinit();
+ defer tree.deinit(gpa);
for (tree.errors) |parse_error| {
try printErrMsgToFile(gpa, parse_error, tree, "", stderr_file, color);
@@ -2700,16 +2705,15 @@ pub fn cmdFmt(gpa: *Allocator, args: []const []const u8) !void {
if (tree.errors.len != 0) {
process.exit(1);
}
+ const formatted = try tree.render(gpa);
+ defer gpa.free(formatted);
+
if (check_flag) {
- const anything_changed = try std.zig.render(gpa, io.null_writer, tree);
- const code = if (anything_changed) @as(u8, 1) else @as(u8, 0);
+ const code: u8 = @boolToInt(mem.eql(u8, formatted, source_code));
process.exit(code);
}
- var bw = io.bufferedWriter(io.getStdOut().writer());
- _ = try std.zig.render(gpa, bw.writer(), tree);
- try bw.flush();
- return;
+ return io.getStdOut().writeAll(formatted);
}
if (input_files.items.len == 0) {
@@ -2846,8 +2850,8 @@ fn fmtPathFile(
// Add to set after no longer possible to get error.IsDir.
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
- const tree = try std.zig.parse(fmt.gpa, source_code);
- defer tree.deinit();
+ var tree = try std.zig.parse(fmt.gpa, source_code);
+ defer tree.deinit(fmt.gpa);
for (tree.errors) |parse_error| {
try printErrMsgToFile(fmt.gpa, parse_error, tree, file_path, std.io.getStdErr(), fmt.color);
@@ -2857,22 +2861,19 @@ fn fmtPathFile(
return;
}
+ // As a heuristic, we make enough capacity for the same as the input source.
+ fmt.out_buffer.shrinkRetainingCapacity(0);
+ try fmt.out_buffer.ensureCapacity(source_code.len);
+
+ try tree.renderToArrayList(&fmt.out_buffer);
+ if (mem.eql(u8, fmt.out_buffer.items, source_code))
+ return;
+
if (check_mode) {
- const anything_changed = try std.zig.render(fmt.gpa, io.null_writer, tree);
- if (anything_changed) {
- const stdout = io.getStdOut().writer();
- try stdout.print("{s}\n", .{file_path});
- fmt.any_error = true;
- }
+ const stdout = io.getStdOut().writer();
+ try stdout.print("{s}\n", .{file_path});
+ fmt.any_error = true;
} else {
- // As a heuristic, we make enough capacity for the same as the input source.
- try fmt.out_buffer.ensureCapacity(source_code.len);
- fmt.out_buffer.items.len = 0;
- const writer = fmt.out_buffer.writer();
- const anything_changed = try std.zig.render(fmt.gpa, writer, tree);
- if (!anything_changed)
- return; // Good thing we didn't waste any file system access on this.
-
var af = try dir.atomicFile(sub_path, .{ .mode = stat.mode });
defer af.deinit();
@@ -2886,7 +2887,7 @@ fn fmtPathFile(
fn printErrMsgToFile(
gpa: *mem.Allocator,
parse_error: ast.Error,
- tree: *ast.Tree,
+ tree: ast.Tree,
path: []const u8,
file: fs.File,
color: Color,
@@ -2896,19 +2897,17 @@ fn printErrMsgToFile(
.on => true,
.off => false,
};
- const lok_token = parse_error.loc();
- const span_first = lok_token;
- const span_last = lok_token;
+ const lok_token = parse_error.token;
- const first_token = tree.token_locs[span_first];
- const last_token = tree.token_locs[span_last];
- const start_loc = tree.tokenLocationLoc(0, first_token);
- const end_loc = tree.tokenLocationLoc(first_token.end, last_token);
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+ const first_token_start = token_starts[lok_token];
+ const start_loc = tree.tokenLocation(0, lok_token);
var text_buf = std.ArrayList(u8).init(gpa);
defer text_buf.deinit();
const writer = text_buf.writer();
- try parse_error.render(tree.token_ids, writer);
+ try tree.renderError(parse_error, writer);
const text = text_buf.items;
const stream = file.writer();
@@ -2925,8 +2924,12 @@ fn printErrMsgToFile(
}
try stream.writeByte('\n');
try stream.writeByteNTimes(' ', start_loc.column);
- try stream.writeByteNTimes('~', last_token.end - first_token.start);
- try stream.writeByte('\n');
+ if (token_tags[lok_token].lexeme()) |lexeme| {
+ try stream.writeByteNTimes('~', lexeme.len);
+ try stream.writeByte('\n');
+ } else {
+ try stream.writeAll("^\n");
+ }
}
pub const info_zen =
diff --git a/src/test.zig b/src/test.zig
index 07eb001e14b6..a28787e9521d 100644
--- a/src/test.zig
+++ b/src/test.zig
@@ -155,7 +155,7 @@ pub const TestContext = struct {
self.updates.append(.{
.src = src,
.case = .{ .Header = result },
- }) catch unreachable;
+ }) catch @panic("out of memory");
}
/// Adds a subcase in which the module is updated with `src`, compiled,
@@ -164,7 +164,7 @@ pub const TestContext = struct {
self.updates.append(.{
.src = src,
.case = .{ .Execution = result },
- }) catch unreachable;
+ }) catch @panic("out of memory");
}
/// Adds a subcase in which the module is updated with `src`, compiled,
@@ -173,7 +173,7 @@ pub const TestContext = struct {
self.updates.append(.{
.src = src,
.case = .{ .CompareObjectFile = result },
- }) catch unreachable;
+ }) catch @panic("out of memory");
}
/// Adds a subcase in which the module is updated with `src`, which
@@ -181,7 +181,7 @@ pub const TestContext = struct {
/// for the expected reasons, given in sequential order in `errors` in
/// the form `:line:column: error: message`.
pub fn addError(self: *Case, src: [:0]const u8, errors: []const []const u8) void {
- var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch unreachable;
+ var array = self.updates.allocator.alloc(ErrorMsg, errors.len) catch @panic("out of memory");
for (errors) |err_msg_line, i| {
if (std.mem.startsWith(u8, err_msg_line, "error: ")) {
array[i] = .{
@@ -224,7 +224,7 @@ pub const TestContext = struct {
},
};
}
- self.updates.append(.{ .src = src, .case = .{ .Error = array } }) catch unreachable;
+ self.updates.append(.{ .src = src, .case = .{ .Error = array } }) catch @panic("out of memory");
}
/// Adds a subcase in which the module is updated with `src`, and
@@ -247,7 +247,7 @@ pub const TestContext = struct {
.output_mode = .Exe,
.extension = extension,
.files = std.ArrayList(File).init(ctx.cases.allocator),
- }) catch unreachable;
+ }) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -262,15 +262,17 @@ pub const TestContext = struct {
}
pub fn exeFromCompiledC(ctx: *TestContext, name: []const u8, target: CrossTarget) *Case {
+ const prefixed_name = std.fmt.allocPrint(ctx.cases.allocator, "CBE: {s}", .{name}) catch
+ @panic("out of memory");
ctx.cases.append(Case{
- .name = name,
+ .name = prefixed_name,
.target = target,
.updates = std.ArrayList(Update).init(ctx.cases.allocator),
.output_mode = .Exe,
.extension = .Zig,
.object_format = .c,
.files = std.ArrayList(File).init(ctx.cases.allocator),
- }) catch unreachable;
+ }) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -285,7 +287,7 @@ pub const TestContext = struct {
.extension = .Zig,
.files = std.ArrayList(File).init(ctx.cases.allocator),
.llvm_backend = true,
- }) catch unreachable;
+ }) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -302,7 +304,7 @@ pub const TestContext = struct {
.output_mode = .Obj,
.extension = extension,
.files = std.ArrayList(File).init(ctx.cases.allocator),
- }) catch unreachable;
+ }) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
@@ -326,7 +328,7 @@ pub const TestContext = struct {
.extension = ext,
.object_format = .c,
.files = std.ArrayList(File).init(ctx.cases.allocator),
- }) catch unreachable;
+ }) catch @panic("out of memory");
return &ctx.cases.items[ctx.cases.items.len - 1];
}
diff --git a/src/translate_c.zig b/src/translate_c.zig
index b6eaec428656..f2d2f5305086 100644
--- a/src/translate_c.zig
+++ b/src/translate_c.zig
@@ -3,23 +3,24 @@
const std = @import("std");
const assert = std.debug.assert;
-const ast = std.zig.ast;
-const Token = std.zig.Token;
const clang = @import("clang.zig");
const ctok = std.c.tokenizer;
const CToken = std.c.Token;
const mem = std.mem;
const math = std.math;
+const ast = @import("translate_c/ast.zig");
+const Node = ast.Node;
+const Tag = Node.Tag;
const CallingConvention = std.builtin.CallingConvention;
pub const ClangErrMsg = clang.Stage2ErrorMsg;
-pub const Error = error{OutOfMemory};
+pub const Error = std.mem.Allocator.Error;
const TypeError = Error || error{UnsupportedType};
const TransError = TypeError || error{UnsupportedTranslation};
-const SymbolTable = std.StringArrayHashMap(*ast.Node);
+const SymbolTable = std.StringArrayHashMap(Node);
const AliasList = std.ArrayList(struct {
alias: []const u8,
name: []const u8,
@@ -30,23 +31,11 @@ const Scope = struct {
parent: ?*Scope,
const Id = enum {
- Switch,
- Block,
- Root,
- Condition,
- Loop,
- };
-
- /// Represents an in-progress ast.Node.Switch. This struct is stack-allocated.
- /// When it is deinitialized, it produces an ast.Node.Switch which is allocated
- /// into the main arena.
- const Switch = struct {
- base: Scope,
- pending_block: Block,
- cases: []*ast.Node,
- case_index: usize,
- switch_label: ?[]const u8,
- default_label: ?[]const u8,
+ block,
+ root,
+ condition,
+ loop,
+ do_loop,
};
/// Used for the scope of condition expressions, for example `if (cond)`.
@@ -67,16 +56,15 @@ const Scope = struct {
}
};
- /// Represents an in-progress ast.Node.Block. This struct is stack-allocated.
- /// When it is deinitialized, it produces an ast.Node.Block which is allocated
+ /// Represents an in-progress Node.Block. This struct is stack-allocated.
+ /// When it is deinitialized, it produces an Node.Block which is allocated
/// into the main arena.
const Block = struct {
base: Scope,
- statements: std.ArrayList(*ast.Node),
+ statements: std.ArrayList(Node),
variables: AliasList,
- label: ?ast.TokenIndex,
mangle_count: u32 = 0,
- lbrace: ast.TokenIndex,
+ label: ?[]const u8 = null,
/// When the block corresponds to a function, keep track of the return type
/// so that the return expression can be cast, if necessary
@@ -85,17 +73,14 @@ const Scope = struct {
fn init(c: *Context, parent: *Scope, labeled: bool) !Block {
var blk = Block{
.base = .{
- .id = .Block,
+ .id = .block,
.parent = parent,
},
- .statements = std.ArrayList(*ast.Node).init(c.gpa),
+ .statements = std.ArrayList(Node).init(c.gpa),
.variables = AliasList.init(c.gpa),
- .label = null,
- .lbrace = try appendToken(c, .LBrace, "{"),
};
if (labeled) {
- blk.label = try appendIdentifier(c, try blk.makeMangledName(c, "blk"));
- _ = try appendToken(c, .Colon, ":");
+ blk.label = try blk.makeMangledName(c, "blk");
}
return blk;
}
@@ -106,31 +91,24 @@ const Scope = struct {
self.* = undefined;
}
- fn complete(self: *Block, c: *Context) !*ast.Node {
- // We reserve 1 extra statement if the parent is a Loop. This is in case of
- // do while, we want to put `if (cond) break;` at the end.
- const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .Loop);
- const rbrace = try appendToken(c, .RBrace, "}");
- if (self.label) |label| {
- const node = try ast.Node.LabeledBlock.alloc(c.arena, alloc_len);
- node.* = .{
- .statements_len = self.statements.items.len,
- .lbrace = self.lbrace,
- .rbrace = rbrace,
- .label = label,
- };
- mem.copy(*ast.Node, node.statements(), self.statements.items);
- return &node.base;
- } else {
- const node = try ast.Node.Block.alloc(c.arena, alloc_len);
- node.* = .{
- .statements_len = self.statements.items.len,
- .lbrace = self.lbrace,
- .rbrace = rbrace,
- };
- mem.copy(*ast.Node, node.statements(), self.statements.items);
- return &node.base;
+ fn complete(self: *Block, c: *Context) !Node {
+ if (self.base.parent.?.id == .do_loop) {
+ // We reserve 1 extra statement if the parent is a do_loop. This is in case of
+ // do while, we want to put `if (cond) break;` at the end.
+ const alloc_len = self.statements.items.len + @boolToInt(self.base.parent.?.id == .do_loop);
+ var stmts = try c.arena.alloc(Node, alloc_len);
+ stmts.len = self.statements.items.len;
+ mem.copy(Node, stmts, self.statements.items);
+ return Tag.block.create(c.arena, .{
+ .label = self.label,
+ .stmts = stmts,
+ });
}
+ if (self.statements.items.len == 0) return Tag.empty_block.init();
+ return Tag.block.create(c.arena, .{
+ .label = self.label,
+ .stmts = try c.arena.dupe(Node, self.statements.items),
+ });
}
/// Given the desired name, return a name that does not shadow anything from outer scopes.
@@ -174,19 +152,27 @@ const Scope = struct {
sym_table: SymbolTable,
macro_table: SymbolTable,
context: *Context,
+ nodes: std.ArrayList(Node),
fn init(c: *Context) Root {
return .{
.base = .{
- .id = .Root,
+ .id = .root,
.parent = null,
},
- .sym_table = SymbolTable.init(c.arena),
- .macro_table = SymbolTable.init(c.arena),
+ .sym_table = SymbolTable.init(c.gpa),
+ .macro_table = SymbolTable.init(c.gpa),
.context = c,
+ .nodes = std.ArrayList(Node).init(c.gpa),
};
}
+ fn deinit(scope: *Root) void {
+ scope.sym_table.deinit();
+ scope.macro_table.deinit();
+ scope.nodes.deinit();
+ }
+
/// Check if the global scope contains this name, without looking into the "future", e.g.
/// ignore the preprocessed decl and macro names.
fn containsNow(scope: *Root, name: []const u8) bool {
@@ -205,20 +191,20 @@ const Scope = struct {
var scope = inner;
while (true) {
switch (scope.id) {
- .Root => unreachable,
- .Block => return @fieldParentPtr(Block, "base", scope),
- .Condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c),
+ .root => unreachable,
+ .block => return @fieldParentPtr(Block, "base", scope),
+ .condition => return @fieldParentPtr(Condition, "base", scope).getBlockScope(c),
else => scope = scope.parent.?,
}
}
}
- fn findBlockReturnType(inner: *Scope, c: *Context) ?clang.QualType {
+ fn findBlockReturnType(inner: *Scope, c: *Context) clang.QualType {
var scope = inner;
while (true) {
switch (scope.id) {
- .Root => return null,
- .Block => {
+ .root => unreachable,
+ .block => {
const block = @fieldParentPtr(Block, "base", scope);
if (block.return_type) |qt| return qt;
scope = scope.parent.?;
@@ -230,17 +216,17 @@ const Scope = struct {
fn getAlias(scope: *Scope, name: []const u8) []const u8 {
return switch (scope.id) {
- .Root => return name,
- .Block => @fieldParentPtr(Block, "base", scope).getAlias(name),
- .Switch, .Loop, .Condition => scope.parent.?.getAlias(name),
+ .root => return name,
+ .block => @fieldParentPtr(Block, "base", scope).getAlias(name),
+ .loop, .do_loop, .condition => scope.parent.?.getAlias(name),
};
}
fn contains(scope: *Scope, name: []const u8) bool {
return switch (scope.id) {
- .Root => @fieldParentPtr(Root, "base", scope).contains(name),
- .Block => @fieldParentPtr(Block, "base", scope).contains(name),
- .Switch, .Loop, .Condition => scope.parent.?.contains(name),
+ .root => @fieldParentPtr(Root, "base", scope).contains(name),
+ .block => @fieldParentPtr(Block, "base", scope).contains(name),
+ .loop, .do_loop, .condition => scope.parent.?.contains(name),
};
}
@@ -248,20 +234,26 @@ const Scope = struct {
var scope = inner;
while (true) {
switch (scope.id) {
- .Root => unreachable,
- .Switch => return scope,
- .Loop => return scope,
+ .root => unreachable,
+ .loop, .do_loop => return scope,
else => scope = scope.parent.?,
}
}
}
- fn getSwitch(inner: *Scope) *Scope.Switch {
+ /// Appends a node to the first block scope if inside a function, or to the root tree if not.
+ fn appendNode(inner: *Scope, node: Node) !void {
var scope = inner;
while (true) {
switch (scope.id) {
- .Root => unreachable,
- .Switch => return @fieldParentPtr(Switch, "base", scope),
+ .root => {
+ const root = @fieldParentPtr(Root, "base", scope);
+ return root.nodes.append(node);
+ },
+ .block => {
+ const block = @fieldParentPtr(Block, "base", scope);
+ return block.statements.append(node);
+ },
else => scope = scope.parent.?,
}
}
@@ -271,18 +263,12 @@ const Scope = struct {
pub const Context = struct {
gpa: *mem.Allocator,
arena: *mem.Allocator,
- token_ids: std.ArrayListUnmanaged(Token.Id) = .{},
- token_locs: std.ArrayListUnmanaged(Token.Loc) = .{},
- errors: std.ArrayListUnmanaged(ast.Error) = .{},
- source_buffer: *std.ArrayList(u8),
- err: Error,
source_manager: *clang.SourceManager,
decl_table: std.AutoArrayHashMapUnmanaged(usize, []const u8) = .{},
alias_list: AliasList,
global_scope: *Scope.Root,
clang_context: *clang.ASTContext,
mangle_count: u32 = 0,
- root_decls: std.ArrayListUnmanaged(*ast.Node) = .{},
opaque_demotes: std.AutoHashMapUnmanaged(usize, void) = .{},
/// This one is different than the root scope's name table. This contains
@@ -311,90 +297,15 @@ pub const Context = struct {
const column = c.source_manager.getSpellingColumnNumber(spelling_loc);
return std.fmt.allocPrint(c.arena, "{s}:{d}:{d}", .{ filename, line, column });
}
-
- fn createCall(c: *Context, fn_expr: *ast.Node, params_len: ast.NodeIndex) !*ast.Node.Call {
- _ = try appendToken(c, .LParen, "(");
- const node = try ast.Node.Call.alloc(c.arena, params_len);
- node.* = .{
- .lhs = fn_expr,
- .params_len = params_len,
- .async_token = null,
- .rtoken = undefined, // set after appending args
- };
- return node;
- }
-
- fn createBuiltinCall(c: *Context, name: []const u8, params_len: ast.NodeIndex) !*ast.Node.BuiltinCall {
- const builtin_token = try appendToken(c, .Builtin, name);
- _ = try appendToken(c, .LParen, "(");
- const node = try ast.Node.BuiltinCall.alloc(c.arena, params_len);
- node.* = .{
- .builtin_token = builtin_token,
- .params_len = params_len,
- .rparen_token = undefined, // set after appending args
- };
- return node;
- }
-
- fn createBlock(c: *Context, statements_len: ast.NodeIndex) !*ast.Node.Block {
- const block_node = try ast.Node.Block.alloc(c.arena, statements_len);
- block_node.* = .{
- .lbrace = try appendToken(c, .LBrace, "{"),
- .statements_len = statements_len,
- .rbrace = undefined,
- };
- return block_node;
- }
};
-fn addCBuiltinsNamespace(c: *Context) Error!void {
- // pub usingnamespace @import("std").c.builtins;
- const pub_tok = try appendToken(c, .Keyword_pub, "pub");
- const use_tok = try appendToken(c, .Keyword_usingnamespace, "usingnamespace");
- const import_tok = try appendToken(c, .Builtin, "@import");
- const lparen_tok = try appendToken(c, .LParen, "(");
- const std_tok = try appendToken(c, .StringLiteral, "\"std\"");
- const rparen_tok = try appendToken(c, .RParen, ")");
-
- const std_node = try c.arena.create(ast.Node.OneToken);
- std_node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = std_tok,
- };
-
- const call_node = try ast.Node.BuiltinCall.alloc(c.arena, 1);
- call_node.* = .{
- .builtin_token = import_tok,
- .params_len = 1,
- .rparen_token = rparen_tok,
- };
- call_node.params()[0] = &std_node.base;
-
- var access_chain = &call_node.base;
- access_chain = try transCreateNodeFieldAccess(c, access_chain, "c");
- access_chain = try transCreateNodeFieldAccess(c, access_chain, "builtins");
-
- const semi_tok = try appendToken(c, .Semicolon, ";");
-
- const bytes = try c.gpa.alignedAlloc(u8, @alignOf(ast.Node.Use), @sizeOf(ast.Node.Use));
- const using_node = @ptrCast(*ast.Node.Use, bytes.ptr);
- using_node.* = .{
- .doc_comments = null,
- .visib_token = pub_tok,
- .use_token = use_tok,
- .expr = access_chain,
- .semicolon_token = semi_tok,
- };
- try c.root_decls.append(c.gpa, &using_node.base);
-}
-
pub fn translate(
gpa: *mem.Allocator,
args_begin: [*]?[*]const u8,
args_end: [*]?[*]const u8,
errors: *[]ClangErrMsg,
resources_path: [*:0]const u8,
-) !*ast.Tree {
+) !std.zig.ast.Tree {
const ast_unit = clang.LoadFromCommandLine(
args_begin,
args_end,
@@ -407,9 +318,6 @@ pub fn translate(
};
defer ast_unit.delete();
- var source_buffer = std.ArrayList(u8).init(gpa);
- defer source_buffer.deinit();
-
// For memory that has the same lifetime as the Tree that we return
// from this function.
var arena = std.heap.ArenaAllocator.init(gpa);
@@ -418,9 +326,7 @@ pub fn translate(
var context = Context{
.gpa = gpa,
.arena = &arena.allocator,
- .source_buffer = &source_buffer,
.source_manager = ast_unit.getSourceManager(),
- .err = undefined,
.alias_list = AliasList.init(gpa),
.global_scope = try arena.allocator.create(Scope.Root),
.clang_context = ast_unit.getASTContext(),
@@ -429,20 +335,17 @@ pub fn translate(
defer {
context.decl_table.deinit(gpa);
context.alias_list.deinit();
- context.token_ids.deinit(gpa);
- context.token_locs.deinit(gpa);
- context.errors.deinit(gpa);
context.global_names.deinit(gpa);
- context.root_decls.deinit(gpa);
context.opaque_demotes.deinit(gpa);
+ context.global_scope.deinit();
}
- try addCBuiltinsNamespace(&context);
+ try context.global_scope.nodes.append(Tag.usingnamespace_builtins.init());
try prepopulateGlobalNameTable(ast_unit, &context);
if (!ast_unit.visitLocalTopLevelDecls(&context, declVisitorC)) {
- return context.err;
+ return error.OutOfMemory;
}
try transPreprocessorEntities(&context, ast_unit);
@@ -450,38 +353,17 @@ pub fn translate(
try addMacros(&context);
for (context.alias_list.items) |alias| {
if (!context.global_scope.sym_table.contains(alias.alias)) {
- try createAlias(&context, alias);
- }
- }
-
- const eof_token = try appendToken(&context, .Eof, "");
- const root_node = try ast.Node.Root.create(&arena.allocator, context.root_decls.items.len, eof_token);
- mem.copy(*ast.Node, root_node.decls(), context.root_decls.items);
-
- if (false) {
- std.debug.warn("debug source:\n{s}\n==EOF==\ntokens:\n", .{source_buffer.items});
- for (context.token_ids.items) |token| {
- std.debug.warn("{}\n", .{token});
+ const node = try Tag.alias.create(context.arena, .{ .actual = alias.alias, .mangled = alias.name });
+ try addTopLevelDecl(&context, alias.alias, node);
}
}
- const tree = try arena.allocator.create(ast.Tree);
- tree.* = .{
- .gpa = gpa,
- .source = try arena.allocator.dupe(u8, source_buffer.items),
- .token_ids = context.token_ids.toOwnedSlice(gpa),
- .token_locs = context.token_locs.toOwnedSlice(gpa),
- .errors = context.errors.toOwnedSlice(gpa),
- .root_node = root_node,
- .arena = arena.state,
- .generated = true,
- };
- return tree;
+ return ast.render(gpa, context.global_scope.nodes.items);
}
fn prepopulateGlobalNameTable(ast_unit: *clang.ASTUnit, c: *Context) !void {
if (!ast_unit.visitLocalTopLevelDecls(c, declVisitorNamesOnlyC)) {
- return c.err;
+ return error.OutOfMemory;
}
// TODO if we see #undef, delete it from the table
@@ -504,19 +386,13 @@ fn prepopulateGlobalNameTable(ast_unit: *clang.ASTUnit, c: *Context) !void {
fn declVisitorNamesOnlyC(context: ?*c_void, decl: *const clang.Decl) callconv(.C) bool {
const c = @ptrCast(*Context, @alignCast(@alignOf(Context), context));
- declVisitorNamesOnly(c, decl) catch |err| {
- c.err = err;
- return false;
- };
+ declVisitorNamesOnly(c, decl) catch return false;
return true;
}
fn declVisitorC(context: ?*c_void, decl: *const clang.Decl) callconv(.C) bool {
const c = @ptrCast(*Context, @alignCast(@alignOf(Context), context));
- declVisitor(c, decl) catch |err| {
- c.err = err;
- return false;
- };
+ declVisitor(c, decl) catch return false;
return true;
}
@@ -533,13 +409,13 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void {
return visitFnDecl(c, @ptrCast(*const clang.FunctionDecl, decl));
},
.Typedef => {
- _ = try transTypeDef(c, @ptrCast(*const clang.TypedefNameDecl, decl), true);
+ try transTypeDef(c, &c.global_scope.base, @ptrCast(*const clang.TypedefNameDecl, decl));
},
.Enum => {
- _ = try transEnumDecl(c, @ptrCast(*const clang.EnumDecl, decl));
+ try transEnumDecl(c, &c.global_scope.base, @ptrCast(*const clang.EnumDecl, decl));
},
.Record => {
- _ = try transRecordDecl(c, @ptrCast(*const clang.RecordDecl, decl));
+ try transRecordDecl(c, &c.global_scope.base, @ptrCast(*const clang.RecordDecl, decl));
},
.Var => {
return visitVarDecl(c, @ptrCast(*const clang.VarDecl, decl), null);
@@ -549,7 +425,7 @@ fn declVisitor(c: *Context, decl: *const clang.Decl) Error!void {
},
else => {
const decl_name = try c.str(decl.getDeclKindName());
- try emitWarning(c, decl.getLocation(), "ignoring {s} declaration", .{decl_name});
+ try warn(c, &c.global_scope.base, decl.getLocation(), "ignoring {s} declaration", .{decl_name});
},
}
}
@@ -565,7 +441,6 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
return visitFnDecl(c, def);
}
- const rp = makeRestorePoint(c);
const fn_decl_loc = fn_decl.getLocation();
const has_body = fn_decl.hasBody();
const storage_class = fn_decl.getStorageClass();
@@ -609,9 +484,9 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
decl_ctx.has_body = false;
decl_ctx.storage_class = .Extern;
decl_ctx.is_export = false;
- try emitWarning(c, fn_decl_loc, "TODO unable to translate variadic function, demoted to declaration", .{});
+ try warn(c, &c.global_scope.base, fn_decl_loc, "TODO unable to translate variadic function, demoted to extern", .{});
}
- break :blk transFnProto(rp, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) {
+ break :blk transFnProto(c, fn_decl, fn_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) {
error.UnsupportedType => {
return failDecl(c, fn_decl_loc, fn_name, "unable to resolve prototype of function", .{});
},
@@ -620,7 +495,7 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
},
.FunctionNoProto => blk: {
const fn_no_proto_type = @ptrCast(*const clang.FunctionType, fn_type);
- break :blk transFnNoProto(rp, fn_no_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) {
+ break :blk transFnNoProto(c, fn_no_proto_type, fn_decl_loc, decl_ctx, true) catch |err| switch (err) {
error.UnsupportedType => {
return failDecl(c, fn_decl_loc, fn_name, "unable to resolve prototype of function", .{});
},
@@ -631,124 +506,99 @@ fn visitFnDecl(c: *Context, fn_decl: *const clang.FunctionDecl) Error!void {
};
if (!decl_ctx.has_body) {
- const semi_tok = try appendToken(c, .Semicolon, ";");
- return addTopLevelDecl(c, fn_name, &proto_node.base);
+ return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base));
}
// actual function definition with body
const body_stmt = fn_decl.getBody();
- var block_scope = try Scope.Block.init(rp.c, &c.global_scope.base, false);
+ var block_scope = try Scope.Block.init(c, &c.global_scope.base, false);
block_scope.return_type = return_qt;
defer block_scope.deinit();
var scope = &block_scope.base;
var param_id: c_uint = 0;
- for (proto_node.params()) |*param, i| {
- const param_name = if (param.name_token) |name_tok|
- tokenSlice(c, name_tok)
- else
- return failDecl(c, fn_decl_loc, fn_name, "function {s} parameter has no name", .{fn_name});
+ for (proto_node.data.params) |*param, i| {
+ const param_name = param.name orelse {
+ proto_node.data.is_extern = true;
+ proto_node.data.is_export = false;
+ try warn(c, &c.global_scope.base, fn_decl_loc, "function {s} parameter has no name, demoted to extern", .{fn_name});
+ return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base));
+ };
const c_param = fn_decl.getParamDecl(param_id);
const qual_type = c_param.getOriginalType();
const is_const = qual_type.isConstQualified();
const mangled_param_name = try block_scope.makeMangledName(c, param_name);
+ param.name = mangled_param_name;
if (!is_const) {
const bare_arg_name = try std.fmt.allocPrint(c.arena, "arg_{s}", .{mangled_param_name});
const arg_name = try block_scope.makeMangledName(c, bare_arg_name);
+ param.name = arg_name;
- const mut_tok = try appendToken(c, .Keyword_var, "var");
- const name_tok = try appendIdentifier(c, mangled_param_name);
- const eq_token = try appendToken(c, .Equal, "=");
- const init_node = try transCreateNodeIdentifier(c, arg_name);
- const semicolon_token = try appendToken(c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .mut_token = mut_tok,
- .name_token = name_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&node.base);
- param.name_token = try appendIdentifier(c, arg_name);
- _ = try appendToken(c, .Colon, ":");
+ const redecl_node = try Tag.arg_redecl.create(c.arena, .{ .actual = mangled_param_name, .mangled = arg_name });
+ try block_scope.statements.append(redecl_node);
}
param_id += 1;
}
const casted_body = @ptrCast(*const clang.CompoundStmt, body_stmt);
- transCompoundStmtInline(rp, &block_scope.base, casted_body, &block_scope) catch |err| switch (err) {
+ transCompoundStmtInline(c, casted_body, &block_scope) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
error.UnsupportedTranslation,
error.UnsupportedType,
- => return failDecl(c, fn_decl_loc, fn_name, "unable to translate function", .{}),
+ => {
+ proto_node.data.is_extern = true;
+ proto_node.data.is_export = false;
+ try warn(c, &c.global_scope.base, fn_decl_loc, "unable to translate function, demoted to extern", .{});
+ return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base));
+ },
};
// add return statement if the function didn't have one
blk: {
- if (fn_ty.getNoReturnAttr()) break :blk;
- if (isCVoid(return_qt)) break :blk;
-
- if (block_scope.statements.items.len > 0) {
- var last = block_scope.statements.items[block_scope.statements.items.len - 1];
- while (true) {
- switch (last.tag) {
- .Block, .LabeledBlock => {
- const stmts = last.blockStatements();
- if (stmts.len == 0) break;
-
- last = stmts[stmts.len - 1];
- },
- // no extra return needed
- .Return => break :blk,
- else => break,
- }
- }
+ const maybe_body = try block_scope.complete(c);
+ if (fn_ty.getNoReturnAttr() or isCVoid(return_qt) or maybe_body.isNoreturn(false)) {
+ proto_node.data.body = maybe_body;
+ break :blk;
}
- const return_expr = try ast.Node.ControlFlowExpression.create(rp.c.arena, .{
- .ltoken = try appendToken(rp.c, .Keyword_return, "return"),
- .tag = .Return,
- }, .{
- .rhs = transZeroInitExpr(rp, scope, fn_decl_loc, return_qt.getTypePtr()) catch |err| switch (err) {
- error.OutOfMemory => |e| return e,
- error.UnsupportedTranslation,
- error.UnsupportedType,
- => return failDecl(c, fn_decl_loc, fn_name, "unable to create a return value for function", .{}),
+ const rhs = transZeroInitExpr(c, scope, fn_decl_loc, return_qt.getTypePtr()) catch |err| switch (err) {
+ error.OutOfMemory => |e| return e,
+ error.UnsupportedTranslation,
+ error.UnsupportedType,
+ => {
+ proto_node.data.is_extern = true;
+ proto_node.data.is_export = false;
+ try warn(c, &c.global_scope.base, fn_decl_loc, "unable to create a return value for function, demoted to extern", .{});
+ return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base));
},
- });
- _ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.statements.append(&return_expr.base);
+ };
+ const ret = try Tag.@"return".create(c.arena, rhs);
+ try block_scope.statements.append(ret);
+ proto_node.data.body = try block_scope.complete(c);
}
- const body_node = try block_scope.complete(rp.c);
- proto_node.setBodyNode(body_node);
- return addTopLevelDecl(c, fn_name, &proto_node.base);
+ return addTopLevelDecl(c, fn_name, Node.initPayload(&proto_node.base));
}
-fn transQualTypeMaybeInitialized(rp: RestorePoint, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!*ast.Node {
+fn transQualTypeMaybeInitialized(c: *Context, scope: *Scope, qt: clang.QualType, decl_init: ?*const clang.Expr, loc: clang.SourceLocation) TransError!Node {
return if (decl_init) |init_expr|
- transQualTypeInitialized(rp, qt, init_expr, loc)
+ transQualTypeInitialized(c, scope, qt, init_expr, loc)
else
- transQualType(rp, qt, loc);
+ transQualType(c, scope, qt, loc);
}
+
/// if mangled_name is not null, this var decl was declared in a block scope.
fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]const u8) Error!void {
const var_name = mangled_name orelse try c.str(@ptrCast(*const clang.NamedDecl, var_decl).getName_bytes_begin());
if (c.global_scope.sym_table.contains(var_name))
return; // Avoid processing this decl twice
- const rp = makeRestorePoint(c);
- const visib_tok = if (mangled_name) |_| null else try appendToken(c, .Keyword_pub, "pub");
-
- const thread_local_token = if (var_decl.getTLSKind() == .None)
- null
- else
- try appendToken(c, .Keyword_threadlocal, "threadlocal");
+ const is_pub = mangled_name == null;
+ const is_threadlocal = var_decl.getTLSKind() != .None;
const scope = &c.global_scope.base;
// TODO https://github.com/ziglang/zig/issues/3756
@@ -767,211 +617,148 @@ fn visitVarDecl(c: *Context, var_decl: *const clang.VarDecl, mangled_name: ?[]co
// does the same as:
// extern int foo;
// int foo = 2;
- const extern_tok = if (storage_class == .Extern and !has_init)
- try appendToken(c, .Keyword_extern, "extern")
- else if (storage_class != .Static)
- try appendToken(c, .Keyword_export, "export")
- else
- null;
-
- const mut_tok = if (is_const)
- try appendToken(c, .Keyword_const, "const")
- else
- try appendToken(c, .Keyword_var, "var");
+ var is_extern = storage_class == .Extern and !has_init;
+ var is_export = !is_extern and storage_class != .Static;
- const name_tok = try appendIdentifier(c, checked_name);
-
- _ = try appendToken(c, .Colon, ":");
-
- const type_node = transQualTypeMaybeInitialized(rp, qual_type, decl_init, var_decl_loc) catch |err| switch (err) {
+ const type_node = transQualTypeMaybeInitialized(c, scope, qual_type, decl_init, var_decl_loc) catch |err| switch (err) {
error.UnsupportedTranslation, error.UnsupportedType => {
return failDecl(c, var_decl_loc, checked_name, "unable to resolve variable type", .{});
},
error.OutOfMemory => |e| return e,
};
- var eq_tok: ast.TokenIndex = undefined;
- var init_node: ?*ast.Node = null;
+ var init_node: ?Node = null;
// If the initialization expression is not present, initialize with undefined.
// If it is an integer literal, we can skip the @as since it will be redundant
// with the variable type.
- if (has_init) {
- eq_tok = try appendToken(c, .Equal, "=");
+ if (has_init) trans_init: {
if (decl_init) |expr| {
const node_or_error = if (expr.getStmtClass() == .StringLiteralClass)
- transStringLiteralAsArray(rp, &c.global_scope.base, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(rp.c, type_node) catch 0)
+ transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), zigArraySize(c, type_node) catch 0)
else
- transExprCoercing(rp, scope, expr, .used, .r_value);
+ transExprCoercing(c, scope, expr, .used);
init_node = node_or_error catch |err| switch (err) {
error.UnsupportedTranslation,
error.UnsupportedType,
=> {
- return failDecl(c, var_decl_loc, checked_name, "unable to translate initializer", .{});
+ is_extern = true;
+ is_export = false;
+ try warn(c, scope, var_decl_loc, "unable to translate variable initializer, demoted to extern", .{});
+ break :trans_init;
},
error.OutOfMemory => |e| return e,
};
+ if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node.?)) {
+ init_node = try Tag.bool_to_int.create(c.arena, init_node.?);
+ }
} else {
- init_node = try transCreateNodeUndefinedLiteral(c);
+ init_node = Tag.undefined_literal.init();
}
} else if (storage_class != .Extern) {
- eq_tok = try appendToken(c, .Equal, "=");
// The C language specification states that variables with static or threadlocal
// storage without an initializer are initialized to a zero value.
// @import("std").mem.zeroes(T)
- const import_fn_call = try c.createBuiltinCall("@import", 1);
- const std_node = try transCreateNodeStringLiteral(c, "\"std\"");
- import_fn_call.params()[0] = std_node;
- import_fn_call.rparen_token = try appendToken(c, .RParen, ")");
- const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "mem");
- const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "zeroes");
-
- const zero_init_call = try c.createCall(outer_field_access, 1);
- zero_init_call.params()[0] = type_node;
- zero_init_call.rtoken = try appendToken(c, .RParen, ")");
-
- init_node = &zero_init_call.base;
+ init_node = try Tag.std_mem_zeroes.create(c.arena, type_node);
}
- const linksection_expr = blk: {
+ const linksection_string = blk: {
var str_len: usize = undefined;
if (var_decl.getSectionAttribute(&str_len)) |str_ptr| {
- _ = try appendToken(rp.c, .Keyword_linksection, "linksection");
- _ = try appendToken(rp.c, .LParen, "(");
- const expr = try transCreateNodeStringLiteral(
- rp.c,
- try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}),
- );
- _ = try appendToken(rp.c, .RParen, ")");
-
- break :blk expr;
+ break :blk str_ptr[0..str_len];
}
break :blk null;
};
- const align_expr = blk: {
- const alignment = var_decl.getAlignedAttribute(rp.c.clang_context);
+ const alignment = blk: {
+ const alignment = var_decl.getAlignedAttribute(c.clang_context);
if (alignment != 0) {
- _ = try appendToken(rp.c, .Keyword_align, "align");
- _ = try appendToken(rp.c, .LParen, "(");
// Clang reports the alignment in bits
- const expr = try transCreateNodeInt(rp.c, alignment / 8);
- _ = try appendToken(rp.c, .RParen, ")");
-
- break :blk expr;
+ break :blk alignment / 8;
}
break :blk null;
};
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = try appendToken(c, .Semicolon, ";"),
- }, .{
- .visib_token = visib_tok,
- .thread_local_token = thread_local_token,
- .eq_token = eq_tok,
- .extern_export_token = extern_tok,
- .type_node = type_node,
- .align_node = align_expr,
- .section_node = linksection_expr,
- .init_node = init_node,
+ const node = try Tag.var_decl.create(c.arena, .{
+ .is_pub = is_pub,
+ .is_const = is_const,
+ .is_extern = is_extern,
+ .is_export = is_export,
+ .is_threadlocal = is_threadlocal,
+ .linksection_string = linksection_string,
+ .alignment = alignment,
+ .name = checked_name,
+ .type = type_node,
+ .init = init_node,
});
- return addTopLevelDecl(c, checked_name, &node.base);
-}
-
-fn transTypeDefAsBuiltin(c: *Context, typedef_decl: *const clang.TypedefNameDecl, builtin_name: []const u8) !*ast.Node {
- _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin_name);
- return transCreateNodeIdentifier(c, builtin_name);
-}
-
-fn checkForBuiltinTypedef(checked_name: []const u8) ?[]const u8 {
- const table = [_][2][]const u8{
- .{ "uint8_t", "u8" },
- .{ "int8_t", "i8" },
- .{ "uint16_t", "u16" },
- .{ "int16_t", "i16" },
- .{ "uint32_t", "u32" },
- .{ "int32_t", "i32" },
- .{ "uint64_t", "u64" },
- .{ "int64_t", "i64" },
- .{ "intptr_t", "isize" },
- .{ "uintptr_t", "usize" },
- .{ "ssize_t", "isize" },
- .{ "size_t", "usize" },
- };
-
- for (table) |entry| {
- if (mem.eql(u8, checked_name, entry[0])) {
- return entry[1];
- }
- }
-
- return null;
-}
+ return addTopLevelDecl(c, checked_name, node);
+}
+
+const builtin_typedef_map = std.ComptimeStringMap([]const u8, .{
+ .{ "uint8_t", "u8" },
+ .{ "int8_t", "i8" },
+ .{ "uint16_t", "u16" },
+ .{ "int16_t", "i16" },
+ .{ "uint32_t", "u32" },
+ .{ "int32_t", "i32" },
+ .{ "uint64_t", "u64" },
+ .{ "int64_t", "i64" },
+ .{ "intptr_t", "isize" },
+ .{ "uintptr_t", "usize" },
+ .{ "ssize_t", "isize" },
+ .{ "size_t", "usize" },
+});
-fn transTypeDef(c: *Context, typedef_decl: *const clang.TypedefNameDecl, top_level_visit: bool) Error!?*ast.Node {
+fn transTypeDef(c: *Context, scope: *Scope, typedef_decl: *const clang.TypedefNameDecl) Error!void {
if (c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl()))) |name|
- return transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
- const rp = makeRestorePoint(c);
+ return; // Avoid processing this decl twice
+ const toplevel = scope.id == .root;
+ const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined;
- const typedef_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin());
+ const bare_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin());
// TODO https://github.com/ziglang/zig/issues/3756
// TODO https://github.com/ziglang/zig/issues/1802
- const checked_name = if (isZigPrimitiveType(typedef_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ typedef_name, c.getMangle() }) else typedef_name;
- if (checkForBuiltinTypedef(checked_name)) |builtin| {
- return transTypeDefAsBuiltin(c, typedef_decl, builtin);
- }
-
- if (!top_level_visit) {
- return transCreateNodeIdentifier(c, checked_name);
+ var name: []const u8 = if (isZigPrimitiveType(bare_name)) try std.fmt.allocPrint(c.arena, "{s}_{d}", .{ bare_name, c.getMangle() }) else bare_name;
+ if (builtin_typedef_map.get(name)) |builtin| {
+ return c.decl_table.putNoClobber(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), builtin);
}
+ if (!toplevel) name = try bs.makeMangledName(c, name);
+ try c.decl_table.putNoClobber(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), name);
- _ = try c.decl_table.put(c.gpa, @ptrToInt(typedef_decl.getCanonicalDecl()), checked_name);
- const node = (try transCreateNodeTypedef(rp, typedef_decl, true, checked_name)) orelse return null;
- try addTopLevelDecl(c, checked_name, node);
- return transCreateNodeIdentifier(c, checked_name);
-}
-
-fn transCreateNodeTypedef(
- rp: RestorePoint,
- typedef_decl: *const clang.TypedefNameDecl,
- toplevel: bool,
- checked_name: []const u8,
-) Error!?*ast.Node {
- const visib_tok = if (toplevel) try appendToken(rp.c, .Keyword_pub, "pub") else null;
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, checked_name);
- const eq_token = try appendToken(rp.c, .Equal, "=");
const child_qt = typedef_decl.getUnderlyingType();
const typedef_loc = typedef_decl.getLocation();
- const init_node = transQualType(rp, child_qt, typedef_loc) catch |err| switch (err) {
+ const init_node = transQualType(c, scope, child_qt, typedef_loc) catch |err| switch (err) {
error.UnsupportedType => {
- try failDecl(rp.c, typedef_loc, checked_name, "unable to resolve typedef child type", .{});
- return null;
+ return failDecl(c, typedef_loc, name, "unable to resolve typedef child type", .{});
},
error.OutOfMemory => |e| return e,
};
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
-
- const node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .visib_token = visib_tok,
- .eq_token = eq_token,
- .init_node = init_node,
- });
- return &node.base;
+
+ const payload = try c.arena.create(ast.Payload.SimpleVarDecl);
+ payload.* = .{
+ .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(toplevel)] },
+ .data = .{
+ .name = name,
+ .init = init_node,
+ },
+ };
+ const node = Node.initPayload(&payload.base);
+
+ if (toplevel) {
+ try addTopLevelDecl(c, name, node);
+ } else {
+ try scope.appendNode(node);
+ }
}
-fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*ast.Node {
+fn transRecordDecl(c: *Context, scope: *Scope, record_decl: *const clang.RecordDecl) Error!void {
if (c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl()))) |name|
- return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
+ return; // Avoid processing this decl twice
const record_loc = record_decl.getLocation();
+ const toplevel = scope.id == .root;
+ const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined;
var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, record_decl).getName_bytes_begin());
var is_unnamed = false;
@@ -983,46 +770,31 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
}
var container_kind_name: []const u8 = undefined;
- var container_kind: std.zig.Token.Id = undefined;
+ var is_union = false;
if (record_decl.isUnion()) {
container_kind_name = "union";
- container_kind = .Keyword_union;
+ is_union = true;
} else if (record_decl.isStruct()) {
container_kind_name = "struct";
- container_kind = .Keyword_struct;
} else {
- try emitWarning(c, record_loc, "record {s} is not a struct or union", .{bare_name});
- return null;
+ try c.decl_table.putNoClobber(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), bare_name);
+ return failDecl(c, record_loc, bare_name, "record {s} is not a struct or union", .{bare_name});
}
- const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name });
- _ = try c.decl_table.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name);
-
- const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
- const mut_tok = try appendToken(c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(c, name);
-
- const eq_token = try appendToken(c, .Equal, "=");
+ var name: []const u8 = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ container_kind_name, bare_name });
+ if (!toplevel) name = try bs.makeMangledName(c, name);
+ try c.decl_table.putNoClobber(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), name);
- var semicolon: ast.TokenIndex = undefined;
+ const is_pub = toplevel and !is_unnamed;
const init_node = blk: {
- const rp = makeRestorePoint(c);
const record_def = record_decl.getDefinition() orelse {
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
- const opaque_type = try transCreateNodeOpaqueType(c);
- semicolon = try appendToken(c, .Semicolon, ";");
- break :blk opaque_type;
+ break :blk Tag.opaque_literal.init();
};
- const layout_tok = try if (record_decl.getPackedAttribute())
- appendToken(c, .Keyword_packed, "packed")
- else
- appendToken(c, .Keyword_extern, "extern");
- const container_tok = try appendToken(c, container_kind, container_kind_name);
- const lbrace_token = try appendToken(c, .LBrace, "{");
-
- var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa);
- defer fields_and_decls.deinit();
+ const is_packed = record_decl.getPackedAttribute();
+ var fields = std.ArrayList(ast.Payload.Record.Field).init(c.gpa);
+ defer fields.deinit();
var unnamed_field_count: u32 = 0;
var it = record_def.field_begin();
@@ -1034,111 +806,88 @@ fn transRecordDecl(c: *Context, record_decl: *const clang.RecordDecl) Error!?*as
if (field_decl.isBitField()) {
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
- const opaque_type = try transCreateNodeOpaqueType(c);
- semicolon = try appendToken(c, .Semicolon, ";");
- try emitWarning(c, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name});
- break :blk opaque_type;
+ try warn(c, scope, field_loc, "{s} demoted to opaque type - has bitfield", .{container_kind_name});
+ break :blk Tag.opaque_literal.init();
}
if (qualTypeCanon(field_qt).isIncompleteOrZeroLengthArrayType(c.clang_context)) {
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
- const opaque_type = try transCreateNodeOpaqueType(c);
- semicolon = try appendToken(c, .Semicolon, ";");
- try emitWarning(c, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name});
- break :blk opaque_type;
+ try warn(c, scope, field_loc, "{s} demoted to opaque type - has variable length array", .{container_kind_name});
+ break :blk Tag.opaque_literal.init();
}
var is_anon = false;
- var raw_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin());
- if (field_decl.isAnonymousStructOrUnion() or raw_name.len == 0) {
+ var field_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin());
+ if (field_decl.isAnonymousStructOrUnion() or field_name.len == 0) {
// Context.getMangle() is not used here because doing so causes unpredictable field names for anonymous fields.
- raw_name = try std.fmt.allocPrint(c.arena, "unnamed_{d}", .{unnamed_field_count});
+ field_name = try std.fmt.allocPrint(c.arena, "unnamed_{d}", .{unnamed_field_count});
unnamed_field_count += 1;
is_anon = true;
}
- const field_name = try appendIdentifier(c, raw_name);
- _ = try appendToken(c, .Colon, ":");
- const field_type = transQualType(rp, field_qt, field_loc) catch |err| switch (err) {
+ const field_type = transQualType(c, scope, field_qt, field_loc) catch |err| switch (err) {
error.UnsupportedType => {
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(record_decl.getCanonicalDecl()), {});
- const opaque_type = try transCreateNodeOpaqueType(c);
- semicolon = try appendToken(c, .Semicolon, ";");
- try emitWarning(c, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, raw_name });
- break :blk opaque_type;
+ try warn(c, scope, record_loc, "{s} demoted to opaque type - unable to translate type of field {s}", .{ container_kind_name, field_name });
+ break :blk Tag.opaque_literal.init();
},
else => |e| return e,
};
- const align_expr = blk_2: {
+ const alignment = blk_2: {
const alignment = field_decl.getAlignedAttribute(c.clang_context);
if (alignment != 0) {
- _ = try appendToken(c, .Keyword_align, "align");
- _ = try appendToken(c, .LParen, "(");
// Clang reports the alignment in bits
- const expr = try transCreateNodeInt(c, alignment / 8);
- _ = try appendToken(c, .RParen, ")");
-
- break :blk_2 expr;
+ break :blk_2 alignment / 8;
}
break :blk_2 null;
};
- const field_node = try c.arena.create(ast.Node.ContainerField);
- field_node.* = .{
- .doc_comments = null,
- .comptime_token = null,
- .name_token = field_name,
- .type_expr = field_type,
- .value_expr = null,
- .align_expr = align_expr,
- };
-
if (is_anon) {
- _ = try c.decl_table.put(
- c.gpa,
- @ptrToInt(field_decl.getCanonicalDecl()),
- raw_name,
- );
+ try c.decl_table.putNoClobber(c.gpa, @ptrToInt(field_decl.getCanonicalDecl()), field_name);
}
- try fields_and_decls.append(&field_node.base);
- _ = try appendToken(c, .Comma, ",");
+ try fields.append(.{
+ .name = field_name,
+ .type = field_type,
+ .alignment = alignment,
+ });
}
- const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len);
- container_node.* = .{
- .layout_token = layout_tok,
- .kind_token = container_tok,
- .init_arg_expr = .None,
- .fields_and_decls_len = fields_and_decls.items.len,
- .lbrace_token = lbrace_token,
- .rbrace_token = try appendToken(c, .RBrace, "}"),
+
+ const record_payload = try c.arena.create(ast.Payload.Record);
+ record_payload.* = .{
+ .base = .{ .tag = ([2]Tag{ .@"struct", .@"union" })[@boolToInt(is_union)] },
+ .data = .{
+ .is_packed = is_packed,
+ .fields = try c.arena.dupe(ast.Payload.Record.Field, fields.items),
+ },
};
- mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items);
- semicolon = try appendToken(c, .Semicolon, ";");
- break :blk &container_node.base;
+ break :blk Node.initPayload(&record_payload.base);
};
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon,
- }, .{
- .visib_token = visib_tok,
- .eq_token = eq_token,
- .init_node = init_node,
- });
+ const payload = try c.arena.create(ast.Payload.SimpleVarDecl);
+ payload.* = .{
+ .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] },
+ .data = .{
+ .name = name,
+ .init = init_node,
+ },
+ };
- try addTopLevelDecl(c, name, &node.base);
- if (!is_unnamed)
- try c.alias_list.append(.{ .alias = bare_name, .name = name });
- return transCreateNodeIdentifier(c, name);
+ if (toplevel) {
+ try addTopLevelDecl(c, name, Node.initPayload(&payload.base));
+ if (!is_unnamed)
+ try c.alias_list.append(.{ .alias = bare_name, .name = name });
+ } else {
+ try scope.appendNode(Node.initPayload(&payload.base));
+ }
}
-fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node {
+fn transEnumDecl(c: *Context, scope: *Scope, enum_decl: *const clang.EnumDecl) Error!void {
if (c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl()))) |name|
- return try transCreateNodeIdentifier(c, name); // Avoid processing this decl twice
- const rp = makeRestorePoint(c);
+ return; // Avoid processing this decl twice
const enum_loc = enum_decl.getLocation();
+ const toplevel = scope.id == .root;
+ const bs: *Scope.Block = if (!toplevel) try scope.findBlockScope(c) else undefined;
var bare_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_decl).getName_bytes_begin());
var is_unnamed = false;
@@ -1147,13 +896,13 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node
is_unnamed = true;
}
- const name = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name});
- _ = try c.decl_table.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name);
+ var name: []const u8 = try std.fmt.allocPrint(c.arena, "enum_{s}", .{bare_name});
+ if (!toplevel) _ = try bs.makeMangledName(c, name);
+ try c.decl_table.putNoClobber(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), name);
- const visib_tok = if (!is_unnamed) try appendToken(c, .Keyword_pub, "pub") else null;
- const mut_tok = try appendToken(c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(c, name);
- const eq_token = try appendToken(c, .Equal, "=");
+ const is_pub = toplevel and !is_unnamed;
+ var redecls = std.ArrayList(Tag.enum_redecl.Data()).init(c.gpa);
+ defer redecls.deinit();
const init_node = if (enum_decl.getDefinition()) |enum_def| blk: {
var pure_enum = true;
@@ -1167,11 +916,8 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node
}
}
- const extern_tok = try appendToken(c, .Keyword_extern, "extern");
- const container_tok = try appendToken(c, .Keyword_enum, "enum");
-
- var fields_and_decls = std.ArrayList(*ast.Node).init(c.gpa);
- defer fields_and_decls.deinit();
+ var fields = std.ArrayList(ast.Payload.Enum.Field).init(c.gpa);
+ defer fields.deinit();
const int_type = enum_decl.getIntegerType();
// The underlying type may be null in case of forward-declared enum
@@ -1179,30 +925,22 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node
// default to the usual integer type used for all the enums.
// default to c_int since msvc and gcc default to different types
- _ = try appendToken(c, .LParen, "(");
- const init_arg_expr = ast.Node.ContainerDecl.InitArg{
- .Type = if (int_type.ptr != null and
- !isCBuiltinType(int_type, .UInt) and
- !isCBuiltinType(int_type, .Int))
- transQualType(rp, int_type, enum_loc) catch |err| switch (err) {
- error.UnsupportedType => {
- try failDecl(c, enum_loc, name, "unable to translate enum tag type", .{});
- return null;
- },
- else => |e| return e,
- }
- else
- try transCreateNodeIdentifier(c, "c_int"),
- };
- _ = try appendToken(c, .RParen, ")");
-
- const lbrace_token = try appendToken(c, .LBrace, "{");
+ const init_arg_expr = if (int_type.ptr != null and
+ !isCBuiltinType(int_type, .UInt) and
+ !isCBuiltinType(int_type, .Int))
+ transQualType(c, scope, int_type, enum_loc) catch |err| switch (err) {
+ error.UnsupportedType => {
+ return failDecl(c, enum_loc, name, "unable to translate enum tag type", .{});
+ },
+ else => |e| return e,
+ }
+ else
+ try Tag.type.create(c.arena, "c_int");
it = enum_def.enumerator_begin();
end_it = enum_def.enumerator_end();
while (it.neq(end_it)) : (it = it.next()) {
const enum_const = it.deref();
-
const enum_val_name = try c.str(@ptrCast(*const clang.NamedDecl, enum_const).getName_bytes_begin());
const field_name = if (!is_unnamed and mem.startsWith(u8, enum_val_name, bare_name))
@@ -1210,123 +948,62 @@ fn transEnumDecl(c: *Context, enum_decl: *const clang.EnumDecl) Error!?*ast.Node
else
enum_val_name;
- const field_name_tok = try appendIdentifier(c, field_name);
-
- const int_node = if (!pure_enum) blk_2: {
- _ = try appendToken(c, .Colon, "=");
- break :blk_2 try transCreateNodeAPInt(c, enum_const.getInitVal());
- } else
+ const int_node = if (!pure_enum)
+ try transCreateNodeAPInt(c, enum_const.getInitVal())
+ else
null;
- const field_node = try c.arena.create(ast.Node.ContainerField);
- field_node.* = .{
- .doc_comments = null,
- .comptime_token = null,
- .name_token = field_name_tok,
- .type_expr = null,
- .value_expr = int_node,
- .align_expr = null,
- };
-
- try fields_and_decls.append(&field_node.base);
- _ = try appendToken(c, .Comma, ",");
+ try fields.append(.{
+ .name = field_name,
+ .value = int_node,
+ });
// In C each enum value is in the global namespace. So we put them there too.
// At this point we can rely on the enum emitting successfully.
- const tld_visib_tok = try appendToken(c, .Keyword_pub, "pub");
- const tld_mut_tok = try appendToken(c, .Keyword_const, "const");
- const tld_name_tok = try appendIdentifier(c, enum_val_name);
- const tld_eq_token = try appendToken(c, .Equal, "=");
- const cast_node = try rp.c.createBuiltinCall("@enumToInt", 1);
- const enum_ident = try transCreateNodeIdentifier(c, name);
- const period_tok = try appendToken(c, .Period, ".");
- const field_ident = try transCreateNodeIdentifier(c, field_name);
- const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
- field_access_node.* = .{
- .base = .{ .tag = .Period },
- .op_token = period_tok,
- .lhs = enum_ident,
- .rhs = field_ident,
- };
- cast_node.params()[0] = &field_access_node.base;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- const tld_init_node = &cast_node.base;
- const tld_semicolon_token = try appendToken(c, .Semicolon, ";");
- const tld_node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = tld_name_tok,
- .mut_token = tld_mut_tok,
- .semicolon_token = tld_semicolon_token,
- }, .{
- .visib_token = tld_visib_tok,
- .eq_token = tld_eq_token,
- .init_node = tld_init_node,
+ try redecls.append(.{
+ .enum_val_name = enum_val_name,
+ .field_name = field_name,
+ .enum_name = name,
});
- try addTopLevelDecl(c, field_name, &tld_node.base);
}
- // make non exhaustive
- const field_node = try c.arena.create(ast.Node.ContainerField);
- field_node.* = .{
- .doc_comments = null,
- .comptime_token = null,
- .name_token = try appendIdentifier(c, "_"),
- .type_expr = null,
- .value_expr = null,
- .align_expr = null,
- };
- try fields_and_decls.append(&field_node.base);
- _ = try appendToken(c, .Comma, ",");
- const container_node = try ast.Node.ContainerDecl.alloc(c.arena, fields_and_decls.items.len);
- container_node.* = .{
- .layout_token = extern_tok,
- .kind_token = container_tok,
- .init_arg_expr = init_arg_expr,
- .fields_and_decls_len = fields_and_decls.items.len,
- .lbrace_token = lbrace_token,
- .rbrace_token = try appendToken(c, .RBrace, "}"),
- };
- mem.copy(*ast.Node, container_node.fieldsAndDecls(), fields_and_decls.items);
- break :blk &container_node.base;
+ break :blk try Tag.@"enum".create(c.arena, .{
+ .int_type = init_arg_expr,
+ .fields = try c.arena.dupe(ast.Payload.Enum.Field, fields.items),
+ });
} else blk: {
_ = try c.opaque_demotes.put(c.gpa, @ptrToInt(enum_decl.getCanonicalDecl()), {});
- break :blk try transCreateNodeOpaqueType(c);
+ break :blk Tag.opaque_literal.init();
};
- const semicolon_token = try appendToken(c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .visib_token = visib_tok,
- .eq_token = eq_token,
- .init_node = init_node,
- });
+ const payload = try c.arena.create(ast.Payload.SimpleVarDecl);
+ payload.* = .{
+ .base = .{ .tag = ([2]Tag{ .var_simple, .pub_var_simple })[@boolToInt(is_pub)] },
+ .data = .{
+ .name = name,
+ .init = init_node,
+ },
+ };
- try addTopLevelDecl(c, name, &node.base);
- if (!is_unnamed)
- try c.alias_list.append(.{ .alias = bare_name, .name = name });
- return transCreateNodeIdentifier(c, name);
-}
-
-fn createAlias(c: *Context, alias: anytype) !void {
- const visib_tok = try appendToken(c, .Keyword_pub, "pub");
- const mut_tok = try appendToken(c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(c, alias.alias);
- const eq_token = try appendToken(c, .Equal, "=");
- const init_node = try transCreateNodeIdentifier(c, alias.name);
- const semicolon_token = try appendToken(c, .Semicolon, ";");
-
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .visib_token = visib_tok,
- .eq_token = eq_token,
- .init_node = init_node,
- });
- return addTopLevelDecl(c, alias.alias, &node.base);
+ if (toplevel) {
+ try addTopLevelDecl(c, name, Node.initPayload(&payload.base));
+ if (!is_unnamed)
+ try c.alias_list.append(.{ .alias = bare_name, .name = name });
+ } else {
+ try scope.appendNode(Node.initPayload(&payload.base));
+ }
+
+ for (redecls.items) |redecl| {
+ if (toplevel) {
+ try addTopLevelDecl(c, redecl.field_name, try Tag.pub_enum_redecl.create(c.arena, redecl));
+ } else {
+ try scope.appendNode(try Tag.enum_redecl.create(c.arena, .{
+ .enum_val_name = try bs.makeMangledName(c, redecl.enum_val_name),
+ .field_name = redecl.field_name,
+ .enum_name = redecl.enum_name,
+ }));
+ }
+ }
}
const ResultUsed = enum {
@@ -1334,317 +1011,252 @@ const ResultUsed = enum {
unused,
};
-const LRValue = enum {
- l_value,
- r_value,
-};
-
fn transStmt(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.Stmt,
result_used: ResultUsed,
- lrvalue: LRValue,
-) TransError!*ast.Node {
+) TransError!Node {
const sc = stmt.getStmtClass();
switch (sc) {
- .BinaryOperatorClass => return transBinaryOperator(rp, scope, @ptrCast(*const clang.BinaryOperator, stmt), result_used),
- .CompoundStmtClass => return transCompoundStmt(rp, scope, @ptrCast(*const clang.CompoundStmt, stmt)),
- .CStyleCastExprClass => return transCStyleCastExprClass(rp, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used, lrvalue),
- .DeclStmtClass => return transDeclStmt(rp, scope, @ptrCast(*const clang.DeclStmt, stmt)),
- .DeclRefExprClass => return transDeclRefExpr(rp, scope, @ptrCast(*const clang.DeclRefExpr, stmt), lrvalue),
- .ImplicitCastExprClass => return transImplicitCastExpr(rp, scope, @ptrCast(*const clang.ImplicitCastExpr, stmt), result_used),
- .IntegerLiteralClass => return transIntegerLiteral(rp, scope, @ptrCast(*const clang.IntegerLiteral, stmt), result_used, .with_as),
- .ReturnStmtClass => return transReturnStmt(rp, scope, @ptrCast(*const clang.ReturnStmt, stmt)),
- .StringLiteralClass => return transStringLiteral(rp, scope, @ptrCast(*const clang.StringLiteral, stmt), result_used),
+ .BinaryOperatorClass => return transBinaryOperator(c, scope, @ptrCast(*const clang.BinaryOperator, stmt), result_used),
+ .CompoundStmtClass => return transCompoundStmt(c, scope, @ptrCast(*const clang.CompoundStmt, stmt)),
+ .CStyleCastExprClass => return transCStyleCastExprClass(c, scope, @ptrCast(*const clang.CStyleCastExpr, stmt), result_used),
+ .DeclStmtClass => return transDeclStmt(c, scope, @ptrCast(*const clang.DeclStmt, stmt)),
+ .DeclRefExprClass => return transDeclRefExpr(c, scope, @ptrCast(*const clang.DeclRefExpr, stmt)),
+ .ImplicitCastExprClass => return transImplicitCastExpr(c, scope, @ptrCast(*const clang.ImplicitCastExpr, stmt), result_used),
+ .IntegerLiteralClass => return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, stmt), result_used, .with_as),
+ .ReturnStmtClass => return transReturnStmt(c, scope, @ptrCast(*const clang.ReturnStmt, stmt)),
+ .StringLiteralClass => return transStringLiteral(c, scope, @ptrCast(*const clang.StringLiteral, stmt), result_used),
.ParenExprClass => {
- const expr = try transExpr(rp, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used, lrvalue);
- if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
- const node = try rp.c.arena.create(ast.Node.GroupedExpression);
- node.* = .{
- .lparen = try appendToken(rp.c, .LParen, "("),
- .expr = expr,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return maybeSuppressResult(rp, scope, result_used, &node.base);
- },
- .InitListExprClass => return transInitListExpr(rp, scope, @ptrCast(*const clang.InitListExpr, stmt), result_used),
- .ImplicitValueInitExprClass => return transImplicitValueInitExpr(rp, scope, @ptrCast(*const clang.Expr, stmt), result_used),
- .IfStmtClass => return transIfStmt(rp, scope, @ptrCast(*const clang.IfStmt, stmt)),
- .WhileStmtClass => return transWhileLoop(rp, scope, @ptrCast(*const clang.WhileStmt, stmt)),
- .DoStmtClass => return transDoWhileLoop(rp, scope, @ptrCast(*const clang.DoStmt, stmt)),
+ const expr = try transExpr(c, scope, @ptrCast(*const clang.ParenExpr, stmt).getSubExpr(), .used);
+ return maybeSuppressResult(c, scope, result_used, expr);
+ },
+ .InitListExprClass => return transInitListExpr(c, scope, @ptrCast(*const clang.InitListExpr, stmt), result_used),
+ .ImplicitValueInitExprClass => return transImplicitValueInitExpr(c, scope, @ptrCast(*const clang.Expr, stmt), result_used),
+ .IfStmtClass => return transIfStmt(c, scope, @ptrCast(*const clang.IfStmt, stmt)),
+ .WhileStmtClass => return transWhileLoop(c, scope, @ptrCast(*const clang.WhileStmt, stmt)),
+ .DoStmtClass => return transDoWhileLoop(c, scope, @ptrCast(*const clang.DoStmt, stmt)),
.NullStmtClass => {
- const block = try rp.c.createBlock(0);
- block.rbrace = try appendToken(rp.c, .RBrace, "}");
- return &block.base;
- },
- .ContinueStmtClass => return try transCreateNodeContinue(rp.c),
- .BreakStmtClass => return transBreak(rp, scope),
- .ForStmtClass => return transForLoop(rp, scope, @ptrCast(*const clang.ForStmt, stmt)),
- .FloatingLiteralClass => return transFloatingLiteral(rp, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used),
+ return Tag.empty_block.init();
+ },
+ .ContinueStmtClass => return Tag.@"continue".init(),
+ .BreakStmtClass => return Tag.@"break".init(),
+ .ForStmtClass => return transForLoop(c, scope, @ptrCast(*const clang.ForStmt, stmt)),
+ .FloatingLiteralClass => return transFloatingLiteral(c, scope, @ptrCast(*const clang.FloatingLiteral, stmt), result_used),
.ConditionalOperatorClass => {
- return transConditionalOperator(rp, scope, @ptrCast(*const clang.ConditionalOperator, stmt), result_used);
+ return transConditionalOperator(c, scope, @ptrCast(*const clang.ConditionalOperator, stmt), result_used);
},
.BinaryConditionalOperatorClass => {
- return transBinaryConditionalOperator(rp, scope, @ptrCast(*const clang.BinaryConditionalOperator, stmt), result_used);
- },
- .SwitchStmtClass => return transSwitch(rp, scope, @ptrCast(*const clang.SwitchStmt, stmt)),
- .CaseStmtClass => return transCase(rp, scope, @ptrCast(*const clang.CaseStmt, stmt)),
- .DefaultStmtClass => return transDefault(rp, scope, @ptrCast(*const clang.DefaultStmt, stmt)),
- .ConstantExprClass => return transConstantExpr(rp, scope, @ptrCast(*const clang.Expr, stmt), result_used),
- .PredefinedExprClass => return transPredefinedExpr(rp, scope, @ptrCast(*const clang.PredefinedExpr, stmt), result_used),
- .CharacterLiteralClass => return transCharLiteral(rp, scope, @ptrCast(*const clang.CharacterLiteral, stmt), result_used, .with_as),
- .StmtExprClass => return transStmtExpr(rp, scope, @ptrCast(*const clang.StmtExpr, stmt), result_used),
- .MemberExprClass => return transMemberExpr(rp, scope, @ptrCast(*const clang.MemberExpr, stmt), result_used),
- .ArraySubscriptExprClass => return transArrayAccess(rp, scope, @ptrCast(*const clang.ArraySubscriptExpr, stmt), result_used),
- .CallExprClass => return transCallExpr(rp, scope, @ptrCast(*const clang.CallExpr, stmt), result_used),
- .UnaryExprOrTypeTraitExprClass => return transUnaryExprOrTypeTraitExpr(rp, scope, @ptrCast(*const clang.UnaryExprOrTypeTraitExpr, stmt), result_used),
- .UnaryOperatorClass => return transUnaryOperator(rp, scope, @ptrCast(*const clang.UnaryOperator, stmt), result_used),
- .CompoundAssignOperatorClass => return transCompoundAssignOperator(rp, scope, @ptrCast(*const clang.CompoundAssignOperator, stmt), result_used),
+ return transBinaryConditionalOperator(c, scope, @ptrCast(*const clang.BinaryConditionalOperator, stmt), result_used);
+ },
+ .SwitchStmtClass => return transSwitch(c, scope, @ptrCast(*const clang.SwitchStmt, stmt)),
+ .CaseStmtClass, .DefaultStmtClass => {
+ return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO complex switch", .{});
+ },
+ .ConstantExprClass => return transConstantExpr(c, scope, @ptrCast(*const clang.Expr, stmt), result_used),
+ .PredefinedExprClass => return transPredefinedExpr(c, scope, @ptrCast(*const clang.PredefinedExpr, stmt), result_used),
+ .CharacterLiteralClass => return transCharLiteral(c, scope, @ptrCast(*const clang.CharacterLiteral, stmt), result_used, .with_as),
+ .StmtExprClass => return transStmtExpr(c, scope, @ptrCast(*const clang.StmtExpr, stmt), result_used),
+ .MemberExprClass => return transMemberExpr(c, scope, @ptrCast(*const clang.MemberExpr, stmt), result_used),
+ .ArraySubscriptExprClass => return transArrayAccess(c, scope, @ptrCast(*const clang.ArraySubscriptExpr, stmt), result_used),
+ .CallExprClass => return transCallExpr(c, scope, @ptrCast(*const clang.CallExpr, stmt), result_used),
+ .UnaryExprOrTypeTraitExprClass => return transUnaryExprOrTypeTraitExpr(c, scope, @ptrCast(*const clang.UnaryExprOrTypeTraitExpr, stmt), result_used),
+ .UnaryOperatorClass => return transUnaryOperator(c, scope, @ptrCast(*const clang.UnaryOperator, stmt), result_used),
+ .CompoundAssignOperatorClass => return transCompoundAssignOperator(c, scope, @ptrCast(*const clang.CompoundAssignOperator, stmt), result_used),
.OpaqueValueExprClass => {
const source_expr = @ptrCast(*const clang.OpaqueValueExpr, stmt).getSourceExpr().?;
- const expr = try transExpr(rp, scope, source_expr, .used, lrvalue);
- if (expr.tag == .GroupedExpression) return maybeSuppressResult(rp, scope, result_used, expr);
- const node = try rp.c.arena.create(ast.Node.GroupedExpression);
- node.* = .{
- .lparen = try appendToken(rp.c, .LParen, "("),
- .expr = expr,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return maybeSuppressResult(rp, scope, result_used, &node.base);
+ const expr = try transExpr(c, scope, source_expr, .used);
+ return maybeSuppressResult(c, scope, result_used, expr);
},
else => {
- return revertAndWarn(
- rp,
- error.UnsupportedTranslation,
- stmt.getBeginLoc(),
- "TODO implement translation of stmt class {s}",
- .{@tagName(sc)},
- );
+ return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "TODO implement translation of stmt class {s}", .{@tagName(sc)});
},
}
}
fn transBinaryOperator(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.BinaryOperator,
result_used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const op = stmt.getOpcode();
const qt = stmt.getType();
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (op) {
- .Assign => return try transCreateNodeAssign(rp, scope, result_used, stmt.getLHS(), stmt.getRHS()),
+ .Assign => return try transCreateNodeAssign(c, scope, result_used, stmt.getLHS(), stmt.getRHS()),
.Comma => {
- var block_scope = try Scope.Block.init(rp.c, scope, true);
- const lparen = try appendToken(rp.c, .LParen, "(");
+ var block_scope = try Scope.Block.init(c, scope, true);
+ defer block_scope.deinit();
- const lhs = try transExpr(rp, &block_scope.base, stmt.getLHS(), .unused, .r_value);
+ const lhs = try transExpr(c, &block_scope.base, stmt.getLHS(), .unused);
try block_scope.statements.append(lhs);
- const rhs = try transExpr(rp, &block_scope.base, stmt.getRHS(), .used, .r_value);
- _ = try appendToken(rp.c, .Semicolon, ";");
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label, rhs);
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
- const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen,
- .expr = block_node,
- .rparen = rparen,
- };
- return maybeSuppressResult(rp, scope, result_used, &grouped_expr.base);
+ const rhs = try transExpr(c, &block_scope.base, stmt.getRHS(), .used);
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = rhs,
+ });
+ try block_scope.statements.append(break_node);
+ const block_node = try block_scope.complete(c);
+ return maybeSuppressResult(c, scope, result_used, block_node);
},
.Div => {
if (cIsSignedInteger(qt)) {
// signed integer division uses @divTrunc
- const div_trunc_node = try rp.c.createBuiltinCall("@divTrunc", 2);
- div_trunc_node.params()[0] = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value);
- _ = try appendToken(rp.c, .Comma, ",");
- const rhs = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value);
- div_trunc_node.params()[1] = rhs;
- div_trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return maybeSuppressResult(rp, scope, result_used, &div_trunc_node.base);
+ const lhs = try transExpr(c, scope, stmt.getLHS(), .used);
+ const rhs = try transExpr(c, scope, stmt.getRHS(), .used);
+ const div_trunc = try Tag.div_trunc.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
+ return maybeSuppressResult(c, scope, result_used, div_trunc);
}
},
.Rem => {
if (cIsSignedInteger(qt)) {
// signed integer division uses @rem
- const rem_node = try rp.c.createBuiltinCall("@rem", 2);
- rem_node.params()[0] = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value);
- _ = try appendToken(rp.c, .Comma, ",");
- const rhs = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value);
- rem_node.params()[1] = rhs;
- rem_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return maybeSuppressResult(rp, scope, result_used, &rem_node.base);
+ const lhs = try transExpr(c, scope, stmt.getLHS(), .used);
+ const rhs = try transExpr(c, scope, stmt.getRHS(), .used);
+ const rem = try Tag.rem.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
+ return maybeSuppressResult(c, scope, result_used, rem);
}
},
.Shl => {
- const node = try transCreateNodeShiftOp(rp, scope, stmt, .BitShiftLeft, .AngleBracketAngleBracketLeft, "<<");
- return maybeSuppressResult(rp, scope, result_used, node);
+ return transCreateNodeShiftOp(c, scope, stmt, .shl, result_used);
},
.Shr => {
- const node = try transCreateNodeShiftOp(rp, scope, stmt, .BitShiftRight, .AngleBracketAngleBracketRight, ">>");
- return maybeSuppressResult(rp, scope, result_used, node);
+ return transCreateNodeShiftOp(c, scope, stmt, .shr, result_used);
},
.LAnd => {
- const node = try transCreateNodeBoolInfixOp(rp, scope, stmt, .BoolAnd, result_used, true);
- return maybeSuppressResult(rp, scope, result_used, node);
+ return transCreateNodeBoolInfixOp(c, scope, stmt, .@"and", result_used);
},
.LOr => {
- const node = try transCreateNodeBoolInfixOp(rp, scope, stmt, .BoolOr, result_used, true);
- return maybeSuppressResult(rp, scope, result_used, node);
+ return transCreateNodeBoolInfixOp(c, scope, stmt, .@"or", result_used);
},
else => {},
}
- const lhs_node = try transExpr(rp, scope, stmt.getLHS(), .used, .l_value);
+ var op_id: Tag = undefined;
switch (op) {
.Add => {
if (cIsUnsignedInteger(qt)) {
- op_token = try appendToken(rp.c, .PlusPercent, "+%");
- op_id = .AddWrap;
+ op_id = .add_wrap;
} else {
- op_token = try appendToken(rp.c, .Plus, "+");
- op_id = .Add;
+ op_id = .add;
}
},
.Sub => {
if (cIsUnsignedInteger(qt)) {
- op_token = try appendToken(rp.c, .MinusPercent, "-%");
- op_id = .SubWrap;
+ op_id = .sub_wrap;
} else {
- op_token = try appendToken(rp.c, .Minus, "-");
- op_id = .Sub;
+ op_id = .sub;
}
},
.Mul => {
if (cIsUnsignedInteger(qt)) {
- op_token = try appendToken(rp.c, .AsteriskPercent, "*%");
- op_id = .MulWrap;
+ op_id = .mul_wrap;
} else {
- op_token = try appendToken(rp.c, .Asterisk, "*");
- op_id = .Mul;
+ op_id = .mul;
}
},
.Div => {
// unsigned/float division uses the operator
- op_id = .Div;
- op_token = try appendToken(rp.c, .Slash, "/");
+ op_id = .div;
},
.Rem => {
// unsigned/float division uses the operator
- op_id = .Mod;
- op_token = try appendToken(rp.c, .Percent, "%");
+ op_id = .mod;
},
.LT => {
- op_id = .LessThan;
- op_token = try appendToken(rp.c, .AngleBracketLeft, "<");
+ op_id = .less_than;
},
.GT => {
- op_id = .GreaterThan;
- op_token = try appendToken(rp.c, .AngleBracketRight, ">");
+ op_id = .greater_than;
},
.LE => {
- op_id = .LessOrEqual;
- op_token = try appendToken(rp.c, .AngleBracketLeftEqual, "<=");
+ op_id = .less_than_equal;
},
.GE => {
- op_id = .GreaterOrEqual;
- op_token = try appendToken(rp.c, .AngleBracketRightEqual, ">=");
+ op_id = .greater_than_equal;
},
.EQ => {
- op_id = .EqualEqual;
- op_token = try appendToken(rp.c, .EqualEqual, "==");
+ op_id = .equal;
},
.NE => {
- op_id = .BangEqual;
- op_token = try appendToken(rp.c, .BangEqual, "!=");
+ op_id = .not_equal;
},
.And => {
- op_id = .BitAnd;
- op_token = try appendToken(rp.c, .Ampersand, "&");
+ op_id = .bit_and;
},
.Xor => {
- op_id = .BitXor;
- op_token = try appendToken(rp.c, .Caret, "^");
+ op_id = .bit_xor;
},
.Or => {
- op_id = .BitOr;
- op_token = try appendToken(rp.c, .Pipe, "|");
+ op_id = .bit_or;
},
else => unreachable,
}
- const rhs_node = try transExpr(rp, scope, stmt.getRHS(), .used, .r_value);
+ const lhs_uncasted = try transExpr(c, scope, stmt.getLHS(), .used);
+ const rhs_uncasted = try transExpr(c, scope, stmt.getRHS(), .used);
- const lhs = if (isBoolRes(lhs_node)) init: {
- const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- cast_node.params()[0] = lhs_node;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- break :init &cast_node.base;
- } else lhs_node;
+ const lhs = if (isBoolRes(lhs_uncasted))
+ try Tag.bool_to_int.create(c.arena, lhs_uncasted)
+ else
+ lhs_uncasted;
- const rhs = if (isBoolRes(rhs_node)) init: {
- const cast_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- cast_node.params()[0] = rhs_node;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- break :init &cast_node.base;
- } else rhs_node;
+ const rhs = if (isBoolRes(rhs_uncasted))
+ try Tag.bool_to_int.create(c.arena, rhs_uncasted)
+ else
+ rhs_uncasted;
- return transCreateNodeInfixOp(rp, scope, lhs, op_id, op_token, rhs, result_used, true);
+ return transCreateNodeInfixOp(c, scope, op_id, lhs, rhs, result_used);
}
fn transCompoundStmtInline(
- rp: RestorePoint,
- parent_scope: *Scope,
+ c: *Context,
stmt: *const clang.CompoundStmt,
block: *Scope.Block,
) TransError!void {
var it = stmt.body_begin();
const end_it = stmt.body_end();
while (it != end_it) : (it += 1) {
- const result = try transStmt(rp, parent_scope, it[0], .unused, .r_value);
- try block.statements.append(result);
+ const result = try transStmt(c, &block.base, it[0], .unused);
+ switch (result.tag()) {
+ .declaration, .empty_block => {},
+ else => try block.statements.append(result),
+ }
}
}
-fn transCompoundStmt(rp: RestorePoint, scope: *Scope, stmt: *const clang.CompoundStmt) TransError!*ast.Node {
- var block_scope = try Scope.Block.init(rp.c, scope, false);
+fn transCompoundStmt(c: *Context, scope: *Scope, stmt: *const clang.CompoundStmt) TransError!Node {
+ var block_scope = try Scope.Block.init(c, scope, false);
defer block_scope.deinit();
- try transCompoundStmtInline(rp, &block_scope.base, stmt, &block_scope);
- return try block_scope.complete(rp.c);
+ try transCompoundStmtInline(c, stmt, &block_scope);
+ return try block_scope.complete(c);
}
fn transCStyleCastExprClass(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.CStyleCastExpr,
result_used: ResultUsed,
- lrvalue: LRValue,
-) TransError!*ast.Node {
+) TransError!Node {
const sub_expr = stmt.getSubExpr();
const cast_node = (try transCCast(
- rp,
+ c,
scope,
stmt.getBeginLoc(),
stmt.getType(),
sub_expr.getType(),
- try transExpr(rp, scope, sub_expr, .used, lrvalue),
+ try transExpr(c, scope, sub_expr, .used),
));
- return maybeSuppressResult(rp, scope, result_used, cast_node);
+ return maybeSuppressResult(c, scope, result_used, cast_node);
}
fn transDeclStmtOne(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
decl: *const clang.Decl,
block_scope: *Scope.Block,
-) TransError!*ast.Node {
- const c = rp.c;
-
+) TransError!void {
switch (decl.getKind()) {
.Var => {
const var_decl = @ptrCast(*const clang.VarDecl, decl);
@@ -1658,62 +1270,51 @@ fn transDeclStmtOne(
.Extern, .Static => {
// This is actually a global variable, put it in the global scope and reference it.
// `_ = mangled_name;`
- try visitVarDecl(rp.c, var_decl, mangled_name);
- return try maybeSuppressResult(rp, scope, .unused, try transCreateNodeIdentifier(rp.c, mangled_name));
+ return visitVarDecl(c, var_decl, mangled_name);
},
else => {},
}
- const mut_tok = if (qual_type.isConstQualified())
- try appendToken(c, .Keyword_const, "const")
- else
- try appendToken(c, .Keyword_var, "var");
- const name_tok = try appendIdentifier(c, mangled_name);
+ const is_const = qual_type.isConstQualified();
- _ = try appendToken(c, .Colon, ":");
const loc = decl.getLocation();
- const type_node = try transQualTypeMaybeInitialized(rp, qual_type, decl_init, loc);
+ const type_node = try transQualTypeMaybeInitialized(c, scope, qual_type, decl_init, loc);
- const eq_token = try appendToken(c, .Equal, "=");
var init_node = if (decl_init) |expr|
if (expr.getStmtClass() == .StringLiteralClass)
- try transStringLiteralAsArray(rp, scope, @ptrCast(*const clang.StringLiteral, expr), try zigArraySize(rp.c, type_node))
+ try transStringLiteralAsArray(c, scope, @ptrCast(*const clang.StringLiteral, expr), try zigArraySize(c, type_node))
else
- try transExprCoercing(rp, scope, expr, .used, .r_value)
+ try transExprCoercing(c, scope, expr, .used)
else
- try transCreateNodeUndefinedLiteral(c);
+ Tag.undefined_literal.init();
if (!qualTypeIsBoolean(qual_type) and isBoolRes(init_node)) {
- const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = init_node;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- init_node = &builtin_node.base;
+ init_node = try Tag.bool_to_int.create(c.arena, init_node);
}
- const semicolon_token = try appendToken(c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .type_node = type_node,
- .init_node = init_node,
+ const node = try Tag.var_decl.create(c.arena, .{
+ .is_pub = false,
+ .is_const = is_const,
+ .is_extern = false,
+ .is_export = false,
+ .is_threadlocal = false,
+ .linksection_string = null,
+ .alignment = null,
+ .name = mangled_name,
+ .type = type_node,
+ .init = init_node,
});
- return &node.base;
+ try block_scope.statements.append(node);
},
.Typedef => {
- const typedef_decl = @ptrCast(*const clang.TypedefNameDecl, decl);
- const name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin());
-
- const underlying_qual = typedef_decl.getUnderlyingType();
- const underlying_type = underlying_qual.getTypePtr();
-
- const mangled_name = try block_scope.makeMangledName(c, name);
- const node = (try transCreateNodeTypedef(rp, typedef_decl, false, mangled_name)) orelse
- return error.UnsupportedTranslation;
- return node;
+ try transTypeDef(c, scope, @ptrCast(*const clang.TypedefNameDecl, decl));
+ },
+ .Record => {
+ try transRecordDecl(c, scope, @ptrCast(*const clang.RecordDecl, decl));
},
- else => |kind| return revertAndWarn(
- rp,
+ .Enum => {
+ try transEnumDecl(c, scope, @ptrCast(*const clang.EnumDecl, decl));
+ },
+ else => |kind| return fail(
+ c,
error.UnsupportedTranslation,
decl.getLocation(),
"TODO implement translation of DeclStmt kind {s}",
@@ -1722,96 +1323,86 @@ fn transDeclStmtOne(
}
}
-fn transDeclStmt(rp: RestorePoint, scope: *Scope, stmt: *const clang.DeclStmt) TransError!*ast.Node {
- const block_scope = scope.findBlockScope(rp.c) catch unreachable;
+fn transDeclStmt(c: *Context, scope: *Scope, stmt: *const clang.DeclStmt) TransError!Node {
+ const block_scope = try scope.findBlockScope(c);
var it = stmt.decl_begin();
const end_it = stmt.decl_end();
- assert(it != end_it);
- while (true) : (it += 1) {
- const node = try transDeclStmtOne(rp, scope, it[0], block_scope);
-
- if (it + 1 == end_it) {
- return node;
- } else {
- try block_scope.statements.append(node);
- }
+ while (it != end_it) : (it += 1) {
+ try transDeclStmtOne(c, scope, it[0], block_scope);
}
- unreachable;
+ return Tag.declaration.init();
}
fn transDeclRefExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.DeclRefExpr,
- lrvalue: LRValue,
-) TransError!*ast.Node {
+) TransError!Node {
const value_decl = expr.getDecl();
- const name = try rp.c.str(@ptrCast(*const clang.NamedDecl, value_decl).getName_bytes_begin());
+ const name = try c.str(@ptrCast(*const clang.NamedDecl, value_decl).getName_bytes_begin());
const mangled_name = scope.getAlias(name);
- return transCreateNodeIdentifier(rp.c, mangled_name);
+ return Tag.identifier.create(c.arena, mangled_name);
}
fn transImplicitCastExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.ImplicitCastExpr,
result_used: ResultUsed,
-) TransError!*ast.Node {
- const c = rp.c;
+) TransError!Node {
const sub_expr = expr.getSubExpr();
const dest_type = getExprQualType(c, @ptrCast(*const clang.Expr, expr));
const src_type = getExprQualType(c, sub_expr);
switch (expr.getCastKind()) {
.BitCast, .FloatingCast, .FloatingToIntegral, .IntegralToFloating, .IntegralCast, .PointerToIntegral, .IntegralToPointer => {
- const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
- return try transCCast(rp, scope, expr.getBeginLoc(), dest_type, src_type, sub_expr_node);
+ const sub_expr_node = try transExpr(c, scope, sub_expr, .used);
+ const casted = try transCCast(c, scope, expr.getBeginLoc(), dest_type, src_type, sub_expr_node);
+ return maybeSuppressResult(c, scope, result_used, casted);
},
.LValueToRValue, .NoOp, .FunctionToPointerDecay => {
- const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
- return maybeSuppressResult(rp, scope, result_used, sub_expr_node);
+ const sub_expr_node = try transExpr(c, scope, sub_expr, .used);
+ return maybeSuppressResult(c, scope, result_used, sub_expr_node);
},
.ArrayToPointerDecay => {
if (exprIsNarrowStringLiteral(sub_expr)) {
- const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
- return maybeSuppressResult(rp, scope, result_used, sub_expr_node);
+ const sub_expr_node = try transExpr(c, scope, sub_expr, .used);
+ return maybeSuppressResult(c, scope, result_used, sub_expr_node);
}
- const prefix_op = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
- prefix_op.rhs = try transExpr(rp, scope, sub_expr, .used, .r_value);
-
- return maybeSuppressResult(rp, scope, result_used, &prefix_op.base);
+ const addr = try Tag.address_of.create(c.arena, try transExpr(c, scope, sub_expr, .used));
+ return maybeSuppressResult(c, scope, result_used, addr);
},
.NullToPointer => {
- return try transCreateNodeNullLiteral(rp.c);
+ return Tag.null_literal.init();
},
.PointerToBoolean => {
// @ptrToInt(val) != 0
- const ptr_to_int = try rp.c.createBuiltinCall("@ptrToInt", 1);
- ptr_to_int.params()[0] = try transExpr(rp, scope, sub_expr, .used, .r_value);
- ptr_to_int.rparen_token = try appendToken(rp.c, .RParen, ")");
+ const ptr_to_int = try Tag.ptr_to_int.create(c.arena, try transExpr(c, scope, sub_expr, .used));
- const op_token = try appendToken(rp.c, .BangEqual, "!=");
- const rhs_node = try transCreateNodeInt(rp.c, 0);
- return transCreateNodeInfixOp(rp, scope, &ptr_to_int.base, .BangEqual, op_token, rhs_node, result_used, false);
+ const ne = try Tag.not_equal.create(c.arena, .{ .lhs = ptr_to_int, .rhs = Tag.zero_literal.init() });
+ return maybeSuppressResult(c, scope, result_used, ne);
},
.IntegralToBoolean => {
- const sub_expr_node = try transExpr(rp, scope, sub_expr, .used, .r_value);
+ const sub_expr_node = try transExpr(c, scope, sub_expr, .used);
// The expression is already a boolean one, return it as-is
if (isBoolRes(sub_expr_node))
- return sub_expr_node;
+ return maybeSuppressResult(c, scope, result_used, sub_expr_node);
// val != 0
- const op_token = try appendToken(rp.c, .BangEqual, "!=");
- const rhs_node = try transCreateNodeInt(rp.c, 0);
- return transCreateNodeInfixOp(rp, scope, sub_expr_node, .BangEqual, op_token, rhs_node, result_used, false);
+ const ne = try Tag.not_equal.create(c.arena, .{ .lhs = sub_expr_node, .rhs = Tag.zero_literal.init() });
+ return maybeSuppressResult(c, scope, result_used, ne);
},
.BuiltinFnToFnPtr => {
- return transExpr(rp, scope, sub_expr, .used, .r_value);
+ return transExpr(c, scope, sub_expr, result_used);
+ },
+ .ToVoid => {
+ // Should only appear in the rhs and lhs of a ConditionalOperator
+ return transExpr(c, scope, sub_expr, .unused);
},
- else => |kind| return revertAndWarn(
- rp,
+ else => |kind| return fail(
+ c,
error.UnsupportedTranslation,
@ptrCast(*const clang.Stmt, expr).getBeginLoc(),
"TODO implement translation of CastKind {s}",
@@ -1821,52 +1412,28 @@ fn transImplicitCastExpr(
}
fn transBoolExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.Expr,
used: ResultUsed,
- lrvalue: LRValue,
- grouped: bool,
-) TransError!*ast.Node {
+) TransError!Node {
if (@ptrCast(*const clang.Stmt, expr).getStmtClass() == .IntegerLiteralClass) {
var is_zero: bool = undefined;
- if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, rp.c.clang_context))) {
- return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{});
+ if (!(@ptrCast(*const clang.IntegerLiteral, expr).isZero(&is_zero, c.clang_context))) {
+ return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid integer literal", .{});
}
- return try transCreateNodeBoolLiteral(rp.c, !is_zero);
+ return Node{ .tag_if_small_enough = @enumToInt(([2]Tag{ .true_literal, .false_literal })[@boolToInt(is_zero)]) };
}
- const lparen = if (grouped)
- try appendToken(rp.c, .LParen, "(")
- else
- undefined;
- var res = try transExpr(rp, scope, expr, used, lrvalue);
-
+ var res = try transExpr(c, scope, expr, used);
if (isBoolRes(res)) {
- if (!grouped and res.tag == .GroupedExpression) {
- const group = @fieldParentPtr(ast.Node.GroupedExpression, "base", res);
- res = group.expr;
- // get zig fmt to work properly
- tokenSlice(rp.c, group.lparen)[0] = ')';
- }
- return res;
+ return maybeSuppressResult(c, scope, used, res);
}
- const ty = getExprQualType(rp.c, expr).getTypePtr();
- const node = try finishBoolExpr(rp, scope, expr.getBeginLoc(), ty, res, used);
+ const ty = getExprQualType(c, expr).getTypePtr();
+ const node = try finishBoolExpr(c, scope, expr.getBeginLoc(), ty, res, used);
- if (grouped) {
- const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen,
- .expr = node,
- .rparen = rparen,
- };
- return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
- } else {
- return maybeSuppressResult(rp, scope, used, node);
- }
+ return maybeSuppressResult(c, scope, used, node);
}
fn exprIsBooleanType(expr: *const clang.Expr) bool {
@@ -1892,34 +1459,32 @@ fn exprIsNarrowStringLiteral(expr: *const clang.Expr) bool {
}
}
-fn isBoolRes(res: *ast.Node) bool {
- switch (res.tag) {
- .BoolOr,
- .BoolAnd,
- .EqualEqual,
- .BangEqual,
- .LessThan,
- .GreaterThan,
- .LessOrEqual,
- .GreaterOrEqual,
- .BoolNot,
- .BoolLiteral,
+fn isBoolRes(res: Node) bool {
+ switch (res.tag()) {
+ .@"or",
+ .@"and",
+ .equal,
+ .not_equal,
+ .less_than,
+ .less_than_equal,
+ .greater_than,
+ .greater_than_equal,
+ .not,
+ .false_literal,
+ .true_literal,
=> return true,
-
- .GroupedExpression => return isBoolRes(@fieldParentPtr(ast.Node.GroupedExpression, "base", res).expr),
-
else => return false,
}
}
fn finishBoolExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
loc: clang.SourceLocation,
ty: *const clang.Type,
- node: *ast.Node,
+ node: Node,
used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
switch (ty.getTypeClass()) {
.Builtin => {
const builtin_ty = @ptrCast(*const clang.BuiltinType, ty);
@@ -1951,42 +1516,38 @@ fn finishBoolExpr(
.WChar_S,
.Float16,
=> {
- const op_token = try appendToken(rp.c, .BangEqual, "!=");
- const rhs_node = try transCreateNodeInt(rp.c, 0);
- return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false);
+ // node != 0
+ return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() });
},
.NullPtr => {
- const op_token = try appendToken(rp.c, .EqualEqual, "==");
- const rhs_node = try transCreateNodeNullLiteral(rp.c);
- return transCreateNodeInfixOp(rp, scope, node, .EqualEqual, op_token, rhs_node, used, false);
+ // node == null
+ return Tag.equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() });
},
else => {},
}
},
.Pointer => {
- const op_token = try appendToken(rp.c, .BangEqual, "!=");
- const rhs_node = try transCreateNodeNullLiteral(rp.c);
- return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false);
+ // node != null
+ return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.null_literal.init() });
},
.Typedef => {
const typedef_ty = @ptrCast(*const clang.TypedefType, ty);
const typedef_decl = typedef_ty.getDecl();
const underlying_type = typedef_decl.getUnderlyingType();
- return finishBoolExpr(rp, scope, loc, underlying_type.getTypePtr(), node, used);
+ return finishBoolExpr(c, scope, loc, underlying_type.getTypePtr(), node, used);
},
.Enum => {
- const op_token = try appendToken(rp.c, .BangEqual, "!=");
- const rhs_node = try transCreateNodeInt(rp.c, 0);
- return transCreateNodeInfixOp(rp, scope, node, .BangEqual, op_token, rhs_node, used, false);
+ // node != 0
+ return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() });
},
.Elaborated => {
const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty);
const named_type = elaborated_ty.getNamedType();
- return finishBoolExpr(rp, scope, loc, named_type.getTypePtr(), node, used);
+ return finishBoolExpr(c, scope, loc, named_type.getTypePtr(), node, used);
},
else => {},
}
- return revertAndWarn(rp, error.UnsupportedType, loc, "unsupported bool expression type", .{});
+ return fail(c, error.UnsupportedType, loc, "unsupported bool expression type", .{});
}
const SuppressCast = enum {
@@ -1994,21 +1555,21 @@ const SuppressCast = enum {
no_as,
};
fn transIntegerLiteral(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.IntegerLiteral,
result_used: ResultUsed,
suppress_as: SuppressCast,
-) TransError!*ast.Node {
+) TransError!Node {
var eval_result: clang.ExprEvalResult = undefined;
- if (!expr.EvaluateAsInt(&eval_result, rp.c.clang_context)) {
+ if (!expr.EvaluateAsInt(&eval_result, c.clang_context)) {
const loc = expr.getBeginLoc();
- return revertAndWarn(rp, error.UnsupportedTranslation, loc, "invalid integer literal", .{});
+ return fail(c, error.UnsupportedTranslation, loc, "invalid integer literal", .{});
}
if (suppress_as == .no_as) {
- const int_lit_node = try transCreateNodeAPInt(rp.c, eval_result.Val.getInt());
- return maybeSuppressResult(rp, scope, result_used, int_lit_node);
+ const int_lit_node = try transCreateNodeAPInt(c, eval_result.Val.getInt());
+ return maybeSuppressResult(c, scope, result_used, int_lit_node);
}
// Integer literals in C have types, and this can matter for several reasons.
@@ -2023,115 +1584,61 @@ fn transIntegerLiteral(
// @as(T, x)
const expr_base = @ptrCast(*const clang.Expr, expr);
- const as_node = try rp.c.createBuiltinCall("@as", 2);
- const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc());
- as_node.params()[0] = ty_node;
- _ = try appendToken(rp.c, .Comma, ",");
- as_node.params()[1] = try transCreateNodeAPInt(rp.c, eval_result.Val.getInt());
-
- as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return maybeSuppressResult(rp, scope, result_used, &as_node.base);
-}
-
-/// In C if a function has return type `int` and the return value is a boolean
-/// expression, there is no implicit cast. So the translated Zig will need to
-/// call @boolToInt
-fn zigShouldCastBooleanReturnToInt(node: ?*ast.Node, qt: ?clang.QualType) bool {
- if (node == null or qt == null) return false;
- return isBoolRes(node.?) and cIsNativeInt(qt.?);
+ const ty_node = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc());
+ const rhs = try transCreateNodeAPInt(c, eval_result.Val.getInt());
+ const as = try Tag.as.create(c.arena, .{ .lhs = ty_node, .rhs = rhs });
+ return maybeSuppressResult(c, scope, result_used, as);
}
fn transReturnStmt(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.ReturnStmt,
-) TransError!*ast.Node {
- const return_kw = try appendToken(rp.c, .Keyword_return, "return");
- var rhs: ?*ast.Node = if (expr.getRetValue()) |val_expr|
- try transExprCoercing(rp, scope, val_expr, .used, .r_value)
- else
- null;
- const return_qt = scope.findBlockReturnType(rp.c);
- if (zigShouldCastBooleanReturnToInt(rhs, return_qt)) {
- const bool_to_int_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- bool_to_int_node.params()[0] = rhs.?;
- bool_to_int_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- rhs = &bool_to_int_node.base;
- }
- const return_expr = try ast.Node.ControlFlowExpression.create(rp.c.arena, .{
- .ltoken = return_kw,
- .tag = .Return,
- }, .{
- .rhs = rhs,
- });
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &return_expr.base;
+) TransError!Node {
+ const val_expr = expr.getRetValue() orelse
+ return Tag.return_void.init();
+
+ var rhs = try transExprCoercing(c, scope, val_expr, .used);
+ const return_qt = scope.findBlockReturnType(c);
+ if (isBoolRes(rhs) and !qualTypeIsBoolean(return_qt)) {
+ rhs = try Tag.bool_to_int.create(c.arena, rhs);
+ }
+ return Tag.@"return".create(c.arena, rhs);
}
fn transStringLiteral(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.StringLiteral,
result_used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const kind = stmt.getKind();
switch (kind) {
.Ascii, .UTF8 => {
var len: usize = undefined;
const bytes_ptr = stmt.getString_bytes_begin_size(&len);
- const str = bytes_ptr[0..len];
- const token = try appendTokenFmt(rp.c, .StringLiteral, "\"{}\"", .{std.zig.fmtEscapes(str)});
- const node = try rp.c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = token,
- };
- return maybeSuppressResult(rp, scope, result_used, &node.base);
+ const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])});
+ const node = try Tag.string_literal.create(c.arena, str);
+ return maybeSuppressResult(c, scope, result_used, node);
},
.UTF16, .UTF32, .Wide => {
- const node = try transWideStringLiteral(rp, scope, stmt);
- return maybeSuppressResult(rp, scope, result_used, node);
+ const str_type = @tagName(stmt.getKind());
+ const name = try std.fmt.allocPrint(c.arena, "zig.{s}_string_{d}", .{ str_type, c.getMangle() });
+ const lit_array = try transStringLiteralAsArray(c, scope, stmt, stmt.getLength() + 1);
+
+ const decl = try Tag.var_simple.create(c.arena, .{ .name = name, .init = lit_array });
+ try scope.appendNode(decl);
+ const node = try Tag.identifier.create(c.arena, name);
+ return maybeSuppressResult(c, scope, result_used, node);
},
}
}
-/// Translates a wide string literal as a global "anonymous" array of the relevant-sized
-/// integer type + null terminator, and returns an identifier node for it
-fn transWideStringLiteral(rp: RestorePoint, scope: *Scope, stmt: *const clang.StringLiteral) TransError!*ast.Node {
- const str_type = @tagName(stmt.getKind());
- const mangle = rp.c.getMangle();
- const name = try std.fmt.allocPrint(rp.c.arena, "zig.{s}_string_{d}", .{ str_type, mangle });
-
- const const_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, name);
- const eq_tok = try appendToken(rp.c, .Equal, "=");
- var semi_tok: ast.TokenIndex = undefined;
-
- const lit_array = try transStringLiteralAsArray(rp, scope, stmt, stmt.getLength() + 1);
-
- semi_tok = try appendToken(rp.c, .Semicolon, ";");
- const var_decl_node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = const_tok,
- .semicolon_token = semi_tok,
- }, .{
- .visib_token = null,
- .eq_token = eq_tok,
- .init_node = lit_array,
- });
- try addTopLevelDecl(rp.c, name, &var_decl_node.base);
- return transCreateNodeIdentifier(rp.c, name);
-}
-
/// Parse the size of an array back out from an ast Node.
-fn zigArraySize(c: *Context, node: *ast.Node) TransError!usize {
- if (node.castTag(.ArrayType)) |array| {
- if (array.len_expr.castTag(.IntegerLiteral)) |int_lit| {
- const tok = tokenSlice(c, int_lit.token);
- return std.fmt.parseUnsigned(usize, tok, 10) catch error.UnsupportedTranslation;
- }
+fn zigArraySize(c: *Context, node: Node) TransError!usize {
+ if (node.castTag(.array_type)) |array| {
+ return array.data.len;
}
return error.UnsupportedTranslation;
}
@@ -2142,11 +1649,11 @@ fn zigArraySize(c: *Context, node: *ast.Node) TransError!usize {
/// than the array, truncate the string. If the array is larger than the
/// string literal, pad the array with 0's
fn transStringLiteralAsArray(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.StringLiteral,
array_size: usize,
-) TransError!*ast.Node {
+) TransError!Node {
if (array_size == 0) return error.UnsupportedType;
const str_length = stmt.getLength();
@@ -2155,40 +1662,25 @@ fn transStringLiteralAsArray(
const ty = expr_base.getType().getTypePtr();
const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty);
- const ty_node = try rp.c.arena.create(ast.Node.ArrayType);
- const op_token = try appendToken(rp.c, .LBracket, "[");
- const len_expr = try transCreateNodeInt(rp.c, array_size);
- _ = try appendToken(rp.c, .RBracket, "]");
-
- ty_node.* = .{
- .op_token = op_token,
- .rhs = try transQualType(rp, const_arr_ty.getElementType(), expr_base.getBeginLoc()),
- .len_expr = len_expr,
- };
- _ = try appendToken(rp.c, .LBrace, "{");
- var init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, array_size);
- init_node.* = .{
- .lhs = &ty_node.base,
- .rtoken = undefined,
- .list_len = array_size,
- };
- const init_list = init_node.list();
+ const elem_type = try transQualType(c, scope, const_arr_ty.getElementType(), expr_base.getBeginLoc());
+ const arr_type = try Tag.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_type });
+ const init_list = try c.arena.alloc(Node, array_size);
var i: c_uint = 0;
const kind = stmt.getKind();
const narrow = kind == .Ascii or kind == .UTF8;
while (i < str_length and i < array_size) : (i += 1) {
const code_unit = stmt.getCodeUnit(i);
- init_list[i] = try transCreateCharLitNode(rp.c, narrow, code_unit);
- _ = try appendToken(rp.c, .Comma, ",");
+ init_list[i] = try transCreateCharLitNode(c, narrow, code_unit);
}
while (i < array_size) : (i += 1) {
- init_list[i] = try transCreateNodeInt(rp.c, 0);
- _ = try appendToken(rp.c, .Comma, ",");
+ init_list[i] = try transCreateNodeNumber(c, 0, .int);
}
- init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
- return &init_node.base;
+ return Tag.array_init.create(c.arena, .{
+ .cond = arr_type,
+ .cases = init_list,
+ });
}
fn cIsEnum(qt: clang.QualType) bool {
@@ -2207,199 +1699,164 @@ fn cIntTypeForEnum(enum_qt: clang.QualType) clang.QualType {
}
fn transCCast(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
loc: clang.SourceLocation,
dst_type: clang.QualType,
src_type: clang.QualType,
- expr: *ast.Node,
-) !*ast.Node {
+ expr: Node,
+) !Node {
if (qualTypeCanon(dst_type).isVoidType()) return expr;
if (dst_type.eq(src_type)) return expr;
if (qualTypeIsPtr(dst_type) and qualTypeIsPtr(src_type))
- return transCPtrCast(rp, loc, dst_type, src_type, expr);
+ return transCPtrCast(c, scope, loc, dst_type, src_type, expr);
+
+ const dst_node = try transQualType(c, scope, dst_type, loc);
if (cIsInteger(dst_type) and (cIsInteger(src_type) or cIsEnum(src_type))) {
// 1. If src_type is an enum, determine the underlying signed int type
// 2. Extend or truncate without changing signed-ness.
// 3. Bit-cast to correct signed-ness
const src_type_is_signed = cIsSignedInteger(src_type) or cIsEnum(src_type);
const src_int_type = if (cIsInteger(src_type)) src_type else cIntTypeForEnum(src_type);
- var src_int_expr = if (cIsInteger(src_type)) expr else try transEnumToInt(rp.c, expr);
-
- // @bitCast(dest_type, intermediate_value)
- const cast_node = try rp.c.createBuiltinCall("@bitCast", 2);
- cast_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
+ var src_int_expr = if (cIsInteger(src_type)) expr else try Tag.enum_to_int.create(c.arena, expr);
if (isBoolRes(src_int_expr)) {
- const bool_to_int_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- bool_to_int_node.params()[0] = src_int_expr;
- bool_to_int_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- src_int_expr = &bool_to_int_node.base;
+ src_int_expr = try Tag.bool_to_int.create(c.arena, src_int_expr);
}
switch (cIntTypeCmp(dst_type, src_int_type)) {
.lt => {
// @truncate(SameSignSmallerInt, src_int_expr)
- const trunc_node = try rp.c.createBuiltinCall("@truncate", 2);
- const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, src_type_is_signed);
- trunc_node.params()[0] = ty_node;
- _ = try appendToken(rp.c, .Comma, ",");
- trunc_node.params()[1] = src_int_expr;
- trunc_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- cast_node.params()[1] = &trunc_node.base;
+ const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed);
+ src_int_expr = try Tag.truncate.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr });
},
.gt => {
// @as(SameSignBiggerInt, src_int_expr)
- const as_node = try rp.c.createBuiltinCall("@as", 2);
- const ty_node = try transQualTypeIntWidthOf(rp.c, dst_type, src_type_is_signed);
- as_node.params()[0] = ty_node;
- _ = try appendToken(rp.c, .Comma, ",");
- as_node.params()[1] = src_int_expr;
- as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- cast_node.params()[1] = &as_node.base;
+ const ty_node = try transQualTypeIntWidthOf(c, dst_type, src_type_is_signed);
+ src_int_expr = try Tag.as.create(c.arena, .{ .lhs = ty_node, .rhs = src_int_expr });
},
.eq => {
- cast_node.params()[1] = src_int_expr;
+ // src_int_expr = src_int_expr
},
}
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &cast_node.base;
+ // @bitCast(dest_type, intermediate_value)
+ return Tag.bit_cast.create(c.arena, .{ .lhs = dst_node, .rhs = src_int_expr });
}
if (cIsInteger(dst_type) and qualTypeIsPtr(src_type)) {
// @intCast(dest_type, @ptrToInt(val))
- const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
- cast_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- const builtin_node = try rp.c.createBuiltinCall("@ptrToInt", 1);
- builtin_node.params()[0] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- cast_node.params()[1] = &builtin_node.base;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &cast_node.base;
+ const ptr_to_int = try Tag.ptr_to_int.create(c.arena, expr);
+ return Tag.int_cast.create(c.arena, .{ .lhs = dst_node, .rhs = ptr_to_int });
}
if (cIsInteger(src_type) and qualTypeIsPtr(dst_type)) {
// @intToPtr(dest_type, val)
- const builtin_node = try rp.c.createBuiltinCall("@intToPtr", 2);
- builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &builtin_node.base;
+ return Tag.int_to_ptr.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
if (cIsFloating(src_type) and cIsFloating(dst_type)) {
- const builtin_node = try rp.c.createBuiltinCall("@floatCast", 2);
- builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &builtin_node.base;
+ // @floatCast(dest_type, val)
+ return Tag.float_cast.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
if (cIsFloating(src_type) and !cIsFloating(dst_type)) {
- const builtin_node = try rp.c.createBuiltinCall("@floatToInt", 2);
- builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &builtin_node.base;
+ // @floatToInt(dest_type, val)
+ return Tag.float_to_int.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
if (!cIsFloating(src_type) and cIsFloating(dst_type)) {
- const builtin_node = try rp.c.createBuiltinCall("@intToFloat", 2);
- builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &builtin_node.base;
+ // @intToFloat(dest_type, val)
+ return Tag.int_to_float.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
if (qualTypeIsBoolean(src_type) and !qualTypeIsBoolean(dst_type)) {
// @boolToInt returns either a comptime_int or a u1
// TODO: if dst_type is 1 bit & signed (bitfield) we need @bitCast
// instead of @as
- const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- const as_node = try rp.c.createBuiltinCall("@as", 2);
- as_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- as_node.params()[1] = &builtin_node.base;
- as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- return &as_node.base;
+ const bool_to_int = try Tag.bool_to_int.create(c.arena, expr);
+ return Tag.as.create(c.arena, .{ .lhs = dst_node, .rhs = bool_to_int });
}
if (cIsEnum(dst_type)) {
- const builtin_node = try rp.c.createBuiltinCall("@intToEnum", 2);
- builtin_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = expr;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &builtin_node.base;
+ // @intToEnum(dest_type, val)
+ return Tag.int_to_enum.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
if (cIsEnum(src_type) and !cIsEnum(dst_type)) {
- return transEnumToInt(rp.c, expr);
+ // @enumToInt(val)
+ return Tag.enum_to_int.create(c.arena, expr);
}
- const cast_node = try rp.c.createBuiltinCall("@as", 2);
- cast_node.params()[0] = try transQualType(rp, dst_type, loc);
- _ = try appendToken(rp.c, .Comma, ",");
- cast_node.params()[1] = expr;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &cast_node.base;
-}
-
-fn transEnumToInt(c: *Context, enum_expr: *ast.Node) TypeError!*ast.Node {
- const builtin_node = try c.createBuiltinCall("@enumToInt", 1);
- builtin_node.params()[0] = enum_expr;
- builtin_node.rparen_token = try appendToken(c, .RParen, ")");
- return &builtin_node.base;
+ // @as(dest_type, val)
+ return Tag.as.create(c.arena, .{ .lhs = dst_node, .rhs = expr });
}
-fn transExpr(
- rp: RestorePoint,
- scope: *Scope,
- expr: *const clang.Expr,
- used: ResultUsed,
- lrvalue: LRValue,
-) TransError!*ast.Node {
- return transStmt(rp, scope, @ptrCast(*const clang.Stmt, expr), used, lrvalue);
+fn transExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node {
+ return transStmt(c, scope, @ptrCast(*const clang.Stmt, expr), used);
}
/// Same as `transExpr` but with the knowledge that the operand will be type coerced, and therefore
/// an `@as` would be redundant. This is used to prevent redundant `@as` in integer literals.
-fn transExprCoercing(
- rp: RestorePoint,
- scope: *Scope,
- expr: *const clang.Expr,
- used: ResultUsed,
- lrvalue: LRValue,
-) TransError!*ast.Node {
+fn transExprCoercing(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node {
switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) {
.IntegerLiteralClass => {
- return transIntegerLiteral(rp, scope, @ptrCast(*const clang.IntegerLiteral, expr), .used, .no_as);
+ return transIntegerLiteral(c, scope, @ptrCast(*const clang.IntegerLiteral, expr), .used, .no_as);
},
.CharacterLiteralClass => {
- return transCharLiteral(rp, scope, @ptrCast(*const clang.CharacterLiteral, expr), .used, .no_as);
+ return transCharLiteral(c, scope, @ptrCast(*const clang.CharacterLiteral, expr), .used, .no_as);
},
.UnaryOperatorClass => {
const un_expr = @ptrCast(*const clang.UnaryOperator, expr);
if (un_expr.getOpcode() == .Extension) {
- return transExprCoercing(rp, scope, un_expr.getSubExpr(), used, lrvalue);
+ return transExprCoercing(c, scope, un_expr.getSubExpr(), used);
+ }
+ },
+ .ImplicitCastExprClass => {
+ const cast_expr = @ptrCast(*const clang.ImplicitCastExpr, expr);
+ const sub_expr = cast_expr.getSubExpr();
+ switch (@ptrCast(*const clang.Stmt, sub_expr).getStmtClass()) {
+ .IntegerLiteralClass, .CharacterLiteralClass => switch (cast_expr.getCastKind()) {
+ .IntegralToFloating => return transExprCoercing(c, scope, sub_expr, used),
+ .IntegralCast => {
+ const dest_type = getExprQualType(c, expr);
+ if (literalFitsInType(c, sub_expr, dest_type))
+ return transExprCoercing(c, scope, sub_expr, used);
+ },
+ else => {},
+ },
+ else => {},
}
},
else => {},
}
- return transExpr(rp, scope, expr, .used, .r_value);
+ return transExpr(c, scope, expr, .used);
+}
+
+fn literalFitsInType(c: *Context, expr: *const clang.Expr, qt: clang.QualType) bool {
+ var width = qualTypeIntBitWidth(c, qt) catch 8;
+ if (width == 0) width = 8; // Byte is the smallest type.
+ const is_signed = cIsSignedInteger(qt);
+ const width_max_int = (@as(u64, 1) << math.lossyCast(u6, width - @boolToInt(is_signed))) - 1;
+
+ switch (@ptrCast(*const clang.Stmt, expr).getStmtClass()) {
+ .CharacterLiteralClass => {
+ const char_lit = @ptrCast(*const clang.CharacterLiteral, expr);
+ const val = char_lit.getValue();
+ // If the val is less than the max int then it fits.
+ return val <= width_max_int;
+ },
+ .IntegerLiteralClass => {
+ const int_lit = @ptrCast(*const clang.IntegerLiteral, expr);
+ var eval_result: clang.ExprEvalResult = undefined;
+ if (!int_lit.EvaluateAsInt(&eval_result, c.clang_context)) {
+ return false;
+ }
+
+ const int = eval_result.Val.getInt();
+ return int.lessThanEqual(width_max_int);
+ },
+ else => unreachable,
+ }
}
fn transInitListExprRecord(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
loc: clang.SourceLocation,
expr: *const clang.InitListExpr,
ty: *const clang.Type,
- used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
var is_union_type = false;
// Unions and Structs are both represented as RecordDecl
const record_ty = ty.getAsRecordType() orelse
@@ -2411,13 +1868,11 @@ fn transInitListExprRecord(
const record_def = record_decl.getDefinition() orelse
unreachable;
- const ty_node = try transType(rp, ty, loc);
+ const ty_node = try transType(c, scope, ty, loc);
const init_count = expr.getNumInits();
- var field_inits = std.ArrayList(*ast.Node).init(rp.c.gpa);
+ var field_inits = std.ArrayList(ast.Payload.ContainerInit.Initializer).init(c.gpa);
defer field_inits.deinit();
- _ = try appendToken(rp.c, .LBrace, "{");
-
var init_i: c_uint = 0;
var it = record_def.field_begin();
const end_it = record_def.field_end();
@@ -2435,78 +1890,34 @@ fn transInitListExprRecord(
// Generate the field assignment expression:
// .field_name = expr
- const period_tok = try appendToken(rp.c, .Period, ".");
-
- var raw_name = try rp.c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin());
+ var raw_name = try c.str(@ptrCast(*const clang.NamedDecl, field_decl).getName_bytes_begin());
if (field_decl.isAnonymousStructOrUnion()) {
- const name = rp.c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?;
- raw_name = try mem.dupe(rp.c.arena, u8, name);
+ const name = c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?;
+ raw_name = try mem.dupe(c.arena, u8, name);
}
- const field_name_tok = try appendIdentifier(rp.c, raw_name);
-
- _ = try appendToken(rp.c, .Equal, "=");
- const field_init_node = try rp.c.arena.create(ast.Node.FieldInitializer);
- field_init_node.* = .{
- .period_token = period_tok,
- .name_token = field_name_tok,
- .expr = try transExpr(rp, scope, elem_expr, .used, .r_value),
- };
-
- try field_inits.append(&field_init_node.base);
- _ = try appendToken(rp.c, .Comma, ",");
+ try field_inits.append(.{
+ .name = raw_name,
+ .value = try transExpr(c, scope, elem_expr, .used),
+ });
}
- const node = try ast.Node.StructInitializer.alloc(rp.c.arena, field_inits.items.len);
- node.* = .{
- .lhs = ty_node,
- .rtoken = try appendToken(rp.c, .RBrace, "}"),
- .list_len = field_inits.items.len,
- };
- mem.copy(*ast.Node, node.list(), field_inits.items);
- return &node.base;
-}
-
-fn transCreateNodeArrayType(
- rp: RestorePoint,
- source_loc: clang.SourceLocation,
- ty: *const clang.Type,
- len: anytype,
-) !*ast.Node {
- const node = try rp.c.arena.create(ast.Node.ArrayType);
- const op_token = try appendToken(rp.c, .LBracket, "[");
- const len_expr = try transCreateNodeInt(rp.c, len);
- _ = try appendToken(rp.c, .RBracket, "]");
- node.* = .{
- .op_token = op_token,
- .rhs = try transType(rp, ty, source_loc),
- .len_expr = len_expr,
- };
- return &node.base;
-}
-
-fn transCreateEmptyArray(rp: RestorePoint, loc: clang.SourceLocation, ty: *const clang.Type) TransError!*ast.Node {
- const ty_node = try transCreateNodeArrayType(rp, loc, ty, 0);
- _ = try appendToken(rp.c, .LBrace, "{");
- const filler_init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, 0);
- filler_init_node.* = .{
+ return Tag.container_init.create(c.arena, .{
.lhs = ty_node,
- .rtoken = try appendToken(rp.c, .RBrace, "}"),
- .list_len = 0,
- };
- return &filler_init_node.base;
+ .inits = try c.arena.dupe(ast.Payload.ContainerInit.Initializer, field_inits.items),
+ });
}
fn transInitListExprArray(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
loc: clang.SourceLocation,
expr: *const clang.InitListExpr,
ty: *const clang.Type,
- used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const arr_type = ty.getAsArrayTypeUnsafe();
const child_qt = arr_type.getElementType();
+ const child_type = try transQualType(c, scope, child_qt, loc);
const init_count = expr.getNumInits();
assert(@ptrCast(*const clang.Type, arr_type).isConstantArrayType());
const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, arr_type);
@@ -2515,125 +1926,83 @@ fn transInitListExprArray(
const leftover_count = all_count - init_count;
if (all_count == 0) {
- return transCreateEmptyArray(rp, loc, child_qt.getTypePtr());
+ return Tag.empty_array.create(c.arena, child_type);
}
- var init_node: *ast.Node.ArrayInitializer = undefined;
- var cat_tok: ast.TokenIndex = undefined;
- if (init_count != 0) {
- const ty_node = try transCreateNodeArrayType(
- rp,
- loc,
- child_qt.getTypePtr(),
- init_count,
- );
- _ = try appendToken(rp.c, .LBrace, "{");
- init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, init_count);
- init_node.* = .{
- .lhs = ty_node,
- .rtoken = undefined,
- .list_len = init_count,
- };
- const init_list = init_node.list();
+ const init_node = if (init_count != 0) blk: {
+ const init_list = try c.arena.alloc(Node, init_count);
- var i: c_uint = 0;
- while (i < init_count) : (i += 1) {
- const elem_expr = expr.getInit(i);
- init_list[i] = try transExpr(rp, scope, elem_expr, .used, .r_value);
- _ = try appendToken(rp.c, .Comma, ",");
+ for (init_list) |*init, i| {
+ const elem_expr = expr.getInit(@intCast(c_uint, i));
+ init.* = try transExprCoercing(c, scope, elem_expr, .used);
}
- init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
+ const init_node = try Tag.array_init.create(c.arena, .{
+ .cond = try Tag.array_type.create(c.arena, .{ .len = init_count, .elem_type = child_type }),
+ .cases = init_list,
+ });
if (leftover_count == 0) {
- return &init_node.base;
+ return init_node;
}
- cat_tok = try appendToken(rp.c, .PlusPlus, "++");
- }
+ break :blk init_node;
+ } else null;
- const ty_node = try transCreateNodeArrayType(rp, loc, child_qt.getTypePtr(), 1);
- _ = try appendToken(rp.c, .LBrace, "{");
- const filler_init_node = try ast.Node.ArrayInitializer.alloc(rp.c.arena, 1);
- filler_init_node.* = .{
- .lhs = ty_node,
- .rtoken = undefined,
- .list_len = 1,
- };
const filler_val_expr = expr.getArrayFiller();
- filler_init_node.list()[0] = try transExpr(rp, scope, filler_val_expr, .used, .r_value);
- filler_init_node.rtoken = try appendToken(rp.c, .RBrace, "}");
-
- const rhs_node = if (leftover_count == 1)
- &filler_init_node.base
- else blk: {
- const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**");
- const mul_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- mul_node.* = .{
- .base = .{ .tag = .ArrayMult },
- .op_token = mul_tok,
- .lhs = &filler_init_node.base,
- .rhs = try transCreateNodeInt(rp.c, leftover_count),
- };
- break :blk &mul_node.base;
- };
+ const filler_node = try Tag.array_filler.create(c.arena, .{
+ .type = child_type,
+ .filler = try transExprCoercing(c, scope, filler_val_expr, .used),
+ .count = leftover_count,
+ });
- if (init_count == 0) {
- return rhs_node;
+ if (init_node) |some| {
+ return Tag.array_cat.create(c.arena, .{ .lhs = some, .rhs = filler_node });
+ } else {
+ return filler_node;
}
-
- const cat_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- cat_node.* = .{
- .base = .{ .tag = .ArrayCat },
- .op_token = cat_tok,
- .lhs = &init_node.base,
- .rhs = rhs_node,
- };
- return &cat_node.base;
}
fn transInitListExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.InitListExpr,
used: ResultUsed,
-) TransError!*ast.Node {
- const qt = getExprQualType(rp.c, @ptrCast(*const clang.Expr, expr));
+) TransError!Node {
+ const qt = getExprQualType(c, @ptrCast(*const clang.Expr, expr));
var qual_type = qt.getTypePtr();
const source_loc = @ptrCast(*const clang.Expr, expr).getBeginLoc();
if (qual_type.isRecordType()) {
- return transInitListExprRecord(
- rp,
+ return maybeSuppressResult(c, scope, used, try transInitListExprRecord(
+ c,
scope,
source_loc,
expr,
qual_type,
- used,
- );
+ ));
} else if (qual_type.isArrayType()) {
- return transInitListExprArray(
- rp,
+ return maybeSuppressResult(c, scope, used, try transInitListExprArray(
+ c,
scope,
source_loc,
expr,
qual_type,
- used,
- );
+ ));
} else {
- const type_name = rp.c.str(qual_type.getTypeClassName());
- return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name});
+ const type_name = c.str(qual_type.getTypeClassName());
+ return fail(c, error.UnsupportedType, source_loc, "unsupported initlist type: '{s}'", .{type_name});
}
}
fn transZeroInitExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
source_loc: clang.SourceLocation,
ty: *const clang.Type,
-) TransError!*ast.Node {
+) TransError!Node {
switch (ty.getTypeClass()) {
.Builtin => {
const builtin_ty = @ptrCast(*const clang.BuiltinType, ty);
switch (builtin_ty.getKind()) {
- .Bool => return try transCreateNodeBoolLiteral(rp.c, false),
+ .Bool => return Tag.false_literal.init(),
.Char_U,
.UChar,
.Char_S,
@@ -2654,126 +2023,112 @@ fn transZeroInitExpr(
.Float128,
.Float16,
.LongDouble,
- => return transCreateNodeInt(rp.c, 0),
- else => return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported builtin type", .{}),
+ => return Tag.zero_literal.init(),
+ else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}),
}
},
- .Pointer => return transCreateNodeNullLiteral(rp.c),
+ .Pointer => return Tag.null_literal.init(),
.Typedef => {
const typedef_ty = @ptrCast(*const clang.TypedefType, ty);
const typedef_decl = typedef_ty.getDecl();
return transZeroInitExpr(
- rp,
+ c,
scope,
source_loc,
typedef_decl.getUnderlyingType().getTypePtr(),
);
},
- else => {},
+ else => return Tag.std_mem_zeroes.create(c.arena, try transType(c, scope, ty, source_loc)),
}
-
- return revertAndWarn(rp, error.UnsupportedType, source_loc, "type does not have an implicit init value", .{});
}
fn transImplicitValueInitExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
expr: *const clang.Expr,
used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const source_loc = expr.getBeginLoc();
- const qt = getExprQualType(rp.c, expr);
+ const qt = getExprQualType(c, expr);
const ty = qt.getTypePtr();
- return transZeroInitExpr(rp, scope, source_loc, ty);
+ return transZeroInitExpr(c, scope, source_loc, ty);
}
fn transIfStmt(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.IfStmt,
-) TransError!*ast.Node {
+) TransError!Node {
// if (c) t
// if (c) t else e
- const if_node = try transCreateNodeIf(rp.c);
-
var cond_scope = Scope.Condition{
.base = .{
.parent = scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond());
- if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
- _ = try appendToken(rp.c, .RParen, ")");
+ const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used);
- if_node.body = try transStmt(rp, scope, stmt.getThen(), .unused, .r_value);
-
- if (stmt.getElse()) |expr| {
- if_node.@"else" = try transCreateNodeElse(rp.c);
- if_node.@"else".?.body = try transStmt(rp, scope, expr, .unused, .r_value);
- }
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &if_node.base;
+ const then_body = try transStmt(c, scope, stmt.getThen(), .unused);
+ const else_body = if (stmt.getElse()) |expr|
+ try transStmt(c, scope, expr, .unused)
+ else
+ null;
+ return Tag.@"if".create(c.arena, .{ .cond = cond, .then = then_body, .@"else" = else_body });
}
fn transWhileLoop(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.WhileStmt,
-) TransError!*ast.Node {
- const while_node = try transCreateNodeWhile(rp.c);
-
+) TransError!Node {
var cond_scope = Scope.Condition{
.base = .{
.parent = scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
const cond_expr = @ptrCast(*const clang.Expr, stmt.getCond());
- while_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
- _ = try appendToken(rp.c, .RParen, ")");
+ const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used);
var loop_scope = Scope{
.parent = scope,
- .id = .Loop,
+ .id = .loop,
};
- while_node.body = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value);
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &while_node.base;
+ const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused);
+ return Tag.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = null });
}
fn transDoWhileLoop(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.DoStmt,
-) TransError!*ast.Node {
- const while_node = try transCreateNodeWhile(rp.c);
-
- while_node.condition = try transCreateNodeBoolLiteral(rp.c, true);
- _ = try appendToken(rp.c, .RParen, ")");
- var new = false;
+) TransError!Node {
var loop_scope = Scope{
.parent = scope,
- .id = .Loop,
+ .id = .do_loop,
};
// if (!cond) break;
- const if_node = try transCreateNodeIf(rp.c);
var cond_scope = Scope.Condition{
.base = .{
.parent = scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
- const prefix_op = try transCreateNodeSimplePrefixOp(rp.c, .BoolNot, .Bang, "!");
- prefix_op.rhs = try transBoolExpr(rp, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used, .r_value, true);
- _ = try appendToken(rp.c, .RParen, ")");
- if_node.condition = &prefix_op.base;
- if_node.body = &(try transCreateNodeBreak(rp.c, null, null)).base;
- _ = try appendToken(rp.c, .Semicolon, ";");
+ const cond = try transBoolExpr(c, &cond_scope.base, @ptrCast(*const clang.Expr, stmt.getCond()), .used);
+ const if_not_break = switch (cond.tag()) {
+ .false_literal => return transStmt(c, scope, stmt.getBody(), .unused),
+ .true_literal => {
+ const body_node = try transStmt(c, scope, stmt.getBody(), .unused);
+ return Tag.while_true.create(c.arena, body_node);
+ },
+ else => try Tag.if_not_break.create(c.arena, cond),
+ };
const body_node = if (stmt.getBody().getStmtClass() == .CompoundStmtClass) blk: {
// there's already a block in C, so we'll append our condition to it.
@@ -2786,8 +2141,11 @@ fn transDoWhileLoop(
// zig: b;
// zig: if (!cond) break;
// zig: }
- const node = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value);
- break :blk node.castTag(.Block).?;
+ const node = try transStmt(c, &loop_scope, stmt.getBody(), .unused);
+ const block = node.castTag(.block).?;
+ block.data.stmts.len += 1; // This is safe since we reserve one extra space in Scope.Block.complete.
+ block.data.stmts[block.data.stmts.len - 1] = if_not_break;
+ break :blk node;
} else blk: {
// the C statement is without a block, so we need to create a block to contain it.
// c: do
@@ -2797,387 +2155,356 @@ fn transDoWhileLoop(
// zig: a;
// zig: if (!cond) break;
// zig: }
- new = true;
- const block = try rp.c.createBlock(2);
- block.statements_len = 1; // over-allocated so we can add another below
- block.statements()[0] = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value);
- break :blk block;
+ const statements = try c.arena.alloc(Node, 2);
+ statements[0] = try transStmt(c, &loop_scope, stmt.getBody(), .unused);
+ statements[1] = if_not_break;
+ break :blk try Tag.block.create(c.arena, .{ .label = null, .stmts = statements });
};
-
- // In both cases above, we reserved 1 extra statement.
- body_node.statements_len += 1;
- body_node.statements()[body_node.statements_len - 1] = &if_node.base;
- if (new)
- body_node.rbrace = try appendToken(rp.c, .RBrace, "}");
- while_node.body = &body_node.base;
- return &while_node.base;
+ return Tag.while_true.create(c.arena, body_node);
}
fn transForLoop(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.ForStmt,
-) TransError!*ast.Node {
+) TransError!Node {
var loop_scope = Scope{
.parent = scope,
- .id = .Loop,
+ .id = .loop,
};
var block_scope: ?Scope.Block = null;
defer if (block_scope) |*bs| bs.deinit();
if (stmt.getInit()) |init| {
- block_scope = try Scope.Block.init(rp.c, scope, false);
+ block_scope = try Scope.Block.init(c, scope, false);
loop_scope.parent = &block_scope.?.base;
- const init_node = try transStmt(rp, &block_scope.?.base, init, .unused, .r_value);
- try block_scope.?.statements.append(init_node);
+ const init_node = try transStmt(c, &block_scope.?.base, init, .unused);
+ if (init_node.tag() != .declaration) try block_scope.?.statements.append(init_node);
}
var cond_scope = Scope.Condition{
.base = .{
.parent = &loop_scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
- const while_node = try transCreateNodeWhile(rp.c);
- while_node.condition = if (stmt.getCond()) |cond|
- try transBoolExpr(rp, &cond_scope.base, cond, .used, .r_value, false)
+ const cond = if (stmt.getCond()) |cond|
+ try transBoolExpr(c, &cond_scope.base, cond, .used)
else
- try transCreateNodeBoolLiteral(rp.c, true);
- _ = try appendToken(rp.c, .RParen, ")");
+ Tag.true_literal.init();
- if (stmt.getInc()) |incr| {
- _ = try appendToken(rp.c, .Colon, ":");
- _ = try appendToken(rp.c, .LParen, "(");
- while_node.continue_expr = try transExpr(rp, &cond_scope.base, incr, .unused, .r_value);
- _ = try appendToken(rp.c, .RParen, ")");
- }
+ const cont_expr = if (stmt.getInc()) |incr|
+ try transExpr(c, &cond_scope.base, incr, .unused)
+ else
+ null;
- while_node.body = try transStmt(rp, &loop_scope, stmt.getBody(), .unused, .r_value);
+ const body = try transStmt(c, &loop_scope, stmt.getBody(), .unused);
+ const while_node = try Tag.@"while".create(c.arena, .{ .cond = cond, .body = body, .cont_expr = cont_expr });
if (block_scope) |*bs| {
- try bs.statements.append(&while_node.base);
- return try bs.complete(rp.c);
+ try bs.statements.append(while_node);
+ return try bs.complete(c);
} else {
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &while_node.base;
+ return while_node;
}
}
-fn getSwitchCaseCount(stmt: *const clang.SwitchStmt) usize {
- const body = stmt.getBody();
- assert(body.getStmtClass() == .CompoundStmtClass);
- const comp = @ptrCast(*const clang.CompoundStmt, body);
- // TODO https://github.com/ziglang/zig/issues/1738
- // return comp.body_end() - comp.body_begin();
- const start_addr = @ptrToInt(comp.body_begin());
- const end_addr = @ptrToInt(comp.body_end());
- return (end_addr - start_addr) / @sizeOf(*clang.Stmt);
-}
-
fn transSwitch(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.SwitchStmt,
-) TransError!*ast.Node {
- const switch_tok = try appendToken(rp.c, .Keyword_switch, "switch");
- _ = try appendToken(rp.c, .LParen, "(");
-
- const cases_len = getSwitchCaseCount(stmt);
-
+) TransError!Node {
var cond_scope = Scope.Condition{
.base = .{
.parent = scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
- const switch_expr = try transExpr(rp, &cond_scope.base, stmt.getCond(), .used, .r_value);
- _ = try appendToken(rp.c, .RParen, ")");
- _ = try appendToken(rp.c, .LBrace, "{");
- // reserve +1 case in case there is no default case
- const switch_node = try ast.Node.Switch.alloc(rp.c.arena, cases_len + 1);
- switch_node.* = .{
- .switch_token = switch_tok,
- .expr = switch_expr,
- .cases_len = cases_len + 1,
- .rbrace = try appendToken(rp.c, .RBrace, "}"),
- };
+ const switch_expr = try transExpr(c, &cond_scope.base, stmt.getCond(), .used);
- var switch_scope = Scope.Switch{
- .base = .{
- .id = .Switch,
- .parent = scope,
- },
- .cases = switch_node.cases(),
- .case_index = 0,
- .pending_block = undefined,
- .default_label = null,
- .switch_label = null,
- };
+ var cases = std.ArrayList(Node).init(c.gpa);
+ defer cases.deinit();
+ var has_default = false;
- // tmp block that all statements will go before being picked up by a case or default
- var block_scope = try Scope.Block.init(rp.c, &switch_scope.base, false);
- defer block_scope.deinit();
+ const body = stmt.getBody();
+ assert(body.getStmtClass() == .CompoundStmtClass);
+ const compound_stmt = @ptrCast(*const clang.CompoundStmt, body);
+ var it = compound_stmt.body_begin();
+ const end_it = compound_stmt.body_end();
+ // Iterate over switch body and collect all cases.
+ // Fallthrough is handled by duplicating statements.
+ while (it != end_it) : (it += 1) {
+ switch (it[0].getStmtClass()) {
+ .CaseStmtClass => {
+ var items = std.ArrayList(Node).init(c.gpa);
+ defer items.deinit();
+ const sub = try transCaseStmt(c, scope, it[0], &items);
+ const res = try transSwitchProngStmt(c, scope, sub, it, end_it);
+
+ if (items.items.len == 0) {
+ has_default = true;
+ const switch_else = try Tag.switch_else.create(c.arena, res);
+ try cases.append(switch_else);
+ } else {
+ const switch_prong = try Tag.switch_prong.create(c.arena, .{
+ .cases = try c.arena.dupe(Node, items.items),
+ .cond = res,
+ });
+ try cases.append(switch_prong);
+ }
+ },
+ .DefaultStmtClass => {
+ has_default = true;
+ const default_stmt = @ptrCast(*const clang.DefaultStmt, it[0]);
+
+ var sub = default_stmt.getSubStmt();
+ while (true) switch (sub.getStmtClass()) {
+ .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(),
+ .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(),
+ else => break,
+ };
- // Note that we do not defer a deinit here; the switch_scope.pending_block field
- // has its own memory management. This resource is freed inside `transCase` and
- // then the final pending_block is freed at the bottom of this function with
- // pending_block.deinit().
- switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false);
- try switch_scope.pending_block.statements.append(&switch_node.base);
-
- const last = try transStmt(rp, &block_scope.base, stmt.getBody(), .unused, .r_value);
- _ = try appendToken(rp.c, .Semicolon, ";");
-
- // take all pending statements
- const last_block_stmts = last.cast(ast.Node.Block).?.statements();
- try switch_scope.pending_block.statements.ensureCapacity(
- switch_scope.pending_block.statements.items.len + last_block_stmts.len,
- );
- for (last_block_stmts) |n| {
- switch_scope.pending_block.statements.appendAssumeCapacity(n);
- }
-
- if (switch_scope.default_label == null) {
- switch_scope.switch_label = try block_scope.makeMangledName(rp.c, "switch");
- }
- if (switch_scope.switch_label) |l| {
- switch_scope.pending_block.label = try appendIdentifier(rp.c, l);
- _ = try appendToken(rp.c, .Colon, ":");
- }
- if (switch_scope.default_label == null) {
- const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
- else_prong.expr = blk: {
- var br = try CtrlFlow.init(rp.c, .Break, switch_scope.switch_label.?);
- break :blk &(try br.finish(null)).base;
- };
- _ = try appendToken(rp.c, .Comma, ",");
+ const res = try transSwitchProngStmt(c, scope, sub, it, end_it);
- if (switch_scope.case_index >= switch_scope.cases.len)
- return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{});
- switch_scope.cases[switch_scope.case_index] = &else_prong.base;
- switch_scope.case_index += 1;
+ const switch_else = try Tag.switch_else.create(c.arena, res);
+ try cases.append(switch_else);
+ },
+ else => {}, // collected in transSwitchProngStmt
+ }
}
- // We overallocated in case there was no default, so now we correct
- // the number of cases in the AST node.
- switch_node.cases_len = switch_scope.case_index;
-
- const result_node = try switch_scope.pending_block.complete(rp.c);
- switch_scope.pending_block.deinit();
- return result_node;
-}
-
-fn transCase(
- rp: RestorePoint,
- scope: *Scope,
- stmt: *const clang.CaseStmt,
-) TransError!*ast.Node {
- const block_scope = scope.findBlockScope(rp.c) catch unreachable;
- const switch_scope = scope.getSwitch();
- const label = try block_scope.makeMangledName(rp.c, "case");
- _ = try appendToken(rp.c, .Semicolon, ";");
-
- const expr = if (stmt.getRHS()) |rhs| blk: {
- const lhs_node = try transExpr(rp, scope, stmt.getLHS(), .used, .r_value);
- const ellips = try appendToken(rp.c, .Ellipsis3, "...");
- const rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
-
- const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- node.* = .{
- .base = .{ .tag = .Range },
- .op_token = ellips,
- .lhs = lhs_node,
- .rhs = rhs_node,
- };
- break :blk &node.base;
- } else
- try transExpr(rp, scope, stmt.getLHS(), .used, .r_value);
- const switch_prong = try transCreateNodeSwitchCase(rp.c, expr);
- switch_prong.expr = blk: {
- var br = try CtrlFlow.init(rp.c, .Break, label);
- break :blk &(try br.finish(null)).base;
- };
- _ = try appendToken(rp.c, .Comma, ",");
+ if (!has_default) {
+ const else_prong = try Tag.switch_else.create(c.arena, Tag.empty_block.init());
+ try cases.append(else_prong);
+ }
- if (switch_scope.case_index >= switch_scope.cases.len)
- return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{});
- switch_scope.cases[switch_scope.case_index] = &switch_prong.base;
- switch_scope.case_index += 1;
+ return Tag.@"switch".create(c.arena, .{
+ .cond = switch_expr,
+ .cases = try c.arena.dupe(Node, cases.items),
+ });
+}
- switch_scope.pending_block.label = try appendIdentifier(rp.c, label);
- _ = try appendToken(rp.c, .Colon, ":");
+/// Collects all items for this case, returns the first statement after the labels.
+/// If items ends up empty, the prong should be translated as an else.
+fn transCaseStmt(c: *Context, scope: *Scope, stmt: *const clang.Stmt, items: *std.ArrayList(Node)) TransError!*const clang.Stmt {
+ var sub = stmt;
+ var seen_default = false;
+ while (true) {
+ switch (sub.getStmtClass()) {
+ .DefaultStmtClass => {
+ seen_default = true;
+ items.items.len = 0;
+ const default_stmt = @ptrCast(*const clang.DefaultStmt, sub);
+ sub = default_stmt.getSubStmt();
+ },
+ .CaseStmtClass => {
+ const case_stmt = @ptrCast(*const clang.CaseStmt, sub);
- // take all pending statements
- try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items);
- block_scope.statements.shrinkAndFree(0);
+ if (seen_default) {
+ items.items.len = 0;
+ sub = case_stmt.getSubStmt();
+ continue;
+ }
- const pending_node = try switch_scope.pending_block.complete(rp.c);
- switch_scope.pending_block.deinit();
- switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false);
+ const expr = if (case_stmt.getRHS()) |rhs| blk: {
+ const lhs_node = try transExprCoercing(c, scope, case_stmt.getLHS(), .used);
+ const rhs_node = try transExprCoercing(c, scope, rhs, .used);
- try switch_scope.pending_block.statements.append(pending_node);
+ break :blk try Tag.ellipsis3.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node });
+ } else
+ try transExprCoercing(c, scope, case_stmt.getLHS(), .used);
- return transStmt(rp, scope, stmt.getSubStmt(), .unused, .r_value);
+ try items.append(expr);
+ sub = case_stmt.getSubStmt();
+ },
+ else => return sub,
+ }
+ }
}
-fn transDefault(
- rp: RestorePoint,
+/// Collects all statements seen by this case into a block.
+/// Avoids creating a block if the first statement is a break or return.
+fn transSwitchProngStmt(
+ c: *Context,
scope: *Scope,
- stmt: *const clang.DefaultStmt,
-) TransError!*ast.Node {
- const block_scope = scope.findBlockScope(rp.c) catch unreachable;
- const switch_scope = scope.getSwitch();
- switch_scope.default_label = try block_scope.makeMangledName(rp.c, "default");
- _ = try appendToken(rp.c, .Semicolon, ";");
-
- const else_prong = try transCreateNodeSwitchCase(rp.c, try transCreateNodeSwitchElse(rp.c));
- else_prong.expr = blk: {
- var br = try CtrlFlow.init(rp.c, .Break, switch_scope.default_label.?);
- break :blk &(try br.finish(null)).base;
- };
- _ = try appendToken(rp.c, .Comma, ",");
-
- if (switch_scope.case_index >= switch_scope.cases.len)
- return revertAndWarn(rp, error.UnsupportedTranslation, @ptrCast(*const clang.Stmt, stmt).getBeginLoc(), "TODO complex switch cases", .{});
- switch_scope.cases[switch_scope.case_index] = &else_prong.base;
- switch_scope.case_index += 1;
-
- switch_scope.pending_block.label = try appendIdentifier(rp.c, switch_scope.default_label.?);
- _ = try appendToken(rp.c, .Colon, ":");
-
- // take all pending statements
- try switch_scope.pending_block.statements.appendSlice(block_scope.statements.items);
- block_scope.statements.shrinkAndFree(0);
+ stmt: *const clang.Stmt,
+ parent_it: clang.CompoundStmt.ConstBodyIterator,
+ parent_end_it: clang.CompoundStmt.ConstBodyIterator,
+) TransError!Node {
+ switch (stmt.getStmtClass()) {
+ .BreakStmtClass => return Tag.empty_block.init(),
+ .ReturnStmtClass => return transStmt(c, scope, stmt, .unused),
+ .CaseStmtClass, .DefaultStmtClass => unreachable,
+ else => {
+ var block_scope = try Scope.Block.init(c, scope, false);
+ defer block_scope.deinit();
- const pending_node = try switch_scope.pending_block.complete(rp.c);
- switch_scope.pending_block.deinit();
- switch_scope.pending_block = try Scope.Block.init(rp.c, scope, false);
- try switch_scope.pending_block.statements.append(pending_node);
+ // we do not need to translate `stmt` since it is the first stmt of `parent_it`
+ try transSwitchProngStmtInline(c, &block_scope, parent_it, parent_end_it);
+ return try block_scope.complete(c);
+ },
+ }
+}
- return transStmt(rp, scope, stmt.getSubStmt(), .unused, .r_value);
+/// Collects all statements seen by this case into a block.
+fn transSwitchProngStmtInline(
+ c: *Context,
+ block: *Scope.Block,
+ start_it: clang.CompoundStmt.ConstBodyIterator,
+ end_it: clang.CompoundStmt.ConstBodyIterator,
+) TransError!void {
+ var it = start_it;
+ while (it != end_it) : (it += 1) {
+ switch (it[0].getStmtClass()) {
+ .ReturnStmtClass => {
+ const result = try transStmt(c, &block.base, it[0], .unused);
+ try block.statements.append(result);
+ return;
+ },
+ .BreakStmtClass => return,
+ .CaseStmtClass => {
+ var sub = @ptrCast(*const clang.CaseStmt, it[0]).getSubStmt();
+ while (true) switch (sub.getStmtClass()) {
+ .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(),
+ .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(),
+ else => break,
+ };
+ const result = try transStmt(c, &block.base, sub, .unused);
+ assert(result.tag() != .declaration);
+ try block.statements.append(result);
+ if (result.isNoreturn(true)) {
+ return;
+ }
+ },
+ .DefaultStmtClass => {
+ var sub = @ptrCast(*const clang.DefaultStmt, it[0]).getSubStmt();
+ while (true) switch (sub.getStmtClass()) {
+ .CaseStmtClass => sub = @ptrCast(*const clang.CaseStmt, sub).getSubStmt(),
+ .DefaultStmtClass => sub = @ptrCast(*const clang.DefaultStmt, sub).getSubStmt(),
+ else => break,
+ };
+ const result = try transStmt(c, &block.base, sub, .unused);
+ assert(result.tag() != .declaration);
+ try block.statements.append(result);
+ if (result.isNoreturn(true)) {
+ return;
+ }
+ },
+ .CompoundStmtClass => {
+ const result = try transCompoundStmt(c, &block.base, @ptrCast(*const clang.CompoundStmt, it[0]));
+ try block.statements.append(result);
+ if (result.isNoreturn(true)) {
+ return;
+ }
+ },
+ else => {
+ const result = try transStmt(c, &block.base, it[0], .unused);
+ switch (result.tag()) {
+ .declaration, .empty_block => {},
+ else => try block.statements.append(result),
+ }
+ },
+ }
+ }
+ return;
}
-fn transConstantExpr(rp: RestorePoint, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!*ast.Node {
+fn transConstantExpr(c: *Context, scope: *Scope, expr: *const clang.Expr, used: ResultUsed) TransError!Node {
var result: clang.ExprEvalResult = undefined;
- if (!expr.EvaluateAsConstantExpr(&result, .EvaluateForCodeGen, rp.c.clang_context))
- return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid constant expression", .{});
+ if (!expr.evaluateAsConstantExpr(&result, .EvaluateForCodeGen, c.clang_context))
+ return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "invalid constant expression", .{});
- var val_node: ?*ast.Node = null;
switch (result.Val.getKind()) {
.Int => {
// See comment in `transIntegerLiteral` for why this code is here.
// @as(T, x)
const expr_base = @ptrCast(*const clang.Expr, expr);
- const as_node = try rp.c.createBuiltinCall("@as", 2);
- const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc());
- as_node.params()[0] = ty_node;
- _ = try appendToken(rp.c, .Comma, ",");
-
- const int_lit_node = try transCreateNodeAPInt(rp.c, result.Val.getInt());
- as_node.params()[1] = int_lit_node;
-
- as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- return maybeSuppressResult(rp, scope, used, &as_node.base);
+ const as_node = try Tag.as.create(c.arena, .{
+ .lhs = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc()),
+ .rhs = try transCreateNodeAPInt(c, result.Val.getInt()),
+ });
+ return maybeSuppressResult(c, scope, used, as_node);
},
else => {
- return revertAndWarn(rp, error.UnsupportedTranslation, expr.getBeginLoc(), "unsupported constant expression kind", .{});
+ return fail(c, error.UnsupportedTranslation, expr.getBeginLoc(), "unsupported constant expression kind", .{});
},
}
}
-fn transPredefinedExpr(rp: RestorePoint, scope: *Scope, expr: *const clang.PredefinedExpr, used: ResultUsed) TransError!*ast.Node {
- return transStringLiteral(rp, scope, expr.getFunctionName(), used);
+fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.PredefinedExpr, used: ResultUsed) TransError!Node {
+ return transStringLiteral(c, scope, expr.getFunctionName(), used);
}
-fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!*ast.Node {
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .CharLiteral },
- .token = undefined,
- };
- if (narrow) {
- const val_array = [_]u8{@intCast(u8, val)};
- node.token = try appendTokenFmt(c, .CharLiteral, "'{}'", .{std.zig.fmtEscapes(&val_array)});
- } else {
- node.token = try appendTokenFmt(c, .CharLiteral, "'\\u{{{x}}}'", .{val});
- }
- return &node.base;
+fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node {
+ return Tag.char_literal.create(c.arena, if (narrow)
+ try std.fmt.allocPrint(c.arena, "'{s}'", .{std.zig.fmtEscapes(&.{@intCast(u8, val)})})
+ else
+ try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val}));
}
fn transCharLiteral(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.CharacterLiteral,
result_used: ResultUsed,
suppress_as: SuppressCast,
-) TransError!*ast.Node {
+) TransError!Node {
const kind = stmt.getKind();
const val = stmt.getValue();
const narrow = kind == .Ascii or kind == .UTF8;
// C has a somewhat obscure feature called multi-character character constant
// e.g. 'abcd'
const int_lit_node = if (kind == .Ascii and val > 255)
- try transCreateNodeInt(rp.c, val)
+ try transCreateNodeNumber(c, val, .int)
else
- try transCreateCharLitNode(rp.c, narrow, val);
+ try transCreateCharLitNode(c, narrow, val);
if (suppress_as == .no_as) {
- return maybeSuppressResult(rp, scope, result_used, int_lit_node);
+ return maybeSuppressResult(c, scope, result_used, int_lit_node);
}
// See comment in `transIntegerLiteral` for why this code is here.
// @as(T, x)
const expr_base = @ptrCast(*const clang.Expr, stmt);
- const as_node = try rp.c.createBuiltinCall("@as", 2);
- const ty_node = try transQualType(rp, expr_base.getType(), expr_base.getBeginLoc());
- as_node.params()[0] = ty_node;
- _ = try appendToken(rp.c, .Comma, ",");
- as_node.params()[1] = int_lit_node;
-
- as_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return maybeSuppressResult(rp, scope, result_used, &as_node.base);
+ const as_node = try Tag.as.create(c.arena, .{
+ .lhs = try transQualType(c, scope, expr_base.getType(), expr_base.getBeginLoc()),
+ .rhs = int_lit_node,
+ });
+ return maybeSuppressResult(c, scope, result_used, as_node);
}
-fn transStmtExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.StmtExpr, used: ResultUsed) TransError!*ast.Node {
+fn transStmtExpr(c: *Context, scope: *Scope, stmt: *const clang.StmtExpr, used: ResultUsed) TransError!Node {
const comp = stmt.getSubStmt();
if (used == .unused) {
- return transCompoundStmt(rp, scope, comp);
+ return transCompoundStmt(c, scope, comp);
}
- const lparen = try appendToken(rp.c, .LParen, "(");
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
var it = comp.body_begin();
const end_it = comp.body_end();
while (it != end_it - 1) : (it += 1) {
- const result = try transStmt(rp, &block_scope.base, it[0], .unused, .r_value);
- try block_scope.statements.append(result);
+ const result = try transStmt(c, &block_scope.base, it[0], .unused);
+ switch (result.tag()) {
+ .declaration, .empty_block => {},
+ else => try block_scope.statements.append(result),
+ }
}
- const break_node = blk: {
- var tmp = try CtrlFlow.init(rp.c, .Break, "blk");
- const rhs = try transStmt(rp, &block_scope.base, it[0], .used, .r_value);
- break :blk try tmp.finish(rhs);
- };
- _ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
- const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen,
- .expr = block_node,
- .rparen = rparen,
- };
- return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = try transStmt(c, &block_scope.base, it[0], .used),
+ });
+ try block_scope.statements.append(break_node);
+ const res = try block_scope.complete(c);
+ return maybeSuppressResult(c, scope, used, res);
}
-fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!*ast.Node {
- var container_node = try transExpr(rp, scope, stmt.getBase(), .used, .r_value);
+fn transMemberExpr(c: *Context, scope: *Scope, stmt: *const clang.MemberExpr, result_used: ResultUsed) TransError!Node {
+ var container_node = try transExpr(c, scope, stmt.getBase(), .used);
if (stmt.isArrow()) {
- container_node = try transCreateNodePtrDeref(rp.c, container_node);
+ container_node = try Tag.deref.create(c.arena, container_node);
}
const member_decl = stmt.getMemberDecl();
@@ -3188,19 +2515,18 @@ fn transMemberExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.MemberExp
if (decl_kind == .Field) {
const field_decl = @ptrCast(*const clang.FieldDecl, member_decl);
if (field_decl.isAnonymousStructOrUnion()) {
- const name = rp.c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?;
- break :blk try mem.dupe(rp.c.arena, u8, name);
+ const name = c.decl_table.get(@ptrToInt(field_decl.getCanonicalDecl())).?;
+ break :blk try mem.dupe(c.arena, u8, name);
}
}
const decl = @ptrCast(*const clang.NamedDecl, member_decl);
- break :blk try rp.c.str(decl.getName_bytes_begin());
+ break :blk try c.str(decl.getName_bytes_begin());
};
-
- const node = try transCreateNodeFieldAccess(rp.c, container_node, name);
- return maybeSuppressResult(rp, scope, result_used, node);
+ const node = try Tag.field_access.create(c.arena, .{ .lhs = container_node, .field_name = name });
+ return maybeSuppressResult(c, scope, result_used, node);
}
-fn transArrayAccess(rp: RestorePoint, scope: *Scope, stmt: *const clang.ArraySubscriptExpr, result_used: ResultUsed) TransError!*ast.Node {
+fn transArrayAccess(c: *Context, scope: *Scope, stmt: *const clang.ArraySubscriptExpr, result_used: ResultUsed) TransError!Node {
var base_stmt = stmt.getBase();
// Unwrap the base statement if it's an array decayed to a bare pointer type
@@ -3213,30 +2539,26 @@ fn transArrayAccess(rp: RestorePoint, scope: *Scope, stmt: *const clang.ArraySub
}
}
- const container_node = try transExpr(rp, scope, base_stmt, .used, .r_value);
- const node = try transCreateNodeArrayAccess(rp.c, container_node);
+ const container_node = try transExpr(c, scope, base_stmt, .used);
// cast if the index is long long or signed
const subscr_expr = stmt.getIdx();
- const qt = getExprQualType(rp.c, subscr_expr);
+ const qt = getExprQualType(c, subscr_expr);
const is_longlong = cIsLongLongInteger(qt);
const is_signed = cIsSignedInteger(qt);
- if (is_longlong or is_signed) {
- const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ const rhs = if (is_longlong or is_signed) blk: {
// check if long long first so that signed long long doesn't just become unsigned long long
- var typeid_node = if (is_longlong) try transCreateNodeIdentifier(rp.c, "usize") else try transQualTypeIntWidthOf(rp.c, qt, false);
- cast_node.params()[0] = typeid_node;
- _ = try appendToken(rp.c, .Comma, ",");
- cast_node.params()[1] = try transExpr(rp, scope, subscr_expr, .used, .r_value);
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- node.rtoken = try appendToken(rp.c, .RBrace, "]");
- node.index_expr = &cast_node.base;
- } else {
- node.index_expr = try transExpr(rp, scope, subscr_expr, .used, .r_value);
- node.rtoken = try appendToken(rp.c, .RBrace, "]");
- }
- return maybeSuppressResult(rp, scope, result_used, &node.base);
+ var typeid_node = if (is_longlong) try Tag.identifier.create(c.arena, "usize") else try transQualTypeIntWidthOf(c, qt, false);
+ break :blk try Tag.int_cast.create(c.arena, .{ .lhs = typeid_node, .rhs = try transExpr(c, scope, subscr_expr, .used) });
+ } else
+ try transExpr(c, scope, subscr_expr, .used);
+
+ const node = try Tag.array_access.create(c.arena, .{
+ .lhs = container_node,
+ .rhs = rhs,
+ });
+ return maybeSuppressResult(c, scope, result_used, node);
}
/// Check if an expression is ultimately a reference to a function declaration
@@ -3271,29 +2593,25 @@ fn cIsFunctionDeclRef(expr: *const clang.Expr) bool {
}
}
-fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, result_used: ResultUsed) TransError!*ast.Node {
+fn transCallExpr(c: *Context, scope: *Scope, stmt: *const clang.CallExpr, result_used: ResultUsed) TransError!Node {
const callee = stmt.getCallee();
- var raw_fn_expr = try transExpr(rp, scope, callee, .used, .r_value);
+ var raw_fn_expr = try transExpr(c, scope, callee, .used);
var is_ptr = false;
const fn_ty = qualTypeGetFnProto(callee.getType(), &is_ptr);
const fn_expr = if (is_ptr and fn_ty != null and !cIsFunctionDeclRef(callee))
- try transCreateNodeUnwrapNull(rp.c, raw_fn_expr)
+ try Tag.unwrap.create(c.arena, raw_fn_expr)
else
raw_fn_expr;
const num_args = stmt.getNumArgs();
- const node = try rp.c.createCall(fn_expr, num_args);
- const call_params = node.params();
+ const args = try c.arena.alloc(Node, num_args);
- const args = stmt.getArgs();
+ const c_args = stmt.getArgs();
var i: usize = 0;
while (i < num_args) : (i += 1) {
- if (i != 0) {
- _ = try appendToken(rp.c, .Comma, ",");
- }
- var call_param = try transExpr(rp, scope, args[i], .used, .r_value);
+ var arg = try transExpr(c, scope, c_args[i], .used);
// In C the result type of a boolean expression is int. If this result is passed as
// an argument to a function whose parameter is also int, there is no cast. Therefore
@@ -3304,31 +2622,26 @@ fn transCallExpr(rp: RestorePoint, scope: *Scope, stmt: *const clang.CallExpr, r
const param_count = fn_proto.getNumParams();
if (i < param_count) {
const param_qt = fn_proto.getParamType(@intCast(c_uint, i));
- if (isBoolRes(call_param) and cIsNativeInt(param_qt)) {
- const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = call_param;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- call_param = &builtin_node.base;
+ if (isBoolRes(arg) and cIsNativeInt(param_qt)) {
+ arg = try Tag.bool_to_int.create(c.arena, arg);
}
}
},
else => {},
}
}
- call_params[i] = call_param;
+ args[i] = arg;
}
- node.rtoken = try appendToken(rp.c, .RParen, ")");
-
+ const node = try Tag.call.create(c.arena, .{ .lhs = fn_expr, .args = args });
if (fn_ty) |ty| {
const canon = ty.getReturnType().getCanonicalType();
const ret_ty = canon.getTypePtr();
if (ret_ty.isVoidType()) {
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &node.base;
+ return node;
}
}
- return maybeSuppressResult(rp, scope, result_used, &node.base);
+ return maybeSuppressResult(c, scope, result_used, node);
}
const ClangFunctionType = union(enum) {
@@ -3363,38 +2676,29 @@ fn qualTypeGetFnProto(qt: clang.QualType, is_ptr: *bool) ?ClangFunctionType {
}
fn transUnaryExprOrTypeTraitExpr(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.UnaryExprOrTypeTraitExpr,
result_used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const loc = stmt.getBeginLoc();
- const type_node = try transQualType(
- rp,
- stmt.getTypeOfArgument(),
- loc,
- );
+ const type_node = try transQualType(c, scope, stmt.getTypeOfArgument(), loc);
const kind = stmt.getKind();
- const kind_str = switch (kind) {
- .SizeOf => "@sizeOf",
- .AlignOf => "@alignOf",
+ switch (kind) {
+ .SizeOf => return Tag.sizeof.create(c.arena, type_node),
+ .AlignOf => return Tag.alignof.create(c.arena, type_node),
.PreferredAlignOf,
.VecStep,
.OpenMPRequiredSimdAlign,
- => return revertAndWarn(
- rp,
+ => return fail(
+ c,
error.UnsupportedTranslation,
loc,
"Unsupported type trait kind {}",
.{kind},
),
- };
-
- const builtin_node = try rp.c.createBuiltinCall(kind_str, 1);
- builtin_node.params()[0] = type_node;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return maybeSuppressResult(rp, scope, result_used, &builtin_node.base);
+ }
}
fn qualTypeHasWrappingOverflow(qt: clang.QualType) bool {
@@ -3407,95 +2711,79 @@ fn qualTypeHasWrappingOverflow(qt: clang.QualType) bool {
}
}
-fn transUnaryOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.UnaryOperator, used: ResultUsed) TransError!*ast.Node {
+fn transUnaryOperator(c: *Context, scope: *Scope, stmt: *const clang.UnaryOperator, used: ResultUsed) TransError!Node {
const op_expr = stmt.getSubExpr();
switch (stmt.getOpcode()) {
.PostInc => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreatePostCrement(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used)
+ return transCreatePostCrement(c, scope, stmt, .add_wrap_assign, used)
else
- return transCreatePostCrement(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", used),
+ return transCreatePostCrement(c, scope, stmt, .add_assign, used),
.PostDec => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreatePostCrement(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used)
+ return transCreatePostCrement(c, scope, stmt, .sub_wrap_assign, used)
else
- return transCreatePostCrement(rp, scope, stmt, .AssignSub, .MinusEqual, "-=", used),
+ return transCreatePostCrement(c, scope, stmt, .sub_assign, used),
.PreInc => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreatePreCrement(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", used)
+ return transCreatePreCrement(c, scope, stmt, .add_wrap_assign, used)
else
- return transCreatePreCrement(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", used),
+ return transCreatePreCrement(c, scope, stmt, .add_assign, used),
.PreDec => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreatePreCrement(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", used)
+ return transCreatePreCrement(c, scope, stmt, .sub_wrap_assign, used)
else
- return transCreatePreCrement(rp, scope, stmt, .AssignSub, .MinusEqual, "-=", used),
+ return transCreatePreCrement(c, scope, stmt, .sub_assign, used),
.AddrOf => {
if (cIsFunctionDeclRef(op_expr)) {
- return transExpr(rp, scope, op_expr, used, .r_value);
+ return transExpr(c, scope, op_expr, used);
}
- const op_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
- op_node.rhs = try transExpr(rp, scope, op_expr, used, .r_value);
- return &op_node.base;
+ return Tag.address_of.create(c.arena, try transExpr(c, scope, op_expr, used));
},
.Deref => {
- const value_node = try transExpr(rp, scope, op_expr, used, .r_value);
+ const node = try transExpr(c, scope, op_expr, used);
var is_ptr = false;
const fn_ty = qualTypeGetFnProto(op_expr.getType(), &is_ptr);
if (fn_ty != null and is_ptr)
- return value_node;
- const unwrapped = try transCreateNodeUnwrapNull(rp.c, value_node);
- return transCreateNodePtrDeref(rp.c, unwrapped);
+ return node;
+ const unwrapped = try Tag.unwrap.create(c.arena, node);
+ return Tag.deref.create(c.arena, unwrapped);
},
- .Plus => return transExpr(rp, scope, op_expr, used, .r_value),
+ .Plus => return transExpr(c, scope, op_expr, used),
.Minus => {
if (!qualTypeHasWrappingOverflow(op_expr.getType())) {
- const op_node = try transCreateNodeSimplePrefixOp(rp.c, .Negation, .Minus, "-");
- op_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
- return &op_node.base;
+ return Tag.negate.create(c.arena, try transExpr(c, scope, op_expr, .used));
} else if (cIsUnsignedInteger(op_expr.getType())) {
- // we gotta emit 0 -% x
- const zero = try transCreateNodeInt(rp.c, 0);
- const token = try appendToken(rp.c, .MinusPercent, "-%");
- const expr = try transExpr(rp, scope, op_expr, .used, .r_value);
- return transCreateNodeInfixOp(rp, scope, zero, .SubWrap, token, expr, used, true);
+ // use -% x for unsigned integers
+ return Tag.negate_wrap.create(c.arena, try transExpr(c, scope, op_expr, .used));
} else
- return revertAndWarn(rp, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{});
+ return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "C negation with non float non integer", .{});
},
.Not => {
- const op_node = try transCreateNodeSimplePrefixOp(rp.c, .BitNot, .Tilde, "~");
- op_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
- return &op_node.base;
+ return Tag.bit_not.create(c.arena, try transExpr(c, scope, op_expr, .used));
},
.LNot => {
- const op_node = try transCreateNodeSimplePrefixOp(rp.c, .BoolNot, .Bang, "!");
- op_node.rhs = try transBoolExpr(rp, scope, op_expr, .used, .r_value, true);
- return &op_node.base;
+ return Tag.not.create(c.arena, try transBoolExpr(c, scope, op_expr, .used));
},
.Extension => {
- return transExpr(rp, scope, stmt.getSubExpr(), used, .l_value);
+ return transExpr(c, scope, stmt.getSubExpr(), used);
},
- else => return revertAndWarn(rp, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}),
+ else => return fail(c, error.UnsupportedTranslation, stmt.getBeginLoc(), "unsupported C translation {}", .{stmt.getOpcode()}),
}
}
fn transCreatePreCrement(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.UnaryOperator,
- op: ast.Node.Tag,
- op_tok_id: std.zig.Token.Id,
- bytes: []const u8,
+ op: Tag,
used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const op_expr = stmt.getSubExpr();
if (used == .unused) {
// common case
// c: ++expr
// zig: expr += 1
- const expr = try transExpr(rp, scope, op_expr, .used, .r_value);
- const token = try appendToken(rp.c, op_tok_id, bytes);
- const one = try transCreateNodeInt(rp.c, 1);
- if (scope.id != .Condition)
- _ = try appendToken(rp.c, .Semicolon, ";");
- return transCreateNodeInfixOp(rp, scope, expr, op, token, one, .used, false);
+ const lhs = try transExpr(c, scope, op_expr, .used);
+ const rhs = Tag.one_literal.init();
+ return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used);
}
// worst case
// c: ++expr
@@ -3504,71 +2792,44 @@ fn transCreatePreCrement(
// zig: _ref.* += 1;
// zig: break :blk _ref.*
// zig: })
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
- const ref = try block_scope.makeMangledName(rp.c, "ref");
-
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, ref);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
- rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
- const init_node = &rhs_node.base;
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&node.base);
-
- const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
- const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
- _ = try appendToken(rp.c, .Semicolon, ";");
- const token = try appendToken(rp.c, op_tok_id, bytes);
- const one = try transCreateNodeInt(rp.c, 1);
- _ = try appendToken(rp.c, .Semicolon, ";");
- const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
- try block_scope.statements.append(assign);
+ const ref = try block_scope.makeMangledName(c, "ref");
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label, ref_node);
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
- // semicolon must immediately follow rbrace because it is the last token in a block
- _ = try appendToken(rp.c, .Semicolon, ";");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = try appendToken(rp.c, .LParen, "("),
- .expr = block_node,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return &grouped_expr.base;
+ const expr = try transExpr(c, &block_scope.base, op_expr, .used);
+ const addr_of = try Tag.address_of.create(c.arena, expr);
+ const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of });
+ try block_scope.statements.append(ref_decl);
+
+ const lhs_node = try Tag.identifier.create(c.arena, ref);
+ const ref_node = try Tag.deref.create(c.arena, lhs_node);
+ const node = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, Tag.one_literal.init(), .used);
+ try block_scope.statements.append(node);
+
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = ref_node,
+ });
+ try block_scope.statements.append(break_node);
+ return block_scope.complete(c);
}
fn transCreatePostCrement(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.UnaryOperator,
- op: ast.Node.Tag,
- op_tok_id: std.zig.Token.Id,
- bytes: []const u8,
+ op: Tag,
used: ResultUsed,
-) TransError!*ast.Node {
+) TransError!Node {
const op_expr = stmt.getSubExpr();
if (used == .unused) {
// common case
- // c: ++expr
+ // c: expr++
// zig: expr += 1
- const expr = try transExpr(rp, scope, op_expr, .used, .r_value);
- const token = try appendToken(rp.c, op_tok_id, bytes);
- const one = try transCreateNodeInt(rp.c, 1);
- if (scope.id != .Condition)
- _ = try appendToken(rp.c, .Semicolon, ";");
- return transCreateNodeInfixOp(rp, scope, expr, op, token, one, .used, false);
+ const lhs = try transExpr(c, scope, op_expr, .used);
+ const rhs = Tag.one_literal.init();
+ return transCreateNodeInfixOp(c, scope, op, lhs, rhs, .used);
}
// worst case
// c: expr++
@@ -3578,93 +2839,56 @@ fn transCreatePostCrement(
// zig: _ref.* += 1;
// zig: break :blk _tmp
// zig: })
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
- const ref = try block_scope.makeMangledName(rp.c, "ref");
-
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, ref);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- const rhs_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
- rhs_node.rhs = try transExpr(rp, scope, op_expr, .used, .r_value);
- const init_node = &rhs_node.base;
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&node.base);
-
- const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
- const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
- _ = try appendToken(rp.c, .Semicolon, ";");
-
- const tmp = try block_scope.makeMangledName(rp.c, "tmp");
- const tmp_mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const tmp_name_tok = try appendIdentifier(rp.c, tmp);
- const tmp_eq_token = try appendToken(rp.c, .Equal, "=");
- const tmp_init_node = ref_node;
- const tmp_semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const tmp_node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = tmp_name_tok,
- .mut_token = tmp_mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = tmp_eq_token,
- .init_node = tmp_init_node,
- });
- try block_scope.statements.append(&tmp_node.base);
+ const ref = try block_scope.makeMangledName(c, "ref");
- const token = try appendToken(rp.c, op_tok_id, bytes);
- const one = try transCreateNodeInt(rp.c, 1);
- _ = try appendToken(rp.c, .Semicolon, ";");
- const assign = try transCreateNodeInfixOp(rp, scope, ref_node, op, token, one, .used, false);
- try block_scope.statements.append(assign);
+ const expr = try transExpr(c, &block_scope.base, op_expr, .used);
+ const addr_of = try Tag.address_of.create(c.arena, expr);
+ const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of });
+ try block_scope.statements.append(ref_decl);
- const break_node = blk: {
- var tmp_ctrl_flow = try CtrlFlow.initToken(rp.c, .Break, block_scope.label);
- const rhs = try transCreateNodeIdentifier(rp.c, tmp);
- break :blk try tmp_ctrl_flow.finish(rhs);
- };
- try block_scope.statements.append(&break_node.base);
- _ = try appendToken(rp.c, .Semicolon, ";");
- const block_node = try block_scope.complete(rp.c);
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = try appendToken(rp.c, .LParen, "("),
- .expr = block_node,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return &grouped_expr.base;
+ const lhs_node = try Tag.identifier.create(c.arena, ref);
+ const ref_node = try Tag.deref.create(c.arena, lhs_node);
+
+ const tmp = try block_scope.makeMangledName(c, "tmp");
+ const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = ref_node });
+ try block_scope.statements.append(tmp_decl);
+
+ const node = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, Tag.one_literal.init(), .used);
+ try block_scope.statements.append(node);
+
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = try Tag.identifier.create(c.arena, tmp),
+ });
+ try block_scope.statements.append(break_node);
+ return block_scope.complete(c);
}
-fn transCompoundAssignOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!*ast.Node {
+fn transCompoundAssignOperator(c: *Context, scope: *Scope, stmt: *const clang.CompoundAssignOperator, used: ResultUsed) TransError!Node {
switch (stmt.getOpcode()) {
.MulAssign => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreateCompoundAssign(rp, scope, stmt, .AssignMulWrap, .AsteriskPercentEqual, "*%=", .MulWrap, .AsteriskPercent, "*%", used)
+ return transCreateCompoundAssign(c, scope, stmt, .mul_wrap_assign, used)
else
- return transCreateCompoundAssign(rp, scope, stmt, .AssignMul, .AsteriskEqual, "*=", .Mul, .Asterisk, "*", used),
+ return transCreateCompoundAssign(c, scope, stmt, .mul_assign, used),
.AddAssign => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreateCompoundAssign(rp, scope, stmt, .AssignAddWrap, .PlusPercentEqual, "+%=", .AddWrap, .PlusPercent, "+%", used)
+ return transCreateCompoundAssign(c, scope, stmt, .add_wrap_assign, used)
else
- return transCreateCompoundAssign(rp, scope, stmt, .AssignAdd, .PlusEqual, "+=", .Add, .Plus, "+", used),
+ return transCreateCompoundAssign(c, scope, stmt, .add_assign, used),
.SubAssign => if (qualTypeHasWrappingOverflow(stmt.getType()))
- return transCreateCompoundAssign(rp, scope, stmt, .AssignSubWrap, .MinusPercentEqual, "-%=", .SubWrap, .MinusPercent, "-%", used)
+ return transCreateCompoundAssign(c, scope, stmt, .sub_wrap_assign, used)
else
- return transCreateCompoundAssign(rp, scope, stmt, .AssignSub, .MinusPercentEqual, "-=", .Sub, .Minus, "-", used),
- .DivAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignDiv, .SlashEqual, "/=", .Div, .Slash, "/", used),
- .RemAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignMod, .PercentEqual, "%=", .Mod, .Percent, "%", used),
- .ShlAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitShiftLeft, .AngleBracketAngleBracketLeftEqual, "<<=", .BitShiftLeft, .AngleBracketAngleBracketLeft, "<<", used),
- .ShrAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitShiftRight, .AngleBracketAngleBracketRightEqual, ">>=", .BitShiftRight, .AngleBracketAngleBracketRight, ">>", used),
- .AndAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitAnd, .AmpersandEqual, "&=", .BitAnd, .Ampersand, "&", used),
- .XorAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitXor, .CaretEqual, "^=", .BitXor, .Caret, "^", used),
- .OrAssign => return transCreateCompoundAssign(rp, scope, stmt, .AssignBitOr, .PipeEqual, "|=", .BitOr, .Pipe, "|", used),
- else => return revertAndWarn(
- rp,
+ return transCreateCompoundAssign(c, scope, stmt, .sub_assign, used),
+ .DivAssign => return transCreateCompoundAssign(c, scope, stmt, .div_assign, used),
+ .RemAssign => return transCreateCompoundAssign(c, scope, stmt, .mod_assign, used),
+ .ShlAssign => return transCreateCompoundAssign(c, scope, stmt, .shl_assign, used),
+ .ShrAssign => return transCreateCompoundAssign(c, scope, stmt, .shr_assign, used),
+ .AndAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_and_assign, used),
+ .XorAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_xor_assign, used),
+ .OrAssign => return transCreateCompoundAssign(c, scope, stmt, .bit_or_assign, used),
+ else => return fail(
+ c,
error.UnsupportedTranslation,
stmt.getBeginLoc(),
"unsupported C translation {}",
@@ -3674,25 +2898,20 @@ fn transCompoundAssignOperator(rp: RestorePoint, scope: *Scope, stmt: *const cla
}
fn transCreateCompoundAssign(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.CompoundAssignOperator,
- assign_op: ast.Node.Tag,
- assign_tok_id: std.zig.Token.Id,
- assign_bytes: []const u8,
- bin_op: ast.Node.Tag,
- bin_tok_id: std.zig.Token.Id,
- bin_bytes: []const u8,
+ op: Tag,
used: ResultUsed,
-) TransError!*ast.Node {
- const is_shift = bin_op == .BitShiftLeft or bin_op == .BitShiftRight;
- const is_div = bin_op == .Div;
- const is_mod = bin_op == .Mod;
+) TransError!Node {
+ const is_shift = op == .shl_assign or op == .shr_assign;
+ const is_div = op == .div_assign;
+ const is_mod = op == .mod_assign;
const lhs = stmt.getLHS();
const rhs = stmt.getRHS();
const loc = stmt.getBeginLoc();
- const lhs_qt = getExprQualType(rp.c, lhs);
- const rhs_qt = getExprQualType(rp.c, rhs);
+ const lhs_qt = getExprQualType(c, lhs);
+ const rhs_qt = getExprQualType(c, rhs);
const is_signed = cIsSignedInteger(lhs_qt);
const requires_int_cast = blk: {
const are_integers = cIsInteger(lhs_qt) and cIsInteger(rhs_qt);
@@ -3704,146 +2923,100 @@ fn transCreateCompoundAssign(
// c: lhs += rhs
// zig: lhs += rhs
if ((is_mod or is_div) and is_signed) {
- const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- const builtin = if (is_mod) "@rem" else "@divTrunc";
- const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
- const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
- builtin_node.params()[0] = lhs_node;
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- op_node.* = .{
- .base = .{ .tag = .Assign },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = &builtin_node.base,
- };
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &op_node.base;
+ const lhs_node = try transExpr(c, scope, lhs, .used);
+ const rhs_node = try transExpr(c, scope, rhs, .used);
+ const builtin = if (is_mod)
+ try Tag.rem.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node })
+ else
+ try Tag.div_trunc.create(c.arena, .{ .lhs = lhs_node, .rhs = rhs_node });
+
+ return transCreateNodeInfixOp(c, scope, .assign, lhs_node, builtin, .used);
}
- const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
- const eq_token = try appendToken(rp.c, assign_tok_id, assign_bytes);
+ const lhs_node = try transExpr(c, scope, lhs, .used);
var rhs_node = if (is_shift or requires_int_cast)
- try transExprCoercing(rp, scope, rhs, .used, .r_value)
+ try transExprCoercing(c, scope, rhs, .used)
else
- try transExpr(rp, scope, rhs, .used, .r_value);
+ try transExpr(c, scope, rhs, .used);
if (is_shift or requires_int_cast) {
- const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ // @intCast(rhs)
const cast_to_type = if (is_shift)
- try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc)
+ try qualTypeToLog2IntRef(c, scope, getExprQualType(c, rhs), loc)
else
- try transQualType(rp, getExprQualType(rp.c, lhs), loc);
- cast_node.params()[0] = cast_to_type;
- _ = try appendToken(rp.c, .Comma, ",");
- cast_node.params()[1] = rhs_node;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- rhs_node = &cast_node.base;
+ try transQualType(c, scope, getExprQualType(c, lhs), loc);
+
+ rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node });
}
- if (scope.id != .Condition)
- _ = try appendToken(rp.c, .Semicolon, ";");
- return transCreateNodeInfixOp(rp, scope, lhs_node, assign_op, eq_token, rhs_node, .used, false);
+
+ return transCreateNodeInfixOp(c, scope, op, lhs_node, rhs_node, .used);
}
// worst case
// c: lhs += rhs
// zig: (blk: {
// zig: const _ref = &lhs;
- // zig: _ref.* = _ref.* + rhs;
+ // zig: _ref.* += rhs;
// zig: break :blk _ref.*
// zig: })
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
- const ref = try block_scope.makeMangledName(rp.c, "ref");
-
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, ref);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- const addr_node = try transCreateNodeSimplePrefixOp(rp.c, .AddressOf, .Ampersand, "&");
- addr_node.rhs = try transExpr(rp, scope, lhs, .used, .l_value);
- const init_node = &addr_node.base;
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&node.base);
+ const ref = try block_scope.makeMangledName(c, "ref");
+
+ const expr = try transExpr(c, &block_scope.base, lhs, .used);
+ const addr_of = try Tag.address_of.create(c.arena, expr);
+ const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = ref, .init = addr_of });
+ try block_scope.statements.append(ref_decl);
- const lhs_node = try transCreateNodeIdentifier(rp.c, ref);
- const ref_node = try transCreateNodePtrDeref(rp.c, lhs_node);
- _ = try appendToken(rp.c, .Semicolon, ";");
+ const lhs_node = try Tag.identifier.create(c.arena, ref);
+ const ref_node = try Tag.deref.create(c.arena, lhs_node);
if ((is_mod or is_div) and is_signed) {
- const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- const builtin = if (is_mod) "@rem" else "@divTrunc";
- const builtin_node = try rp.c.createBuiltinCall(builtin, 2);
- builtin_node.params()[0] = try transCreateNodePtrDeref(rp.c, lhs_node);
- _ = try appendToken(rp.c, .Comma, ",");
- builtin_node.params()[1] = try transExpr(rp, scope, rhs, .used, .r_value);
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- _ = try appendToken(rp.c, .Semicolon, ";");
- op_node.* = .{
- .base = .{ .tag = .Assign },
- .op_token = op_token,
- .lhs = ref_node,
- .rhs = &builtin_node.base,
- };
- _ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.statements.append(&op_node.base);
+ const rhs_node = try transExpr(c, &block_scope.base, rhs, .used);
+ const builtin = if (is_mod)
+ try Tag.rem.create(c.arena, .{ .lhs = ref_node, .rhs = rhs_node })
+ else
+ try Tag.div_trunc.create(c.arena, .{ .lhs = ref_node, .rhs = rhs_node });
+
+ const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, ref_node, builtin, .used);
+ try block_scope.statements.append(assign);
} else {
- const bin_token = try appendToken(rp.c, bin_tok_id, bin_bytes);
- var rhs_node = try transExpr(rp, scope, rhs, .used, .r_value);
+ var rhs_node = try transExpr(c, &block_scope.base, rhs, .used);
if (is_shift or requires_int_cast) {
- const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
+ // @intCast(rhs)
const cast_to_type = if (is_shift)
- try qualTypeToLog2IntRef(rp, getExprQualType(rp.c, rhs), loc)
+ try qualTypeToLog2IntRef(c, scope, getExprQualType(c, rhs), loc)
else
- try transQualType(rp, getExprQualType(rp.c, lhs), loc);
- cast_node.params()[0] = cast_to_type;
- _ = try appendToken(rp.c, .Comma, ",");
- cast_node.params()[1] = rhs_node;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- rhs_node = &cast_node.base;
- }
+ try transQualType(c, scope, getExprQualType(c, lhs), loc);
- const rhs_bin = try transCreateNodeInfixOp(rp, scope, ref_node, bin_op, bin_token, rhs_node, .used, false);
- _ = try appendToken(rp.c, .Semicolon, ";");
+ rhs_node = try Tag.int_cast.create(c.arena, .{ .lhs = cast_to_type, .rhs = rhs_node });
+ }
- const ass_eq_token = try appendToken(rp.c, .Equal, "=");
- const assign = try transCreateNodeInfixOp(rp, scope, ref_node, .Assign, ass_eq_token, rhs_bin, .used, false);
+ const assign = try transCreateNodeInfixOp(c, &block_scope.base, op, ref_node, rhs_node, .used);
try block_scope.statements.append(assign);
}
- const break_node = try transCreateNodeBreak(rp.c, block_scope.label, ref_node);
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = try appendToken(rp.c, .LParen, "("),
- .expr = block_node,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return &grouped_expr.base;
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = ref_node,
+ });
+ try block_scope.statements.append(break_node);
+ return block_scope.complete(c);
}
fn transCPtrCast(
- rp: RestorePoint,
+ c: *Context,
+ scope: *Scope,
loc: clang.SourceLocation,
dst_type: clang.QualType,
src_type: clang.QualType,
- expr: *ast.Node,
-) !*ast.Node {
+ expr: Node,
+) !Node {
const ty = dst_type.getTypePtr();
const child_type = ty.getPointeeType();
const src_ty = src_type.getTypePtr();
const src_child_type = src_ty.getPointeeType();
+ const dst_type_node = try transType(c, scope, ty, loc);
if ((src_child_type.isConstQualified() and
!child_type.isConstQualified()) or
@@ -3851,80 +3024,47 @@ fn transCPtrCast(
!child_type.isVolatileQualified()))
{
// Casting away const or volatile requires us to use @intToPtr
- const inttoptr_node = try rp.c.createBuiltinCall("@intToPtr", 2);
- const dst_type_node = try transType(rp, ty, loc);
- inttoptr_node.params()[0] = dst_type_node;
- _ = try appendToken(rp.c, .Comma, ",");
-
- const ptrtoint_node = try rp.c.createBuiltinCall("@ptrToInt", 1);
- ptrtoint_node.params()[0] = expr;
- ptrtoint_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- inttoptr_node.params()[1] = &ptrtoint_node.base;
- inttoptr_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- return &inttoptr_node.base;
+ const ptr_to_int = try Tag.ptr_to_int.create(c.arena, expr);
+ const int_to_ptr = try Tag.int_to_ptr.create(c.arena, .{ .lhs = dst_type_node, .rhs = ptr_to_int });
+ return int_to_ptr;
} else {
// Implicit downcasting from higher to lower alignment values is forbidden,
// use @alignCast to side-step this problem
- const ptrcast_node = try rp.c.createBuiltinCall("@ptrCast", 2);
- const dst_type_node = try transType(rp, ty, loc);
- ptrcast_node.params()[0] = dst_type_node;
- _ = try appendToken(rp.c, .Comma, ",");
-
- if (qualTypeCanon(child_type).isVoidType()) {
+ const rhs = if (qualTypeCanon(child_type).isVoidType())
// void has 1-byte alignment, so @alignCast is not needed
- ptrcast_node.params()[1] = expr;
- } else if (typeIsOpaque(rp.c, qualTypeCanon(child_type), loc)) {
+ expr
+ else if (typeIsOpaque(c, qualTypeCanon(child_type), loc))
// For opaque types a ptrCast is enough
- ptrcast_node.params()[1] = expr;
- } else {
- const aligncast_node = try rp.c.createBuiltinCall("@alignCast", 2);
- const alignof_node = try rp.c.createBuiltinCall("@alignOf", 1);
- const child_type_node = try transQualType(rp, child_type, loc);
- alignof_node.params()[0] = child_type_node;
- alignof_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- aligncast_node.params()[0] = &alignof_node.base;
- _ = try appendToken(rp.c, .Comma, ",");
- aligncast_node.params()[1] = expr;
- aligncast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- ptrcast_node.params()[1] = &aligncast_node.base;
- }
- ptrcast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- return &ptrcast_node.base;
+ expr
+ else blk: {
+ const child_type_node = try transQualType(c, scope, child_type, loc);
+ const alignof = try Tag.alignof.create(c.arena, child_type_node);
+ const align_cast = try Tag.align_cast.create(c.arena, .{ .lhs = alignof, .rhs = expr });
+ break :blk align_cast;
+ };
+ return Tag.ptr_cast.create(c.arena, .{ .lhs = dst_type_node, .rhs = rhs });
}
}
-fn transBreak(rp: RestorePoint, scope: *Scope) TransError!*ast.Node {
- const break_scope = scope.getBreakableScope();
- const label_text: ?[]const u8 = if (break_scope.id == .Switch) blk: {
- const swtch = @fieldParentPtr(Scope.Switch, "base", break_scope);
- const block_scope = try scope.findBlockScope(rp.c);
- swtch.switch_label = try block_scope.makeMangledName(rp.c, "switch");
- break :blk swtch.switch_label;
- } else
- null;
-
- var cf = try CtrlFlow.init(rp.c, .Break, label_text);
- const br = try cf.finish(null);
- _ = try appendToken(rp.c, .Semicolon, ";");
- return &br.base;
-}
-
-fn transFloatingLiteral(rp: RestorePoint, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!*ast.Node {
+fn transFloatingLiteral(c: *Context, scope: *Scope, stmt: *const clang.FloatingLiteral, used: ResultUsed) TransError!Node {
// TODO use something more accurate
- const dbl = stmt.getValueAsApproximateDouble();
- const node = try rp.c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .FloatLiteral },
- .token = try appendTokenFmt(rp.c, .FloatLiteral, "{d}", .{dbl}),
- };
- return maybeSuppressResult(rp, scope, used, &node.base);
+ var dbl = stmt.getValueAsApproximateDouble();
+ const is_negative = dbl < 0;
+ if (is_negative) dbl = -dbl;
+ const str = try std.fmt.allocPrint(c.arena, "{d}", .{dbl});
+ var node = if (dbl == std.math.floor(dbl))
+ try Tag.integer_literal.create(c.arena, str)
+ else
+ try Tag.float_literal.create(c.arena, str);
+ if (is_negative) node = try Tag.negate.create(c.arena, node);
+ return maybeSuppressResult(c, scope, used, node);
}
-fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!*ast.Node {
+fn transBinaryConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.BinaryConditionalOperator, used: ResultUsed) TransError!Node {
// GNU extension of the ternary operator where the middle expression is
// omitted, the conditition itself is returned if it evaluates to true
+ const qt = @ptrCast(*const clang.Expr, stmt).getType();
+ const res_is_bool = qualTypeIsBoolean(qt);
const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt);
const cond_expr = casted_stmt.getCond();
const true_expr = casted_stmt.getTrueExpr();
@@ -3935,184 +3075,149 @@ fn transBinaryConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const
// const _cond_temp = (cond_expr);
// break :blk if (_cond_temp) _cond_temp else (false_expr);
// })
- const lparen = try appendToken(rp.c, .LParen, "(");
-
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
- const mangled_name = try block_scope.makeMangledName(rp.c, "cond_temp");
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, mangled_name);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- const init_node = try transExpr(rp, &block_scope.base, cond_expr, .used, .r_value);
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const tmp_var = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&tmp_var.base);
-
- var break_node_tmp = try CtrlFlow.initToken(rp.c, .Break, block_scope.label);
+ const mangled_name = try block_scope.makeMangledName(c, "cond_temp");
+ const init_node = try transExpr(c, &block_scope.base, cond_expr, .used);
+ const ref_decl = try Tag.var_simple.create(c.arena, .{ .name = mangled_name, .init = init_node });
+ try block_scope.statements.append(ref_decl);
- const if_node = try transCreateNodeIf(rp.c);
var cond_scope = Scope.Condition{
.base = .{
.parent = &block_scope.base,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
- const tmp_var_node = try transCreateNodeIdentifier(rp.c, mangled_name);
-
- const ty = getExprQualType(rp.c, cond_expr).getTypePtr();
- const cond_node = try finishBoolExpr(rp, &cond_scope.base, cond_expr.getBeginLoc(), ty, tmp_var_node, used);
- if_node.condition = cond_node;
- _ = try appendToken(rp.c, .RParen, ")");
-
- if_node.body = try transCreateNodeIdentifier(rp.c, mangled_name);
- if_node.@"else" = try transCreateNodeElse(rp.c);
- if_node.@"else".?.body = try transExpr(rp, &block_scope.base, false_expr, .used, .r_value);
- _ = try appendToken(rp.c, .Semicolon, ";");
-
- const break_node = try break_node_tmp.finish(&if_node.base);
- _ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
-
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen,
- .expr = block_node,
- .rparen = try appendToken(rp.c, .RParen, ")"),
- };
- return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
+
+ const cond_ident = try Tag.identifier.create(c.arena, mangled_name);
+ const ty = getExprQualType(c, cond_expr).getTypePtr();
+ const cond_node = try finishBoolExpr(c, &cond_scope.base, cond_expr.getBeginLoc(), ty, cond_ident, .used);
+ var then_body = cond_ident;
+ if (!res_is_bool and isBoolRes(init_node)) {
+ then_body = try Tag.bool_to_int.create(c.arena, then_body);
+ }
+
+ var else_body = try transExpr(c, &block_scope.base, false_expr, .used);
+ if (!res_is_bool and isBoolRes(else_body)) {
+ else_body = try Tag.bool_to_int.create(c.arena, else_body);
+ }
+ const if_node = try Tag.@"if".create(c.arena, .{
+ .cond = cond_node,
+ .then = then_body,
+ .@"else" = else_body,
+ });
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = if_node,
+ });
+ try block_scope.statements.append(break_node);
+ const res = try block_scope.complete(c);
+ return maybeSuppressResult(c, scope, used, res);
}
-fn transConditionalOperator(rp: RestorePoint, scope: *Scope, stmt: *const clang.ConditionalOperator, used: ResultUsed) TransError!*ast.Node {
- const grouped = scope.id == .Condition;
- const lparen = if (grouped) try appendToken(rp.c, .LParen, "(") else undefined;
- const if_node = try transCreateNodeIf(rp.c);
+fn transConditionalOperator(c: *Context, scope: *Scope, stmt: *const clang.ConditionalOperator, used: ResultUsed) TransError!Node {
var cond_scope = Scope.Condition{
.base = .{
.parent = scope,
- .id = .Condition,
+ .id = .condition,
},
};
defer cond_scope.deinit();
+ const qt = @ptrCast(*const clang.Expr, stmt).getType();
+ const res_is_bool = qualTypeIsBoolean(qt);
const casted_stmt = @ptrCast(*const clang.AbstractConditionalOperator, stmt);
const cond_expr = casted_stmt.getCond();
const true_expr = casted_stmt.getTrueExpr();
const false_expr = casted_stmt.getFalseExpr();
- if_node.condition = try transBoolExpr(rp, &cond_scope.base, cond_expr, .used, .r_value, false);
- _ = try appendToken(rp.c, .RParen, ")");
-
- if_node.body = try transExpr(rp, scope, true_expr, .used, .r_value);
+ const cond = try transBoolExpr(c, &cond_scope.base, cond_expr, .used);
- if_node.@"else" = try transCreateNodeElse(rp.c);
- if_node.@"else".?.body = try transExpr(rp, scope, false_expr, .used, .r_value);
+ var then_body = try transExpr(c, scope, true_expr, used);
+ if (!res_is_bool and isBoolRes(then_body)) {
+ then_body = try Tag.bool_to_int.create(c.arena, then_body);
+ }
- if (grouped) {
- const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen,
- .expr = &if_node.base,
- .rparen = rparen,
- };
- return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
- } else {
- return maybeSuppressResult(rp, scope, used, &if_node.base);
+ var else_body = try transExpr(c, scope, false_expr, used);
+ if (!res_is_bool and isBoolRes(else_body)) {
+ else_body = try Tag.bool_to_int.create(c.arena, else_body);
}
+
+ const if_node = try Tag.@"if".create(c.arena, .{
+ .cond = cond,
+ .then = then_body,
+ .@"else" = else_body,
+ });
+ // Clang inserts ImplicitCast(ToVoid)'s to both rhs and lhs so we don't need to supress the result here.
+ return if_node;
}
fn maybeSuppressResult(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
used: ResultUsed,
- result: *ast.Node,
-) TransError!*ast.Node {
+ result: Node,
+) TransError!Node {
if (used == .used) return result;
- if (scope.id != .Condition) {
- // NOTE: This is backwards, but the semicolon must immediately follow the node.
- _ = try appendToken(rp.c, .Semicolon, ";");
- } else { // TODO is there a way to avoid this hack?
- // this parenthesis must come immediately following the node
- _ = try appendToken(rp.c, .RParen, ")");
- // these need to come before _
- _ = try appendToken(rp.c, .Colon, ":");
- _ = try appendToken(rp.c, .LParen, "(");
- }
- const lhs = try transCreateNodeIdentifier(rp.c, "_");
- const op_token = try appendToken(rp.c, .Equal, "=");
- const op_node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .Assign },
- .op_token = op_token,
- .lhs = lhs,
- .rhs = result,
- };
- return &op_node.base;
+ return Tag.discard.create(c.arena, result);
}
-fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: *ast.Node) !void {
- try c.root_decls.append(c.gpa, decl_node);
+fn addTopLevelDecl(c: *Context, name: []const u8, decl_node: Node) !void {
_ = try c.global_scope.sym_table.put(name, decl_node);
+ try c.global_scope.nodes.append(decl_node);
}
/// Translate a qual type for a variable with an initializer. The initializer
/// only matters for incomplete arrays, since the size of the array is determined
/// by the size of the initializer
fn transQualTypeInitialized(
- rp: RestorePoint,
+ c: *Context,
+ scope: *Scope,
qt: clang.QualType,
decl_init: *const clang.Expr,
source_loc: clang.SourceLocation,
-) TypeError!*ast.Node {
+) TypeError!Node {
const ty = qt.getTypePtr();
if (ty.getTypeClass() == .IncompleteArray) {
const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty);
- const elem_ty = incomplete_array_ty.getElementType().getTypePtr();
+ const elem_ty = try transType(c, scope, incomplete_array_ty.getElementType().getTypePtr(), source_loc);
switch (decl_init.getStmtClass()) {
.StringLiteralClass => {
const string_lit = @ptrCast(*const clang.StringLiteral, decl_init);
const string_lit_size = string_lit.getLength() + 1; // +1 for null terminator
const array_size = @intCast(usize, string_lit_size);
- return transCreateNodeArrayType(rp, source_loc, elem_ty, array_size);
+ return Tag.array_type.create(c.arena, .{ .len = array_size, .elem_type = elem_ty });
},
.InitListExprClass => {
const init_expr = @ptrCast(*const clang.InitListExpr, decl_init);
const size = init_expr.getNumInits();
- return transCreateNodeArrayType(rp, source_loc, elem_ty, size);
+ return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_ty });
},
else => {},
}
}
- return transQualType(rp, qt, source_loc);
+ return transQualType(c, scope, qt, source_loc);
}
-fn transQualType(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!*ast.Node {
- return transType(rp, qt.getTypePtr(), source_loc);
+fn transQualType(c: *Context, scope: *Scope, qt: clang.QualType, source_loc: clang.SourceLocation) TypeError!Node {
+ return transType(c, scope, qt.getTypePtr(), source_loc);
}
/// Produces a Zig AST node by translating a Clang QualType, respecting the width, but modifying the signed-ness.
/// Asserts the type is an integer.
-fn transQualTypeIntWidthOf(c: *Context, ty: clang.QualType, is_signed: bool) TypeError!*ast.Node {
+fn transQualTypeIntWidthOf(c: *Context, ty: clang.QualType, is_signed: bool) TypeError!Node {
return transTypeIntWidthOf(c, qualTypeCanon(ty), is_signed);
}
/// Produces a Zig AST node by translating a Clang Type, respecting the width, but modifying the signed-ness.
/// Asserts the type is an integer.
-fn transTypeIntWidthOf(c: *Context, ty: *const clang.Type, is_signed: bool) TypeError!*ast.Node {
+fn transTypeIntWidthOf(c: *Context, ty: *const clang.Type, is_signed: bool) TypeError!Node {
assert(ty.getTypeClass() == .Builtin);
const builtin_ty = @ptrCast(*const clang.BuiltinType, ty);
- return transCreateNodeIdentifier(c, switch (builtin_ty.getKind()) {
+ return Tag.type.create(c.arena, switch (builtin_ty.getKind()) {
.Char_U, .Char_S, .UChar, .SChar, .Char8 => if (is_signed) "i8" else "u8",
.UShort, .Short => if (is_signed) "c_short" else "c_ushort",
.UInt, .Int => if (is_signed) "c_int" else "c_uint",
@@ -4141,7 +3246,7 @@ fn qualTypeIsBoolean(qt: clang.QualType) bool {
return qualTypeCanon(qt).isBooleanType();
}
-fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) !u32 {
+fn qualTypeIntBitWidth(c: *Context, qt: clang.QualType) !u32 {
const ty = qt.getTypePtr();
switch (ty.getTypeClass()) {
@@ -4165,7 +3270,7 @@ fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.S
.Typedef => {
const typedef_ty = @ptrCast(*const clang.TypedefType, ty);
const typedef_decl = typedef_ty.getDecl();
- const type_name = try rp.c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin());
+ const type_name = try c.str(@ptrCast(*const clang.NamedDecl, typedef_decl).getName_bytes_begin());
if (mem.eql(u8, type_name, "uint8_t") or mem.eql(u8, type_name, "int8_t")) {
return 8;
@@ -4181,55 +3286,19 @@ fn qualTypeIntBitWidth(rp: RestorePoint, qt: clang.QualType, source_loc: clang.S
},
else => return 0,
}
-
- unreachable;
}
-fn qualTypeToLog2IntRef(rp: RestorePoint, qt: clang.QualType, source_loc: clang.SourceLocation) !*ast.Node {
- const int_bit_width = try qualTypeIntBitWidth(rp, qt, source_loc);
+fn qualTypeToLog2IntRef(c: *Context, scope: *Scope, qt: clang.QualType, source_loc: clang.SourceLocation) !Node {
+ const int_bit_width = try qualTypeIntBitWidth(c, qt);
if (int_bit_width != 0) {
// we can perform the log2 now.
const cast_bit_width = math.log2_int(u64, int_bit_width);
- const node = try rp.c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .IntegerLiteral },
- .token = try appendTokenFmt(rp.c, .Identifier, "u{d}", .{cast_bit_width}),
- };
- return &node.base;
- }
-
- const zig_type_node = try transQualType(rp, qt, source_loc);
-
- // @import("std").math.Log2Int(c_long);
- //
- // FnCall
- // FieldAccess
- // FieldAccess
- // FnCall (.builtin = true)
- // Symbol "import"
- // StringLiteral "std"
- // Symbol "math"
- // Symbol "Log2Int"
- // Symbol (var from above)
-
- const import_fn_call = try rp.c.createBuiltinCall("@import", 1);
- const std_token = try appendToken(rp.c, .StringLiteral, "\"std\"");
- const std_node = try rp.c.arena.create(ast.Node.OneToken);
- std_node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = std_token,
- };
- import_fn_call.params()[0] = &std_node.base;
- import_fn_call.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- const inner_field_access = try transCreateNodeFieldAccess(rp.c, &import_fn_call.base, "math");
- const outer_field_access = try transCreateNodeFieldAccess(rp.c, inner_field_access, "Log2Int");
- const log2int_fn_call = try rp.c.createCall(outer_field_access, 1);
- log2int_fn_call.params()[0] = zig_type_node;
- log2int_fn_call.rtoken = try appendToken(rp.c, .RParen, ")");
+ return Tag.log2_int_type.create(c.arena, cast_bit_width);
+ }
- return &log2int_fn_call.base;
+ const zig_type = try transQualType(c, scope, qt, source_loc);
+ return Tag.std_math_Log2Int.create(c.arena, zig_type);
}
fn qualTypeChildIsFnProto(qt: clang.QualType) bool {
@@ -4393,28 +3462,22 @@ fn cIsLongLongInteger(qt: clang.QualType) bool {
};
}
fn transCreateNodeAssign(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
result_used: ResultUsed,
lhs: *const clang.Expr,
rhs: *const clang.Expr,
-) !*ast.Node {
+) !Node {
// common case
// c: lhs = rhs
// zig: lhs = rhs
if (result_used == .unused) {
- const lhs_node = try transExpr(rp, scope, lhs, .used, .l_value);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- var rhs_node = try transExprCoercing(rp, scope, rhs, .used, .r_value);
+ const lhs_node = try transExpr(c, scope, lhs, .used);
+ var rhs_node = try transExprCoercing(c, scope, rhs, .used);
if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) {
- const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = rhs_node;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- rhs_node = &builtin_node.base;
+ rhs_node = try Tag.bool_to_int.create(c.arena, rhs_node);
}
- if (scope.id != .Condition)
- _ = try appendToken(rp.c, .Semicolon, ";");
- return transCreateNodeInfixOp(rp, scope, lhs_node, .Assign, eq_token, rhs_node, .used, false);
+ return transCreateNodeInfixOp(c, scope, .assign, lhs_node, rhs_node, .used);
}
// worst case
@@ -4424,176 +3487,62 @@ fn transCreateNodeAssign(
// zig: lhs = _tmp;
// zig: break :blk _tmp
// zig: })
- var block_scope = try Scope.Block.init(rp.c, scope, true);
+ var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
- const tmp = try block_scope.makeMangledName(rp.c, "tmp");
- const mut_tok = try appendToken(rp.c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(rp.c, tmp);
- const eq_token = try appendToken(rp.c, .Equal, "=");
- var rhs_node = try transExpr(rp, &block_scope.base, rhs, .used, .r_value);
- if (!exprIsBooleanType(lhs) and isBoolRes(rhs_node)) {
- const builtin_node = try rp.c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = rhs_node;
- builtin_node.rparen_token = try appendToken(rp.c, .RParen, ")");
- rhs_node = &builtin_node.base;
- }
- const init_node = rhs_node;
- const semicolon_token = try appendToken(rp.c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(rp.c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .eq_token = eq_token,
- .init_node = init_node,
- });
- try block_scope.statements.append(&node.base);
-
- const lhs_node = try transExpr(rp, &block_scope.base, lhs, .used, .l_value);
- const lhs_eq_token = try appendToken(rp.c, .Equal, "=");
- const ident = try transCreateNodeIdentifier(rp.c, tmp);
- _ = try appendToken(rp.c, .Semicolon, ";");
+ const tmp = try block_scope.makeMangledName(c, "tmp");
+ const rhs_node = try transExpr(c, &block_scope.base, rhs, .used);
+ const tmp_decl = try Tag.var_simple.create(c.arena, .{ .name = tmp, .init = rhs_node });
+ try block_scope.statements.append(tmp_decl);
- const assign = try transCreateNodeInfixOp(rp, &block_scope.base, lhs_node, .Assign, lhs_eq_token, ident, .used, false);
+ const lhs_node = try transExpr(c, &block_scope.base, lhs, .used);
+ const tmp_ident = try Tag.identifier.create(c.arena, tmp);
+ const assign = try transCreateNodeInfixOp(c, &block_scope.base, .assign, lhs_node, tmp_ident, .used);
try block_scope.statements.append(assign);
- const break_node = blk: {
- var tmp_ctrl_flow = try CtrlFlow.init(rp.c, .Break, tokenSlice(rp.c, block_scope.label.?));
- const rhs_expr = try transCreateNodeIdentifier(rp.c, tmp);
- break :blk try tmp_ctrl_flow.finish(rhs_expr);
- };
- _ = try appendToken(rp.c, .Semicolon, ";");
- try block_scope.statements.append(&break_node.base);
- const block_node = try block_scope.complete(rp.c);
- // semicolon must immediately follow rbrace because it is the last token in a block
- _ = try appendToken(rp.c, .Semicolon, ";");
- return block_node;
-}
-
-fn transCreateNodeFieldAccess(c: *Context, container: *ast.Node, field_name: []const u8) !*ast.Node {
- const field_access_node = try c.arena.create(ast.Node.SimpleInfixOp);
- field_access_node.* = .{
- .base = .{ .tag = .Period },
- .op_token = try appendToken(c, .Period, "."),
- .lhs = container,
- .rhs = try transCreateNodeIdentifier(c, field_name),
- };
- return &field_access_node.base;
-}
-
-fn transCreateNodeSimplePrefixOp(
- c: *Context,
- comptime tag: ast.Node.Tag,
- op_tok_id: std.zig.Token.Id,
- bytes: []const u8,
-) !*ast.Node.SimplePrefixOp {
- const node = try c.arena.create(ast.Node.SimplePrefixOp);
- node.* = .{
- .base = .{ .tag = tag },
- .op_token = try appendToken(c, op_tok_id, bytes),
- .rhs = undefined, // translate and set afterward
- };
- return node;
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = tmp_ident,
+ });
+ try block_scope.statements.append(break_node);
+ return block_scope.complete(c);
}
fn transCreateNodeInfixOp(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
- lhs_node: *ast.Node,
- op: ast.Node.Tag,
- op_token: ast.TokenIndex,
- rhs_node: *ast.Node,
+ op: Tag,
+ lhs: Node,
+ rhs: Node,
used: ResultUsed,
- grouped: bool,
-) !*ast.Node {
- var lparen = if (grouped)
- try appendToken(rp.c, .LParen, "(")
- else
- null;
- const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- node.* = .{
+) !Node {
+ const payload = try c.arena.create(ast.Payload.BinOp);
+ payload.* = .{
.base = .{ .tag = op },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = rhs_node,
- };
- if (!grouped) return maybeSuppressResult(rp, scope, used, &node.base);
- const rparen = try appendToken(rp.c, .RParen, ")");
- const grouped_expr = try rp.c.arena.create(ast.Node.GroupedExpression);
- grouped_expr.* = .{
- .lparen = lparen.?,
- .expr = &node.base,
- .rparen = rparen,
+ .data = .{
+ .lhs = lhs,
+ .rhs = rhs,
+ },
};
- return maybeSuppressResult(rp, scope, used, &grouped_expr.base);
+ return maybeSuppressResult(c, scope, used, Node.initPayload(&payload.base));
}
fn transCreateNodeBoolInfixOp(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.BinaryOperator,
- op: ast.Node.Tag,
+ op: Tag,
used: ResultUsed,
- grouped: bool,
-) !*ast.Node {
- std.debug.assert(op == .BoolAnd or op == .BoolOr);
-
- const lhs_hode = try transBoolExpr(rp, scope, stmt.getLHS(), .used, .l_value, true);
- const op_token = if (op == .BoolAnd)
- try appendToken(rp.c, .Keyword_and, "and")
- else
- try appendToken(rp.c, .Keyword_or, "or");
- const rhs = try transBoolExpr(rp, scope, stmt.getRHS(), .used, .r_value, true);
+) !Node {
+ std.debug.assert(op == .@"and" or op == .@"or");
- return transCreateNodeInfixOp(
- rp,
- scope,
- lhs_hode,
- op,
- op_token,
- rhs,
- used,
- grouped,
- );
-}
+ const lhs = try transBoolExpr(c, scope, stmt.getLHS(), .used);
+ const rhs = try transBoolExpr(c, scope, stmt.getRHS(), .used);
-fn transCreateNodePtrType(
- c: *Context,
- is_const: bool,
- is_volatile: bool,
- op_tok_id: std.zig.Token.Id,
-) !*ast.Node.PtrType {
- const node = try c.arena.create(ast.Node.PtrType);
- const op_token = switch (op_tok_id) {
- .LBracket => blk: {
- const lbracket = try appendToken(c, .LBracket, "[");
- _ = try appendToken(c, .Asterisk, "*");
- _ = try appendToken(c, .RBracket, "]");
- break :blk lbracket;
- },
- .Identifier => blk: {
- const lbracket = try appendToken(c, .LBracket, "["); // Rendering checks if this token + 2 == .Identifier, so needs to return this token
- _ = try appendToken(c, .Asterisk, "*");
- _ = try appendIdentifier(c, "c");
- _ = try appendToken(c, .RBracket, "]");
- break :blk lbracket;
- },
- .Asterisk => try appendToken(c, .Asterisk, "*"),
- else => unreachable,
- };
- node.* = .{
- .op_token = op_token,
- .ptr_info = .{
- .const_token = if (is_const) try appendToken(c, .Keyword_const, "const") else null,
- .volatile_token = if (is_volatile) try appendToken(c, .Keyword_volatile, "volatile") else null,
- },
- .rhs = undefined, // translate and set afterward
- };
- return node;
+ return transCreateNodeInfixOp(c, scope, op, lhs, rhs, used);
}
-fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !*ast.Node {
+fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !Node {
const num_limbs = math.cast(usize, int.getNumWords()) catch |err| switch (err) {
error.Overflow => return error.OutOfMemory,
};
@@ -4629,418 +3578,96 @@ fn transCreateNodeAPInt(c: *Context, int: *const clang.APSInt) !*ast.Node {
else => @compileError("unimplemented"),
}
- const big: math.big.int.Const = .{ .limbs = limbs, .positive = !is_negative };
+ const big: math.big.int.Const = .{ .limbs = limbs, .positive = true };
const str = big.toStringAlloc(c.arena, 10, false) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
};
- defer c.arena.free(str);
- const token = try appendToken(c, .IntegerLiteral, str);
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .IntegerLiteral },
- .token = token,
- };
- return &node.base;
-}
-
-fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
- const token = try appendToken(c, .Keyword_undefined, "undefined");
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .UndefinedLiteral },
- .token = token,
- };
- return &node.base;
-}
-
-fn transCreateNodeNullLiteral(c: *Context) !*ast.Node {
- const token = try appendToken(c, .Keyword_null, "null");
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .NullLiteral },
- .token = token,
- };
- return &node.base;
+ const res = try Tag.integer_literal.create(c.arena, str);
+ if (is_negative) return Tag.negate.create(c.arena, res);
+ return res;
}
-fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
- const token = if (value)
- try appendToken(c, .Keyword_true, "true")
+fn transCreateNodeNumber(c: *Context, num: anytype, num_kind: enum { int, float }) !Node {
+ const fmt_s = if (comptime std.meta.trait.isNumber(@TypeOf(num))) "{d}" else "{s}";
+ const str = try std.fmt.allocPrint(c.arena, fmt_s, .{num});
+ if (num_kind == .float)
+ return Tag.float_literal.create(c.arena, str)
else
- try appendToken(c, .Keyword_false, "false");
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .BoolLiteral },
- .token = token,
- };
- return &node.base;
-}
-
-fn transCreateNodeInt(c: *Context, int: anytype) !*ast.Node {
- const fmt_s = if (comptime std.meta.trait.isIntegerNumber(@TypeOf(int))) "{d}" else "{s}";
- const token = try appendTokenFmt(c, .IntegerLiteral, fmt_s, .{int});
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .IntegerLiteral },
- .token = token,
- };
- return &node.base;
+ return Tag.integer_literal.create(c.arena, str);
}
-fn transCreateNodeFloat(c: *Context, str: []const u8) !*ast.Node {
- const token = try appendTokenFmt(c, .FloatLiteral, "{s}", .{str});
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .FloatLiteral },
- .token = token,
- };
- return &node.base;
-}
-
-fn transCreateNodeOpaqueType(c: *Context) !*ast.Node {
- const container_tok = try appendToken(c, .Keyword_opaque, "opaque");
- const lbrace_token = try appendToken(c, .LBrace, "{");
- const container_node = try ast.Node.ContainerDecl.alloc(c.arena, 0);
- container_node.* = .{
- .kind_token = container_tok,
- .layout_token = null,
- .lbrace_token = lbrace_token,
- .rbrace_token = try appendToken(c, .RBrace, "}"),
- .fields_and_decls_len = 0,
- .init_arg_expr = .None,
- };
- return &container_node.base;
-}
-
-fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: *ast.Node, proto_alias: *ast.Node.FnProto) !*ast.Node {
+fn transCreateNodeMacroFn(c: *Context, name: []const u8, ref: Node, proto_alias: *ast.Payload.Func) !Node {
const scope = &c.global_scope.base;
- const pub_tok = try appendToken(c, .Keyword_pub, "pub");
- const fn_tok = try appendToken(c, .Keyword_fn, "fn");
- const name_tok = try appendIdentifier(c, name);
- _ = try appendToken(c, .LParen, "(");
-
- var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa);
+ var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa);
defer fn_params.deinit();
- for (proto_alias.params()) |param, i| {
- if (i != 0) {
- _ = try appendToken(c, .Comma, ",");
- }
- const param_name_tok = param.name_token orelse
- try appendTokenFmt(c, .Identifier, "arg_{d}", .{c.getMangle()});
-
- _ = try appendToken(c, .Colon, ":");
+ for (proto_alias.data.params) |param, i| {
+ const param_name = param.name orelse
+ try std.fmt.allocPrint(c.arena, "arg_{d}", .{c.getMangle()});
- (try fn_params.addOne()).* = .{
- .doc_comments = null,
- .comptime_token = null,
- .noalias_token = param.noalias_token,
- .name_token = param_name_tok,
- .param_type = param.param_type,
- };
+ try fn_params.append(.{
+ .name = param_name,
+ .type = param.type,
+ .is_noalias = param.is_noalias,
+ });
}
- _ = try appendToken(c, .RParen, ")");
-
- _ = try appendToken(c, .Keyword_callconv, "callconv");
- _ = try appendToken(c, .LParen, "(");
- const callconv_expr = try transCreateNodeEnumLiteral(c, "Inline");
- _ = try appendToken(c, .RParen, ")");
-
- const block_lbrace = try appendToken(c, .LBrace, "{");
-
- const return_kw = try appendToken(c, .Keyword_return, "return");
- const unwrap_expr = try transCreateNodeUnwrapNull(c, ref.cast(ast.Node.VarDecl).?.getInitNode().?);
-
- const call_expr = try c.createCall(unwrap_expr, fn_params.items.len);
- const call_params = call_expr.params();
+ const init = if (ref.castTag(.var_decl)) |v|
+ v.data.init.?
+ else if (ref.castTag(.var_simple) orelse ref.castTag(.pub_var_simple)) |v|
+ v.data.init
+ else
+ unreachable;
+ const unwrap_expr = try Tag.unwrap.create(c.arena, init);
+ const args = try c.arena.alloc(Node, fn_params.items.len);
for (fn_params.items) |param, i| {
- if (i != 0) {
- _ = try appendToken(c, .Comma, ",");
- }
- call_params[i] = try transCreateNodeIdentifier(c, tokenSlice(c, param.name_token.?));
+ args[i] = try Tag.identifier.create(c.arena, param.name.?);
}
- call_expr.rtoken = try appendToken(c, .RParen, ")");
-
- const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{
- .ltoken = return_kw,
- .tag = .Return,
- }, .{
- .rhs = &call_expr.base,
+ const call_expr = try Tag.call.create(c.arena, .{
+ .lhs = unwrap_expr,
+ .args = args,
});
- _ = try appendToken(c, .Semicolon, ";");
-
- const block = try ast.Node.Block.alloc(c.arena, 1);
- block.* = .{
- .lbrace = block_lbrace,
- .statements_len = 1,
- .rbrace = try appendToken(c, .RBrace, "}"),
- };
- block.statements()[0] = &return_expr.base;
-
- const fn_proto = try ast.Node.FnProto.create(c.arena, .{
- .params_len = fn_params.items.len,
- .fn_token = fn_tok,
- .return_type = proto_alias.return_type,
- }, .{
- .visib_token = pub_tok,
- .name_token = name_tok,
- .body_node = &block.base,
- .callconv_expr = callconv_expr,
+ const return_expr = try Tag.@"return".create(c.arena, call_expr);
+ const block = try Tag.block_single.create(c.arena, return_expr);
+
+ return Tag.pub_inline_fn.create(c.arena, .{
+ .name = name,
+ .params = try c.arena.dupe(ast.Payload.Param, fn_params.items),
+ .return_type = proto_alias.data.return_type,
+ .body = block,
});
- mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
- return &fn_proto.base;
-}
-
-fn transCreateNodeUnwrapNull(c: *Context, wrapped: *ast.Node) !*ast.Node {
- _ = try appendToken(c, .Period, ".");
- const qm = try appendToken(c, .QuestionMark, "?");
- const node = try c.arena.create(ast.Node.SimpleSuffixOp);
- node.* = .{
- .base = .{ .tag = .UnwrapOptional },
- .lhs = wrapped,
- .rtoken = qm,
- };
- return &node.base;
-}
-
-fn transCreateNodeEnumLiteral(c: *Context, name: []const u8) !*ast.Node {
- const node = try c.arena.create(ast.Node.EnumLiteral);
- node.* = .{
- .dot = try appendToken(c, .Period, "."),
- .name = try appendIdentifier(c, name),
- };
- return &node.base;
-}
-
-fn transCreateNodeStringLiteral(c: *Context, str: []const u8) !*ast.Node {
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = try appendToken(c, .StringLiteral, str),
- };
- return &node.base;
-}
-
-fn transCreateNodeIf(c: *Context) !*ast.Node.If {
- const if_tok = try appendToken(c, .Keyword_if, "if");
- _ = try appendToken(c, .LParen, "(");
- const node = try c.arena.create(ast.Node.If);
- node.* = .{
- .if_token = if_tok,
- .condition = undefined,
- .payload = null,
- .body = undefined,
- .@"else" = null,
- };
- return node;
-}
-
-fn transCreateNodeElse(c: *Context) !*ast.Node.Else {
- const node = try c.arena.create(ast.Node.Else);
- node.* = .{
- .else_token = try appendToken(c, .Keyword_else, "else"),
- .payload = null,
- .body = undefined,
- };
- return node;
-}
-
-fn transCreateNodeBreak(
- c: *Context,
- label: ?ast.TokenIndex,
- rhs: ?*ast.Node,
-) !*ast.Node.ControlFlowExpression {
- var ctrl_flow = try CtrlFlow.init(c, .Break, if (label) |l| tokenSlice(c, l) else null);
- return ctrl_flow.finish(rhs);
-}
-
-const CtrlFlow = struct {
- c: *Context,
- ltoken: ast.TokenIndex,
- label_token: ?ast.TokenIndex,
- tag: ast.Node.Tag,
-
- /// Does everything except the RHS.
- fn init(c: *Context, tag: ast.Node.Tag, label: ?[]const u8) !CtrlFlow {
- const kw: Token.Id = switch (tag) {
- .Break => .Keyword_break,
- .Continue => .Keyword_continue,
- .Return => .Keyword_return,
- else => unreachable,
- };
- const kw_text = switch (tag) {
- .Break => "break",
- .Continue => "continue",
- .Return => "return",
- else => unreachable,
- };
- const ltoken = try appendToken(c, kw, kw_text);
- const label_token = if (label) |l| blk: {
- _ = try appendToken(c, .Colon, ":");
- break :blk try appendIdentifier(c, l);
- } else null;
- return CtrlFlow{
- .c = c,
- .ltoken = ltoken,
- .label_token = label_token,
- .tag = tag,
- };
- }
-
- fn initToken(c: *Context, tag: ast.Node.Tag, label: ?ast.TokenIndex) !CtrlFlow {
- const other_token = label orelse return init(c, tag, null);
- const loc = c.token_locs.items[other_token];
- const label_name = c.source_buffer.items[loc.start..loc.end];
- return init(c, tag, label_name);
- }
-
- fn finish(self: *CtrlFlow, rhs: ?*ast.Node) !*ast.Node.ControlFlowExpression {
- return ast.Node.ControlFlowExpression.create(self.c.arena, .{
- .ltoken = self.ltoken,
- .tag = self.tag,
- }, .{
- .label = self.label_token,
- .rhs = rhs,
- });
- }
-};
-
-fn transCreateNodeWhile(c: *Context) !*ast.Node.While {
- const while_tok = try appendToken(c, .Keyword_while, "while");
- _ = try appendToken(c, .LParen, "(");
-
- const node = try c.arena.create(ast.Node.While);
- node.* = .{
- .label = null,
- .inline_token = null,
- .while_token = while_tok,
- .condition = undefined,
- .payload = null,
- .continue_expr = null,
- .body = undefined,
- .@"else" = null,
- };
- return node;
-}
-
-fn transCreateNodeContinue(c: *Context) !*ast.Node {
- const ltoken = try appendToken(c, .Keyword_continue, "continue");
- const node = try ast.Node.ControlFlowExpression.create(c.arena, .{
- .ltoken = ltoken,
- .tag = .Continue,
- }, .{});
- _ = try appendToken(c, .Semicolon, ";");
- return &node.base;
-}
-
-fn transCreateNodeSwitchCase(c: *Context, lhs: *ast.Node) !*ast.Node.SwitchCase {
- const arrow_tok = try appendToken(c, .EqualAngleBracketRight, "=>");
-
- const node = try ast.Node.SwitchCase.alloc(c.arena, 1);
- node.* = .{
- .items_len = 1,
- .arrow_token = arrow_tok,
- .payload = null,
- .expr = undefined,
- };
- node.items()[0] = lhs;
- return node;
-}
-
-fn transCreateNodeSwitchElse(c: *Context) !*ast.Node {
- const node = try c.arena.create(ast.Node.SwitchElse);
- node.* = .{
- .token = try appendToken(c, .Keyword_else, "else"),
- };
- return &node.base;
}
fn transCreateNodeShiftOp(
- rp: RestorePoint,
+ c: *Context,
scope: *Scope,
stmt: *const clang.BinaryOperator,
- op: ast.Node.Tag,
- op_tok_id: std.zig.Token.Id,
- bytes: []const u8,
-) !*ast.Node {
- std.debug.assert(op == .BitShiftLeft or op == .BitShiftRight);
+ op: Tag,
+ used: ResultUsed,
+) !Node {
+ std.debug.assert(op == .shl or op == .shr);
const lhs_expr = stmt.getLHS();
const rhs_expr = stmt.getRHS();
const rhs_location = rhs_expr.getBeginLoc();
// lhs >> @as(u5, rh)
- const lhs = try transExpr(rp, scope, lhs_expr, .used, .l_value);
- const op_token = try appendToken(rp.c, op_tok_id, bytes);
-
- const cast_node = try rp.c.createBuiltinCall("@intCast", 2);
- const rhs_type = try qualTypeToLog2IntRef(rp, stmt.getType(), rhs_location);
- cast_node.params()[0] = rhs_type;
- _ = try appendToken(rp.c, .Comma, ",");
- const rhs = try transExprCoercing(rp, scope, rhs_expr, .used, .r_value);
- cast_node.params()[1] = rhs;
- cast_node.rparen_token = try appendToken(rp.c, .RParen, ")");
-
- const node = try rp.c.arena.create(ast.Node.SimpleInfixOp);
- node.* = .{
- .base = .{ .tag = op },
- .op_token = op_token,
- .lhs = lhs,
- .rhs = &cast_node.base,
- };
+ const lhs = try transExpr(c, scope, lhs_expr, .used);
- return &node.base;
-}
+ const rhs_type = try qualTypeToLog2IntRef(c, scope, stmt.getType(), rhs_location);
+ const rhs = try transExprCoercing(c, scope, rhs_expr, .used);
+ const rhs_casted = try Tag.int_cast.create(c.arena, .{ .lhs = rhs_type, .rhs = rhs });
-fn transCreateNodePtrDeref(c: *Context, lhs: *ast.Node) !*ast.Node {
- const node = try c.arena.create(ast.Node.SimpleSuffixOp);
- node.* = .{
- .base = .{ .tag = .Deref },
- .lhs = lhs,
- .rtoken = try appendToken(c, .PeriodAsterisk, ".*"),
- };
- return &node.base;
+ return transCreateNodeInfixOp(c, scope, op, lhs, rhs_casted, used);
}
-fn transCreateNodeArrayAccess(c: *Context, lhs: *ast.Node) !*ast.Node.ArrayAccess {
- _ = try appendToken(c, .LBrace, "[");
- const node = try c.arena.create(ast.Node.ArrayAccess);
- node.* = .{
- .lhs = lhs,
- .index_expr = undefined,
- .rtoken = undefined,
- };
- return node;
-}
-
-const RestorePoint = struct {
- c: *Context,
- token_index: ast.TokenIndex,
- src_buf_index: usize,
-
- fn activate(self: RestorePoint) void {
- self.c.token_ids.shrinkAndFree(self.c.gpa, self.token_index);
- self.c.token_locs.shrinkAndFree(self.c.gpa, self.token_index);
- self.c.source_buffer.shrinkAndFree(self.src_buf_index);
- }
-};
-
-fn makeRestorePoint(c: *Context) RestorePoint {
- return RestorePoint{
- .c = c,
- .token_index = c.token_ids.items.len,
- .src_buf_index = c.source_buffer.items.len,
- };
-}
-
-fn transType(rp: RestorePoint, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!*ast.Node {
+fn transType(c: *Context, scope: *Scope, ty: *const clang.Type, source_loc: clang.SourceLocation) TypeError!Node {
switch (ty.getTypeClass()) {
.Builtin => {
const builtin_ty = @ptrCast(*const clang.BuiltinType, ty);
- return transCreateNodeIdentifier(rp.c, switch (builtin_ty.getKind()) {
+ return Tag.type.create(c.arena, switch (builtin_ty.getKind()) {
.Void => "c_void",
.Bool => "bool",
.Char_U, .UChar, .Char_S, .Char8 => "u8",
@@ -5060,112 +3687,115 @@ fn transType(rp: RestorePoint, ty: *const clang.Type, source_loc: clang.SourceLo
.Float128 => "f128",
.Float16 => "f16",
.LongDouble => "c_longdouble",
- else => return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported builtin type", .{}),
+ else => return fail(c, error.UnsupportedType, source_loc, "unsupported builtin type", .{}),
});
},
.FunctionProto => {
const fn_proto_ty = @ptrCast(*const clang.FunctionProtoType, ty);
- const fn_proto = try transFnProto(rp, null, fn_proto_ty, source_loc, null, false);
- return &fn_proto.base;
+ const fn_proto = try transFnProto(c, null, fn_proto_ty, source_loc, null, false);
+ return Node.initPayload(&fn_proto.base);
},
.FunctionNoProto => {
const fn_no_proto_ty = @ptrCast(*const clang.FunctionType, ty);
- const fn_proto = try transFnNoProto(rp, fn_no_proto_ty, source_loc, null, false);
- return &fn_proto.base;
+ const fn_proto = try transFnNoProto(c, fn_no_proto_ty, source_loc, null, false);
+ return Node.initPayload(&fn_proto.base);
},
.Paren => {
const paren_ty = @ptrCast(*const clang.ParenType, ty);
- return transQualType(rp, paren_ty.getInnerType(), source_loc);
+ return transQualType(c, scope, paren_ty.getInnerType(), source_loc);
},
.Pointer => {
const child_qt = ty.getPointeeType();
if (qualTypeChildIsFnProto(child_qt)) {
- const optional_node = try transCreateNodeSimplePrefixOp(rp.c, .OptionalType, .QuestionMark, "?");
- optional_node.rhs = try transQualType(rp, child_qt, source_loc);
- return &optional_node.base;
+ return Tag.optional_type.create(c.arena, try transQualType(c, scope, child_qt, source_loc));
}
- if (typeIsOpaque(rp.c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(rp.c, child_qt)) {
- const optional_node = try transCreateNodeSimplePrefixOp(rp.c, .OptionalType, .QuestionMark, "?");
- const pointer_node = try transCreateNodePtrType(
- rp.c,
- child_qt.isConstQualified(),
- child_qt.isVolatileQualified(),
- .Asterisk,
- );
- optional_node.rhs = &pointer_node.base;
- pointer_node.rhs = try transQualType(rp, child_qt, source_loc);
- return &optional_node.base;
+ const is_const = child_qt.isConstQualified();
+ const is_volatile = child_qt.isVolatileQualified();
+ const elem_type = try transQualType(c, scope, child_qt, source_loc);
+ if (typeIsOpaque(c, child_qt.getTypePtr(), source_loc) or qualTypeWasDemotedToOpaque(c, child_qt)) {
+ const ptr = try Tag.single_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type });
+ return Tag.optional_type.create(c.arena, ptr);
}
- const pointer_node = try transCreateNodePtrType(
- rp.c,
- child_qt.isConstQualified(),
- child_qt.isVolatileQualified(),
- .Identifier,
- );
- pointer_node.rhs = try transQualType(rp, child_qt, source_loc);
- return &pointer_node.base;
+
+ return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type });
},
.ConstantArray => {
const const_arr_ty = @ptrCast(*const clang.ConstantArrayType, ty);
const size_ap_int = const_arr_ty.getSize();
const size = size_ap_int.getLimitedValue(math.maxInt(usize));
- const elem_ty = const_arr_ty.getElementType().getTypePtr();
- return try transCreateNodeArrayType(rp, source_loc, elem_ty, size);
+ const elem_type = try transType(c, scope, const_arr_ty.getElementType().getTypePtr(), source_loc);
+
+ return Tag.array_type.create(c.arena, .{ .len = size, .elem_type = elem_type });
},
.IncompleteArray => {
const incomplete_array_ty = @ptrCast(*const clang.IncompleteArrayType, ty);
const child_qt = incomplete_array_ty.getElementType();
- var node = try transCreateNodePtrType(
- rp.c,
- child_qt.isConstQualified(),
- child_qt.isVolatileQualified(),
- .Identifier,
- );
- node.rhs = try transQualType(rp, child_qt, source_loc);
- return &node.base;
+ const is_const = child_qt.isConstQualified();
+ const is_volatile = child_qt.isVolatileQualified();
+ const elem_type = try transQualType(c, scope, child_qt, source_loc);
+
+ return Tag.c_pointer.create(c.arena, .{ .is_const = is_const, .is_volatile = is_volatile, .elem_type = elem_type });
},
.Typedef => {
const typedef_ty = @ptrCast(*const clang.TypedefType, ty);
const typedef_decl = typedef_ty.getDecl();
- return (try transTypeDef(rp.c, typedef_decl, false)) orelse
- revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to translate typedef declaration", .{});
+ var trans_scope = scope;
+ if (@ptrCast(*const clang.Decl, typedef_decl).castToNamedDecl()) |named_decl| {
+ const decl_name = try c.str(named_decl.getName_bytes_begin());
+ if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base;
+ }
+ try transTypeDef(c, trans_scope, typedef_decl);
+ const name = c.decl_table.get(@ptrToInt(typedef_decl.getCanonicalDecl())).?;
+ return Tag.identifier.create(c.arena, name);
},
.Record => {
const record_ty = @ptrCast(*const clang.RecordType, ty);
const record_decl = record_ty.getDecl();
- return (try transRecordDecl(rp.c, record_decl)) orelse
- revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to resolve record declaration", .{});
+ var trans_scope = scope;
+ if (@ptrCast(*const clang.Decl, record_decl).castToNamedDecl()) |named_decl| {
+ const decl_name = try c.str(named_decl.getName_bytes_begin());
+ if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base;
+ }
+ try transRecordDecl(c, trans_scope, record_decl);
+ const name = c.decl_table.get(@ptrToInt(record_decl.getCanonicalDecl())).?;
+ return Tag.identifier.create(c.arena, name);
},
.Enum => {
const enum_ty = @ptrCast(*const clang.EnumType, ty);
const enum_decl = enum_ty.getDecl();
- return (try transEnumDecl(rp.c, enum_decl)) orelse
- revertAndWarn(rp, error.UnsupportedType, source_loc, "unable to translate enum declaration", .{});
+ var trans_scope = scope;
+ if (@ptrCast(*const clang.Decl, enum_decl).castToNamedDecl()) |named_decl| {
+ const decl_name = try c.str(named_decl.getName_bytes_begin());
+ if (c.global_names.get(decl_name)) |_| trans_scope = &c.global_scope.base;
+ }
+ try transEnumDecl(c, trans_scope, enum_decl);
+ const name = c.decl_table.get(@ptrToInt(enum_decl.getCanonicalDecl())).?;
+ return Tag.identifier.create(c.arena, name);
},
.Elaborated => {
const elaborated_ty = @ptrCast(*const clang.ElaboratedType, ty);
- return transQualType(rp, elaborated_ty.getNamedType(), source_loc);
+ return transQualType(c, scope, elaborated_ty.getNamedType(), source_loc);
},
.Decayed => {
const decayed_ty = @ptrCast(*const clang.DecayedType, ty);
- return transQualType(rp, decayed_ty.getDecayedType(), source_loc);
+ return transQualType(c, scope, decayed_ty.getDecayedType(), source_loc);
},
.Attributed => {
const attributed_ty = @ptrCast(*const clang.AttributedType, ty);
- return transQualType(rp, attributed_ty.getEquivalentType(), source_loc);
+ return transQualType(c, scope, attributed_ty.getEquivalentType(), source_loc);
},
.MacroQualified => {
const macroqualified_ty = @ptrCast(*const clang.MacroQualifiedType, ty);
- return transQualType(rp, macroqualified_ty.getModifiedType(), source_loc);
+ return transQualType(c, scope, macroqualified_ty.getModifiedType(), source_loc);
},
else => {
- const type_name = rp.c.str(ty.getTypeClassName());
- return revertAndWarn(rp, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name});
+ const type_name = c.str(ty.getTypeClassName());
+ return fail(c, error.UnsupportedType, source_loc, "unsupported type: '{s}'", .{type_name});
},
}
}
@@ -5231,7 +3861,7 @@ const FnDeclContext = struct {
};
fn transCC(
- rp: RestorePoint,
+ c: *Context,
fn_ty: *const clang.FunctionType,
source_loc: clang.SourceLocation,
) !CallingConvention {
@@ -5244,8 +3874,8 @@ fn transCC(
.X86ThisCall => return CallingConvention.Thiscall,
.AAPCS => return CallingConvention.AAPCS,
.AAPCS_VFP => return CallingConvention.AAPCSVFP,
- else => return revertAndWarn(
- rp,
+ else => return fail(
+ c,
error.UnsupportedType,
source_loc,
"unsupported calling convention: {s}",
@@ -5255,33 +3885,33 @@ fn transCC(
}
fn transFnProto(
- rp: RestorePoint,
+ c: *Context,
fn_decl: ?*const clang.FunctionDecl,
fn_proto_ty: *const clang.FunctionProtoType,
source_loc: clang.SourceLocation,
fn_decl_context: ?FnDeclContext,
is_pub: bool,
-) !*ast.Node.FnProto {
+) !*ast.Payload.Func {
const fn_ty = @ptrCast(*const clang.FunctionType, fn_proto_ty);
- const cc = try transCC(rp, fn_ty, source_loc);
+ const cc = try transCC(c, fn_ty, source_loc);
const is_var_args = fn_proto_ty.isVariadic();
- return finishTransFnProto(rp, fn_decl, fn_proto_ty, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub);
+ return finishTransFnProto(c, fn_decl, fn_proto_ty, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub);
}
fn transFnNoProto(
- rp: RestorePoint,
+ c: *Context,
fn_ty: *const clang.FunctionType,
source_loc: clang.SourceLocation,
fn_decl_context: ?FnDeclContext,
is_pub: bool,
-) !*ast.Node.FnProto {
- const cc = try transCC(rp, fn_ty, source_loc);
+) !*ast.Payload.Func {
+ const cc = try transCC(c, fn_ty, source_loc);
const is_var_args = if (fn_decl_context) |ctx| (!ctx.is_export and ctx.storage_class != .Static) else true;
- return finishTransFnProto(rp, null, null, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub);
+ return finishTransFnProto(c, null, null, fn_ty, source_loc, fn_decl_context, is_var_args, cc, is_pub);
}
fn finishTransFnProto(
- rp: RestorePoint,
+ c: *Context,
fn_decl: ?*const clang.FunctionDecl,
fn_proto_ty: ?*const clang.FunctionProtoType,
fn_ty: *const clang.FunctionType,
@@ -5290,128 +3920,78 @@ fn finishTransFnProto(
is_var_args: bool,
cc: CallingConvention,
is_pub: bool,
-) !*ast.Node.FnProto {
+) !*ast.Payload.Func {
const is_export = if (fn_decl_context) |ctx| ctx.is_export else false;
const is_extern = if (fn_decl_context) |ctx| !ctx.has_body else false;
+ const scope = &c.global_scope.base;
// TODO check for always_inline attribute
// TODO check for align attribute
- // pub extern fn name(...) T
- const pub_tok = if (is_pub) try appendToken(rp.c, .Keyword_pub, "pub") else null;
- const extern_export_inline_tok = if (is_export)
- try appendToken(rp.c, .Keyword_export, "export")
- else if (is_extern)
- try appendToken(rp.c, .Keyword_extern, "extern")
- else
- null;
- const fn_tok = try appendToken(rp.c, .Keyword_fn, "fn");
- const name_tok = if (fn_decl_context) |ctx| try appendIdentifier(rp.c, ctx.fn_name) else null;
- const lparen_tok = try appendToken(rp.c, .LParen, "(");
-
- var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(rp.c.gpa);
+ var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa);
defer fn_params.deinit();
const param_count: usize = if (fn_proto_ty != null) fn_proto_ty.?.getNumParams() else 0;
- try fn_params.ensureCapacity(param_count + 1); // +1 for possible var args node
+ try fn_params.ensureCapacity(param_count);
var i: usize = 0;
while (i < param_count) : (i += 1) {
const param_qt = fn_proto_ty.?.getParamType(@intCast(c_uint, i));
+ const is_noalias = param_qt.isRestrictQualified();
- const noalias_tok = if (param_qt.isRestrictQualified()) try appendToken(rp.c, .Keyword_noalias, "noalias") else null;
-
- const param_name_tok: ?ast.TokenIndex = blk: {
- if (fn_decl) |decl| {
- const param = decl.getParamDecl(@intCast(c_uint, i));
- const param_name: []const u8 = try rp.c.str(@ptrCast(*const clang.NamedDecl, param).getName_bytes_begin());
- if (param_name.len < 1)
- break :blk null;
-
- const result = try appendIdentifier(rp.c, param_name);
- _ = try appendToken(rp.c, .Colon, ":");
- break :blk result;
- }
- break :blk null;
- };
+ const param_name: ?[]const u8 =
+ if (fn_decl) |decl|
+ blk: {
+ const param = decl.getParamDecl(@intCast(c_uint, i));
+ const param_name: []const u8 = try c.str(@ptrCast(*const clang.NamedDecl, param).getName_bytes_begin());
+ if (param_name.len < 1)
+ break :blk null;
- const type_node = try transQualType(rp, param_qt, source_loc);
+ break :blk param_name;
+ } else null;
+ const type_node = try transQualType(c, scope, param_qt, source_loc);
fn_params.addOneAssumeCapacity().* = .{
- .doc_comments = null,
- .comptime_token = null,
- .noalias_token = noalias_tok,
- .name_token = param_name_tok,
- .param_type = .{ .type_expr = type_node },
+ .is_noalias = is_noalias,
+ .name = param_name,
+ .type = type_node,
};
-
- if (i + 1 < param_count) {
- _ = try appendToken(rp.c, .Comma, ",");
- }
}
- const var_args_token: ?ast.TokenIndex = if (is_var_args) blk: {
- if (param_count > 0) {
- _ = try appendToken(rp.c, .Comma, ",");
- }
- break :blk try appendToken(rp.c, .Ellipsis3, "...");
- } else null;
-
- const rparen_tok = try appendToken(rp.c, .RParen, ")");
-
- const linksection_expr = blk: {
+ const linksection_string = blk: {
if (fn_decl) |decl| {
var str_len: usize = undefined;
if (decl.getSectionAttribute(&str_len)) |str_ptr| {
- _ = try appendToken(rp.c, .Keyword_linksection, "linksection");
- _ = try appendToken(rp.c, .LParen, "(");
- const expr = try transCreateNodeStringLiteral(
- rp.c,
- try std.fmt.allocPrint(rp.c.arena, "\"{s}\"", .{str_ptr[0..str_len]}),
- );
- _ = try appendToken(rp.c, .RParen, ")");
-
- break :blk expr;
+ break :blk str_ptr[0..str_len];
}
}
break :blk null;
};
- const align_expr = blk: {
+ const alignment = blk: {
if (fn_decl) |decl| {
- const alignment = decl.getAlignedAttribute(rp.c.clang_context);
+ const alignment = decl.getAlignedAttribute(c.clang_context);
if (alignment != 0) {
- _ = try appendToken(rp.c, .Keyword_align, "align");
- _ = try appendToken(rp.c, .LParen, "(");
// Clang reports the alignment in bits
- const expr = try transCreateNodeInt(rp.c, alignment / 8);
- _ = try appendToken(rp.c, .RParen, ")");
-
- break :blk expr;
+ break :blk alignment / 8;
}
}
break :blk null;
};
- const callconv_expr = if ((is_export or is_extern) and cc == .C) null else blk: {
- _ = try appendToken(rp.c, .Keyword_callconv, "callconv");
- _ = try appendToken(rp.c, .LParen, "(");
- const expr = try transCreateNodeEnumLiteral(rp.c, @tagName(cc));
- _ = try appendToken(rp.c, .RParen, ")");
- break :blk expr;
- };
+ const explicit_callconv = if ((is_export or is_extern) and cc == .C) null else cc;
const return_type_node = blk: {
if (fn_ty.getNoReturnAttr()) {
- break :blk try transCreateNodeIdentifier(rp.c, "noreturn");
+ break :blk Tag.noreturn_type.init();
} else {
const return_qt = fn_ty.getReturnType();
if (isCVoid(return_qt)) {
// convert primitive c_void to actual void (only for return type)
- break :blk try transCreateNodeIdentifier(rp.c, "void");
+ break :blk Tag.void_type.init();
} else {
- break :blk transQualType(rp, return_qt, source_loc) catch |err| switch (err) {
+ break :blk transQualType(c, scope, return_qt, source_loc) catch |err| switch (err) {
error.UnsupportedType => {
- try emitWarning(rp.c, source_loc, "unsupported function proto return type", .{});
+ try warn(c, scope, source_loc, "unsupported function proto return type", .{});
return err;
},
error.OutOfMemory => |e| return e,
@@ -5419,116 +3999,57 @@ fn finishTransFnProto(
}
}
};
+ const name: ?[]const u8 = if (fn_decl_context) |ctx| ctx.fn_name else null;
+ const payload = try c.arena.create(ast.Payload.Func);
+ payload.* = .{
+ .base = .{ .tag = .func },
+ .data = .{
+ .is_pub = is_pub,
+ .is_extern = is_extern,
+ .is_export = is_export,
+ .is_var_args = is_var_args,
+ .name = name,
+ .linksection_string = linksection_string,
+ .explicit_callconv = explicit_callconv,
+ .params = try c.arena.dupe(ast.Payload.Param, fn_params.items),
+ .return_type = return_type_node,
+ .body = null,
+ .alignment = alignment,
+ },
+ };
+ return payload;
+}
- // We need to reserve an undefined (but non-null) body node to set later.
- var body_node: ?*ast.Node = null;
- if (fn_decl_context) |ctx| {
- if (ctx.has_body) {
- // TODO: we should be able to use undefined here but
- // it causes a bug. This is undefined without zig language
- // being aware of it.
- body_node = @intToPtr(*ast.Node, 0x08);
- }
- }
-
- const fn_proto = try ast.Node.FnProto.create(rp.c.arena, .{
- .params_len = fn_params.items.len,
- .return_type = .{ .Explicit = return_type_node },
- .fn_token = fn_tok,
- }, .{
- .visib_token = pub_tok,
- .name_token = name_tok,
- .extern_export_inline_token = extern_export_inline_tok,
- .align_expr = align_expr,
- .section_expr = linksection_expr,
- .callconv_expr = callconv_expr,
- .body_node = body_node,
- .var_args_token = var_args_token,
- });
- mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
- return fn_proto;
+fn warn(c: *Context, scope: *Scope, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void {
+ const args_prefix = .{c.locStr(loc)};
+ const value = try std.fmt.allocPrint(c.arena, "// {s}: warning: " ++ format, args_prefix ++ args);
+ try scope.appendNode(try Tag.warning.create(c.arena, value));
}
-fn revertAndWarn(
- rp: RestorePoint,
+fn fail(
+ c: *Context,
err: anytype,
source_loc: clang.SourceLocation,
comptime format: []const u8,
args: anytype,
) (@TypeOf(err) || error{OutOfMemory}) {
- rp.activate();
- try emitWarning(rp.c, source_loc, format, args);
+ try warn(c, &c.global_scope.base, source_loc, format, args);
return err;
}
-fn emitWarning(c: *Context, loc: clang.SourceLocation, comptime format: []const u8, args: anytype) !void {
- const args_prefix = .{c.locStr(loc)};
- _ = try appendTokenFmt(c, .LineComment, "// {s}: warning: " ++ format, args_prefix ++ args);
-}
-
-pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) !void {
+pub fn failDecl(c: *Context, loc: clang.SourceLocation, name: []const u8, comptime format: []const u8, args: anytype) Error!void {
+ // location
// pub const name = @compileError(msg);
- const pub_tok = try appendToken(c, .Keyword_pub, "pub");
- const const_tok = try appendToken(c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(c, name);
- const eq_tok = try appendToken(c, .Equal, "=");
- const builtin_tok = try appendToken(c, .Builtin, "@compileError");
- const lparen_tok = try appendToken(c, .LParen, "(");
- const msg_tok = try appendTokenFmt(c, .StringLiteral, "\"" ++ format ++ "\"", args);
- const rparen_tok = try appendToken(c, .RParen, ")");
- const semi_tok = try appendToken(c, .Semicolon, ";");
- _ = try appendTokenFmt(c, .LineComment, "// {s}", .{c.locStr(loc)});
-
- const msg_node = try c.arena.create(ast.Node.OneToken);
- msg_node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = msg_tok,
- };
-
- const call_node = try ast.Node.BuiltinCall.alloc(c.arena, 1);
- call_node.* = .{
- .builtin_token = builtin_tok,
- .params_len = 1,
- .rparen_token = rparen_tok,
- };
- call_node.params()[0] = &msg_node.base;
-
- const var_decl_node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = const_tok,
- .semicolon_token = semi_tok,
- }, .{
- .visib_token = pub_tok,
- .eq_token = eq_tok,
- .init_node = &call_node.base,
- });
- try addTopLevelDecl(c, name, &var_decl_node.base);
-}
-
-fn appendToken(c: *Context, token_id: Token.Id, bytes: []const u8) !ast.TokenIndex {
- std.debug.assert(token_id != .Identifier); // use appendIdentifier
- return appendTokenFmt(c, token_id, "{s}", .{bytes});
+ const fail_msg = try std.fmt.allocPrint(c.arena, format, args);
+ try addTopLevelDecl(c, name, try Tag.fail_decl.create(c.arena, .{ .actual = name, .mangled = fail_msg }));
+ const location_comment = try std.fmt.allocPrint(c.arena, "// {s}", .{c.locStr(loc)});
+ try c.global_scope.nodes.append(try Tag.warning.create(c.arena, location_comment));
}
-fn appendTokenFmt(c: *Context, token_id: Token.Id, comptime format: []const u8, args: anytype) !ast.TokenIndex {
- assert(token_id != .Invalid);
-
- try c.token_ids.ensureCapacity(c.gpa, c.token_ids.items.len + 1);
- try c.token_locs.ensureCapacity(c.gpa, c.token_locs.items.len + 1);
-
- const start_index = c.source_buffer.items.len;
- try c.source_buffer.writer().print(format ++ " ", args);
-
- c.token_ids.appendAssumeCapacity(token_id);
- c.token_locs.appendAssumeCapacity(.{
- .start = start_index,
- .end = c.source_buffer.items.len - 1, // back up before the space
- });
-
- return c.token_ids.items.len - 1;
+pub fn freeErrors(errors: []ClangErrMsg) void {
+ errors.ptr.delete(errors.len);
}
-// TODO hook up with codegen
fn isZigPrimitiveType(name: []const u8) bool {
if (name.len > 1 and (name[0] == 'u' or name[0] == 'i')) {
for (name[1..]) |c| {
@@ -5539,56 +4060,7 @@ fn isZigPrimitiveType(name: []const u8) bool {
}
return true;
}
- // void is invalid in c so it doesn't need to be checked.
- return mem.eql(u8, name, "comptime_float") or
- mem.eql(u8, name, "comptime_int") or
- mem.eql(u8, name, "bool") or
- mem.eql(u8, name, "isize") or
- mem.eql(u8, name, "usize") or
- mem.eql(u8, name, "f16") or
- mem.eql(u8, name, "f32") or
- mem.eql(u8, name, "f64") or
- mem.eql(u8, name, "f128") or
- mem.eql(u8, name, "c_longdouble") or
- mem.eql(u8, name, "noreturn") or
- mem.eql(u8, name, "type") or
- mem.eql(u8, name, "anyerror") or
- mem.eql(u8, name, "c_short") or
- mem.eql(u8, name, "c_ushort") or
- mem.eql(u8, name, "c_int") or
- mem.eql(u8, name, "c_uint") or
- mem.eql(u8, name, "c_long") or
- mem.eql(u8, name, "c_ulong") or
- mem.eql(u8, name, "c_longlong") or
- mem.eql(u8, name, "c_ulonglong");
-}
-
-fn appendIdentifier(c: *Context, name: []const u8) !ast.TokenIndex {
- return appendTokenFmt(c, .Identifier, "{}", .{std.zig.fmtId(name)});
-}
-
-fn transCreateNodeIdentifier(c: *Context, name: []const u8) !*ast.Node {
- const token_index = try appendIdentifier(c, name);
- const identifier = try c.arena.create(ast.Node.OneToken);
- identifier.* = .{
- .base = .{ .tag = .Identifier },
- .token = token_index,
- };
- return &identifier.base;
-}
-
-fn transCreateNodeIdentifierUnchecked(c: *Context, name: []const u8) !*ast.Node {
- const token_index = try appendTokenFmt(c, .Identifier, "{s}", .{name});
- const identifier = try c.arena.create(ast.Node.OneToken);
- identifier.* = .{
- .base = .{ .tag = .Identifier },
- .token = token_index,
- };
- return &identifier.base;
-}
-
-pub fn freeErrors(errors: []ClangErrMsg) void {
- errors.ptr.delete(errors.len);
+ return @import("astgen.zig").simple_types.has(name);
}
const MacroCtx = struct {
@@ -5709,27 +4181,13 @@ fn transPreprocessorEntities(c: *Context, unit: *clang.ASTUnit) Error!void {
fn transMacroDefine(c: *Context, m: *MacroCtx) ParseError!void {
const scope = &c.global_scope.base;
- const visib_tok = try appendToken(c, .Keyword_pub, "pub");
- const mut_tok = try appendToken(c, .Keyword_const, "const");
- const name_tok = try appendIdentifier(c, m.name);
- const eq_token = try appendToken(c, .Equal, "=");
-
const init_node = try parseCExpr(c, m, scope);
const last = m.next().?;
if (last != .Eof and last != .Nl)
return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)});
- const semicolon_token = try appendToken(c, .Semicolon, ";");
- const node = try ast.Node.VarDecl.create(c.arena, .{
- .name_token = name_tok,
- .mut_token = mut_tok,
- .semicolon_token = semicolon_token,
- }, .{
- .visib_token = visib_tok,
- .eq_token = eq_token,
- .init_node = init_node,
- });
- _ = try c.global_scope.macro_table.put(m.name, &node.base);
+ const var_decl = try Tag.pub_var_simple.create(c.arena, .{ .name = m.name, .init = init_node });
+ _ = try c.global_scope.macro_table.put(m.name, var_decl);
}
fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void {
@@ -5737,16 +4195,11 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void {
defer block_scope.deinit();
const scope = &block_scope.base;
- const pub_tok = try appendToken(c, .Keyword_pub, "pub");
- const fn_tok = try appendToken(c, .Keyword_fn, "fn");
- const name_tok = try appendIdentifier(c, m.name);
- _ = try appendToken(c, .LParen, "(");
-
if (m.next().? != .LParen) {
return m.fail(c, "unable to translate C expr: expected '('", .{});
}
- var fn_params = std.ArrayList(ast.Node.FnProto.ParamDecl).init(c.gpa);
+ var fn_params = std.ArrayList(ast.Payload.Param).init(c.gpa);
defer fn_params.deinit();
while (true) {
@@ -5754,120 +4207,82 @@ fn transMacroFnDefine(c: *Context, m: *MacroCtx) ParseError!void {
_ = m.next();
const mangled_name = try block_scope.makeMangledName(c, m.slice());
- const param_name_tok = try appendIdentifier(c, mangled_name);
- _ = try appendToken(c, .Colon, ":");
-
- const any_type = try c.arena.create(ast.Node.OneToken);
- any_type.* = .{
- .base = .{ .tag = .AnyType },
- .token = try appendToken(c, .Keyword_anytype, "anytype"),
- };
-
- (try fn_params.addOne()).* = .{
- .doc_comments = null,
- .comptime_token = null,
- .noalias_token = null,
- .name_token = param_name_tok,
- .param_type = .{ .any_type = &any_type.base },
- };
+ try fn_params.append(.{
+ .is_noalias = false,
+ .name = mangled_name,
+ .type = Tag.@"anytype".init(),
+ });
if (m.peek().? != .Comma) break;
_ = m.next();
- _ = try appendToken(c, .Comma, ",");
}
if (m.next().? != .RParen) {
return m.fail(c, "unable to translate C expr: expected ')'", .{});
}
- _ = try appendToken(c, .RParen, ")");
-
- _ = try appendToken(c, .Keyword_callconv, "callconv");
- _ = try appendToken(c, .LParen, "(");
- const callconv_expr = try transCreateNodeEnumLiteral(c, "Inline");
- _ = try appendToken(c, .RParen, ")");
-
- const type_of = try c.createBuiltinCall("@TypeOf", 1);
-
- const return_kw = try appendToken(c, .Keyword_return, "return");
const expr = try parseCExpr(c, m, scope);
const last = m.next().?;
if (last != .Eof and last != .Nl)
return m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(last)});
- _ = try appendToken(c, .Semicolon, ";");
- const type_of_arg = if (!expr.tag.isBlock()) expr else blk: {
- const stmts = expr.blockStatements();
+
+ const typeof_arg = if (expr.castTag(.block)) |some| blk: {
+ const stmts = some.data.stmts;
const blk_last = stmts[stmts.len - 1];
- const br = blk_last.cast(ast.Node.ControlFlowExpression).?;
- break :blk br.getRHS().?;
- };
- type_of.params()[0] = type_of_arg;
- type_of.rparen_token = try appendToken(c, .RParen, ")");
- const return_expr = try ast.Node.ControlFlowExpression.create(c.arena, .{
- .ltoken = return_kw,
- .tag = .Return,
- }, .{
- .rhs = expr,
- });
+ const br = blk_last.castTag(.break_val).?;
+ break :blk br.data.val;
+ } else expr;
+ const return_type = if (typeof_arg.castTag(.std_meta_cast)) |some|
+ some.data.lhs
+ else
+ try Tag.typeof.create(c.arena, typeof_arg);
- try block_scope.statements.append(&return_expr.base);
- const block_node = try block_scope.complete(c);
- const fn_proto = try ast.Node.FnProto.create(c.arena, .{
- .fn_token = fn_tok,
- .params_len = fn_params.items.len,
- .return_type = .{ .Explicit = &type_of.base },
- }, .{
- .visib_token = pub_tok,
- .name_token = name_tok,
- .body_node = block_node,
- .callconv_expr = callconv_expr,
- });
- mem.copy(ast.Node.FnProto.ParamDecl, fn_proto.params(), fn_params.items);
+ const return_expr = try Tag.@"return".create(c.arena, expr);
+ try block_scope.statements.append(return_expr);
- _ = try c.global_scope.macro_table.put(m.name, &fn_proto.base);
+ const fn_decl = try Tag.pub_inline_fn.create(c.arena, .{
+ .name = m.name,
+ .params = try c.arena.dupe(ast.Payload.Param, fn_params.items),
+ .return_type = return_type,
+ .body = try block_scope.complete(c),
+ });
+ _ = try c.global_scope.macro_table.put(m.name, fn_decl);
}
const ParseError = Error || error{ParseError};
-fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
// TODO parseCAssignExpr here
const node = try parseCCondExpr(c, m, scope);
if (m.next().? != .Comma) {
m.i -= 1;
return node;
}
- _ = try appendToken(c, .Semicolon, ";");
var block_scope = try Scope.Block.init(c, scope, true);
defer block_scope.deinit();
var last = node;
while (true) {
// suppress result
- const lhs = try transCreateNodeIdentifier(c, "_");
- const op_token = try appendToken(c, .Equal, "=");
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .Assign },
- .op_token = op_token,
- .lhs = lhs,
- .rhs = last,
- };
- try block_scope.statements.append(&op_node.base);
+ const ignore = try Tag.discard.create(c.arena, last);
+ try block_scope.statements.append(ignore);
last = try parseCCondExpr(c, m, scope);
- _ = try appendToken(c, .Semicolon, ";");
if (m.next().? != .Comma) {
m.i -= 1;
break;
}
}
- const break_node = try transCreateNodeBreak(c, block_scope.label, last);
- try block_scope.statements.append(&break_node.base);
+ const break_node = try Tag.break_val.create(c.arena, .{
+ .label = block_scope.label,
+ .val = last,
+ });
+ try block_scope.statements.append(break_node);
return try block_scope.complete(c);
}
-fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
+fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!Node {
var lit_bytes = m.slice();
switch (m.list[m.i].id) {
@@ -5887,11 +4302,10 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
}
if (suffix == .none) {
- return transCreateNodeInt(c, lit_bytes);
+ return transCreateNodeNumber(c, lit_bytes, .int);
}
- const cast_node = try c.createBuiltinCall("@as", 2);
- cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) {
+ const type_node = try Tag.type.create(c.arena, switch (suffix) {
.u => "c_uint",
.l => "c_long",
.lu => "c_ulong",
@@ -5905,27 +4319,22 @@ fn parseCNumLit(c: *Context, m: *MacroCtx) ParseError!*ast.Node {
.llu => 3,
else => unreachable,
}];
- _ = try appendToken(c, .Comma, ",");
- cast_node.params()[1] = try transCreateNodeInt(c, lit_bytes);
- cast_node.rparen_token = try appendToken(c, .RParen, ")");
- return &cast_node.base;
+ const rhs = try transCreateNodeNumber(c, lit_bytes, .int);
+ return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs });
},
.FloatLiteral => |suffix| {
if (lit_bytes[0] == '.')
lit_bytes = try std.fmt.allocPrint(c.arena, "0{s}", .{lit_bytes});
if (suffix == .none) {
- return transCreateNodeFloat(c, lit_bytes);
+ return transCreateNodeNumber(c, lit_bytes, .float);
}
- const cast_node = try c.createBuiltinCall("@as", 2);
- cast_node.params()[0] = try transCreateNodeIdentifier(c, switch (suffix) {
+ const type_node = try Tag.type.create(c.arena, switch (suffix) {
.f => "f32",
.l => "c_longdouble",
else => unreachable,
});
- _ = try appendToken(c, .Comma, ",");
- cast_node.params()[1] = try transCreateNodeFloat(c, lit_bytes[0 .. lit_bytes.len - 1]);
- cast_node.rparen_token = try appendToken(c, .RParen, ")");
- return &cast_node.base;
+ const rhs = try transCreateNodeNumber(c, lit_bytes[0 .. lit_bytes.len - 1], .float);
+ return Tag.as.create(c.arena, .{ .lhs = type_node, .rhs = rhs });
},
else => unreachable,
}
@@ -6091,79 +4500,62 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
return bytes[0..i];
}
-fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
const tok = m.next().?;
const slice = m.slice();
switch (tok) {
.CharLiteral => {
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
- const token = try appendToken(c, .CharLiteral, try zigifyEscapeSequences(c, m));
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .CharLiteral },
- .token = token,
- };
- return &node.base;
+ return Tag.char_literal.create(c.arena, try zigifyEscapeSequences(c, m));
} else {
- const token = try appendTokenFmt(c, .IntegerLiteral, "0x{x}", .{slice[1 .. slice.len - 1]});
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .IntegerLiteral },
- .token = token,
- };
- return &node.base;
+ const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]});
+ return Tag.integer_literal.create(c.arena, str);
}
},
.StringLiteral => {
- const token = try appendToken(c, .StringLiteral, try zigifyEscapeSequences(c, m));
- const node = try c.arena.create(ast.Node.OneToken);
- node.* = .{
- .base = .{ .tag = .StringLiteral },
- .token = token,
- };
- return &node.base;
+ return Tag.string_literal.create(c.arena, try zigifyEscapeSequences(c, m));
},
.IntegerLiteral, .FloatLiteral => {
return parseCNumLit(c, m);
},
// eventually this will be replaced by std.c.parse which will handle these correctly
- .Keyword_void => return transCreateNodeIdentifierUnchecked(c, "c_void"),
- .Keyword_bool => return transCreateNodeIdentifierUnchecked(c, "bool"),
- .Keyword_double => return transCreateNodeIdentifierUnchecked(c, "f64"),
- .Keyword_long => return transCreateNodeIdentifierUnchecked(c, "c_long"),
- .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"),
- .Keyword_float => return transCreateNodeIdentifierUnchecked(c, "f32"),
- .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
- .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
+ .Keyword_void => return Tag.type.create(c.arena, "c_void"),
+ .Keyword_bool => return Tag.type.create(c.arena, "bool"),
+ .Keyword_double => return Tag.type.create(c.arena, "f64"),
+ .Keyword_long => return Tag.type.create(c.arena, "c_long"),
+ .Keyword_int => return Tag.type.create(c.arena, "c_int"),
+ .Keyword_float => return Tag.type.create(c.arena, "f32"),
+ .Keyword_short => return Tag.type.create(c.arena, "c_short"),
+ .Keyword_char => return Tag.type.create(c.arena, "u8"),
.Keyword_unsigned => if (m.next()) |t| switch (t) {
- .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "u8"),
- .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_ushort"),
- .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_uint"),
+ .Keyword_char => return Tag.type.create(c.arena, "u8"),
+ .Keyword_short => return Tag.type.create(c.arena, "c_ushort"),
+ .Keyword_int => return Tag.type.create(c.arena, "c_uint"),
.Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) {
_ = m.next();
- return transCreateNodeIdentifierUnchecked(c, "c_ulonglong");
- } else return transCreateNodeIdentifierUnchecked(c, "c_ulong"),
+ return Tag.type.create(c.arena, "c_ulonglong");
+ } else return Tag.type.create(c.arena, "c_ulong"),
else => {
m.i -= 1;
- return transCreateNodeIdentifierUnchecked(c, "c_uint");
+ return Tag.type.create(c.arena, "c_uint");
},
} else {
- return transCreateNodeIdentifierUnchecked(c, "c_uint");
+ return Tag.type.create(c.arena, "c_uint");
},
.Keyword_signed => if (m.next()) |t| switch (t) {
- .Keyword_char => return transCreateNodeIdentifierUnchecked(c, "i8"),
- .Keyword_short => return transCreateNodeIdentifierUnchecked(c, "c_short"),
- .Keyword_int => return transCreateNodeIdentifierUnchecked(c, "c_int"),
+ .Keyword_char => return Tag.type.create(c.arena, "i8"),
+ .Keyword_short => return Tag.type.create(c.arena, "c_short"),
+ .Keyword_int => return Tag.type.create(c.arena, "c_int"),
.Keyword_long => if (m.peek() != null and m.peek().? == .Keyword_long) {
_ = m.next();
- return transCreateNodeIdentifierUnchecked(c, "c_longlong");
- } else return transCreateNodeIdentifierUnchecked(c, "c_long"),
+ return Tag.type.create(c.arena, "c_longlong");
+ } else return Tag.type.create(c.arena, "c_long"),
else => {
m.i -= 1;
- return transCreateNodeIdentifierUnchecked(c, "c_int");
+ return Tag.type.create(c.arena, "c_int");
},
} else {
- return transCreateNodeIdentifierUnchecked(c, "c_int");
+ return Tag.type.create(c.arena, "c_int");
},
.Keyword_enum, .Keyword_struct, .Keyword_union => {
// struct Foo will be declared as struct_Foo by transRecordDecl
@@ -6173,17 +4565,12 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
return error.ParseError;
}
- const ident_token = try appendTokenFmt(c, .Identifier, "{s}_{s}", .{ slice, m.slice() });
- const identifier = try c.arena.create(ast.Node.OneToken);
- identifier.* = .{
- .base = .{ .tag = .Identifier },
- .token = ident_token,
- };
- return &identifier.base;
+ const name = try std.fmt.allocPrint(c.arena, "{s}_{s}", .{ slice, m.slice() });
+ return Tag.identifier.create(c.arena, name);
},
.Identifier => {
const mangled_name = scope.getAlias(slice);
- return transCreateNodeIdentifier(c, checkForBuiltinTypedef(mangled_name) orelse mangled_name);
+ return Tag.identifier.create(c.arena, builtin_typedef_map.get(mangled_name) orelse mangled_name);
},
.LParen => {
const inner_node = try parseCExpr(c, m, scope);
@@ -6213,10 +4600,6 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
},
else => return inner_node,
}
-
- // hack to get zig fmt to render a comma in builtin calls
- _ = try appendToken(c, .Comma, ",");
-
const node_to_cast = try parseCExpr(c, m, scope);
if (saw_l_paren and m.next().? != .RParen) {
@@ -6224,28 +4607,7 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
return error.ParseError;
}
- const lparen = try appendToken(c, .LParen, "(");
-
- //(@import("std").meta.cast(dest, x))
- const import_fn_call = try c.createBuiltinCall("@import", 1);
- const std_node = try transCreateNodeStringLiteral(c, "\"std\"");
- import_fn_call.params()[0] = std_node;
- import_fn_call.rparen_token = try appendToken(c, .RParen, ")");
- const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "meta");
- const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "cast");
-
- const cast_fn_call = try c.createCall(outer_field_access, 2);
- cast_fn_call.params()[0] = inner_node;
- cast_fn_call.params()[1] = node_to_cast;
- cast_fn_call.rtoken = try appendToken(c, .RParen, ")");
-
- const group_node = try c.arena.create(ast.Node.GroupedExpression);
- group_node.* = .{
- .lparen = lparen,
- .expr = &cast_fn_call.base,
- .rparen = try appendToken(c, .RParen, ")"),
- };
- return &group_node.base;
+ return Tag.std_meta_cast.create(c.arena, .{ .lhs = inner_node, .rhs = node_to_cast });
},
else => {
try m.fail(c, "unable to translate C expr: unexpected token .{s}", .{@tagName(tok)});
@@ -6254,447 +4616,256 @@ fn parseCPrimaryExprInner(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*
}
}
-fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCPrimaryExprInner(c, m, scope);
// In C the preprocessor would handle concatting strings while expanding macros.
// This should do approximately the same by concatting any strings and identifiers
// after a primary expression.
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.peek().?) {
.StringLiteral, .Identifier => {},
else => break,
}
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .ArrayCat },
- .op_token = try appendToken(c, .PlusPlus, "++"),
- .lhs = node,
- .rhs = try parseCPrimaryExprInner(c, m, scope),
- };
- node = &op_node.base;
+ node = try Tag.array_cat.create(c.arena, .{ .lhs = node, .rhs = try parseCPrimaryExprInner(c, m, scope) });
}
return node;
}
-fn nodeIsInfixOp(tag: ast.Node.Tag) bool {
- return switch (tag) {
- .Add,
- .AddWrap,
- .ArrayCat,
- .ArrayMult,
- .Assign,
- .AssignBitAnd,
- .AssignBitOr,
- .AssignBitShiftLeft,
- .AssignBitShiftRight,
- .AssignBitXor,
- .AssignDiv,
- .AssignSub,
- .AssignSubWrap,
- .AssignMod,
- .AssignAdd,
- .AssignAddWrap,
- .AssignMul,
- .AssignMulWrap,
- .BangEqual,
- .BitAnd,
- .BitOr,
- .BitShiftLeft,
- .BitShiftRight,
- .BitXor,
- .BoolAnd,
- .BoolOr,
- .Div,
- .EqualEqual,
- .ErrorUnion,
- .GreaterOrEqual,
- .GreaterThan,
- .LessOrEqual,
- .LessThan,
- .MergeErrorSets,
- .Mod,
- .Mul,
- .MulWrap,
- .Period,
- .Range,
- .Sub,
- .SubWrap,
- .UnwrapOptional,
- .Catch,
- => true,
-
- else => false,
- };
-}
-
-fn macroBoolToInt(c: *Context, node: *ast.Node) !*ast.Node {
+fn macroBoolToInt(c: *Context, node: Node) !Node {
if (!isBoolRes(node)) {
- if (!nodeIsInfixOp(node.tag)) return node;
-
- const group_node = try c.arena.create(ast.Node.GroupedExpression);
- group_node.* = .{
- .lparen = try appendToken(c, .LParen, "("),
- .expr = node,
- .rparen = try appendToken(c, .RParen, ")"),
- };
- return &group_node.base;
+ return node;
}
- const builtin_node = try c.createBuiltinCall("@boolToInt", 1);
- builtin_node.params()[0] = node;
- builtin_node.rparen_token = try appendToken(c, .RParen, ")");
- return &builtin_node.base;
+ return Tag.bool_to_int.create(c.arena, node);
}
-fn macroIntToBool(c: *Context, node: *ast.Node) !*ast.Node {
+fn macroIntToBool(c: *Context, node: Node) !Node {
if (isBoolRes(node)) {
- if (!nodeIsInfixOp(node.tag)) return node;
-
- const group_node = try c.arena.create(ast.Node.GroupedExpression);
- group_node.* = .{
- .lparen = try appendToken(c, .LParen, "("),
- .expr = node,
- .rparen = try appendToken(c, .RParen, ")"),
- };
- return &group_node.base;
+ return node;
}
- const op_token = try appendToken(c, .BangEqual, "!=");
- const zero = try transCreateNodeInt(c, 0);
- const res = try c.arena.create(ast.Node.SimpleInfixOp);
- res.* = .{
- .base = .{ .tag = .BangEqual },
- .op_token = op_token,
- .lhs = node,
- .rhs = zero,
- };
- const group_node = try c.arena.create(ast.Node.GroupedExpression);
- group_node.* = .{
- .lparen = try appendToken(c, .LParen, "("),
- .expr = &res.base,
- .rparen = try appendToken(c, .RParen, ")"),
- };
- return &group_node.base;
+ return Tag.not_equal.create(c.arena, .{ .lhs = node, .rhs = Tag.zero_literal.init() });
}
-fn macroGroup(c: *Context, node: *ast.Node) !*ast.Node {
- if (!nodeIsInfixOp(node.tag)) return node;
-
- const group_node = try c.arena.create(ast.Node.GroupedExpression);
- group_node.* = .{
- .lparen = try appendToken(c, .LParen, "("),
- .expr = node,
- .rparen = try appendToken(c, .RParen, ")"),
- };
- return &group_node.base;
-}
-
-fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCCondExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
const node = try parseCOrExpr(c, m, scope);
if (m.peek().? != .QuestionMark) {
return node;
}
_ = m.next();
- // must come immediately after expr
- _ = try appendToken(c, .RParen, ")");
- const if_node = try transCreateNodeIf(c);
- if_node.condition = node;
- if_node.body = try parseCOrExpr(c, m, scope);
+ const then_body = try parseCOrExpr(c, m, scope);
if (m.next().? != .Colon) {
try m.fail(c, "unable to translate C expr: expected ':'", .{});
return error.ParseError;
}
- if_node.@"else" = try transCreateNodeElse(c);
- if_node.@"else".?.body = try parseCCondExpr(c, m, scope);
- return &if_node.base;
+ const else_body = try parseCCondExpr(c, m, scope);
+ return Tag.@"if".create(c.arena, .{ .cond = node, .then = then_body, .@"else" = else_body });
}
-fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCAndExpr(c, m, scope);
while (m.next().? == .PipePipe) {
- const lhs_node = try macroIntToBool(c, node);
- const op_token = try appendToken(c, .Keyword_or, "or");
- const rhs_node = try parseCAndExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .BoolOr },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroIntToBool(c, rhs_node),
- };
- node = &op_node.base;
+ const lhs = try macroIntToBool(c, node);
+ const rhs = try macroIntToBool(c, try parseCAndExpr(c, m, scope));
+ node = try Tag.@"or".create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
m.i -= 1;
return node;
}
-fn parseCAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCBitOrExpr(c, m, scope);
while (m.next().? == .AmpersandAmpersand) {
- const lhs_node = try macroIntToBool(c, node);
- const op_token = try appendToken(c, .Keyword_and, "and");
- const rhs_node = try parseCBitOrExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .BoolAnd },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroIntToBool(c, rhs_node),
- };
- node = &op_node.base;
+ const lhs = try macroIntToBool(c, node);
+ const rhs = try macroIntToBool(c, try parseCBitOrExpr(c, m, scope));
+ node = try Tag.@"and".create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
m.i -= 1;
return node;
}
-fn parseCBitOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCBitOrExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCBitXorExpr(c, m, scope);
while (m.next().? == .Pipe) {
- const lhs_node = try macroBoolToInt(c, node);
- const op_token = try appendToken(c, .Pipe, "|");
- const rhs_node = try parseCBitXorExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .BitOr },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCBitXorExpr(c, m, scope));
+ node = try Tag.bit_or.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
m.i -= 1;
return node;
}
-fn parseCBitXorExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCBitXorExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCBitAndExpr(c, m, scope);
while (m.next().? == .Caret) {
- const lhs_node = try macroBoolToInt(c, node);
- const op_token = try appendToken(c, .Caret, "^");
- const rhs_node = try parseCBitAndExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .BitXor },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCBitAndExpr(c, m, scope));
+ node = try Tag.bit_xor.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
m.i -= 1;
return node;
}
-fn parseCBitAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCBitAndExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCEqExpr(c, m, scope);
while (m.next().? == .Ampersand) {
- const lhs_node = try macroBoolToInt(c, node);
- const op_token = try appendToken(c, .Ampersand, "&");
- const rhs_node = try parseCEqExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = .BitAnd },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCEqExpr(c, m, scope));
+ node = try Tag.bit_and.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
m.i -= 1;
return node;
}
-fn parseCEqExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCEqExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCRelExpr(c, m, scope);
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.peek().?) {
.BangEqual => {
- op_token = try appendToken(c, .BangEqual, "!=");
- op_id = .BangEqual;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope));
+ node = try Tag.not_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.EqualEqual => {
- op_token = try appendToken(c, .EqualEqual, "==");
- op_id = .EqualEqual;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCRelExpr(c, m, scope));
+ node = try Tag.equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
else => return node,
}
- _ = m.next();
- const lhs_node = try macroBoolToInt(c, node);
- const rhs_node = try parseCRelExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = op_id },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
}
}
-fn parseCRelExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCRelExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCShiftExpr(c, m, scope);
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.peek().?) {
.AngleBracketRight => {
- op_token = try appendToken(c, .AngleBracketRight, ">");
- op_id = .GreaterThan;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope));
+ node = try Tag.greater_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.AngleBracketRightEqual => {
- op_token = try appendToken(c, .AngleBracketRightEqual, ">=");
- op_id = .GreaterOrEqual;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope));
+ node = try Tag.greater_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.AngleBracketLeft => {
- op_token = try appendToken(c, .AngleBracketLeft, "<");
- op_id = .LessThan;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope));
+ node = try Tag.less_than.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.AngleBracketLeftEqual => {
- op_token = try appendToken(c, .AngleBracketLeftEqual, "<=");
- op_id = .LessOrEqual;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCShiftExpr(c, m, scope));
+ node = try Tag.less_than_equal.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
else => return node,
}
- _ = m.next();
- const lhs_node = try macroBoolToInt(c, node);
- const rhs_node = try parseCShiftExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = op_id },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
}
}
-fn parseCShiftExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCShiftExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCAddSubExpr(c, m, scope);
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.peek().?) {
.AngleBracketAngleBracketLeft => {
- op_token = try appendToken(c, .AngleBracketAngleBracketLeft, "<<");
- op_id = .BitShiftLeft;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope));
+ node = try Tag.shl.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.AngleBracketAngleBracketRight => {
- op_token = try appendToken(c, .AngleBracketAngleBracketRight, ">>");
- op_id = .BitShiftRight;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCAddSubExpr(c, m, scope));
+ node = try Tag.shr.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
else => return node,
}
- _ = m.next();
- const lhs_node = try macroBoolToInt(c, node);
- const rhs_node = try parseCAddSubExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = op_id },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
}
}
-fn parseCAddSubExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCAddSubExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCMulExpr(c, m, scope);
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.peek().?) {
.Plus => {
- op_token = try appendToken(c, .Plus, "+");
- op_id = .Add;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope));
+ node = try Tag.add.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.Minus => {
- op_token = try appendToken(c, .Minus, "-");
- op_id = .Sub;
+ _ = m.next();
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCMulExpr(c, m, scope));
+ node = try Tag.sub.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
else => return node,
}
- _ = m.next();
- const lhs_node = try macroBoolToInt(c, node);
- const rhs_node = try parseCMulExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = op_id },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
}
}
-fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCMulExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCUnaryExpr(c, m, scope);
while (true) {
- var op_token: ast.TokenIndex = undefined;
- var op_id: ast.Node.Tag = undefined;
switch (m.next().?) {
.Asterisk => {
- if (m.peek().? == .RParen) {
+ const next = m.peek().?;
+ if (next == .RParen or next == .Nl or next == .Eof) {
// type *)
- // hack to get zig fmt to render a comma in builtin calls
- _ = try appendToken(c, .Comma, ",");
-
// last token of `node`
const prev_id = m.list[m.i - 1].id;
if (prev_id == .Keyword_void) {
- const ptr = try transCreateNodePtrType(c, false, false, .Asterisk);
- ptr.rhs = node;
- const optional_node = try transCreateNodeSimplePrefixOp(c, .OptionalType, .QuestionMark, "?");
- optional_node.rhs = &ptr.base;
- return &optional_node.base;
+ const ptr = try Tag.single_pointer.create(c.arena, .{
+ .is_const = false,
+ .is_volatile = false,
+ .elem_type = node,
+ });
+ return Tag.optional_type.create(c.arena, ptr);
} else {
- const ptr = try transCreateNodePtrType(c, false, false, Token.Id.Identifier);
- ptr.rhs = node;
- return &ptr.base;
+ return Tag.c_pointer.create(c.arena, .{
+ .is_const = false,
+ .is_volatile = false,
+ .elem_type = node,
+ });
}
} else {
// expr * expr
- op_token = try appendToken(c, .Asterisk, "*");
- op_id = .BitShiftLeft;
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
+ node = try Tag.mul.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
}
},
.Slash => {
- op_id = .Div;
- op_token = try appendToken(c, .Slash, "/");
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
+ node = try Tag.div.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
.Percent => {
- op_id = .Mod;
- op_token = try appendToken(c, .Percent, "%");
+ const lhs = try macroBoolToInt(c, node);
+ const rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
+ node = try Tag.mod.create(c.arena, .{ .lhs = lhs, .rhs = rhs });
},
else => {
m.i -= 1;
return node;
},
}
- const lhs_node = try macroBoolToInt(c, node);
- const rhs_node = try parseCUnaryExpr(c, m, scope);
- const op_node = try c.arena.create(ast.Node.SimpleInfixOp);
- op_node.* = .{
- .base = .{ .tag = op_id },
- .op_token = op_token,
- .lhs = lhs_node,
- .rhs = try macroBoolToInt(c, rhs_node),
- };
- node = &op_node.base;
}
}
-fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
var node = try parseCPrimaryExpr(c, m, scope);
while (true) {
switch (m.next().?) {
@@ -6704,38 +4875,33 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N
return error.ParseError;
}
- node = try transCreateNodeFieldAccess(c, node, m.slice());
- continue;
+ node = try Tag.field_access.create(c.arena, .{ .lhs = node, .field_name = m.slice() });
},
.Arrow => {
if (m.next().? != .Identifier) {
try m.fail(c, "unable to translate C expr: expected identifier", .{});
return error.ParseError;
}
- const deref = try transCreateNodePtrDeref(c, node);
- node = try transCreateNodeFieldAccess(c, deref, m.slice());
- continue;
+
+ const deref = try Tag.deref.create(c.arena, node);
+ node = try Tag.field_access.create(c.arena, .{ .lhs = deref, .field_name = m.slice() });
},
.LBracket => {
- const arr_node = try transCreateNodeArrayAccess(c, node);
- arr_node.index_expr = try parseCExpr(c, m, scope);
- arr_node.rtoken = try appendToken(c, .RBracket, "]");
- node = &arr_node.base;
+ const index = try macroBoolToInt(c, try parseCExpr(c, m, scope));
+ node = try Tag.array_access.create(c.arena, .{ .lhs = node, .rhs = index });
if (m.next().? != .RBracket) {
try m.fail(c, "unable to translate C expr: expected ']'", .{});
return error.ParseError;
}
- continue;
},
.LParen => {
- _ = try appendToken(c, .LParen, "(");
- var call_params = std.ArrayList(*ast.Node).init(c.gpa);
- defer call_params.deinit();
+ var args = std.ArrayList(Node).init(c.gpa);
+ defer args.deinit();
while (true) {
const arg = try parseCCondExpr(c, m, scope);
- try call_params.append(arg);
+ try args.append(arg);
switch (m.next().?) {
- .Comma => _ = try appendToken(c, .Comma, ","),
+ .Comma => {},
.RParen => break,
else => {
try m.fail(c, "unable to translate C expr: expected ',' or ')'", .{});
@@ -6743,32 +4909,17 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N
},
}
}
- const call_node = try ast.Node.Call.alloc(c.arena, call_params.items.len);
- call_node.* = .{
- .lhs = node,
- .params_len = call_params.items.len,
- .async_token = null,
- .rtoken = try appendToken(c, .RParen, ")"),
- };
- mem.copy(*ast.Node, call_node.params(), call_params.items);
- node = &call_node.base;
- continue;
+ node = try Tag.call.create(c.arena, .{ .lhs = node, .args = try c.arena.dupe(Node, args.items) });
},
.LBrace => {
- // must come immediately after `node`
- _ = try appendToken(c, .Comma, ",");
-
- const dot = try appendToken(c, .Period, ".");
- _ = try appendToken(c, .LBrace, "{");
-
- var init_vals = std.ArrayList(*ast.Node).init(c.gpa);
+ var init_vals = std.ArrayList(Node).init(c.gpa);
defer init_vals.deinit();
while (true) {
const val = try parseCCondExpr(c, m, scope);
try init_vals.append(val);
switch (m.next().?) {
- .Comma => _ = try appendToken(c, .Comma, ","),
+ .Comma => {},
.RBrace => break,
else => {
try m.fail(c, "unable to translate C expr: expected ',' or '}}'", .{});
@@ -6776,29 +4927,8 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N
},
}
}
- const tuple_node = try ast.Node.StructInitializerDot.alloc(c.arena, init_vals.items.len);
- tuple_node.* = .{
- .dot = dot,
- .list_len = init_vals.items.len,
- .rtoken = try appendToken(c, .RBrace, "}"),
- };
- mem.copy(*ast.Node, tuple_node.list(), init_vals.items);
-
- //(@import("std").mem.zeroInit(T, .{x}))
- const import_fn_call = try c.createBuiltinCall("@import", 1);
- const std_node = try transCreateNodeStringLiteral(c, "\"std\"");
- import_fn_call.params()[0] = std_node;
- import_fn_call.rparen_token = try appendToken(c, .RParen, ")");
- const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "mem");
- const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "zeroInit");
-
- const zero_init_call = try c.createCall(outer_field_access, 2);
- zero_init_call.params()[0] = node;
- zero_init_call.params()[1] = &tuple_node.base;
- zero_init_call.rtoken = try appendToken(c, .RParen, ")");
-
- node = &zero_init_call.base;
- continue;
+ const tuple_node = try Tag.tuple.create(c.arena, try c.arena.dupe(Node, init_vals.items));
+ node = try Tag.std_mem_zeroinit.create(c.arena, .{ .lhs = node, .rhs = tuple_node });
},
.PlusPlus, .MinusMinus => {
try m.fail(c, "TODO postfix inc/dec expr", .{});
@@ -6812,35 +4942,31 @@ fn parseCPostfixExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.N
}
}
-fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Node {
+fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
switch (m.next().?) {
.Bang => {
- const node = try transCreateNodeSimplePrefixOp(c, .BoolNot, .Bang, "!");
- node.rhs = try macroIntToBool(c, try parseCUnaryExpr(c, m, scope));
- return &node.base;
+ const operand = try macroIntToBool(c, try parseCUnaryExpr(c, m, scope));
+ return Tag.not.create(c.arena, operand);
},
.Minus => {
- const node = try transCreateNodeSimplePrefixOp(c, .Negation, .Minus, "-");
- node.rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
- return &node.base;
+ const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
+ return Tag.negate.create(c.arena, operand);
},
.Plus => return try parseCUnaryExpr(c, m, scope),
.Tilde => {
- const node = try transCreateNodeSimplePrefixOp(c, .BitNot, .Tilde, "~");
- node.rhs = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
- return &node.base;
+ const operand = try macroBoolToInt(c, try parseCUnaryExpr(c, m, scope));
+ return Tag.bit_not.create(c.arena, operand);
},
.Asterisk => {
- const node = try macroGroup(c, try parseCUnaryExpr(c, m, scope));
- return try transCreateNodePtrDeref(c, node);
+ const operand = try parseCUnaryExpr(c, m, scope);
+ return Tag.deref.create(c.arena, operand);
},
.Ampersand => {
- const node = try transCreateNodeSimplePrefixOp(c, .AddressOf, .Ampersand, "&");
- node.rhs = try macroGroup(c, try parseCUnaryExpr(c, m, scope));
- return &node.base;
+ const operand = try parseCUnaryExpr(c, m, scope);
+ return Tag.address_of.create(c.arena, operand);
},
.Keyword_sizeof => {
- const inner = if (m.peek().? == .LParen) blk: {
+ const operand = if (m.peek().? == .LParen) blk: {
_ = m.next();
// C grammar says this should be 'type-name' but we have to
// use parseCMulExpr to correctly handle pointer types.
@@ -6852,18 +4978,7 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod
break :blk inner;
} else try parseCUnaryExpr(c, m, scope);
- //(@import("std").meta.sizeof(dest, x))
- const import_fn_call = try c.createBuiltinCall("@import", 1);
- const std_node = try transCreateNodeStringLiteral(c, "\"std\"");
- import_fn_call.params()[0] = std_node;
- import_fn_call.rparen_token = try appendToken(c, .RParen, ")");
- const inner_field_access = try transCreateNodeFieldAccess(c, &import_fn_call.base, "meta");
- const outer_field_access = try transCreateNodeFieldAccess(c, inner_field_access, "sizeof");
-
- const sizeof_call = try c.createCall(outer_field_access, 1);
- sizeof_call.params()[0] = inner;
- sizeof_call.rtoken = try appendToken(c, .RParen, ")");
- return &sizeof_call.base;
+ return Tag.std_meta_sizeof.create(c.arena, operand);
},
.Keyword_alignof => {
// TODO this won't work if using 's
@@ -6874,16 +4989,13 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod
}
// C grammar says this should be 'type-name' but we have to
// use parseCMulExpr to correctly handle pointer types.
- const inner = try parseCMulExpr(c, m, scope);
+ const operand = try parseCMulExpr(c, m, scope);
if (m.next().? != .RParen) {
try m.fail(c, "unable to translate C expr: expected ')'", .{});
return error.ParseError;
}
- const builtin_call = try c.createBuiltinCall("@alignOf", 1);
- builtin_call.params()[0] = inner;
- builtin_call.rparen_token = try appendToken(c, .RParen, ")");
- return &builtin_call.base;
+ return Tag.alignof.create(c.arena, operand);
},
.PlusPlus, .MinusMinus => {
try m.fail(c, "TODO unary inc/dec expr", .{});
@@ -6896,51 +5008,40 @@ fn parseCUnaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!*ast.Nod
}
}
-fn tokenSlice(c: *Context, token: ast.TokenIndex) []u8 {
- const tok = c.token_locs.items[token];
- const slice = c.source_buffer.items[tok.start..tok.end];
- return if (mem.startsWith(u8, slice, "@\""))
- slice[2 .. slice.len - 1]
- else
- slice;
-}
-
-fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
- switch (node.tag) {
- .ContainerDecl,
- .AddressOf,
- .Await,
- .BitNot,
- .BoolNot,
- .OptionalType,
- .Negation,
- .NegationWrap,
- .Resume,
- .Try,
- .ArrayType,
- .ArrayTypeSentinel,
- .PtrType,
- .SliceType,
+fn getContainer(c: *Context, node: Node) ?Node {
+ switch (node.tag()) {
+ .@"union",
+ .@"struct",
+ .@"enum",
+ .address_of,
+ .bit_not,
+ .not,
+ .optional_type,
+ .negate,
+ .negate_wrap,
+ .array_type,
+ .c_pointer,
+ .single_pointer,
=> return node,
- .Identifier => {
- const ident = node.castTag(.Identifier).?;
- if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
- if (value.cast(ast.Node.VarDecl)) |var_decl|
- return getContainer(c, var_decl.getInitNode().?);
+ .identifier => {
+ const ident = node.castTag(.identifier).?;
+ if (c.global_scope.sym_table.get(ident.data)) |value| {
+ if (value.castTag(.var_decl)) |var_decl|
+ return getContainer(c, var_decl.data.init.?);
+ if (value.castTag(.var_simple) orelse value.castTag(.pub_var_simple)) |var_decl|
+ return getContainer(c, var_decl.data.init);
}
},
- .Period => {
- const infix = node.castTag(.Period).?;
+ .field_access => {
+ const field_access = node.castTag(.field_access).?;
- if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
- if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
- for (container.fieldsAndDecls()) |field_ref| {
- const field = field_ref.cast(ast.Node.ContainerField).?;
- const ident = infix.rhs.castTag(.Identifier).?;
- if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
- return getContainer(c, field.type_expr.?);
+ if (getContainerTypeOf(c, field_access.data.lhs)) |ty_node| {
+ if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| {
+ for (container.data.fields) |field| {
+ if (mem.eql(u8, field.name, field_access.data.field_name)) {
+ return getContainer(c, field.type);
}
}
}
@@ -6952,22 +5053,19 @@ fn getContainer(c: *Context, node: *ast.Node) ?*ast.Node {
return null;
}
-fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
- if (ref.castTag(.Identifier)) |ident| {
- if (c.global_scope.sym_table.get(tokenSlice(c, ident.token))) |value| {
- if (value.cast(ast.Node.VarDecl)) |var_decl| {
- if (var_decl.getTypeNode()) |ty|
- return getContainer(c, ty);
+fn getContainerTypeOf(c: *Context, ref: Node) ?Node {
+ if (ref.castTag(.identifier)) |ident| {
+ if (c.global_scope.sym_table.get(ident.data)) |value| {
+ if (value.castTag(.var_decl)) |var_decl| {
+ return getContainer(c, var_decl.data.type);
}
}
- } else if (ref.castTag(.Period)) |infix| {
- if (getContainerTypeOf(c, infix.lhs)) |ty_node| {
- if (ty_node.cast(ast.Node.ContainerDecl)) |container| {
- for (container.fieldsAndDecls()) |field_ref| {
- const field = field_ref.cast(ast.Node.ContainerField).?;
- const ident = infix.rhs.castTag(.Identifier).?;
- if (mem.eql(u8, tokenSlice(c, field.name_token), tokenSlice(c, ident.token))) {
- return getContainer(c, field.type_expr.?);
+ } else if (ref.castTag(.field_access)) |field_access| {
+ if (getContainerTypeOf(c, field_access.data.lhs)) |ty_node| {
+ if (ty_node.castTag(.@"struct") orelse ty_node.castTag(.@"union")) |container| {
+ for (container.data.fields) |field| {
+ if (mem.eql(u8, field.name, field_access.data.field_name)) {
+ return getContainer(c, field.type);
}
}
} else
@@ -6977,11 +5075,16 @@ fn getContainerTypeOf(c: *Context, ref: *ast.Node) ?*ast.Node {
return null;
}
-fn getFnProto(c: *Context, ref: *ast.Node) ?*ast.Node.FnProto {
- const init = if (ref.cast(ast.Node.VarDecl)) |v| v.getInitNode().? else return null;
+fn getFnProto(c: *Context, ref: Node) ?*ast.Payload.Func {
+ const init = if (ref.castTag(.var_decl)) |v|
+ v.data.init orelse return null
+ else if (ref.castTag(.var_simple) orelse ref.castTag(.pub_var_simple)) |v|
+ v.data.init
+ else
+ return null;
if (getContainerTypeOf(c, init)) |ty_node| {
- if (ty_node.castTag(.OptionalType)) |prefix| {
- if (prefix.rhs.cast(ast.Node.FnProto)) |fn_proto| {
+ if (ty_node.castTag(.optional_type)) |prefix| {
+ if (prefix.data.castTag(.func)) |fn_proto| {
return fn_proto;
}
}
diff --git a/src/translate_c/ast.zig b/src/translate_c/ast.zig
new file mode 100644
index 000000000000..3bc20271cc2c
--- /dev/null
+++ b/src/translate_c/ast.zig
@@ -0,0 +1,2529 @@
+// SPDX-License-Identifier: MIT
+// Copyright (c) 2021 Zig Contributors
+// This file is part of [zig](https://ziglang.org/), which is MIT licensed.
+// The MIT license requires this copyright notice to be included in all copies
+// and substantial portions of the software.
+const std = @import("std");
+const Type = @import("../type.zig").Type;
+const Allocator = std.mem.Allocator;
+
+pub const Node = extern union {
+ /// If the tag value is less than Tag.no_payload_count, then no pointer
+ /// dereference is needed.
+ tag_if_small_enough: usize,
+ ptr_otherwise: *Payload,
+
+ pub const Tag = enum {
+ /// Declarations add themselves to the correct scopes and should not be emitted as this tag.
+ declaration,
+ null_literal,
+ undefined_literal,
+ /// opaque {}
+ opaque_literal,
+ true_literal,
+ false_literal,
+ empty_block,
+ return_void,
+ zero_literal,
+ one_literal,
+ void_type,
+ noreturn_type,
+ @"anytype",
+ @"continue",
+ @"break",
+ /// pub usingnamespace @import("std").c.builtins;
+ usingnamespace_builtins,
+ // After this, the tag requires a payload.
+
+ integer_literal,
+ float_literal,
+ string_literal,
+ char_literal,
+ identifier,
+ @"if",
+ /// if (!operand) break;
+ if_not_break,
+ @"while",
+ /// while (true) operand
+ while_true,
+ @"switch",
+ /// else => operand,
+ switch_else,
+ /// items => body,
+ switch_prong,
+ break_val,
+ @"return",
+ field_access,
+ array_access,
+ call,
+ var_decl,
+ func,
+ warning,
+ /// All enums are non-exhaustive
+ @"enum",
+ @"struct",
+ @"union",
+ array_init,
+ tuple,
+ container_init,
+ std_meta_cast,
+ /// _ = operand;
+ discard,
+
+ // a + b
+ add,
+ // a = b
+ add_assign,
+ // c = (a = b)
+ add_wrap,
+ add_wrap_assign,
+ sub,
+ sub_assign,
+ sub_wrap,
+ sub_wrap_assign,
+ mul,
+ mul_assign,
+ mul_wrap,
+ mul_wrap_assign,
+ div,
+ div_assign,
+ shl,
+ shl_assign,
+ shr,
+ shr_assign,
+ mod,
+ mod_assign,
+ @"and",
+ @"or",
+ less_than,
+ less_than_equal,
+ greater_than,
+ greater_than_equal,
+ equal,
+ not_equal,
+ bit_and,
+ bit_and_assign,
+ bit_or,
+ bit_or_assign,
+ bit_xor,
+ bit_xor_assign,
+ array_cat,
+ ellipsis3,
+ assign,
+
+ log2_int_type,
+ /// @import("std").math.Log2Int(operand)
+ std_math_Log2Int,
+ /// @intCast(lhs, rhs)
+ int_cast,
+ /// @rem(lhs, rhs)
+ rem,
+ /// @divTrunc(lhs, rhs)
+ div_trunc,
+ /// @boolToInt(operand)
+ bool_to_int,
+ /// @as(lhs, rhs)
+ as,
+ /// @truncate(lhs, rhs)
+ truncate,
+ /// @bitCast(lhs, rhs)
+ bit_cast,
+ /// @floatCast(lhs, rhs)
+ float_cast,
+ /// @floatToInt(lhs, rhs)
+ float_to_int,
+ /// @intToFloat(lhs, rhs)
+ int_to_float,
+ /// @intToEnum(lhs, rhs)
+ int_to_enum,
+ /// @enumToInt(operand)
+ enum_to_int,
+ /// @intToPtr(lhs, rhs)
+ int_to_ptr,
+ /// @ptrToInt(operand)
+ ptr_to_int,
+ /// @alignCast(lhs, rhs)
+ align_cast,
+ /// @ptrCast(lhs, rhs)
+ ptr_cast,
+
+ negate,
+ negate_wrap,
+ bit_not,
+ not,
+ address_of,
+ /// .?
+ unwrap,
+ /// .*
+ deref,
+
+ block,
+ /// { operand }
+ block_single,
+
+ sizeof,
+ alignof,
+ typeof,
+ type,
+
+ optional_type,
+ c_pointer,
+ single_pointer,
+ array_type,
+
+ /// @import("std").meta.sizeof(operand)
+ std_meta_sizeof,
+ /// @import("std").mem.zeroes(operand)
+ std_mem_zeroes,
+ /// @import("std").mem.zeroInit(lhs, rhs)
+ std_mem_zeroinit,
+ // pub const name = @compileError(msg);
+ fail_decl,
+ // var actual = mangled;
+ arg_redecl,
+ /// pub const alias = actual;
+ alias,
+ /// const name = init;
+ var_simple,
+ /// pub const name = init;
+ pub_var_simple,
+ /// pub const enum_field_name = @enumToInt(enum_name.field_name);
+ pub_enum_redecl,
+ enum_redecl,
+
+ /// pub inline fn name(params) return_type body
+ pub_inline_fn,
+
+ /// [0]type{}
+ empty_array,
+ /// [1]type{val} ** count
+ array_filler,
+
+ pub const last_no_payload_tag = Tag.usingnamespace_builtins;
+ pub const no_payload_count = @enumToInt(last_no_payload_tag) + 1;
+
+ pub fn Type(comptime t: Tag) type {
+ return switch (t) {
+ .declaration,
+ .null_literal,
+ .undefined_literal,
+ .opaque_literal,
+ .true_literal,
+ .false_literal,
+ .empty_block,
+ .usingnamespace_builtins,
+ .return_void,
+ .zero_literal,
+ .one_literal,
+ .void_type,
+ .noreturn_type,
+ .@"anytype",
+ .@"continue",
+ .@"break",
+ => @compileError("Type Tag " ++ @tagName(t) ++ " has no payload"),
+
+ .std_mem_zeroes,
+ .@"return",
+ .discard,
+ .std_math_Log2Int,
+ .negate,
+ .negate_wrap,
+ .bit_not,
+ .not,
+ .optional_type,
+ .address_of,
+ .unwrap,
+ .deref,
+ .ptr_to_int,
+ .enum_to_int,
+ .empty_array,
+ .while_true,
+ .if_not_break,
+ .switch_else,
+ .block_single,
+ .std_meta_sizeof,
+ .bool_to_int,
+ .sizeof,
+ .alignof,
+ .typeof,
+ => Payload.UnOp,
+
+ .add,
+ .add_assign,
+ .add_wrap,
+ .add_wrap_assign,
+ .sub,
+ .sub_assign,
+ .sub_wrap,
+ .sub_wrap_assign,
+ .mul,
+ .mul_assign,
+ .mul_wrap,
+ .mul_wrap_assign,
+ .div,
+ .div_assign,
+ .shl,
+ .shl_assign,
+ .shr,
+ .shr_assign,
+ .mod,
+ .mod_assign,
+ .@"and",
+ .@"or",
+ .less_than,
+ .less_than_equal,
+ .greater_than,
+ .greater_than_equal,
+ .equal,
+ .not_equal,
+ .bit_and,
+ .bit_and_assign,
+ .bit_or,
+ .bit_or_assign,
+ .bit_xor,
+ .bit_xor_assign,
+ .div_trunc,
+ .rem,
+ .int_cast,
+ .as,
+ .truncate,
+ .bit_cast,
+ .float_cast,
+ .float_to_int,
+ .int_to_float,
+ .int_to_enum,
+ .int_to_ptr,
+ .array_cat,
+ .ellipsis3,
+ .assign,
+ .align_cast,
+ .array_access,
+ .std_mem_zeroinit,
+ .ptr_cast,
+ => Payload.BinOp,
+
+ .integer_literal,
+ .float_literal,
+ .string_literal,
+ .char_literal,
+ .identifier,
+ .warning,
+ .type,
+ => Payload.Value,
+ .@"if" => Payload.If,
+ .@"while" => Payload.While,
+ .@"switch", .array_init,.switch_prong => Payload.Switch,
+ .break_val => Payload.BreakVal,
+ .call => Payload.Call,
+ .var_decl => Payload.VarDecl,
+ .func => Payload.Func,
+ .@"enum" => Payload.Enum,
+ .@"struct", .@"union" => Payload.Record,
+ .tuple => Payload.TupleInit,
+ .container_init => Payload.ContainerInit,
+ .std_meta_cast => Payload.Infix,
+ .block => Payload.Block,
+ .c_pointer, .single_pointer => Payload.Pointer,
+ .array_type => Payload.Array,
+ .arg_redecl, .alias, .fail_decl => Payload.ArgRedecl,
+ .log2_int_type => Payload.Log2IntType,
+ .var_simple, .pub_var_simple => Payload.SimpleVarDecl,
+ .pub_enum_redecl, .enum_redecl => Payload.EnumRedecl,
+ .array_filler => Payload.ArrayFiller,
+ .pub_inline_fn => Payload.PubInlineFn,
+ .field_access => Payload.FieldAccess,
+ };
+ }
+
+ pub fn init(comptime t: Tag) Node {
+ comptime std.debug.assert(@enumToInt(t) < Tag.no_payload_count);
+ return .{ .tag_if_small_enough = @enumToInt(t) };
+ }
+
+ pub fn create(comptime t: Tag, ally: *Allocator, data: Data(t)) error{OutOfMemory}!Node {
+ const ptr = try ally.create(t.Type());
+ ptr.* = .{
+ .base = .{ .tag = t },
+ .data = data,
+ };
+ return Node{ .ptr_otherwise = &ptr.base };
+ }
+
+ pub fn Data(comptime t: Tag) type {
+ return std.meta.fieldInfo(t.Type(), .data).field_type;
+ }
+ };
+
+ pub fn tag(self: Node) Tag {
+ if (self.tag_if_small_enough < Tag.no_payload_count) {
+ return @intToEnum(Tag, @intCast(std.meta.Tag(Tag), self.tag_if_small_enough));
+ } else {
+ return self.ptr_otherwise.tag;
+ }
+ }
+
+ pub fn castTag(self: Node, comptime t: Tag) ?*t.Type() {
+ if (self.tag_if_small_enough < Tag.no_payload_count)
+ return null;
+
+ if (self.ptr_otherwise.tag == t)
+ return @fieldParentPtr(t.Type(), "base", self.ptr_otherwise);
+
+ return null;
+ }
+
+ pub fn initPayload(payload: *Payload) Node {
+ std.debug.assert(@enumToInt(payload.tag) >= Tag.no_payload_count);
+ return .{ .ptr_otherwise = payload };
+ }
+
+ pub fn isNoreturn(node: Node, break_counts: bool) bool {
+ switch (node.tag()) {
+ .block => {
+ const block_node = node.castTag(.block).?;
+ if (block_node.data.stmts.len == 0) return false;
+
+ const last = block_node.data.stmts[block_node.data.stmts.len - 1];
+ return last.isNoreturn(break_counts);
+ },
+ .@"switch" => {
+ const switch_node = node.castTag(.@"switch").?;
+
+ for (switch_node.data.cases) |case| {
+ const body = if (case.castTag(.switch_else)) |some|
+ some.data
+ else if (case.castTag(.switch_prong)) |some|
+ some.data.cond
+ else unreachable;
+
+ if (!body.isNoreturn(break_counts)) return false;
+ }
+ return true;
+ },
+ .@"return", .return_void => return true,
+ .@"break" => if (break_counts) return true,
+ else => {},
+ }
+ return false;
+ }
+
+};
+
+pub const Payload = struct {
+ tag: Node.Tag,
+
+ pub const Infix = struct {
+ base: Payload,
+ data: struct {
+ lhs: Node,
+ rhs: Node,
+ },
+ };
+
+ pub const Value = struct {
+ base: Payload,
+ data: []const u8,
+ };
+
+ pub const UnOp = struct {
+ base: Payload,
+ data: Node,
+ };
+
+ pub const BinOp = struct {
+ base: Payload,
+ data: struct {
+ lhs: Node,
+ rhs: Node,
+ },
+ };
+
+ pub const If = struct {
+ base: Payload,
+ data: struct {
+ cond: Node,
+ then: Node,
+ @"else": ?Node,
+ },
+ };
+
+ pub const While = struct {
+ base: Payload,
+ data: struct {
+ cond: Node,
+ body: Node,
+ cont_expr: ?Node,
+ },
+ };
+
+ pub const Switch = struct {
+ base: Payload,
+ data: struct {
+ cond: Node,
+ cases: []Node,
+ },
+ };
+
+ pub const BreakVal = struct {
+ base: Payload,
+ data: struct {
+ label: ?[]const u8,
+ val: Node,
+ },
+ };
+
+ pub const Call = struct {
+ base: Payload,
+ data: struct {
+ lhs: Node,
+ args: []Node,
+ },
+ };
+
+ pub const VarDecl = struct {
+ base: Payload,
+ data: struct {
+ is_pub: bool,
+ is_const: bool,
+ is_extern: bool,
+ is_export: bool,
+ is_threadlocal: bool,
+ alignment: ?c_uint,
+ linksection_string: ?[]const u8,
+ name: []const u8,
+ type: Node,
+ init: ?Node,
+ },
+ };
+
+ pub const Func = struct {
+ base: Payload,
+ data: struct {
+ is_pub: bool,
+ is_extern: bool,
+ is_export: bool,
+ is_var_args: bool,
+ name: ?[]const u8,
+ linksection_string: ?[]const u8,
+ explicit_callconv: ?std.builtin.CallingConvention,
+ params: []Param,
+ return_type: Node,
+ body: ?Node,
+ alignment: ?c_uint,
+ },
+ };
+
+ pub const Param = struct {
+ is_noalias: bool,
+ name: ?[]const u8,
+ type: Node,
+ };
+
+ pub const Enum = struct {
+ base: Payload,
+ data: struct {
+ int_type: Node,
+ fields: []Field,
+ },
+
+ pub const Field = struct {
+ name: []const u8,
+ value: ?Node,
+ };
+ };
+
+ pub const Record = struct {
+ base: Payload,
+ data: struct {
+ is_packed: bool,
+ fields: []Field,
+ },
+
+ pub const Field = struct {
+ name: []const u8,
+ type: Node,
+ alignment: ?c_uint,
+ };
+ };
+
+ pub const TupleInit = struct {
+ base: Payload,
+ data: []Node,
+ };
+
+ pub const ContainerInit = struct {
+ base: Payload,
+ data: struct {
+ lhs: Node,
+ inits: []Initializer,
+ },
+
+ pub const Initializer = struct {
+ name: []const u8,
+ value: Node,
+ };
+ };
+
+ pub const Block = struct {
+ base: Payload,
+ data: struct {
+ label: ?[]const u8,
+ stmts: []Node,
+ },
+ };
+
+ pub const Array = struct {
+ base: Payload,
+ data: struct {
+ elem_type: Node,
+ len: usize,
+ },
+ };
+
+ pub const Pointer = struct {
+ base: Payload,
+ data: struct {
+ elem_type: Node,
+ is_const: bool,
+ is_volatile: bool,
+ },
+ };
+
+ pub const ArgRedecl = struct {
+ base: Payload,
+ data: struct {
+ actual: []const u8,
+ mangled: []const u8,
+ },
+ };
+
+ pub const Log2IntType = struct {
+ base: Payload,
+ data: std.math.Log2Int(u64),
+ };
+
+ pub const SimpleVarDecl = struct {
+ base: Payload,
+ data: struct {
+ name: []const u8,
+ init: Node,
+ },
+ };
+
+ pub const EnumRedecl = struct {
+ base: Payload,
+ data: struct {
+ enum_val_name: []const u8,
+ field_name: []const u8,
+ enum_name: []const u8,
+ },
+ };
+
+ pub const ArrayFiller = struct {
+ base: Payload,
+ data: struct {
+ type: Node,
+ filler: Node,
+ count: usize,
+ },
+ };
+
+ pub const PubInlineFn = struct {
+ base: Payload,
+ data: struct {
+ name: []const u8,
+ params: []Param,
+ return_type: Node,
+ body: Node,
+ },
+ };
+
+ pub const FieldAccess = struct {
+ base: Payload,
+ data: struct {
+ lhs: Node,
+ field_name: []const u8,
+ },
+ };
+};
+
+/// Converts the nodes into a Zig ast.
+/// Caller must free the source slice.
+pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree {
+ var ctx = Context{
+ .gpa = gpa,
+ .buf = std.ArrayList(u8).init(gpa),
+ };
+ defer ctx.buf.deinit();
+ defer ctx.nodes.deinit(gpa);
+ defer ctx.extra_data.deinit(gpa);
+ defer ctx.tokens.deinit(gpa);
+
+ // Estimate that each top level node has 10 child nodes.
+ const estimated_node_count = nodes.len * 10;
+ try ctx.nodes.ensureCapacity(gpa, estimated_node_count);
+ // Estimate that each each node has 2 tokens.
+ const estimated_tokens_count = estimated_node_count * 2;
+ try ctx.tokens.ensureCapacity(gpa, estimated_tokens_count);
+ // Estimate that each each token is 3 bytes long.
+ const estimated_buf_len = estimated_tokens_count * 3;
+ try ctx.buf.ensureCapacity(estimated_buf_len);
+
+ ctx.nodes.appendAssumeCapacity(.{
+ .tag = .root,
+ .main_token = 0,
+ .data = .{
+ .lhs = undefined,
+ .rhs = undefined,
+ },
+ });
+
+ const root_members = blk: {
+ var result = std.ArrayList(NodeIndex).init(gpa);
+ defer result.deinit();
+
+ for (nodes) |node| {
+ const res = try renderNode(&ctx, node);
+ if (node.tag() == .warning) continue;
+ try result.append(res);
+ }
+ break :blk try ctx.listToSpan(result.items);
+ };
+
+ ctx.nodes.items(.data)[0] = .{
+ .lhs = root_members.start,
+ .rhs = root_members.end,
+ };
+
+ try ctx.tokens.append(gpa, .{
+ .tag = .eof,
+ .start = @intCast(u32, ctx.buf.items.len),
+ });
+
+ return std.zig.ast.Tree{
+ .source = ctx.buf.toOwnedSlice(),
+ .tokens = ctx.tokens.toOwnedSlice(),
+ .nodes = ctx.nodes.toOwnedSlice(),
+ .extra_data = ctx.extra_data.toOwnedSlice(gpa),
+ .errors = &.{},
+ };
+}
+
+const NodeIndex = std.zig.ast.Node.Index;
+const NodeSubRange = std.zig.ast.Node.SubRange;
+const TokenIndex = std.zig.ast.TokenIndex;
+const TokenTag = std.zig.Token.Tag;
+
+const Context = struct {
+ gpa: *Allocator,
+ buf: std.ArrayList(u8) = .{},
+ nodes: std.zig.ast.NodeList = .{},
+ extra_data: std.ArrayListUnmanaged(std.zig.ast.Node.Index) = .{},
+ tokens: std.zig.ast.TokenList = .{},
+
+ fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex {
+ const start_index = c.buf.items.len;
+ try c.buf.writer().print(format ++ " ", args);
+
+ try c.tokens.append(c.gpa, .{
+ .tag = tag,
+ .start = @intCast(u32, start_index),
+ });
+
+ return @intCast(u32, c.tokens.len - 1);
+ }
+
+ fn addToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex {
+ return addTokenFmt(c, tag, "{s}", .{bytes});
+ }
+
+ fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex {
+ return addTokenFmt(c, .identifier, "{s}", .{std.zig.fmtId(bytes)});
+ }
+
+ fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange {
+ try c.extra_data.appendSlice(c.gpa, list);
+ return NodeSubRange{
+ .start = @intCast(NodeIndex, c.extra_data.items.len - list.len),
+ .end = @intCast(NodeIndex, c.extra_data.items.len),
+ };
+ }
+
+ fn addNode(c: *Context, elem: std.zig.ast.NodeList.Elem) Allocator.Error!NodeIndex {
+ const result = @intCast(NodeIndex, c.nodes.len);
+ try c.nodes.append(c.gpa, elem);
+ return result;
+ }
+
+ fn addExtra(c: *Context, extra: anytype) Allocator.Error!NodeIndex {
+ const fields = std.meta.fields(@TypeOf(extra));
+ try c.extra_data.ensureCapacity(c.gpa, c.extra_data.items.len + fields.len);
+ const result = @intCast(u32, c.extra_data.items.len);
+ inline for (fields) |field| {
+ comptime std.debug.assert(field.field_type == NodeIndex);
+ c.extra_data.appendAssumeCapacity(@field(extra, field.name));
+ }
+ return result;
+ }
+};
+
+fn renderNodes(c: *Context, nodes: []const Node) Allocator.Error!NodeSubRange {
+ var result = std.ArrayList(NodeIndex).init(c.gpa);
+ defer result.deinit();
+
+ for (nodes) |node| {
+ const res = try renderNode(c, node);
+ if (node.tag() == .warning) continue;
+ try result.append(res);
+ }
+
+ return try c.listToSpan(result.items);
+}
+
+fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
+ switch (node.tag()) {
+ .declaration => unreachable,
+ .warning => {
+ const payload = node.castTag(.warning).?.data;
+ try c.buf.appendSlice(payload);
+ try c.buf.append('\n');
+ return @as(NodeIndex, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32'
+ },
+ .usingnamespace_builtins => {
+ // pub usingnamespace @import("std").c.builtins;
+ _ = try c.addToken(.keyword_pub, "pub");
+ const usingnamespace_token = try c.addToken(.keyword_usingnamespace, "usingnamespace");
+ const import_node = try renderStdImport(c, "c", "builtins");
+ _ = try c.addToken(.semicolon, ";");
+
+ return c.addNode(.{
+ .tag = .@"usingnamespace",
+ .main_token = usingnamespace_token,
+ .data = .{
+ .lhs = import_node,
+ .rhs = undefined,
+ },
+ });
+ },
+ .std_math_Log2Int => {
+ const payload = node.castTag(.std_math_Log2Int).?.data;
+ const import_node = try renderStdImport(c, "math", "Log2Int");
+ return renderCall(c, import_node, &.{payload});
+ },
+ .std_meta_cast => {
+ const payload = node.castTag(.std_meta_cast).?.data;
+ const import_node = try renderStdImport(c, "meta", "cast");
+ return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
+ },
+ .std_meta_sizeof => {
+ const payload = node.castTag(.std_meta_sizeof).?.data;
+ const import_node = try renderStdImport(c, "meta", "sizeof");
+ return renderCall(c, import_node, &.{payload});
+ },
+ .std_mem_zeroes => {
+ const payload = node.castTag(.std_mem_zeroes).?.data;
+ const import_node = try renderStdImport(c, "mem", "zeroes");
+ return renderCall(c, import_node, &.{payload});
+ },
+ .std_mem_zeroinit => {
+ const payload = node.castTag(.std_mem_zeroinit).?.data;
+ const import_node = try renderStdImport(c, "mem", "zeroInit");
+ return renderCall(c, import_node, &.{ payload.lhs, payload.rhs });
+ },
+ .call => {
+ const payload = node.castTag(.call).?.data;
+ const lhs = try renderNode(c, payload.lhs);
+ return renderCall(c, lhs, payload.args);
+ },
+ .null_literal => return c.addNode(.{
+ .tag = .null_literal,
+ .main_token = try c.addToken(.keyword_null, "null"),
+ .data = undefined,
+ }),
+ .undefined_literal => return c.addNode(.{
+ .tag = .undefined_literal,
+ .main_token = try c.addToken(.keyword_undefined, "undefined"),
+ .data = undefined,
+ }),
+ .true_literal => return c.addNode(.{
+ .tag = .true_literal,
+ .main_token = try c.addToken(.keyword_true, "true"),
+ .data = undefined,
+ }),
+ .false_literal => return c.addNode(.{
+ .tag = .false_literal,
+ .main_token = try c.addToken(.keyword_false, "false"),
+ .data = undefined,
+ }),
+ .zero_literal => return c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addToken(.integer_literal, "0"),
+ .data = undefined,
+ }),
+ .one_literal => return c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addToken(.integer_literal, "1"),
+ .data = undefined,
+ }),
+ .void_type => return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.identifier, "void"),
+ .data = undefined,
+ }),
+ .noreturn_type => return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.identifier, "noreturn"),
+ .data = undefined,
+ }),
+ .@"continue" => return c.addNode(.{
+ .tag = .@"continue",
+ .main_token = try c.addToken(.keyword_continue, "continue"),
+ .data = .{
+ .lhs = 0,
+ .rhs = undefined,
+ },
+ }),
+ .return_void => return c.addNode(.{
+ .tag = .@"return",
+ .main_token = try c.addToken(.keyword_return, "return"),
+ .data = .{
+ .lhs = 0,
+ .rhs = undefined,
+ },
+ }),
+ .@"break" => return c.addNode(.{
+ .tag = .@"break",
+ .main_token = try c.addToken(.keyword_break, "break"),
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ }),
+ .break_val => {
+ const payload = node.castTag(.break_val).?.data;
+ const tok = try c.addToken(.keyword_break, "break");
+ const break_label = if (payload.label) |some| blk: {
+ _ = try c.addToken(.colon, ":");
+ break :blk try c.addIdentifier(some);
+ } else 0;
+ return c.addNode(.{
+ .tag = .@"break",
+ .main_token = tok,
+ .data = .{
+ .lhs = break_label,
+ .rhs = try renderNode(c, payload.val),
+ },
+ });
+ },
+ .@"return" => {
+ const payload = node.castTag(.@"return").?.data;
+ return c.addNode(.{
+ .tag = .@"return",
+ .main_token = try c.addToken(.keyword_return, "return"),
+ .data = .{
+ .lhs = try renderNode(c, payload),
+ .rhs = undefined,
+ },
+ });
+ },
+ .type => {
+ const payload = node.castTag(.type).?.data;
+ return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.identifier, payload),
+ .data = undefined,
+ });
+ },
+ .log2_int_type => {
+ const payload = node.castTag(.log2_int_type).?.data;
+ return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addTokenFmt(.identifier, "u{d}", .{payload}),
+ .data = undefined,
+ });
+ },
+ .identifier => {
+ const payload = node.castTag(.identifier).?.data;
+ return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addIdentifier(payload),
+ .data = undefined,
+ });
+ },
+ .float_literal => {
+ const payload = node.castTag(.float_literal).?.data;
+ return c.addNode(.{
+ .tag = .float_literal,
+ .main_token = try c.addToken(.float_literal, payload),
+ .data = undefined,
+ });
+ },
+ .integer_literal => {
+ const payload = node.castTag(.integer_literal).?.data;
+ return c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addToken(.integer_literal, payload),
+ .data = undefined,
+ });
+ },
+ .string_literal => {
+ const payload = node.castTag(.string_literal).?.data;
+ return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.string_literal, payload),
+ .data = undefined,
+ });
+ },
+ .char_literal => {
+ const payload = node.castTag(.char_literal).?.data;
+ return c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.char_literal, payload),
+ .data = undefined,
+ });
+ },
+ .fail_decl => {
+ const payload = node.castTag(.fail_decl).?.data;
+ // pub const name = @compileError(msg);
+ _ = try c.addToken(.keyword_pub, "pub");
+ const const_tok = try c.addToken(.keyword_const, "const");
+ _ = try c.addIdentifier(payload.actual);
+ _ = try c.addToken(.equal, "=");
+
+ const compile_error_tok = try c.addToken(.builtin, "@compileError");
+ _ = try c.addToken(.l_paren, "(");
+ const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(payload.mangled)});
+ const err_msg = try c.addNode(.{
+ .tag = .string_literal,
+ .main_token = err_msg_tok,
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ const compile_error = try c.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = compile_error_tok,
+ .data = .{
+ .lhs = err_msg,
+ .rhs = 0,
+ },
+ });
+ _ = try c.addToken(.semicolon, ";");
+
+ return c.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = const_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = compile_error,
+ },
+ });
+ },
+ .pub_var_simple, .var_simple => {
+ const payload = @fieldParentPtr(Payload.SimpleVarDecl, "base", node.ptr_otherwise).data;
+ if (node.tag() == .pub_var_simple) _ = try c.addToken(.keyword_pub, "pub");
+ const const_tok = try c.addToken(.keyword_const, "const");
+ _ = try c.addIdentifier(payload.name);
+ _ = try c.addToken(.equal, "=");
+
+ const init = try renderNode(c, payload.init);
+ _ = try c.addToken(.semicolon, ";");
+
+ return c.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = const_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = init,
+ },
+ });
+ },
+ .var_decl => return renderVar(c, node),
+ .arg_redecl, .alias => {
+ const payload = @fieldParentPtr(Payload.ArgRedecl, "base", node.ptr_otherwise).data;
+ if (node.tag() == .alias) _ = try c.addToken(.keyword_pub, "pub");
+ const mut_tok = if (node.tag() == .alias)
+ try c.addToken(.keyword_const, "const")
+ else
+ try c.addToken(.keyword_var, "var");
+ _ = try c.addIdentifier(payload.actual);
+ _ = try c.addToken(.equal, "=");
+
+ const init = try c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addIdentifier(payload.mangled),
+ .data = undefined,
+ });
+ _ = try c.addToken(.semicolon, ";");
+
+ return c.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = mut_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = init,
+ },
+ });
+ },
+ .int_cast => {
+ const payload = node.castTag(.int_cast).?.data;
+ return renderBuiltinCall(c, "@intCast", &.{ payload.lhs, payload.rhs });
+ },
+ .rem => {
+ const payload = node.castTag(.rem).?.data;
+ return renderBuiltinCall(c, "@rem", &.{ payload.lhs, payload.rhs });
+ },
+ .div_trunc => {
+ const payload = node.castTag(.div_trunc).?.data;
+ return renderBuiltinCall(c, "@divTrunc", &.{ payload.lhs, payload.rhs });
+ },
+ .bool_to_int => {
+ const payload = node.castTag(.bool_to_int).?.data;
+ return renderBuiltinCall(c, "@boolToInt", &.{payload});
+ },
+ .as => {
+ const payload = node.castTag(.as).?.data;
+ return renderBuiltinCall(c, "@as", &.{ payload.lhs, payload.rhs });
+ },
+ .truncate => {
+ const payload = node.castTag(.truncate).?.data;
+ return renderBuiltinCall(c, "@truncate", &.{ payload.lhs, payload.rhs });
+ },
+ .bit_cast => {
+ const payload = node.castTag(.bit_cast).?.data;
+ return renderBuiltinCall(c, "@bitCast", &.{ payload.lhs, payload.rhs });
+ },
+ .float_cast => {
+ const payload = node.castTag(.float_cast).?.data;
+ return renderBuiltinCall(c, "@floatCast", &.{ payload.lhs, payload.rhs });
+ },
+ .float_to_int => {
+ const payload = node.castTag(.float_to_int).?.data;
+ return renderBuiltinCall(c, "@floatToInt", &.{ payload.lhs, payload.rhs });
+ },
+ .int_to_float => {
+ const payload = node.castTag(.int_to_float).?.data;
+ return renderBuiltinCall(c, "@intToFloat", &.{ payload.lhs, payload.rhs });
+ },
+ .int_to_enum => {
+ const payload = node.castTag(.int_to_enum).?.data;
+ return renderBuiltinCall(c, "@intToEnum", &.{ payload.lhs, payload.rhs });
+ },
+ .enum_to_int => {
+ const payload = node.castTag(.enum_to_int).?.data;
+ return renderBuiltinCall(c, "@enumToInt", &.{payload});
+ },
+ .int_to_ptr => {
+ const payload = node.castTag(.int_to_ptr).?.data;
+ return renderBuiltinCall(c, "@intToPtr", &.{ payload.lhs, payload.rhs });
+ },
+ .ptr_to_int => {
+ const payload = node.castTag(.ptr_to_int).?.data;
+ return renderBuiltinCall(c, "@ptrToInt", &.{payload});
+ },
+ .align_cast => {
+ const payload = node.castTag(.align_cast).?.data;
+ return renderBuiltinCall(c, "@alignCast", &.{ payload.lhs, payload.rhs });
+ },
+ .ptr_cast => {
+ const payload = node.castTag(.ptr_cast).?.data;
+ return renderBuiltinCall(c, "@ptrCast", &.{ payload.lhs, payload.rhs });
+ },
+ .sizeof => {
+ const payload = node.castTag(.sizeof).?.data;
+ return renderBuiltinCall(c, "@sizeOf", &.{payload});
+ },
+ .alignof => {
+ const payload = node.castTag(.alignof).?.data;
+ return renderBuiltinCall(c, "@alignOf", &.{payload});
+ },
+ .typeof => {
+ const payload = node.castTag(.typeof).?.data;
+ return renderBuiltinCall(c, "@TypeOf", &.{payload});
+ },
+ .negate => return renderPrefixOp(c, node, .negation, .minus, "-"),
+ .negate_wrap => return renderPrefixOp(c, node, .negation_wrap, .minus_percent, "-%"),
+ .bit_not => return renderPrefixOp(c, node, .bit_not, .tilde, "~"),
+ .not => return renderPrefixOp(c, node, .bool_not, .bang, "!"),
+ .optional_type => return renderPrefixOp(c, node, .optional_type, .question_mark, "?"),
+ .address_of => return renderPrefixOp(c, node, .address_of, .ampersand, "&"),
+ .deref => {
+ const payload = node.castTag(.deref).?.data;
+ const operand = try renderNodeGrouped(c, payload);
+ const deref_tok = try c.addToken(.period_asterisk, ".*");
+ return c.addNode(.{
+ .tag = .deref,
+ .main_token = deref_tok,
+ .data = .{
+ .lhs = operand,
+ .rhs = undefined,
+ },
+ });
+ },
+ .unwrap => {
+ const payload = node.castTag(.unwrap).?.data;
+ const operand = try renderNodeGrouped(c, payload);
+ const period = try c.addToken(.period, ".");
+ const question_mark = try c.addToken(.question_mark, "?");
+ return c.addNode(.{
+ .tag = .unwrap_optional,
+ .main_token = period,
+ .data = .{
+ .lhs = operand,
+ .rhs = question_mark,
+ },
+ });
+ },
+ .c_pointer, .single_pointer => {
+ const payload = @fieldParentPtr(Payload.Pointer, "base", node.ptr_otherwise).data;
+
+ const asterisk = if (node.tag() == .single_pointer)
+ try c.addToken(.asterisk, "*")
+ else blk: {
+ _ = try c.addToken(.l_bracket, "[");
+ const res = try c.addToken(.asterisk, "*");
+ _ = try c.addIdentifier("c");
+ _ = try c.addToken(.r_bracket, "]");
+ break :blk res;
+ };
+ if (payload.is_const) _ = try c.addToken(.keyword_const, "const");
+ if (payload.is_volatile) _ = try c.addToken(.keyword_volatile, "volatile");
+ const elem_type = try renderNodeGrouped(c, payload.elem_type);
+
+ return c.addNode(.{
+ .tag = .ptr_type_aligned,
+ .main_token = asterisk,
+ .data = .{
+ .lhs = 0,
+ .rhs = elem_type,
+ },
+ });
+ },
+ .add => return renderBinOpGrouped(c, node, .add, .plus, "+"),
+ .add_assign => return renderBinOp(c, node, .assign_add, .plus_equal, "+="),
+ .add_wrap => return renderBinOpGrouped(c, node, .add_wrap, .plus_percent, "+%"),
+ .add_wrap_assign => return renderBinOp(c, node, .assign_add_wrap, .plus_percent_equal, "+%="),
+ .sub => return renderBinOpGrouped(c, node, .sub, .minus, "-"),
+ .sub_assign => return renderBinOp(c, node, .assign_sub, .minus_equal, "-="),
+ .sub_wrap => return renderBinOpGrouped(c, node, .sub_wrap, .minus_percent, "-%"),
+ .sub_wrap_assign => return renderBinOp(c, node, .assign_sub_wrap, .minus_percent_equal, "-%="),
+ .mul => return renderBinOpGrouped(c, node, .mul, .asterisk, "*"),
+ .mul_assign => return renderBinOp(c, node, .assign_mul, .asterisk_equal, "*="),
+ .mul_wrap => return renderBinOpGrouped(c, node, .mul_wrap, .asterisk_percent, "*%"),
+ .mul_wrap_assign => return renderBinOp(c, node, .assign_mul_wrap, .asterisk_percent_equal, "*%="),
+ .div => return renderBinOpGrouped(c, node, .div, .slash, "/"),
+ .div_assign => return renderBinOp(c, node, .assign_div, .slash_equal, "/="),
+ .shl => return renderBinOpGrouped(c, node, .bit_shift_left, .angle_bracket_angle_bracket_left, "<<"),
+ .shl_assign => return renderBinOp(c, node, .assign_bit_shift_left, .angle_bracket_angle_bracket_left_equal, "<<="),
+ .shr => return renderBinOpGrouped(c, node, .bit_shift_right, .angle_bracket_angle_bracket_right, ">>"),
+ .shr_assign => return renderBinOp(c, node, .assign_bit_shift_right, .angle_bracket_angle_bracket_right_equal, ">>="),
+ .mod => return renderBinOpGrouped(c, node, .mod, .percent, "%"),
+ .mod_assign => return renderBinOp(c, node, .assign_mod, .percent_equal, "%="),
+ .@"and" => return renderBinOpGrouped(c, node, .bool_and, .keyword_and, "and"),
+ .@"or" => return renderBinOpGrouped(c, node, .bool_or, .keyword_or, "or"),
+ .less_than => return renderBinOpGrouped(c, node, .less_than, .angle_bracket_left, "<"),
+ .less_than_equal => return renderBinOpGrouped(c, node, .less_or_equal, .angle_bracket_left_equal, "<="),
+ .greater_than => return renderBinOpGrouped(c, node, .greater_than, .angle_bracket_right, ">="),
+ .greater_than_equal => return renderBinOpGrouped(c, node, .greater_or_equal, .angle_bracket_right_equal, ">="),
+ .equal => return renderBinOpGrouped(c, node, .equal_equal, .equal_equal, "=="),
+ .not_equal => return renderBinOpGrouped(c, node, .bang_equal, .bang_equal, "!="),
+ .bit_and => return renderBinOpGrouped(c, node, .bit_and, .ampersand, "&"),
+ .bit_and_assign => return renderBinOp(c, node, .assign_bit_and, .ampersand_equal, "&="),
+ .bit_or => return renderBinOpGrouped(c, node, .bit_or, .pipe, "|"),
+ .bit_or_assign => return renderBinOp(c, node, .assign_bit_or, .pipe_equal, "|="),
+ .bit_xor => return renderBinOpGrouped(c, node, .bit_xor, .caret, "^"),
+ .bit_xor_assign => return renderBinOp(c, node, .assign_bit_xor, .caret_equal, "^="),
+ .array_cat => return renderBinOp(c, node, .array_cat, .plus_plus, "++"),
+ .ellipsis3 => return renderBinOpGrouped(c, node, .switch_range, .ellipsis3, "..."),
+ .assign => return renderBinOp(c, node, .assign, .equal, "="),
+ .empty_block => {
+ const l_brace = try c.addToken(.l_brace, "{");
+ _ = try c.addToken(.r_brace, "}");
+ return c.addNode(.{
+ .tag = .block_two,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ },
+ .block_single => {
+ const payload = node.castTag(.block_single).?.data;
+ const l_brace = try c.addToken(.l_brace, "{");
+
+ const stmt = try renderNode(c, payload);
+ try addSemicolonIfNeeded(c, payload);
+
+ _ = try c.addToken(.r_brace, "}");
+ return c.addNode(.{
+ .tag = .block_two_semicolon,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = stmt,
+ .rhs = 0,
+ },
+ });
+ },
+ .block => {
+ const payload = node.castTag(.block).?.data;
+ if (payload.label) |some| {
+ _ = try c.addIdentifier(some);
+ _ = try c.addToken(.colon, ":");
+ }
+ const l_brace = try c.addToken(.l_brace, "{");
+
+ var stmts = std.ArrayList(NodeIndex).init(c.gpa);
+ defer stmts.deinit();
+ for (payload.stmts) |stmt| {
+ const res = try renderNode(c, stmt);
+ if (res == 0) continue;
+ try addSemicolonIfNeeded(c, stmt);
+ try stmts.append(res);
+ }
+ const span = try c.listToSpan(stmts.items);
+ _ = try c.addToken(.r_brace, "}");
+
+ const semicolon = c.tokens.items(.tag)[c.tokens.len - 2] == .semicolon;
+ return c.addNode(.{
+ .tag = if (semicolon) .block_semicolon else .block,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ },
+ .func => return renderFunc(c, node),
+ .pub_inline_fn => return renderMacroFunc(c, node),
+ .discard => {
+ const payload = node.castTag(.discard).?.data;
+ const lhs = try c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addToken(.identifier, "_"),
+ .data = undefined,
+ });
+ return c.addNode(.{
+ .tag = .assign,
+ .main_token = try c.addToken(.equal, "="),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try renderNode(c, payload),
+ },
+ });
+ },
+ .@"while" => {
+ const payload = node.castTag(.@"while").?.data;
+ const while_tok = try c.addToken(.keyword_while, "while");
+ _ = try c.addToken(.l_paren, "(");
+ const cond = try renderNode(c, payload.cond);
+ _ = try c.addToken(.r_paren, ")");
+
+ const cont_expr = if (payload.cont_expr) |some| blk: {
+ _ = try c.addToken(.colon, ":");
+ _ = try c.addToken(.l_paren, "(");
+ const res = try renderNode(c, some);
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+ const body = try renderNode(c, payload.body);
+
+ if (cont_expr == 0) {
+ return c.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = body,
+ },
+ });
+ } else {
+ return c.addNode(.{
+ .tag = .while_cont,
+ .main_token = while_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = try c.addExtra(std.zig.ast.Node.WhileCont{
+ .cont_expr = cont_expr,
+ .then_expr = body,
+ }),
+ },
+ });
+ }
+ },
+ .while_true => {
+ const payload = node.castTag(.while_true).?.data;
+ const while_tok = try c.addToken(.keyword_while, "while");
+ _ = try c.addToken(.l_paren, "(");
+ const cond = try c.addNode(.{
+ .tag = .true_literal,
+ .main_token = try c.addToken(.keyword_true, "true"),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ const body = try renderNode(c, payload);
+
+ return c.addNode(.{
+ .tag = .while_simple,
+ .main_token = while_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = body,
+ },
+ });
+ },
+ .@"if" => {
+ const payload = node.castTag(.@"if").?.data;
+ const if_tok = try c.addToken(.keyword_if, "if");
+ _ = try c.addToken(.l_paren, "(");
+ const cond = try renderNode(c, payload.cond);
+ _ = try c.addToken(.r_paren, ")");
+
+ const then_expr = try renderNode(c, payload.then);
+ const else_node = payload.@"else" orelse return c.addNode(.{
+ .tag = .if_simple,
+ .main_token = if_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = then_expr,
+ },
+ });
+ _ = try c.addToken(.keyword_else, "else");
+ const else_expr = try renderNode(c, else_node);
+
+ return c.addNode(.{
+ .tag = .@"if",
+ .main_token = if_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = try c.addExtra(std.zig.ast.Node.If{
+ .then_expr = then_expr,
+ .else_expr = else_expr,
+ }),
+ },
+ });
+ },
+ .if_not_break => {
+ const payload = node.castTag(.if_not_break).?.data;
+ const if_tok = try c.addToken(.keyword_if, "if");
+ _ = try c.addToken(.l_paren, "(");
+ const cond = try c.addNode(.{
+ .tag = .bool_not,
+ .main_token = try c.addToken(.bang, "!"),
+ .data = .{
+ .lhs = try renderNodeGrouped(c, payload),
+ .rhs = undefined,
+ },
+ });
+ _ = try c.addToken(.r_paren, ")");
+ const then_expr = try c.addNode(.{
+ .tag = .@"break",
+ .main_token = try c.addToken(.keyword_break, "break"),
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+
+ return c.addNode(.{
+ .tag = .if_simple,
+ .main_token = if_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = then_expr,
+ },
+ });
+ },
+ .@"switch" => {
+ const payload = node.castTag(.@"switch").?.data;
+ const switch_tok = try c.addToken(.keyword_switch, "switch");
+ _ = try c.addToken(.l_paren, "(");
+ const cond = try renderNode(c, payload.cond);
+ _ = try c.addToken(.r_paren, ")");
+
+ _ = try c.addToken(.l_brace, "{");
+ var cases = try c.gpa.alloc(NodeIndex, payload.cases.len);
+ defer c.gpa.free(cases);
+ for (payload.cases) |case, i| {
+ cases[i] = try renderNode(c, case);
+ _ = try c.addToken(.comma, ",");
+ }
+ const span = try c.listToSpan(cases);
+ _ = try c.addToken(.r_brace, "}");
+ return c.addNode(.{
+ .tag = .switch_comma,
+ .main_token = switch_tok,
+ .data = .{
+ .lhs = cond,
+ .rhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ },
+ .switch_else => {
+ const payload = node.castTag(.switch_else).?.data;
+ _ = try c.addToken(.keyword_else, "else");
+ return c.addNode(.{
+ .tag = .switch_case_one,
+ .main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
+ .data = .{
+ .lhs = 0,
+ .rhs = try renderNode(c, payload),
+ },
+ });
+ },
+ .switch_prong => {
+ const payload = node.castTag(.switch_prong).?.data;
+ var items = try c.gpa.alloc(NodeIndex, std.math.max(payload.cases.len, 1));
+ defer c.gpa.free(items);
+ items[0] = 0;
+ for (payload.cases) |item, i| {
+ if (i != 0) _ = try c.addToken(.comma, ",");
+ items[i] = try renderNode(c, item);
+ }
+ _ = try c.addToken(.r_brace, "}");
+ if (items.len < 2) {
+ return c.addNode(.{
+ .tag = .switch_case_one,
+ .main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
+ .data = .{
+ .lhs = items[0],
+ .rhs = try renderNode(c, payload.cond),
+ },
+ });
+ } else {
+ const span = try c.listToSpan(items);
+ return c.addNode(.{
+ .tag = .switch_case,
+ .main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
+ .data = .{
+ .lhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ .rhs = try renderNode(c, payload.cond),
+ },
+ });
+ }
+ },
+ .opaque_literal => {
+ const opaque_tok = try c.addToken(.keyword_opaque, "opaque");
+ _ = try c.addToken(.l_brace, "{");
+ _ = try c.addToken(.r_brace, "}");
+
+ return c.addNode(.{
+ .tag = .container_decl_two,
+ .main_token = opaque_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ },
+ .array_access => {
+ const payload = node.castTag(.array_access).?.data;
+ const lhs = try renderNode(c, payload.lhs);
+ const l_bracket = try c.addToken(.l_bracket, "[");
+ const index_expr = try renderNode(c, payload.rhs);
+ _ = try c.addToken(.r_bracket, "]");
+ return c.addNode(.{
+ .tag = .array_access,
+ .main_token = l_bracket,
+ .data = .{
+ .lhs = lhs,
+ .rhs = index_expr,
+ },
+ });
+ },
+ .array_type => {
+ const payload = node.castTag(.array_type).?.data;
+ return renderArrayType(c, payload.len, payload.elem_type);
+ },
+ .array_filler => {
+ const payload = node.castTag(.array_filler).?.data;
+
+ const type_expr = try renderArrayType(c, 1, payload.type);
+ const l_brace = try c.addToken(.l_brace, "{");
+ const val = try renderNode(c, payload.filler);
+ _ = try c.addToken(.r_brace, "}");
+
+ const init = try c.addNode(.{
+ .tag = .array_init_one,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = val,
+ },
+ });
+ return c.addNode(.{
+ .tag = .array_cat,
+ .main_token = try c.addToken(.asterisk_asterisk, "**"),
+ .data = .{
+ .lhs = init,
+ .rhs = try c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{payload.count}),
+ .data = undefined,
+ }),
+ },
+ });
+ },
+ .empty_array => {
+ const payload = node.castTag(.empty_array).?.data;
+
+ const type_expr = try renderArrayType(c, 0, payload);
+ return renderArrayInit(c, type_expr, &.{});
+ },
+ .array_init => {
+ const payload = node.castTag(.array_init).?.data;
+ const type_expr = try renderNode(c, payload.cond);
+ return renderArrayInit(c, type_expr, payload.cases);
+ },
+ .field_access => {
+ const payload = node.castTag(.field_access).?.data;
+ const lhs = try renderNode(c, payload.lhs);
+ return renderFieldAccess(c, lhs, payload.field_name);
+ },
+ .@"struct", .@"union" => return renderRecord(c, node),
+ .@"enum" => {
+ const payload = node.castTag(.@"enum").?.data;
+ _ = try c.addToken(.keyword_extern, "extern");
+ const enum_tok = try c.addToken(.keyword_enum, "enum");
+ _ = try c.addToken(.l_paren, "(");
+ const arg_expr = try renderNode(c, payload.int_type);
+ _ = try c.addToken(.r_paren, ")");
+ _ = try c.addToken(.l_brace, "{");
+ const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.fields.len + 1, 1));
+ defer c.gpa.free(members);
+ members[0] = 0;
+
+ for (payload.fields) |field, i| {
+ const name_tok = try c.addIdentifier(field.name);
+ const value_expr = if (field.value) |some| blk: {
+ _ = try c.addToken(.equal, "=");
+ break :blk try renderNode(c, some);
+ } else 0;
+
+ members[i] = try c.addNode(.{
+ .tag = .container_field_init,
+ .main_token = name_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = value_expr,
+ },
+ });
+ _ = try c.addToken(.comma, ",");
+ }
+ // make non-exhaustive
+ members[payload.fields.len] = try c.addNode(.{
+ .tag = .container_field_init,
+ .main_token = try c.addIdentifier("_"),
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ _ = try c.addToken(.comma, ",");
+ _ = try c.addToken(.r_brace, "}");
+
+ const span = try c.listToSpan(members);
+ return c.addNode(.{
+ .tag = .container_decl_arg_trailing,
+ .main_token = enum_tok,
+ .data = .{
+ .lhs = arg_expr,
+ .rhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ },
+ .pub_enum_redecl, .enum_redecl => {
+ const payload = @fieldParentPtr(Payload.EnumRedecl, "base", node.ptr_otherwise).data;
+ if (node.tag() == .pub_enum_redecl) _ = try c.addToken(.keyword_pub, "pub");
+ const const_tok = try c.addToken(.keyword_const, "const");
+ _ = try c.addIdentifier(payload.enum_val_name);
+ _ = try c.addToken(.equal, "=");
+
+ const enum_to_int_tok = try c.addToken(.builtin, "@enumToInt");
+ _ = try c.addToken(.l_paren, "(");
+ const enum_name = try c.addNode(.{
+ .tag = .identifier,
+ .main_token = try c.addIdentifier(payload.enum_name),
+ .data = undefined,
+ });
+ const field_access = try renderFieldAccess(c, enum_name, payload.field_name);
+ const init_node = try c.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = enum_to_int_tok,
+ .data = .{
+ .lhs = field_access,
+ .rhs = 0,
+ },
+ });
+ _ = try c.addToken(.r_paren, ")");
+ _ = try c.addToken(.semicolon, ";");
+
+ return c.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = const_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = init_node,
+ },
+ });
+ },
+ .tuple => {
+ const payload = node.castTag(.tuple).?.data;
+ _ = try c.addToken(.period, ".");
+ const l_brace = try c.addToken(.l_brace, "{");
+ var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.len, 2));
+ defer c.gpa.free(inits);
+ inits[0] = 0;
+ inits[1] = 0;
+ for (payload) |init, i| {
+ if (i != 0) _ = try c.addToken(.comma, ",");
+ inits[i] = try renderNode(c, init);
+ }
+ _ = try c.addToken(.r_brace, "}");
+ if (payload.len < 3) {
+ return c.addNode(.{
+ .tag = .array_init_dot_two,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = inits[0],
+ .rhs = inits[1],
+ },
+ });
+ } else {
+ const span = try c.listToSpan(inits);
+ return c.addNode(.{
+ .tag = .array_init_dot,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ }
+ },
+ .container_init => {
+ const payload = node.castTag(.container_init).?.data;
+ const lhs = try renderNode(c, payload.lhs);
+
+ const l_brace = try c.addToken(.l_brace, "{");
+ var inits = try c.gpa.alloc(NodeIndex, std.math.max(payload.inits.len, 1));
+ defer c.gpa.free(inits);
+ inits[0] = 0;
+ for (payload.inits) |init, i| {
+ _ = try c.addToken(.period, ".");
+ _ = try c.addIdentifier(init.name);
+ _ = try c.addToken(.equal, "=");
+ inits[i] = try renderNode(c, init.value);
+ _ = try c.addToken(.comma, ",");
+ }
+ _ = try c.addToken(.r_brace, "}");
+
+ if (payload.inits.len < 2) {
+ return c.addNode(.{
+ .tag = .struct_init_one_comma,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = inits[0],
+ },
+ });
+ } else {
+ const span = try c.listToSpan(inits);
+ return c.addNode(.{
+ .tag = .struct_init_comma,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ }
+ },
+ .@"anytype" => unreachable, // Handled in renderParams
+ }
+}
+
+fn renderRecord(c: *Context, node: Node) !NodeIndex {
+ const payload = @fieldParentPtr(Payload.Record, "base", node.ptr_otherwise).data;
+ if (payload.is_packed)
+ _ = try c.addToken(.keyword_packed, "packed")
+ else
+ _ = try c.addToken(.keyword_extern, "extern");
+ const kind_tok = if (node.tag() == .@"struct")
+ try c.addToken(.keyword_struct, "struct")
+ else
+ try c.addToken(.keyword_union, "union");
+
+ _ = try c.addToken(.l_brace, "{");
+ const members = try c.gpa.alloc(NodeIndex, std.math.max(payload.fields.len, 2));
+ defer c.gpa.free(members);
+ members[0] = 0;
+ members[1] = 0;
+
+ for (payload.fields) |field, i| {
+ const name_tok = try c.addIdentifier(field.name);
+ _ = try c.addToken(.colon, ":");
+ const type_expr = try renderNode(c, field.type);
+
+ const alignment = field.alignment orelse {
+ members[i] = try c.addNode(.{
+ .tag = .container_field_init,
+ .main_token = name_tok,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = 0,
+ },
+ });
+ _ = try c.addToken(.comma, ",");
+ continue;
+ };
+ _ = try c.addToken(.keyword_align, "align");
+ _ = try c.addToken(.l_paren, "(");
+ const align_expr = try c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{alignment}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+
+ members[i] = try c.addNode(.{
+ .tag = .container_field_align,
+ .main_token = name_tok,
+ .data = .{
+ .lhs = type_expr,
+ .rhs = align_expr,
+ },
+ });
+ _ = try c.addToken(.comma, ",");
+ }
+ _ = try c.addToken(.r_brace, "}");
+
+ if (payload.fields.len == 0) {
+ return c.addNode(.{
+ .tag = .container_decl_two,
+ .main_token = kind_tok,
+ .data = .{
+ .lhs = 0,
+ .rhs = 0,
+ },
+ });
+ } else if (payload.fields.len <= 2) {
+ return c.addNode(.{
+ .tag = .container_decl_two_trailing,
+ .main_token = kind_tok,
+ .data = .{
+ .lhs = members[0],
+ .rhs = members[1],
+ },
+ });
+ } else {
+ const span = try c.listToSpan(members);
+ return c.addNode(.{
+ .tag = .container_decl_trailing,
+ .main_token = kind_tok,
+ .data = .{
+ .lhs = span.start,
+ .rhs = span.end,
+ },
+ });
+ }
+}
+
+fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeIndex {
+ return c.addNode(.{
+ .tag = .field_access,
+ .main_token = try c.addToken(.period, "."),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try c.addIdentifier(field_name),
+ },
+ });
+}
+
+fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex {
+ const l_brace = try c.addToken(.l_brace, "{");
+ var rendered = try c.gpa.alloc(NodeIndex, std.math.max(inits.len, 1));
+ defer c.gpa.free(rendered);
+ rendered[0] = 0;
+ for (inits) |init, i| {
+ rendered[i] = try renderNode(c, init);
+ _ = try c.addToken(.comma, ",");
+ }
+ _ = try c.addToken(.r_brace, "}");
+ if (inits.len < 2) {
+ return c.addNode(.{
+ .tag = .array_init_one_comma,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = rendered[0],
+ },
+ });
+ } else {
+ const span = try c.listToSpan(rendered);
+ return c.addNode(.{
+ .tag = .array_init_comma,
+ .main_token = l_brace,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ }
+}
+
+fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex {
+ const l_bracket = try c.addToken(.l_bracket, "[");
+ const len_expr = try c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{len}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_bracket, "]");
+ const elem_type_expr = try renderNode(c, elem_type);
+ return c.addNode(.{
+ .tag = .array_type,
+ .main_token = l_bracket,
+ .data = .{
+ .lhs = len_expr,
+ .rhs = elem_type_expr,
+ },
+ });
+}
+
+fn addSemicolonIfNeeded(c: *Context, node: Node) !void {
+ switch (node.tag()) {
+ .warning => unreachable,
+ .var_decl, .var_simple, .arg_redecl, .alias, .enum_redecl, .block, .empty_block, .block_single, .@"switch" => {},
+ .while_true => {
+ const payload = node.castTag(.while_true).?.data;
+ return addSemicolonIfNotBlock(c, payload);
+ },
+ .@"while" => {
+ const payload = node.castTag(.@"while").?.data;
+ return addSemicolonIfNotBlock(c, payload.body);
+ },
+ .@"if" => {
+ const payload = node.castTag(.@"if").?.data;
+ if (payload.@"else") |some|
+ return addSemicolonIfNeeded(c, some);
+ return addSemicolonIfNotBlock(c, payload.then);
+ },
+ else => _ = try c.addToken(.semicolon, ";"),
+ }
+}
+
+fn addSemicolonIfNotBlock(c: *Context, node: Node) !void {
+ switch (node.tag()) {
+ .block, .empty_block, .block_single => {},
+ else => _ = try c.addToken(.semicolon, ";"),
+ }
+}
+
+fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
+ switch (node.tag()) {
+ .declaration => unreachable,
+ .null_literal,
+ .undefined_literal,
+ .true_literal,
+ .false_literal,
+ .return_void,
+ .zero_literal,
+ .one_literal,
+ .void_type,
+ .noreturn_type,
+ .@"anytype",
+ .div_trunc,
+ .rem,
+ .int_cast,
+ .as,
+ .truncate,
+ .bit_cast,
+ .float_cast,
+ .float_to_int,
+ .int_to_float,
+ .int_to_enum,
+ .int_to_ptr,
+ .std_mem_zeroes,
+ .std_math_Log2Int,
+ .log2_int_type,
+ .ptr_to_int,
+ .enum_to_int,
+ .sizeof,
+ .alignof,
+ .typeof,
+ .std_meta_sizeof,
+ .std_meta_cast,
+ .std_mem_zeroinit,
+ .integer_literal,
+ .float_literal,
+ .string_literal,
+ .char_literal,
+ .identifier,
+ .field_access,
+ .ptr_cast,
+ .type,
+ .array_access,
+ .align_cast,
+ .optional_type,
+ .c_pointer,
+ .single_pointer,
+ .unwrap,
+ .deref,
+ .address_of,
+ .not,
+ .negate,
+ .negate_wrap,
+ .bit_not,
+ .func,
+ .call,
+ .array_type,
+ .bool_to_int,
+ => {
+ // no grouping needed
+ return renderNode(c, node);
+ },
+
+ .opaque_literal,
+ .empty_array,
+ .block_single,
+ .add,
+ .add_wrap,
+ .sub,
+ .sub_wrap,
+ .mul,
+ .mul_wrap,
+ .div,
+ .shl,
+ .shr,
+ .mod,
+ .@"and",
+ .@"or",
+ .less_than,
+ .less_than_equal,
+ .greater_than,
+ .greater_than_equal,
+ .equal,
+ .not_equal,
+ .bit_and,
+ .bit_or,
+ .bit_xor,
+ .empty_block,
+ .array_cat,
+ .array_filler,
+ .@"if",
+ .@"enum",
+ .@"struct",
+ .@"union",
+ .array_init,
+ .tuple,
+ .container_init,
+ .block,
+ => return c.addNode(.{
+ .tag = .grouped_expression,
+ .main_token = try c.addToken(.l_paren, "("),
+ .data = .{
+ .lhs = try renderNode(c, node),
+ .rhs = try c.addToken(.r_paren, ")"),
+ },
+ }),
+ .ellipsis3,
+ .switch_prong,
+ .warning,
+ .var_decl,
+ .fail_decl,
+ .arg_redecl,
+ .alias,
+ .var_simple,
+ .pub_var_simple,
+ .pub_enum_redecl,
+ .enum_redecl,
+ .@"while",
+ .@"switch",
+ .@"break",
+ .break_val,
+ .pub_inline_fn,
+ .discard,
+ .@"continue",
+ .@"return",
+ .usingnamespace_builtins,
+ .while_true,
+ .if_not_break,
+ .switch_else,
+ .add_assign,
+ .add_wrap_assign,
+ .sub_assign,
+ .sub_wrap_assign,
+ .mul_assign,
+ .mul_wrap_assign,
+ .div_assign,
+ .shl_assign,
+ .shr_assign,
+ .mod_assign,
+ .bit_and_assign,
+ .bit_or_assign,
+ .bit_xor_assign,
+ .assign,
+ => {
+ // these should never appear in places where grouping might be needed.
+ unreachable;
+ },
+ }
+}
+
+fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+ const payload = @fieldParentPtr(Payload.UnOp, "base", node.ptr_otherwise).data;
+ return c.addNode(.{
+ .tag = tag,
+ .main_token = try c.addToken(tok_tag, bytes),
+ .data = .{
+ .lhs = try renderNodeGrouped(c, payload),
+ .rhs = undefined,
+ },
+ });
+}
+
+fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+ const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
+ const lhs = try renderNodeGrouped(c, payload.lhs);
+ return c.addNode(.{
+ .tag = tag,
+ .main_token = try c.addToken(tok_tag, bytes),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try renderNodeGrouped(c, payload.rhs),
+ },
+ });
+}
+
+fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+ const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
+ const lhs = try renderNode(c, payload.lhs);
+ return c.addNode(.{
+ .tag = tag,
+ .main_token = try c.addToken(tok_tag, bytes),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try renderNode(c, payload.rhs),
+ },
+ });
+}
+
+fn renderStdImport(c: *Context, first: []const u8, second: []const u8) !NodeIndex {
+ const import_tok = try c.addToken(.builtin, "@import");
+ _ = try c.addToken(.l_paren, "(");
+ const std_tok = try c.addToken(.string_literal, "\"std\"");
+ const std_node = try c.addNode(.{
+ .tag = .string_literal,
+ .main_token = std_tok,
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+
+ const import_node = try c.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = import_tok,
+ .data = .{
+ .lhs = std_node,
+ .rhs = 0,
+ },
+ });
+
+ var access_chain = import_node;
+ access_chain = try renderFieldAccess(c, access_chain, first);
+ access_chain = try renderFieldAccess(c, access_chain, second);
+ return access_chain;
+}
+
+fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex {
+ const lparen = try c.addToken(.l_paren, "(");
+ const res = switch (args.len) {
+ 0 => try c.addNode(.{
+ .tag = .call_one,
+ .main_token = lparen,
+ .data = .{
+ .lhs = lhs,
+ .rhs = 0,
+ },
+ }),
+ 1 => blk: {
+ const arg = try renderNode(c, args[0]);
+ break :blk try c.addNode(.{
+ .tag = .call_one,
+ .main_token = lparen,
+ .data = .{
+ .lhs = lhs,
+ .rhs = arg,
+ },
+ });
+ },
+ else => blk: {
+ var rendered = try c.gpa.alloc(NodeIndex, args.len);
+ defer c.gpa.free(rendered);
+
+ for (args) |arg, i| {
+ if (i != 0) _ = try c.addToken(.comma, ",");
+ rendered[i] = try renderNode(c, arg);
+ }
+ const span = try c.listToSpan(rendered);
+ break :blk try c.addNode(.{
+ .tag = .call,
+ .main_token = lparen,
+ .data = .{
+ .lhs = lhs,
+ .rhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ },
+ });
+ },
+ };
+ _ = try c.addToken(.r_paren, ")");
+ return res;
+}
+
+fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !NodeIndex {
+ const builtin_tok = try c.addToken(.builtin, builtin);
+ _ = try c.addToken(.l_paren, "(");
+ var arg_1: NodeIndex = 0;
+ var arg_2: NodeIndex = 0;
+ switch (args.len) {
+ 0 => {},
+ 1 => {
+ arg_1 = try renderNode(c, args[0]);
+ },
+ 2 => {
+ arg_1 = try renderNode(c, args[0]);
+ _ = try c.addToken(.comma, ",");
+ arg_2 = try renderNode(c, args[1]);
+ },
+ else => unreachable, // expand this function as needed.
+ }
+
+ _ = try c.addToken(.r_paren, ")");
+ return c.addNode(.{
+ .tag = .builtin_call_two,
+ .main_token = builtin_tok,
+ .data = .{
+ .lhs = arg_1,
+ .rhs = arg_2,
+ },
+ });
+}
+
+fn renderVar(c: *Context, node: Node) !NodeIndex {
+ const payload = node.castTag(.var_decl).?.data;
+ if (payload.is_pub) _ = try c.addToken(.keyword_pub, "pub");
+ if (payload.is_extern) _ = try c.addToken(.keyword_extern, "extern");
+ if (payload.is_export) _ = try c.addToken(.keyword_export, "export");
+ if (payload.is_threadlocal) _ = try c.addToken(.keyword_threadlocal, "threadlocal");
+ const mut_tok = if (payload.is_const)
+ try c.addToken(.keyword_const, "const")
+ else
+ try c.addToken(.keyword_var, "var");
+ _ = try c.addIdentifier(payload.name);
+ _ = try c.addToken(.colon, ":");
+ const type_node = try renderNode(c, payload.type);
+
+ const align_node = if (payload.alignment) |some| blk: {
+ _ = try c.addToken(.keyword_align, "align");
+ _ = try c.addToken(.l_paren, "(");
+ const res = try c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+
+ const section_node = if (payload.linksection_string) |some| blk: {
+ _ = try c.addToken(.keyword_linksection, "linksection");
+ _ = try c.addToken(.l_paren, "(");
+ const res = try c.addNode(.{
+ .tag = .string_literal,
+ .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+
+ const init_node = if (payload.init) |some| blk: {
+ _ = try c.addToken(.equal, "=");
+ break :blk try renderNode(c, some);
+ } else 0;
+ _ = try c.addToken(.semicolon, ";");
+
+ if (section_node == 0) {
+ if (align_node == 0) {
+ return c.addNode(.{
+ .tag = .simple_var_decl,
+ .main_token = mut_tok,
+ .data = .{
+ .lhs = type_node,
+ .rhs = init_node,
+ },
+ });
+ } else {
+ return c.addNode(.{
+ .tag = .local_var_decl,
+ .main_token = mut_tok,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.LocalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ }),
+ .rhs = init_node,
+ },
+ });
+ }
+ } else {
+ return c.addNode(.{
+ .tag = .global_var_decl,
+ .main_token = mut_tok,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.GlobalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ .section_node = section_node,
+ }),
+ .rhs = init_node,
+ },
+ });
+ }
+}
+
+fn renderFunc(c: *Context, node: Node) !NodeIndex {
+ const payload = node.castTag(.func).?.data;
+ if (payload.is_pub) _ = try c.addToken(.keyword_pub, "pub");
+ if (payload.is_extern) _ = try c.addToken(.keyword_extern, "extern");
+ if (payload.is_export) _ = try c.addToken(.keyword_export, "export");
+ const fn_token = try c.addToken(.keyword_fn, "fn");
+ if (payload.name) |some| _ = try c.addIdentifier(some);
+
+ const params = try renderParams(c, payload.params, payload.is_var_args);
+ defer params.deinit();
+ var span: NodeSubRange = undefined;
+ if (params.items.len > 1) span = try c.listToSpan(params.items);
+
+ const align_expr = if (payload.alignment) |some| blk: {
+ _ = try c.addToken(.keyword_align, "align");
+ _ = try c.addToken(.l_paren, "(");
+ const res = try c.addNode(.{
+ .tag = .integer_literal,
+ .main_token = try c.addTokenFmt(.integer_literal, "{d}", .{some}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+
+ const section_expr = if (payload.linksection_string) |some| blk: {
+ _ = try c.addToken(.keyword_linksection, "linksection");
+ _ = try c.addToken(.l_paren, "(");
+ const res = try c.addNode(.{
+ .tag = .string_literal,
+ .main_token = try c.addTokenFmt(.string_literal, "\"{s}\"", .{std.zig.fmtEscapes(some)}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+
+ const callconv_expr = if (payload.explicit_callconv) |some| blk: {
+ _ = try c.addToken(.keyword_callconv, "callconv");
+ _ = try c.addToken(.l_paren, "(");
+ _ = try c.addToken(.period, ".");
+ const res = try c.addNode(.{
+ .tag = .enum_literal,
+ .main_token = try c.addTokenFmt(.identifier, "{s}", .{@tagName(some)}),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ } else 0;
+
+ const return_type_expr = try renderNode(c, payload.return_type);
+
+ const fn_proto = try blk: {
+ if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) {
+ if (params.items.len < 2)
+ break :blk c.addNode(.{
+ .tag = .fn_proto_simple,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = params.items[0],
+ .rhs = return_type_expr,
+ },
+ })
+ else
+ break :blk c.addNode(.{
+ .tag = .fn_proto_multi,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ .rhs = return_type_expr,
+ },
+ });
+ }
+ if (params.items.len < 2)
+ break :blk c.addNode(.{
+ .tag = .fn_proto_one,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{
+ .param = params.items[0],
+ .align_expr = align_expr,
+ .section_expr = section_expr,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ })
+ else
+ break :blk c.addNode(.{
+ .tag = .fn_proto,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.FnProto{
+ .params_start = span.start,
+ .params_end = span.end,
+ .align_expr = align_expr,
+ .section_expr = section_expr,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ });
+ };
+
+ const payload_body = payload.body orelse {
+ if (payload.is_extern) {
+ _ = try c.addToken(.semicolon, ";");
+ }
+ return fn_proto;
+ };
+ const body = try renderNode(c, payload_body);
+ return c.addNode(.{
+ .tag = .fn_decl,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = fn_proto,
+ .rhs = body,
+ },
+ });
+}
+
+fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
+ const payload = node.castTag(.pub_inline_fn).?.data;
+ _ = try c.addToken(.keyword_pub, "pub");
+ const fn_token = try c.addToken(.keyword_fn, "fn");
+ _ = try c.addIdentifier(payload.name);
+
+ const params = try renderParams(c, payload.params, false);
+ defer params.deinit();
+ var span: NodeSubRange = undefined;
+ if (params.items.len > 1) span = try c.listToSpan(params.items);
+
+ const callconv_expr = blk: {
+ _ = try c.addToken(.keyword_callconv, "callconv");
+ _ = try c.addToken(.l_paren, "(");
+ _ = try c.addToken(.period, ".");
+ const res = try c.addNode(.{
+ .tag = .enum_literal,
+ .main_token = try c.addToken(.identifier, "Inline"),
+ .data = undefined,
+ });
+ _ = try c.addToken(.r_paren, ")");
+ break :blk res;
+ };
+ const return_type_expr = try renderNodeGrouped(c, payload.return_type);
+
+ const fn_proto = try blk: {
+ if (params.items.len < 2)
+ break :blk c.addNode(.{
+ .tag = .fn_proto_one,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{
+ .param = params.items[0],
+ .align_expr = 0,
+ .section_expr = 0,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ })
+ else
+ break :blk c.addNode(.{
+ .tag = .fn_proto,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = try c.addExtra(std.zig.ast.Node.FnProto{
+ .params_start = span.start,
+ .params_end = span.end,
+ .align_expr = 0,
+ .section_expr = 0,
+ .callconv_expr = callconv_expr,
+ }),
+ .rhs = return_type_expr,
+ },
+ });
+ };
+ return c.addNode(.{
+ .tag = .fn_decl,
+ .main_token = fn_token,
+ .data = .{
+ .lhs = fn_proto,
+ .rhs = try renderNode(c, payload.body),
+ },
+ });
+}
+
+fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.ArrayList(NodeIndex) {
+ _ = try c.addToken(.l_paren, "(");
+ var rendered = std.ArrayList(NodeIndex).init(c.gpa);
+ errdefer rendered.deinit();
+ try rendered.ensureCapacity(std.math.max(params.len, 1));
+
+ for (params) |param, i| {
+ if (i != 0) _ = try c.addToken(.comma, ",");
+ if (param.is_noalias) _ = try c.addToken(.keyword_noalias, "noalias");
+ if (param.name) |some| {
+ _ = try c.addIdentifier(some);
+ _ = try c.addToken(.colon, ":");
+ }
+ if (param.type.tag() == .@"anytype") {
+ _ = try c.addToken(.keyword_anytype, "anytype");
+ continue;
+ }
+ rendered.appendAssumeCapacity(try renderNode(c, param.type));
+ }
+ if (is_var_args) {
+ if (params.len != 0) _ = try c.addToken(.comma, ",");
+ _ = try c.addToken(.ellipsis3, "...");
+ }
+ _ = try c.addToken(.r_paren, ")");
+
+ if (rendered.items.len == 0) rendered.appendAssumeCapacity(0);
+ return rendered;
+}
diff --git a/src/type.zig b/src/type.zig
index e1006e554cd5..38fe6dd3e6af 100644
--- a/src/type.zig
+++ b/src/type.zig
@@ -28,6 +28,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -357,6 +359,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -506,6 +510,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -772,6 +778,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -868,6 +876,7 @@ pub const Type = extern union {
.i16, .u16 => return 2,
.i32, .u32 => return 4,
.i64, .u64 => return 8,
+ .u128, .i128 => return 16,
.isize,
.usize,
@@ -1010,6 +1019,7 @@ pub const Type = extern union {
.i16, .u16 => return 2,
.i32, .u32 => return 4,
.i64, .u64 => return 8,
+ .u128, .i128 => return 16,
.@"anyframe", .anyframe_T, .isize, .usize => return @divExact(target.cpu.arch.ptrBitWidth(), 8),
@@ -1109,6 +1119,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1191,6 +1203,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1278,6 +1292,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1359,6 +1375,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1440,6 +1458,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1522,6 +1542,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1660,6 +1682,8 @@ pub const Type = extern union {
.i32 => unreachable,
.u64 => unreachable,
.i64 => unreachable,
+ .u128 => unreachable,
+ .i128 => unreachable,
.usize => unreachable,
.isize => unreachable,
.c_short => unreachable,
@@ -1776,6 +1800,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -1856,6 +1882,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2009,6 +2037,8 @@ pub const Type = extern union {
.i16,
.i32,
.i64,
+ .u128,
+ .i128,
=> true,
};
}
@@ -2061,6 +2091,8 @@ pub const Type = extern union {
.i16,
.i32,
.i64,
+ .u128,
+ .i128,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
@@ -2167,6 +2199,8 @@ pub const Type = extern union {
.i32 => .{ .signedness = .signed, .bits = 32 },
.u64 => .{ .signedness = .unsigned, .bits = 64 },
.i64 => .{ .signedness = .signed, .bits = 64 },
+ .u128 => .{ .signedness = .unsigned, .bits = 128 },
+ .i128 => .{ .signedness = .signed, .bits = 128 },
.usize => .{ .signedness = .unsigned, .bits = target.cpu.arch.ptrBitWidth() },
.isize => .{ .signedness = .signed, .bits = target.cpu.arch.ptrBitWidth() },
.c_short => .{ .signedness = .signed, .bits = CType.short.sizeInBits(target) },
@@ -2227,6 +2261,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.optional,
.optional_single_mut_pointer,
.optional_single_const_pointer,
@@ -2333,6 +2369,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2417,6 +2455,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2500,6 +2540,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2583,6 +2625,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2663,6 +2707,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2743,6 +2789,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2793,6 +2841,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2874,6 +2924,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -2971,6 +3023,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -3060,6 +3114,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -3193,6 +3249,8 @@ pub const Type = extern union {
i32,
u64,
i64,
+ u128,
+ i128,
usize,
isize,
c_short,
@@ -3277,6 +3335,8 @@ pub const Type = extern union {
.i32,
.u64,
.i64,
+ .u128,
+ .i128,
.usize,
.isize,
.c_short,
@@ -3352,6 +3412,11 @@ pub const Type = extern union {
};
}
+ pub fn init(comptime t: Tag) Type {
+ comptime std.debug.assert(@enumToInt(t) < Tag.no_payload_count);
+ return .{ .tag_if_small_enough = @enumToInt(t) };
+ }
+
pub fn create(comptime t: Tag, ally: *Allocator, data: Data(t)) error{OutOfMemory}!Type {
const ptr = try ally.create(t.Type());
ptr.* = .{
diff --git a/src/zig_clang.cpp b/src/zig_clang.cpp
index 8dc6a0823be3..d9e5e527ac75 100644
--- a/src/zig_clang.cpp
+++ b/src/zig_clang.cpp
@@ -2244,6 +2244,11 @@ unsigned ZigClangAPSInt_getNumWords(const ZigClangAPSInt *self) {
return casted->getNumWords();
}
+bool ZigClangAPSInt_lessThanEqual(const ZigClangAPSInt *self, uint64_t rhs) {
+ auto casted = reinterpret_cast(self);
+ return casted->ule(rhs);
+}
+
uint64_t ZigClangAPInt_getLimitedValue(const ZigClangAPInt *self, uint64_t limit) {
auto casted = reinterpret_cast(self);
return casted->getLimitedValue(limit);
diff --git a/src/zig_clang.h b/src/zig_clang.h
index 6fe1da0bc1a5..a697c58b4f62 100644
--- a/src/zig_clang.h
+++ b/src/zig_clang.h
@@ -1097,6 +1097,7 @@ ZIG_EXTERN_C const struct ZigClangAPSInt *ZigClangAPSInt_negate(const struct Zig
ZIG_EXTERN_C void ZigClangAPSInt_free(const struct ZigClangAPSInt *self);
ZIG_EXTERN_C const uint64_t *ZigClangAPSInt_getRawData(const struct ZigClangAPSInt *self);
ZIG_EXTERN_C unsigned ZigClangAPSInt_getNumWords(const struct ZigClangAPSInt *self);
+ZIG_EXTERN_C bool ZigClangAPSInt_lessThanEqual(const struct ZigClangAPSInt *self, uint64_t rhs);
ZIG_EXTERN_C uint64_t ZigClangAPInt_getLimitedValue(const struct ZigClangAPInt *self, uint64_t limit);
diff --git a/src/zir.zig b/src/zir.zig
index eefded0c6f6e..ee0fd3dc3d75 100644
--- a/src/zir.zig
+++ b/src/zir.zig
@@ -53,6 +53,9 @@ pub const Inst = struct {
indexable_ptr_len,
/// Function parameter value. These must be first in a function's main block,
/// in respective order with the parameters.
+ /// TODO make this instruction implicit; after we transition to having ZIR
+ /// instructions be same sized and referenced by index, the first N indexes
+ /// will implicitly be references to the parameters of the function.
arg,
/// Type coercion.
as,
@@ -169,8 +172,10 @@ pub const Inst = struct {
floatcast,
/// Declare a function body.
@"fn",
- /// Returns a function type.
- fntype,
+ /// Returns a function type, assuming unspecified calling convention.
+ fn_type,
+ /// Returns a function type, with a calling convention instruction operand.
+ fn_type_cc,
/// @import(operand)
import,
/// Integer literal.
@@ -340,6 +345,8 @@ pub const Inst = struct {
void_value,
/// A switch expression.
switchbr,
+ /// Same as `switchbr` but the target is a pointer to the value being switched on.
+ switchbr_ref,
/// A range in a switch case, `lhs...rhs`.
/// Only checks that `lhs >= rhs` if they are ints, everything else is
/// validated by the .switch instruction.
@@ -450,6 +457,8 @@ pub const Inst = struct {
.block_comptime_flat,
=> Block,
+ .switchbr, .switchbr_ref => SwitchBr,
+
.arg => Arg,
.array_type_sentinel => ArrayTypeSentinel,
.@"break" => Break,
@@ -471,7 +480,8 @@ pub const Inst = struct {
.@"export" => Export,
.param_type => ParamType,
.primitive => Primitive,
- .fntype => FnType,
+ .fn_type => FnType,
+ .fn_type_cc => FnTypeCc,
.elem_ptr, .elem_val => Elem,
.condbr => CondBr,
.ptr_type => PtrType,
@@ -485,7 +495,6 @@ pub const Inst = struct {
.enum_type => EnumType,
.union_type => UnionType,
.struct_type => StructType,
- .switchbr => SwitchBr,
};
}
@@ -546,7 +555,8 @@ pub const Inst = struct {
.field_ptr_named,
.field_val_named,
.@"fn",
- .fntype,
+ .fn_type,
+ .fn_type_cc,
.int,
.intcast,
.int_type,
@@ -614,7 +624,6 @@ pub const Inst = struct {
.struct_type,
.void_value,
.switch_range,
- .switchbr,
=> false,
.@"break",
@@ -629,6 +638,8 @@ pub const Inst = struct {
.container_field_named,
.container_field_typed,
.container_field,
+ .switchbr,
+ .switchbr_ref,
=> true,
};
}
@@ -689,6 +700,8 @@ pub const Inst = struct {
base: Inst,
positionals: struct {
+ /// This exists to be passed to the arg TZIR instruction, which
+ /// needs it for debug info.
name: []const u8,
},
kw_args: struct {},
@@ -725,6 +738,8 @@ pub const Inst = struct {
kw_args: struct {},
};
+ // TODO break this into multiple call instructions to avoid paying the cost
+ // of the calling convention field most of the time.
pub const Call = struct {
pub const base_tag = Tag.call;
base: Inst,
@@ -732,10 +747,9 @@ pub const Inst = struct {
positionals: struct {
func: *Inst,
args: []*Inst,
- },
- kw_args: struct {
modifier: std.builtin.CallOptions.Modifier = .auto,
},
+ kw_args: struct {},
};
pub const DeclRef = struct {
@@ -849,8 +863,8 @@ pub const Inst = struct {
kw_args: struct {
@"volatile": bool = false,
output: ?*Inst = null,
- inputs: []*Inst = &[0]*Inst{},
- clobbers: []*Inst = &[0]*Inst{},
+ inputs: []const []const u8 = &.{},
+ clobbers: []const []const u8 = &.{},
args: []*Inst = &[0]*Inst{},
},
};
@@ -867,7 +881,18 @@ pub const Inst = struct {
};
pub const FnType = struct {
- pub const base_tag = Tag.fntype;
+ pub const base_tag = Tag.fn_type;
+ base: Inst,
+
+ positionals: struct {
+ param_types: []*Inst,
+ return_type: *Inst,
+ },
+ kw_args: struct {},
+ };
+
+ pub const FnTypeCc = struct {
+ pub const base_tag = Tag.fn_type_cc;
base: Inst,
positionals: struct {
@@ -1167,20 +1192,12 @@ pub const Inst = struct {
},
kw_args: struct {
init_inst: ?*Inst = null,
- init_kind: InitKind = .none,
+ has_enum_token: bool,
layout: std.builtin.TypeInfo.ContainerLayout = .Auto,
},
-
- // TODO error: values of type '(enum literal)' must be comptime known
- pub const InitKind = enum {
- enum_type,
- tag_type,
- none,
- };
};
pub const SwitchBr = struct {
- pub const base_tag = Tag.switchbr;
base: Inst,
positionals: struct {
@@ -1189,14 +1206,12 @@ pub const Inst = struct {
items: []*Inst,
cases: []Case,
else_body: Body,
- },
- kw_args: struct {
/// Pointer to first range if such exists.
range: ?*Inst = null,
special_prong: SpecialProng = .none,
},
+ kw_args: struct {},
- // Not anonymous due to stage1 limitations
pub const SpecialProng = enum {
none,
@"else",
@@ -1391,6 +1406,7 @@ const Writer = struct {
}
switch (@TypeOf(param)) {
*Inst => return self.writeInstParamToStream(stream, param),
+ ?*Inst => return self.writeInstParamToStream(stream, param.?),
[]*Inst => {
try stream.writeByte('[');
for (param) |inst, i| {
@@ -1458,7 +1474,7 @@ const Writer = struct {
const name = self.loop_table.get(param).?;
return stream.print("\"{}\"", .{std.zig.fmtEscapes(name)});
},
- [][]const u8 => {
+ [][]const u8, []const []const u8 => {
try stream.writeByte('[');
for (param) |str, i| {
if (i != 0) {
@@ -1586,6 +1602,7 @@ const DumpTzir = struct {
.unreach,
.breakpoint,
.dbg_stmt,
+ .arg,
=> {},
.ref,
@@ -1630,8 +1647,6 @@ const DumpTzir = struct {
try dtz.findConst(bin_op.rhs);
},
- .arg => {},
-
.br => {
const br = inst.castTag(.br).?;
try dtz.findConst(&br.block.base);
diff --git a/src/zir_sema.zig b/src/zir_sema.zig
index 480e0b4c337a..b20e78d448a1 100644
--- a/src/zir_sema.zig
+++ b/src/zir_sema.zig
@@ -91,7 +91,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.@"fn" => return zirFn(mod, scope, old_inst.castTag(.@"fn").?),
.@"export" => return zirExport(mod, scope, old_inst.castTag(.@"export").?),
.primitive => return zirPrimitive(mod, scope, old_inst.castTag(.primitive).?),
- .fntype => return zirFnType(mod, scope, old_inst.castTag(.fntype).?),
+ .fn_type => return zirFnType(mod, scope, old_inst.castTag(.fn_type).?),
+ .fn_type_cc => return zirFnTypeCc(mod, scope, old_inst.castTag(.fn_type_cc).?),
.intcast => return zirIntcast(mod, scope, old_inst.castTag(.intcast).?),
.bitcast => return zirBitcast(mod, scope, old_inst.castTag(.bitcast).?),
.floatcast => return zirFloatcast(mod, scope, old_inst.castTag(.floatcast).?),
@@ -154,7 +155,8 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
.bool_and => return zirBoolOp(mod, scope, old_inst.castTag(.bool_and).?),
.bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?),
.void_value => return mod.constVoid(scope, old_inst.src),
- .switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?),
+ .switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, false),
+ .switchbr_ref => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr_ref).?, true),
.switch_range => return zirSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
.container_field_named,
@@ -957,11 +959,11 @@ fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
);
}
- if (inst.kw_args.modifier == .compile_time) {
+ if (inst.positionals.modifier == .compile_time) {
return mod.fail(scope, inst.base.src, "TODO implement comptime function calls", .{});
}
- if (inst.kw_args.modifier != .auto) {
- return mod.fail(scope, inst.base.src, "TODO implement call with modifier {}", .{inst.kw_args.modifier});
+ if (inst.positionals.modifier != .auto) {
+ return mod.fail(scope, inst.base.src, "TODO implement call with modifier {}", .{inst.positionals.modifier});
}
// TODO handle function calls of generic functions
@@ -979,8 +981,8 @@ fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
const ret_type = func.ty.fnReturnType();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
- const is_comptime_call = b.is_comptime or inst.kw_args.modifier == .compile_time;
- const is_inline_call = is_comptime_call or inst.kw_args.modifier == .always_inline or
+ const is_comptime_call = b.is_comptime or inst.positionals.modifier == .compile_time;
+ const is_inline_call = is_comptime_call or inst.positionals.modifier == .always_inline or
func.ty.fnCallingConvention() == .Inline;
if (is_inline_call) {
const func_val = try mod.resolveConstValue(scope, func);
@@ -1294,34 +1296,69 @@ fn zirEnsureErrPayloadVoid(mod: *Module, scope: *Scope, unwrap: *zir.Inst.UnOp)
fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- const return_type = try resolveType(mod, scope, fntype.positionals.return_type);
+
+ return fnTypeCommon(
+ mod,
+ scope,
+ &fntype.base,
+ fntype.positionals.param_types,
+ fntype.positionals.return_type,
+ .Unspecified,
+ );
+}
+
+fn zirFnTypeCc(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnTypeCc) InnerError!*Inst {
+ const tracy = trace(@src());
+ defer tracy.end();
+
const cc_tv = try resolveInstConst(mod, scope, fntype.positionals.cc);
+ // TODO once we're capable of importing and analyzing decls from
+ // std.builtin, this needs to change
const cc_str = cc_tv.val.castTag(.enum_literal).?.data;
const cc = std.meta.stringToEnum(std.builtin.CallingConvention, cc_str) orelse
return mod.fail(scope, fntype.positionals.cc.src, "Unknown calling convention {s}", .{cc_str});
+ return fnTypeCommon(
+ mod,
+ scope,
+ &fntype.base,
+ fntype.positionals.param_types,
+ fntype.positionals.return_type,
+ cc,
+ );
+}
+
+fn fnTypeCommon(
+ mod: *Module,
+ scope: *Scope,
+ zir_inst: *zir.Inst,
+ zir_param_types: []*zir.Inst,
+ zir_return_type: *zir.Inst,
+ cc: std.builtin.CallingConvention,
+) InnerError!*Inst {
+ const return_type = try resolveType(mod, scope, zir_return_type);
// Hot path for some common function types.
- if (fntype.positionals.param_types.len == 0) {
+ if (zir_param_types.len == 0) {
if (return_type.zigTypeTag() == .NoReturn and cc == .Unspecified) {
- return mod.constType(scope, fntype.base.src, Type.initTag(.fn_noreturn_no_args));
+ return mod.constType(scope, zir_inst.src, Type.initTag(.fn_noreturn_no_args));
}
if (return_type.zigTypeTag() == .Void and cc == .Unspecified) {
- return mod.constType(scope, fntype.base.src, Type.initTag(.fn_void_no_args));
+ return mod.constType(scope, zir_inst.src, Type.initTag(.fn_void_no_args));
}
if (return_type.zigTypeTag() == .NoReturn and cc == .Naked) {
- return mod.constType(scope, fntype.base.src, Type.initTag(.fn_naked_noreturn_no_args));
+ return mod.constType(scope, zir_inst.src, Type.initTag(.fn_naked_noreturn_no_args));
}
if (return_type.zigTypeTag() == .Void and cc == .C) {
- return mod.constType(scope, fntype.base.src, Type.initTag(.fn_ccc_void_no_args));
+ return mod.constType(scope, zir_inst.src, Type.initTag(.fn_ccc_void_no_args));
}
}
const arena = scope.arena();
- const param_types = try arena.alloc(Type, fntype.positionals.param_types.len);
- for (fntype.positionals.param_types) |param_type, i| {
+ const param_types = try arena.alloc(Type, zir_param_types.len);
+ for (zir_param_types) |param_type, i| {
const resolved = try resolveType(mod, scope, param_type);
// TODO skip for comptime params
if (!resolved.isValidVarType(false)) {
@@ -1335,7 +1372,7 @@ fn zirFnType(mod: *Module, scope: *Scope, fntype: *zir.Inst.FnType) InnerError!*
.return_type = return_type,
.cc = cc,
});
- return mod.constType(scope, fntype.base.src, fn_ty);
+ return mod.constType(scope, zir_inst.src, fn_ty);
}
fn zirPrimitive(mod: *Module, scope: *Scope, primitive: *zir.Inst.Primitive) InnerError!*Inst {
@@ -1554,10 +1591,15 @@ fn zirSwitchRange(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError
return mod.constVoid(scope, inst.base.src);
}
-fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError!*Inst {
+fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr, ref: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
- const target = try resolveInst(mod, scope, inst.positionals.target);
+
+ const target_ptr = try resolveInst(mod, scope, inst.positionals.target);
+ const target = if (ref)
+ try mod.analyzeDeref(scope, inst.base.src, target_ptr, inst.positionals.target.src)
+ else
+ target_ptr;
try validateSwitch(mod, scope, target, inst);
if (try mod.resolveDefinedValue(scope, target)) |target_val| {
@@ -1626,13 +1668,13 @@ fn zirSwitchBr(mod: *Module, scope: *Scope, inst: *zir.Inst.SwitchBr) InnerError
fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.SwitchBr) InnerError!void {
// validate usage of '_' prongs
- if (inst.kw_args.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) {
+ if (inst.positionals.special_prong == .underscore and target.ty.zigTypeTag() != .Enum) {
return mod.fail(scope, inst.base.src, "'_' prong only allowed when switching on non-exhaustive enums", .{});
// TODO notes "'_' prong here" inst.positionals.cases[last].src
}
// check that target type supports ranges
- if (inst.kw_args.range) |range_inst| {
+ if (inst.positionals.range) |range_inst| {
switch (target.ty.zigTypeTag()) {
.Int, .ComptimeInt => {},
else => {
@@ -1683,14 +1725,14 @@ fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.Sw
const start = try target.ty.minInt(&arena, mod.getTarget());
const end = try target.ty.maxInt(&arena, mod.getTarget());
if (try range_set.spans(start, end)) {
- if (inst.kw_args.special_prong == .@"else") {
+ if (inst.positionals.special_prong == .@"else") {
return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
}
return;
}
}
- if (inst.kw_args.special_prong != .@"else") {
+ if (inst.positionals.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
}
},
@@ -1710,15 +1752,15 @@ fn validateSwitch(mod: *Module, scope: *Scope, target: *Inst, inst: *zir.Inst.Sw
return mod.fail(scope, item.src, "duplicate switch value", .{});
}
}
- if ((true_count + false_count < 2) and inst.kw_args.special_prong != .@"else") {
+ if ((true_count + false_count < 2) and inst.positionals.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "switch must handle all possibilities", .{});
}
- if ((true_count + false_count == 2) and inst.kw_args.special_prong == .@"else") {
+ if ((true_count + false_count == 2) and inst.positionals.special_prong == .@"else") {
return mod.fail(scope, inst.base.src, "unreachable else prong, all cases already handled", .{});
}
},
.EnumLiteral, .Void, .Fn, .Pointer, .Type => {
- if (inst.kw_args.special_prong != .@"else") {
+ if (inst.positionals.special_prong != .@"else") {
return mod.fail(scope, inst.base.src, "else prong required when switching on type '{}'", .{target.ty});
}
@@ -1981,19 +2023,21 @@ fn zirDeref(mod: *Module, scope: *Scope, deref: *zir.Inst.UnOp) InnerError!*Inst
fn zirAsm(mod: *Module, scope: *Scope, assembly: *zir.Inst.Asm) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
+
const return_type = try resolveType(mod, scope, assembly.positionals.return_type);
const asm_source = try resolveConstString(mod, scope, assembly.positionals.asm_source);
const output = if (assembly.kw_args.output) |o| try resolveConstString(mod, scope, o) else null;
- const inputs = try scope.arena().alloc([]const u8, assembly.kw_args.inputs.len);
- const clobbers = try scope.arena().alloc([]const u8, assembly.kw_args.clobbers.len);
- const args = try scope.arena().alloc(*Inst, assembly.kw_args.args.len);
+ const arena = scope.arena();
+ const inputs = try arena.alloc([]const u8, assembly.kw_args.inputs.len);
+ const clobbers = try arena.alloc([]const u8, assembly.kw_args.clobbers.len);
+ const args = try arena.alloc(*Inst, assembly.kw_args.args.len);
for (inputs) |*elem, i| {
- elem.* = try resolveConstString(mod, scope, assembly.kw_args.inputs[i]);
+ elem.* = try arena.dupe(u8, assembly.kw_args.inputs[i]);
}
for (clobbers) |*elem, i| {
- elem.* = try resolveConstString(mod, scope, assembly.kw_args.clobbers[i]);
+ elem.* = try arena.dupe(u8, assembly.kw_args.clobbers[i]);
}
for (args) |*elem, i| {
const arg = try resolveInst(mod, scope, assembly.kw_args.args[i]);
diff --git a/test/run_translated_c.zig b/test/run_translated_c.zig
index b8af201e360e..85b0f19c88a2 100644
--- a/test/run_translated_c.zig
+++ b/test/run_translated_c.zig
@@ -3,6 +3,33 @@ const tests = @import("tests.zig");
const nl = std.cstr.line_sep;
pub fn addCases(cases: *tests.RunTranslatedCContext) void {
+ cases.add("use global scope for record/enum/typedef type transalation if needed",
+ \\void bar(void);
+ \\void baz(void);
+ \\struct foo { int x; };
+ \\void bar() {
+ \\ struct foo tmp;
+ \\}
+ \\
+ \\void baz() {
+ \\ struct foo tmp;
+ \\}
+ \\
+ \\int main(void) {
+ \\ bar();
+ \\ baz();
+ \\ return 0;
+ \\}
+ , "");
+
+ cases.add("failed macros are only declared once",
+ \\#define FOO =
+ \\#define FOO =
+ \\#define PtrToPtr64(p) ((void *POINTER_64) p)
+ \\#define STRUC_ALIGNED_STACK_COPY(t,s) ((CONST t *)(s))
+ \\int main(void) {}
+ , "");
+
cases.add("parenthesized string literal",
\\void foo(const char *s) {}
\\int main(void) {
@@ -922,4 +949,13 @@ pub fn addCases(cases: *tests.RunTranslatedCContext) void {
\\ return 0;
\\}
, "");
+
+ cases.add("Use correct break label for statement expression in nested scope",
+ \\#include
+ \\int main(void) {
+ \\ int x = ({1, ({2; 3;});});
+ \\ if (x != 3) abort();
+ \\ return 0;
+ \\}
+ , "");
}
diff --git a/test/stage2/test.zig b/test/stage2/test.zig
index b5de03524fff..a7cbef82a9e5 100644
--- a/test/stage2/test.zig
+++ b/test/stage2/test.zig
@@ -1088,7 +1088,7 @@ pub fn addCases(ctx: *TestContext) !void {
\\ _ = foo;
\\}
\\extern var foo;
- , &[_][]const u8{":4:1: error: unable to infer variable type"});
+ , &[_][]const u8{":4:8: error: unable to infer variable type"});
}
{
@@ -1194,12 +1194,12 @@ pub fn addCases(ctx: *TestContext) !void {
\\comptime {
\\ foo: while (true) {}
\\}
- , &[_][]const u8{":2:5: error: unused while label"});
+ , &[_][]const u8{":2:5: error: unused while loop label"});
case.addError(
\\comptime {
\\ foo: for ("foo") |_| {}
\\}
- , &[_][]const u8{":2:5: error: unused for label"});
+ , &[_][]const u8{":2:5: error: unused for loop label"});
case.addError(
\\comptime {
\\ blk: {blk: {}}
@@ -1294,6 +1294,10 @@ pub fn addCases(ctx: *TestContext) !void {
,
"",
);
+ // TODO this should be :8:21 not :8:19. we need to improve source locations
+ // to be relative to the containing Decl so that they can survive when the byte
+ // offset of a previous Decl changes. Here the change from 7 to 999 introduces
+ // +2 to the byte offset and makes the error location wrong by 2 bytes.
case.addError(
\\export fn _start() noreturn {
\\ const y = fibonacci(999);
@@ -1314,7 +1318,7 @@ pub fn addCases(ctx: *TestContext) !void {
\\ );
\\ unreachable;
\\}
- , &[_][]const u8{":8:10: error: evaluation exceeded 1000 backwards branches"});
+ , &[_][]const u8{":8:19: error: evaluation exceeded 1000 backwards branches"});
}
{
var case = ctx.exe("orelse at comptime", linux_x64);
diff --git a/test/translate_c.zig b/test/translate_c.zig
index 95969a2f7254..2f95bb2d1e49 100644
--- a/test/translate_c.zig
+++ b/test/translate_c.zig
@@ -3,12 +3,208 @@ const std = @import("std");
const CrossTarget = std.zig.CrossTarget;
pub fn addCases(cases: *tests.TranslateCContext) void {
- cases.add("variadic function demoted to prototype",
+ cases.add("if as while stmt has semicolon",
+ \\void foo() {
+ \\ while (1) if (1) {
+ \\ int a = 1;
+ \\ } else {
+ \\ int b = 2;
+ \\ }
+ \\ if (1) if (1) {}
+ \\}
+ , &[_][]const u8{
+ \\pub export fn foo() void {
+ \\ while (true) if (true) {
+ \\ var a: c_int = 1;
+ \\ } else {
+ \\ var b: c_int = 2;
+ \\ };
+ \\ if (true) if (true) {};
+ \\}
+ });
+
+ cases.add("conditional operator cast to void",
+ \\int bar();
+ \\void foo() {
+ \\ int a;
+ \\ a ? a = 2 : bar();
+ \\}
+ , &[_][]const u8{
+ \\pub extern fn bar(...) c_int;
+ \\pub export fn foo() void {
+ \\ var a: c_int = undefined;
+ \\ if (a != 0) a = 2 else _ = bar();
+ \\}
+ });
+
+ cases.add("struct in struct init to zero",
+ \\struct Foo {
+ \\ int a;
+ \\ struct Bar {
+ \\ int a;
+ \\ } b;
+ \\} a = {};
+ \\#define PTR void *
+ , &[_][]const u8{
+ \\pub const struct_Bar = extern struct {
+ \\ a: c_int,
+ \\};
+ \\pub const struct_Foo = extern struct {
+ \\ a: c_int,
+ \\ b: struct_Bar,
+ \\};
+ \\pub export var a: struct_Foo = struct_Foo{
+ \\ .a = 0,
+ \\ .b = @import("std").mem.zeroes(struct_Bar),
+ \\};
+ ,
+ \\pub const PTR = ?*c_void;
+ });
+
+ cases.add("scoped enum",
+ \\void foo() {
+ \\ enum Foo {
+ \\ A,
+ \\ B,
+ \\ C,
+ \\ };
+ \\ enum Foo a = B;
+ \\ {
+ \\ enum Foo {
+ \\ A,
+ \\ B,
+ \\ C,
+ \\ };
+ \\ enum Foo a = B;
+ \\ }
+ \\}
+ , &[_][]const u8{
+ \\pub export fn foo() void {
+ \\ const enum_Foo = extern enum(c_int) {
+ \\ A,
+ \\ B,
+ \\ C,
+ \\ _,
+ \\ };
+ \\ const A = @enumToInt(enum_Foo.A);
+ \\ const B = @enumToInt(enum_Foo.B);
+ \\ const C = @enumToInt(enum_Foo.C);
+ \\ var a: enum_Foo = @intToEnum(enum_Foo, B);
+ \\ {
+ \\ const enum_Foo = extern enum(c_int) {
+ \\ A,
+ \\ B,
+ \\ C,
+ \\ _,
+ \\ };
+ \\ const A_2 = @enumToInt(enum_Foo.A);
+ \\ const B_3 = @enumToInt(enum_Foo.B);
+ \\ const C_4 = @enumToInt(enum_Foo.C);
+ \\ var a_5: enum_Foo = @intToEnum(enum_Foo, B_3);
+ \\ }
+ \\}
+ });
+
+ cases.add("scoped record",
+ \\void foo() {
+ \\ struct Foo {
+ \\ int A;
+ \\ int B;
+ \\ int C;
+ \\ };
+ \\ struct Foo a = {0};
+ \\ {
+ \\ struct Foo {
+ \\ int A;
+ \\ int B;
+ \\ int C;
+ \\ };
+ \\ struct Foo a = {0};
+ \\ }
+ \\}
+ , &[_][]const u8{
+ \\pub export fn foo() void {
+ \\ const struct_Foo = extern struct {
+ \\ A: c_int,
+ \\ B: c_int,
+ \\ C: c_int,
+ \\ };
+ \\ var a: struct_Foo = struct_Foo{
+ \\ .A = @as(c_int, 0),
+ \\ .B = 0,
+ \\ .C = 0,
+ \\ };
+ \\ {
+ \\ const struct_Foo_1 = extern struct {
+ \\ A: c_int,
+ \\ B: c_int,
+ \\ C: c_int,
+ \\ };
+ \\ var a_2: struct_Foo_1 = struct_Foo_1{
+ \\ .A = @as(c_int, 0),
+ \\ .B = 0,
+ \\ .C = 0,
+ \\ };
+ \\ }
+ \\}
+ });
+
+ cases.add("scoped typedef",
+ \\void foo() {
+ \\ typedef union {
+ \\ int A;
+ \\ int B;
+ \\ int C;
+ \\ } Foo;
+ \\ Foo a = {0};
+ \\ {
+ \\ typedef union {
+ \\ int A;
+ \\ int B;
+ \\ int C;
+ \\ } Foo;
+ \\ Foo a = {0};
+ \\ }
+ \\}
+ , &[_][]const u8{
+ \\pub export fn foo() void {
+ \\ const union_unnamed_1 = extern union {
+ \\ A: c_int,
+ \\ B: c_int,
+ \\ C: c_int,
+ \\ };
+ \\ const Foo = union_unnamed_1;
+ \\ var a: Foo = Foo{
+ \\ .A = @as(c_int, 0),
+ \\ };
+ \\ {
+ \\ const union_unnamed_2 = extern union {
+ \\ A: c_int,
+ \\ B: c_int,
+ \\ C: c_int,
+ \\ };
+ \\ const Foo_1 = union_unnamed_2;
+ \\ var a_2: Foo_1 = Foo_1{
+ \\ .A = @as(c_int, 0),
+ \\ };
+ \\ }
+ \\}
+ });
+
+ cases.add("use cast param as macro fn return type",
+ \\#define MEM_PHYSICAL_TO_K0(x) (void*)((u32)(x) + SYS_BASE_CACHED)
+ , &[_][]const u8{
+ \\pub fn MEM_PHYSICAL_TO_K0(x: anytype) callconv(.Inline) ?*c_void {
+ \\ return @import("std").meta.cast(?*c_void, @import("std").meta.cast(u32, x) + SYS_BASE_CACHED);
+ \\}
+ });
+
+ cases.add("variadic function demoted to extern",
\\int foo(int bar, ...) {
\\ return 1;
\\}
, &[_][]const u8{
- \\warning: TODO unable to translate variadic function, demoted to declaration
+ \\warning: TODO unable to translate variadic function, demoted to extern
\\pub extern fn foo(bar: c_int, ...) c_int;
});
@@ -21,11 +217,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ Foo *bar;
\\} Bar;
, &[_][]const u8{
- \\const struct_unnamed_1 = //
- ,
- \\warning: unsupported type: 'Atomic'
- \\ opaque {}; //
- ,
+ \\source.h:1:9: warning: struct demoted to opaque type - unable to translate type of field foo
+ \\const struct_unnamed_1 = opaque {};
\\pub const Foo = struct_unnamed_1;
\\const struct_unnamed_2 = extern struct {
\\ bar: ?*Foo,
@@ -43,8 +236,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
,
\\pub const VALUE = ((((1 + (2 * 3)) + (4 * 5)) + 6) << 7) | @boolToInt(8 == 9);
,
- \\pub fn _AL_READ3BYTES(p: anytype) callconv(.Inline) @TypeOf(((@import("std").meta.cast([*c]u8, p)).* | (((@import("std").meta.cast([*c]u8, p)) + 1).* << 8)) | (((@import("std").meta.cast([*c]u8, p)) + 2).* << 16)) {
- \\ return ((@import("std").meta.cast([*c]u8, p)).* | (((@import("std").meta.cast([*c]u8, p)) + 1).* << 8)) | (((@import("std").meta.cast([*c]u8, p)) + 2).* << 16);
+ \\pub fn _AL_READ3BYTES(p: anytype) callconv(.Inline) @TypeOf((@import("std").meta.cast([*c]u8, p).* | ((@import("std").meta.cast([*c]u8, p) + 1).* << 8)) | ((@import("std").meta.cast([*c]u8, p) + 2).* << 16)) {
+ \\ return (@import("std").meta.cast([*c]u8, p).* | ((@import("std").meta.cast([*c]u8, p) + 1).* << 8)) | ((@import("std").meta.cast([*c]u8, p) + 2).* << 16);
\\}
});
@@ -57,7 +250,6 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export var bar: f32 = @import("std").mem.zeroes(f32);
\\threadlocal var bar_1: c_int = 2;
\\pub export fn foo() c_int {
- \\ _ = bar_1;
\\ return 0;
\\}
});
@@ -107,7 +299,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ int i1;
\\} boom_t;
\\#define FOO ((boom_t){1})
- , &[_][]const u8{ // TODO properly translate this
+ , &[_][]const u8{
\\pub const struct_Color = extern struct {
\\ r: u8,
\\ g: u8,
@@ -127,7 +319,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\};
\\pub const boom_t = struct_boom_t;
,
- \\pub const FOO = @import("std").mem.zeroInit(boom_t, .{ 1 });
+ \\pub const FOO = @import("std").mem.zeroInit(boom_t, .{1});
});
cases.add("complex switch",
@@ -142,14 +334,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ }
\\}
, &[_][]const u8{ // TODO properly translate this
- \\pub const main = @compileError("unable to translate function");
+ \\source.h:5:13: warning: TODO complex switch
+ ,
+ \\source.h:1:5: warning: unable to translate function, demoted to extern
+ \\pub extern fn main() c_int;
});
cases.add("correct semicolon after infixop",
\\#define __ferror_unlocked_body(_fp) (((_fp)->_flags & _IO_ERR_SEEN) != 0)
, &[_][]const u8{
- \\pub fn __ferror_unlocked_body(_fp: anytype) callconv(.Inline) @TypeOf(((_fp.*._flags) & _IO_ERR_SEEN) != 0) {
- \\ return ((_fp.*._flags) & _IO_ERR_SEEN) != 0;
+ \\pub fn __ferror_unlocked_body(_fp: anytype) callconv(.Inline) @TypeOf((_fp.*._flags & _IO_ERR_SEEN) != 0) {
+ \\ return (_fp.*._flags & _IO_ERR_SEEN) != 0;
\\}
});
@@ -193,9 +388,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn foo() void {
\\ while (false) while (false) {};
\\ while (true) while (false) {};
- \\ while (true) while (true) {
- \\ if (!false) break;
- \\ };
+ \\ while (true) {}
\\}
});
@@ -245,15 +438,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ volatile _Atomic int abufused[12];
\\};
, &[_][]const u8{
- \\pub const struct_arcan_shmif_page = //
- ,
- \\warning: unsupported type: 'Atomic'
- \\ opaque {}; //
- ,
- \\ warning: struct demoted to opaque type - unable to translate type of field abufused
- , // TODO should be `addr: *struct_arcan_shmif_page`
+ \\source.h:4:8: warning: struct demoted to opaque type - unable to translate type of field abufused
+ \\pub const struct_arcan_shmif_page = opaque {};
\\pub const struct_arcan_shmif_cont = extern struct {
- \\ addr: [*c]struct_arcan_shmif_page,
+ \\ addr: ?*struct_arcan_shmif_page,
\\};
});
@@ -293,22 +481,22 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub const uuid_t = [16]u8;
\\pub const UUID_NULL: uuid_t = [16]u8{
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 0))),
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
+ \\ 0,
\\};
});
@@ -362,10 +550,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\};
\\pub export var ub: union_unnamed_1 = union_unnamed_1{
\\ .c = [4]u8{
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 'a'))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 'b'))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 'b'))),
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 'a'))),
+ \\ 'a',
+ \\ 'b',
+ \\ 'b',
+ \\ 'a',
\\ },
\\};
});
@@ -492,7 +680,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: c_int = undefined;
- \\ var b: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 123)));
+ \\ var b: u8 = 123;
\\ const c: c_int = undefined;
\\ const d: c_uint = @bitCast(c_uint, @as(c_int, 440));
\\ var e: c_int = 10;
@@ -514,8 +702,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var a: c_int = undefined;
\\ _ = @as(c_int, 1);
\\ _ = "hey";
- \\ _ = (@as(c_int, 1) + @as(c_int, 1));
- \\ _ = (@as(c_int, 1) - @as(c_int, 1));
+ \\ _ = @as(c_int, 1) + @as(c_int, 1);
+ \\ _ = @as(c_int, 1) - @as(c_int, 1);
\\ a = 1;
\\}
});
@@ -559,9 +747,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ '2',
\\ 0,
\\};
- \\pub export fn foo() void {
- \\ _ = v2;
- \\}
+ \\pub export fn foo() void {}
});
cases.add("simple function definition",
@@ -634,9 +820,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var a: c_int = undefined;
\\ var b: c_int = undefined;
\\ var c: c_int = undefined;
- \\ c = (a + b);
- \\ c = (a - b);
- \\ c = (a * b);
+ \\ c = a + b;
+ \\ c = a - b;
+ \\ c = a * b;
\\ c = @divTrunc(a, b);
\\ c = @rem(a, b);
\\ return 0;
@@ -645,11 +831,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var a: c_uint = undefined;
\\ var b: c_uint = undefined;
\\ var c: c_uint = undefined;
- \\ c = (a +% b);
- \\ c = (a -% b);
- \\ c = (a *% b);
- \\ c = (a / b);
- \\ c = (a % b);
+ \\ c = a +% b;
+ \\ c = a -% b;
+ \\ c = a *% b;
+ \\ c = a / b;
+ \\ c = a % b;
\\ return 0;
\\}
});
@@ -914,13 +1100,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ ;;;;;
\\}
, &[_][]const u8{
- \\pub export fn foo() void {
- \\ {}
- \\ {}
- \\ {}
- \\ {}
- \\ {}
- \\}
+ \\pub export fn foo() void {}
});
if (std.Target.current.os.tag != .windows) {
@@ -1335,11 +1515,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\extern enum enum_ty my_enum;
\\enum enum_ty { FOO };
, &[_][]const u8{
- \\pub const FOO = @enumToInt(enum_enum_ty.FOO);
\\pub const enum_enum_ty = extern enum(c_int) {
\\ FOO,
\\ _,
\\};
+ \\pub const FOO = @enumToInt(enum_enum_ty.FOO);
\\pub extern var my_enum: enum_enum_ty;
});
@@ -1448,7 +1628,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub fn foo() callconv(.C) void {
\\ var arr: [10]u8 = [1]u8{
- \\ @bitCast(u8, @truncate(i8, @as(c_int, 1))),
+ \\ 1,
\\ } ++ [1]u8{0} ** 9;
\\ var arr1: [10][*c]u8 = [1][*c]u8{
\\ null,
@@ -1481,48 +1661,48 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ p,
\\};
, &[_][]const u8{
- \\pub const a = @enumToInt(enum_unnamed_1.a);
- \\pub const b = @enumToInt(enum_unnamed_1.b);
- \\pub const c = @enumToInt(enum_unnamed_1.c);
\\const enum_unnamed_1 = extern enum(c_int) {
\\ a,
\\ b,
\\ c,
\\ _,
\\};
+ \\pub const a = @enumToInt(enum_unnamed_1.a);
+ \\pub const b = @enumToInt(enum_unnamed_1.b);
+ \\pub const c = @enumToInt(enum_unnamed_1.c);
\\pub const d = enum_unnamed_1;
- \\pub const e = @enumToInt(enum_unnamed_2.e);
- \\pub const f = @enumToInt(enum_unnamed_2.f);
- \\pub const g = @enumToInt(enum_unnamed_2.g);
\\const enum_unnamed_2 = extern enum(c_int) {
\\ e = 0,
\\ f = 4,
\\ g = 5,
\\ _,
\\};
+ \\pub const e = @enumToInt(enum_unnamed_2.e);
+ \\pub const f = @enumToInt(enum_unnamed_2.f);
+ \\pub const g = @enumToInt(enum_unnamed_2.g);
\\pub export var h: enum_unnamed_2 = @intToEnum(enum_unnamed_2, e);
- \\pub const i = @enumToInt(enum_unnamed_3.i);
- \\pub const j = @enumToInt(enum_unnamed_3.j);
- \\pub const k = @enumToInt(enum_unnamed_3.k);
\\const enum_unnamed_3 = extern enum(c_int) {
\\ i,
\\ j,
\\ k,
\\ _,
\\};
+ \\pub const i = @enumToInt(enum_unnamed_3.i);
+ \\pub const j = @enumToInt(enum_unnamed_3.j);
+ \\pub const k = @enumToInt(enum_unnamed_3.k);
\\pub const struct_Baz = extern struct {
\\ l: enum_unnamed_3,
\\ m: d,
\\};
- \\pub const n = @enumToInt(enum_i.n);
- \\pub const o = @enumToInt(enum_i.o);
- \\pub const p = @enumToInt(enum_i.p);
\\pub const enum_i = extern enum(c_int) {
\\ n,
\\ o,
\\ p,
\\ _,
\\};
+ \\pub const n = @enumToInt(enum_i.n);
+ \\pub const o = @enumToInt(enum_i.o);
+ \\pub const p = @enumToInt(enum_i.p);
,
\\pub const Baz = struct_Baz;
});
@@ -1639,7 +1819,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
cases.add("macro pointer cast",
\\#define NRF_GPIO ((NRF_GPIO_Type *) NRF_GPIO_BASE)
, &[_][]const u8{
- \\pub const NRF_GPIO = (@import("std").meta.cast([*c]NRF_GPIO_Type, NRF_GPIO_BASE));
+ \\pub const NRF_GPIO = @import("std").meta.cast([*c]NRF_GPIO_Type, NRF_GPIO_BASE);
});
cases.add("basic macro function",
@@ -1701,13 +1881,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ unsigned d = 440;
\\}
, &[_][]const u8{
- \\pub var a: c_long = @bitCast(c_long, @as(c_long, @as(c_int, 2)));
- \\pub var b: c_long = @bitCast(c_long, @as(c_long, @as(c_int, 2)));
+ \\pub var a: c_long = 2;
+ \\pub var b: c_long = 2;
\\pub var c: c_int = 4;
\\pub export fn foo(arg_c_1: u8) void {
\\ var c_1 = arg_c_1;
\\ var a_2: c_int = undefined;
- \\ var b_3: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 123)));
+ \\ var b_3: u8 = 123;
\\ b_3 = @bitCast(u8, @truncate(i8, a_2));
\\ {
\\ var d: c_int = 5;
@@ -1723,17 +1903,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
, &[_][]const u8{
\\pub export fn foo() c_int {
- \\ _ = (blk: {
+ \\ _ = blk: {
\\ _ = @as(c_int, 2);
\\ break :blk @as(c_int, 4);
- \\ });
- \\ return (blk: {
- \\ _ = (blk_1: {
+ \\ };
+ \\ return blk: {
+ \\ _ = blk_1: {
\\ _ = @as(c_int, 2);
\\ break :blk_1 @as(c_int, 4);
- \\ });
+ \\ };
\\ break :blk @as(c_int, 6);
- \\ });
+ \\ };
\\}
});
@@ -1780,20 +1960,16 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ while (true) {
\\ var a_1: c_int = 4;
\\ a_1 = 9;
- \\ return (blk: {
+ \\ return blk: {
\\ _ = @as(c_int, 6);
\\ break :blk a_1;
- \\ });
+ \\ };
\\ }
\\ while (true) {
\\ var a_1: c_int = 2;
\\ a_1 = 12;
- \\ if (!true) break;
- \\ }
- \\ while (true) {
- \\ a = 7;
- \\ if (!true) break;
\\ }
+ \\ while (true) a = 7;
\\ return 0;
\\}
});
@@ -1813,16 +1989,16 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var b: c_int = 4;
\\ while ((i + @as(c_int, 2)) != 0) : (i = 2) {
\\ var a: c_int = 2;
- \\ _ = (blk: {
- \\ _ = (blk_1: {
+ \\ _ = blk: {
+ \\ _ = blk_1: {
\\ a = 6;
\\ break :blk_1 @as(c_int, 5);
- \\ });
+ \\ };
\\ break :blk @as(c_int, 7);
- \\ });
+ \\ };
\\ }
\\ }
- \\ var i: u8 = @bitCast(u8, @truncate(i8, @as(c_int, 2)));
+ \\ var i: u8 = 2;
\\}
});
@@ -1830,7 +2006,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\unsigned anyerror = 2;
\\#define noreturn _Noreturn
, &[_][]const u8{
- \\pub export var anyerror_1: c_uint = @bitCast(c_uint, @as(c_int, 2));
+ \\pub export var anyerror_1: c_uint = 2;
,
\\pub const noreturn_2 = @compileError("unable to translate C expr: unexpected token .Keyword_noreturn");
});
@@ -1844,7 +2020,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export var a: f32 = @floatCast(f32, 3.1415);
\\pub export var b: f64 = 3.1415;
\\pub export var c: c_int = @floatToInt(c_int, 3.1415);
- \\pub export var d: f64 = @intToFloat(f64, @as(c_int, 3));
+ \\pub export var d: f64 = 3;
});
cases.add("conditional operator",
@@ -1854,7 +2030,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
, &[_][]const u8{
\\pub export fn bar() c_int {
- \\ if ((if (true) @as(c_int, 5) else (if (true) @as(c_int, 4) else @as(c_int, 6))) != 0) _ = @as(c_int, 2);
+ \\ if ((if (true) @as(c_int, 5) else if (true) @as(c_int, 4) else @as(c_int, 6)) != 0) _ = @as(c_int, 2);
\\ return if (true) @as(c_int, 5) else if (true) @as(c_int, 4) else @as(c_int, 6);
\\}
});
@@ -1870,34 +2046,64 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ default:
\\ res = 3 * i;
\\ break;
+ \\ break;
+ \\ case 7: {
+ \\ res = 7;
+ \\ break;
+ \\ }
\\ case 4:
+ \\ case 5:
+ \\ res = 69;
+ \\ {
\\ res = 5;
+ \\ return;
+ \\ }
+ \\ case 6:
+ \\ switch (res) {
+ \\ case 9: break;
+ \\ }
+ \\ res = 1;
+ \\ return;
\\ }
\\}
, &[_][]const u8{
\\pub export fn switch_fn(arg_i: c_int) void {
\\ var i = arg_i;
\\ var res: c_int = 0;
- \\ @"switch": {
- \\ case_2: {
- \\ default: {
- \\ case_1: {
- \\ case: {
- \\ switch (i) {
- \\ @as(c_int, 0) => break :case,
- \\ @as(c_int, 1)...@as(c_int, 3) => break :case_1,
- \\ else => break :default,
- \\ @as(c_int, 4) => break :case_2,
- \\ }
- \\ }
- \\ res = 1;
- \\ }
- \\ res = 2;
+ \\ switch (i) {
+ \\ @as(c_int, 0) => {
+ \\ res = 1;
+ \\ res = 2;
+ \\ res = @as(c_int, 3) * i;
+ \\ },
+ \\ @as(c_int, 1)...@as(c_int, 3) => {
+ \\ res = 2;
+ \\ res = @as(c_int, 3) * i;
+ \\ },
+ \\ else => {
+ \\ res = @as(c_int, 3) * i;
+ \\ },
+ \\ @as(c_int, 7) => {
+ \\ {
+ \\ res = 7;
+ \\ break;
\\ }
- \\ res = (@as(c_int, 3) * i);
- \\ break :@"switch";
- \\ }
- \\ res = 5;
+ \\ },
+ \\ @as(c_int, 4), @as(c_int, 5) => {
+ \\ res = 69;
+ \\ {
+ \\ res = 5;
+ \\ return;
+ \\ }
+ \\ },
+ \\ @as(c_int, 6) => {
+ \\ switch (res) {
+ \\ @as(c_int, 9) => {},
+ \\ else => {},
+ \\ }
+ \\ res = 1;
+ \\ return;
+ \\ },
\\ }
\\}
});
@@ -1973,13 +2179,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ Two,
\\};
, &[_][]const u8{
- \\pub const One = @enumToInt(enum_unnamed_1.One);
- \\pub const Two = @enumToInt(enum_unnamed_1.Two);
\\const enum_unnamed_1 = extern enum(c_int) {
\\ One,
\\ Two,
\\ _,
\\};
+ \\pub const One = @enumToInt(enum_unnamed_1.One);
+ \\pub const Two = @enumToInt(enum_unnamed_1.Two);
});
cases.add("c style cast",
@@ -1993,7 +2199,6 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
});
- // TODO translate-c should in theory be able to figure out to drop all these casts
cases.add("escape sequences",
\\const char *escapes() {
\\char a = '\'',
@@ -2012,17 +2217,17 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\
, &[_][]const u8{
\\pub export fn escapes() [*c]const u8 {
- \\ var a: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\'')));
- \\ var b: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\\')));
- \\ var c: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x07')));
- \\ var d: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x08')));
- \\ var e: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x0c')));
- \\ var f: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\n')));
- \\ var g: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\r')));
- \\ var h: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\t')));
- \\ var i: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x0b')));
- \\ var j: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\x00')));
- \\ var k: u8 = @bitCast(u8, @truncate(i8, @as(c_int, '\"')));
+ \\ var a: u8 = '\'';
+ \\ var b: u8 = '\\';
+ \\ var c: u8 = '\x07';
+ \\ var d: u8 = '\x08';
+ \\ var e: u8 = '\x0c';
+ \\ var f: u8 = '\n';
+ \\ var g: u8 = '\r';
+ \\ var h: u8 = '\t';
+ \\ var i: u8 = '\x0b';
+ \\ var j: u8 = '\x00';
+ \\ var k: u8 = '\"';
\\ return "\'\\\x07\x08\x0c\n\r\t\x0b\x00\"";
\\}
});
@@ -2043,12 +2248,12 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn foo() void {
\\ var a: c_int = 2;
\\ while (true) {
- \\ a = (a - @as(c_int, 1));
+ \\ a = a - @as(c_int, 1);
\\ if (!(a != 0)) break;
\\ }
\\ var b: c_int = 2;
\\ while (true) {
- \\ b = (b - @as(c_int, 1));
+ \\ b = b - @as(c_int, 1);
\\ if (!(b != 0)) break;
\\ }
\\}
@@ -2084,25 +2289,28 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ C,
\\ _,
\\};
+ \\pub const FooA = @enumToInt(enum_Foo.A);
+ \\pub const FooB = @enumToInt(enum_Foo.B);
+ \\pub const FooC = @enumToInt(enum_Foo.C);
\\pub const SomeTypedef = c_int;
\\pub export fn and_or_non_bool(arg_a: c_int, arg_b: f32, arg_c: ?*c_void) c_int {
\\ var a = arg_a;
\\ var b = arg_b;
\\ var c = arg_c;
\\ var d: enum_Foo = @intToEnum(enum_Foo, FooA);
- \\ var e: c_int = @boolToInt(((a != 0) and (b != 0)));
- \\ var f: c_int = @boolToInt(((b != 0) and (c != null)));
- \\ var g: c_int = @boolToInt(((a != 0) and (c != null)));
- \\ var h: c_int = @boolToInt(((a != 0) or (b != 0)));
- \\ var i: c_int = @boolToInt(((b != 0) or (c != null)));
- \\ var j: c_int = @boolToInt(((a != 0) or (c != null)));
- \\ var k: c_int = @boolToInt(((a != 0) or (@bitCast(c_int, @enumToInt(d)) != 0)));
- \\ var l: c_int = @boolToInt(((@bitCast(c_int, @enumToInt(d)) != 0) and (b != 0)));
- \\ var m: c_int = @boolToInt(((c != null) or (@bitCast(c_uint, @enumToInt(d)) != 0)));
+ \\ var e: c_int = @boolToInt((a != 0) and (b != 0));
+ \\ var f: c_int = @boolToInt((b != 0) and (c != null));
+ \\ var g: c_int = @boolToInt((a != 0) and (c != null));
+ \\ var h: c_int = @boolToInt((a != 0) or (b != 0));
+ \\ var i: c_int = @boolToInt((b != 0) or (c != null));
+ \\ var j: c_int = @boolToInt((a != 0) or (c != null));
+ \\ var k: c_int = @boolToInt((a != 0) or (@bitCast(c_int, @enumToInt(d)) != 0));
+ \\ var l: c_int = @boolToInt((@bitCast(c_int, @enumToInt(d)) != 0) and (b != 0));
+ \\ var m: c_int = @boolToInt((c != null) or (@bitCast(c_uint, @enumToInt(d)) != 0));
\\ var td: SomeTypedef = 44;
- \\ var o: c_int = @boolToInt(((td != 0) or (b != 0)));
- \\ var p: c_int = @boolToInt(((c != null) and (td != 0)));
- \\ return ((((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p);
+ \\ var o: c_int = @boolToInt((td != 0) or (b != 0));
+ \\ var p: c_int = @boolToInt((c != null) and (td != 0));
+ \\ return (((((((((e + f) + g) + h) + i) + j) + k) + l) + m) + o) + p;
\\}
,
\\pub const Foo = enum_Foo;
@@ -2129,6 +2337,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ B,
\\ _,
\\};
+ \\pub const BarA = @enumToInt(enum_Bar.A);
+ \\pub const BarB = @enumToInt(enum_Bar.B);
\\pub extern fn func(a: [*c]struct_Foo, b: [*c][*c]enum_Bar) void;
,
\\pub const Foo = struct_Foo;
@@ -2143,7 +2353,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn max(arg_a: c_int, arg_b: c_int) c_int {
\\ var a = arg_a;
\\ var b = arg_b;
- \\ return ((a & b) ^ (a | b));
+ \\ return (a & b) ^ (a | b);
\\}
});
@@ -2162,13 +2372,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\pub export fn test_comparisons(arg_a: c_int, arg_b: c_int) c_int {
\\ var a = arg_a;
\\ var b = arg_b;
- \\ var c: c_int = @boolToInt((a < b));
- \\ var d: c_int = @boolToInt((a > b));
- \\ var e: c_int = @boolToInt((a <= b));
- \\ var f: c_int = @boolToInt((a >= b));
- \\ var g: c_int = @boolToInt((c < d));
- \\ var h: c_int = @boolToInt((e < f));
- \\ var i: c_int = @boolToInt((g < h));
+ \\ var c: c_int = @boolToInt(a < b);
+ \\ var d: c_int = @boolToInt(a > b);
+ \\ var e: c_int = @boolToInt(a <= b);
+ \\ var f: c_int = @boolToInt(a >= b);
+ \\ var g: c_int = @boolToInt(c < d);
+ \\ var h: c_int = @boolToInt(e < f);
+ \\ var i: c_int = @boolToInt(g < h);
\\ return i;
\\}
});
@@ -2215,11 +2425,11 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
, &[_][]const u8{
\\pub export fn foo() c_int {
- \\ return (blk: {
+ \\ return blk: {
\\ var a: c_int = 1;
\\ _ = a;
\\ break :blk a;
- \\ });
+ \\ };
\\}
});
@@ -2289,8 +2499,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: [10]c_longlong = undefined;
- \\ var i: c_longlong = @bitCast(c_longlong, @as(c_longlong, @as(c_int, 0)));
- \\ a[@intCast(usize, i)] = @bitCast(c_longlong, @as(c_longlong, @as(c_int, 0)));
+ \\ var i: c_longlong = 0;
+ \\ a[@intCast(usize, i)] = 0;
\\}
});
@@ -2302,8 +2512,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: [10]c_uint = undefined;
- \\ var i: c_uint = @bitCast(c_uint, @as(c_int, 0));
- \\ a[i] = @bitCast(c_uint, @as(c_int, 0));
+ \\ var i: c_uint = 0;
+ \\ a[i] = 0;
\\}
});
@@ -2395,6 +2605,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ C,
\\ _,
\\};
+ \\pub const A = @enumToInt(enum_SomeEnum.A);
+ \\pub const B = @enumToInt(enum_SomeEnum.B);
+ \\pub const C = @enumToInt(enum_SomeEnum.C);
\\pub export fn if_none_bool(arg_a: c_int, arg_b: f32, arg_c: ?*c_void, arg_d: enum_SomeEnum) c_int {
\\ var a = arg_a;
\\ var b = arg_b;
@@ -2484,10 +2697,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ var f: ?fn () callconv(.C) void = foo;
\\ var b: ?fn () callconv(.C) c_int = baz;
\\ f.?();
- \\ (f).?();
+ \\ f.?();
\\ foo();
\\ _ = b.?();
- \\ _ = (b).?();
+ \\ _ = b.?();
\\ _ = baz();
\\}
});
@@ -2508,31 +2721,31 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var i: c_int = 0;
- \\ var u: c_uint = @bitCast(c_uint, @as(c_int, 0));
+ \\ var u: c_uint = 0;
\\ i += 1;
\\ i -= 1;
\\ u +%= 1;
\\ u -%= 1;
- \\ i = (blk: {
+ \\ i = blk: {
\\ const ref = &i;
\\ ref.* += 1;
\\ break :blk ref.*;
- \\ });
- \\ i = (blk: {
+ \\ };
+ \\ i = blk: {
\\ const ref = &i;
\\ ref.* -= 1;
\\ break :blk ref.*;
- \\ });
- \\ u = (blk: {
+ \\ };
+ \\ u = blk: {
\\ const ref = &u;
\\ ref.* +%= 1;
\\ break :blk ref.*;
- \\ });
- \\ u = (blk: {
+ \\ };
+ \\ u = blk: {
\\ const ref = &u;
\\ ref.* -%= 1;
\\ break :blk ref.*;
- \\ });
+ \\ };
\\}
});
@@ -2595,67 +2808,67 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var a: c_int = 0;
- \\ var b: c_uint = @bitCast(c_uint, @as(c_int, 0));
- \\ a += (blk: {
+ \\ var b: c_uint = 0;
+ \\ a += blk: {
\\ const ref = &a;
- \\ ref.* = ref.* + @as(c_int, 1);
+ \\ ref.* += @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a -= (blk: {
+ \\ };
+ \\ a -= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* - @as(c_int, 1);
+ \\ ref.* -= @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a *= (blk: {
+ \\ };
+ \\ a *= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* * @as(c_int, 1);
+ \\ ref.* *= @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a &= (blk: {
+ \\ };
+ \\ a &= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* & @as(c_int, 1);
+ \\ ref.* &= @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a |= (blk: {
+ \\ };
+ \\ a |= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* | @as(c_int, 1);
+ \\ ref.* |= @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a ^= (blk: {
+ \\ };
+ \\ a ^= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* ^ @as(c_int, 1);
+ \\ ref.* ^= @as(c_int, 1);
\\ break :blk ref.*;
- \\ });
- \\ a >>= @intCast(@import("std").math.Log2Int(c_int), (blk: {
+ \\ };
+ \\ a >>= @intCast(@import("std").math.Log2Int(c_int), blk: {
\\ const ref = &a;
- \\ ref.* = ref.* >> @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
+ \\ ref.* >>= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
- \\ a <<= @intCast(@import("std").math.Log2Int(c_int), (blk: {
+ \\ });
+ \\ a <<= @intCast(@import("std").math.Log2Int(c_int), blk: {
\\ const ref = &a;
- \\ ref.* = ref.* << @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
+ \\ ref.* <<= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
- \\ a = @divTrunc(a, (blk: {
+ \\ });
+ \\ a = @divTrunc(a, blk: {
\\ const ref = &a;
\\ ref.* = @divTrunc(ref.*, @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
- \\ a = @rem(a, (blk: {
+ \\ });
+ \\ a = @rem(a, blk: {
\\ const ref = &a;
\\ ref.* = @rem(ref.*, @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
- \\ b /= (blk: {
+ \\ });
+ \\ b /= blk: {
\\ const ref = &b;
- \\ ref.* = ref.* / @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* /= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ b %= (blk: {
+ \\ };
+ \\ b %= blk: {
\\ const ref = &b;
- \\ ref.* = ref.* % @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* %= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
+ \\ };
\\}
});
@@ -2673,47 +2886,47 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
, &[_][]const u8{
\\pub export fn foo() void {
- \\ var a: c_uint = @bitCast(c_uint, @as(c_int, 0));
- \\ a +%= (blk: {
+ \\ var a: c_uint = 0;
+ \\ a +%= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* +% @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* +%= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a -%= (blk: {
+ \\ };
+ \\ a -%= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* -% @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* -%= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a *%= (blk: {
+ \\ };
+ \\ a *%= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* *% @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* *%= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a &= (blk: {
+ \\ };
+ \\ a &= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* & @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* &= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a |= (blk: {
+ \\ };
+ \\ a |= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* | @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* |= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a ^= (blk: {
+ \\ };
+ \\ a ^= blk: {
\\ const ref = &a;
- \\ ref.* = ref.* ^ @bitCast(c_uint, @as(c_int, 1));
+ \\ ref.* ^= @bitCast(c_uint, @as(c_int, 1));
\\ break :blk ref.*;
- \\ });
- \\ a >>= @intCast(@import("std").math.Log2Int(c_uint), (blk: {
+ \\ };
+ \\ a >>= @intCast(@import("std").math.Log2Int(c_uint), blk: {
\\ const ref = &a;
- \\ ref.* = ref.* >> @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
+ \\ ref.* >>= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
- \\ a <<= @intCast(@import("std").math.Log2Int(c_uint), (blk: {
+ \\ });
+ \\ a <<= @intCast(@import("std").math.Log2Int(c_uint), blk: {
\\ const ref = &a;
- \\ ref.* = ref.* << @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
+ \\ ref.* <<= @intCast(@import("std").math.Log2Int(c_int), @as(c_int, 1));
\\ break :blk ref.*;
- \\ }));
+ \\ });
\\}
});
@@ -2733,35 +2946,35 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo() void {
\\ var i: c_int = 0;
- \\ var u: c_uint = @bitCast(c_uint, @as(c_int, 0));
+ \\ var u: c_uint = 0;
\\ i += 1;
\\ i -= 1;
\\ u +%= 1;
\\ u -%= 1;
- \\ i = (blk: {
+ \\ i = blk: {
\\ const ref = &i;
\\ const tmp = ref.*;
\\ ref.* += 1;
\\ break :blk tmp;
- \\ });
- \\ i = (blk: {
+ \\ };
+ \\ i = blk: {
\\ const ref = &i;
\\ const tmp = ref.*;
\\ ref.* -= 1;
\\ break :blk tmp;
- \\ });
- \\ u = (blk: {
+ \\ };
+ \\ u = blk: {
\\ const ref = &u;
\\ const tmp = ref.*;
\\ ref.* +%= 1;
\\ break :blk tmp;
- \\ });
- \\ u = (blk: {
+ \\ };
+ \\ u = blk: {
\\ const ref = &u;
\\ const tmp = ref.*;
\\ ref.* -%= 1;
\\ break :blk tmp;
- \\ });
+ \\ };
\\}
});
@@ -2854,15 +3067,15 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\ Foo1,
\\};
, &[_][]const u8{
- \\pub const FooA = @enumToInt(enum_Foo.A);
- \\pub const FooB = @enumToInt(enum_Foo.B);
- \\pub const Foo1 = @enumToInt(enum_Foo.@"1");
\\pub const enum_Foo = extern enum(c_int) {
\\ A = 2,
\\ B = 5,
\\ @"1" = 6,
\\ _,
\\};
+ \\pub const FooA = @enumToInt(enum_Foo.A);
+ \\pub const FooB = @enumToInt(enum_Foo.B);
+ \\pub const Foo1 = @enumToInt(enum_Foo.@"1");
,
\\pub const Foo = enum_Foo;
});
@@ -2872,13 +3085,13 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define BAR (void*) a
\\#define BAZ (uint32_t)(2)
, &[_][]const u8{
- \\pub fn FOO(bar: anytype) callconv(.Inline) @TypeOf(baz((@import("std").meta.cast(?*c_void, baz)))) {
- \\ return baz((@import("std").meta.cast(?*c_void, baz)));
+ \\pub fn FOO(bar: anytype) callconv(.Inline) @TypeOf(baz(@import("std").meta.cast(?*c_void, baz))) {
+ \\ return baz(@import("std").meta.cast(?*c_void, baz));
\\}
,
- \\pub const BAR = (@import("std").meta.cast(?*c_void, a));
+ \\pub const BAR = @import("std").meta.cast(?*c_void, a);
,
- \\pub const BAZ = (@import("std").meta.cast(u32, 2));
+ \\pub const BAZ = @import("std").meta.cast(u32, 2);
});
cases.add("macro with cast to unsigned short, long, and long long",
@@ -2886,9 +3099,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define CURLAUTH_BASIC ((unsigned long) 1)
\\#define CURLAUTH_BASIC_BUT_ULONGLONG ((unsigned long long) 1)
, &[_][]const u8{
- \\pub const CURLAUTH_BASIC_BUT_USHORT = (@import("std").meta.cast(c_ushort, 1));
- \\pub const CURLAUTH_BASIC = (@import("std").meta.cast(c_ulong, 1));
- \\pub const CURLAUTH_BASIC_BUT_ULONGLONG = (@import("std").meta.cast(c_ulonglong, 1));
+ \\pub const CURLAUTH_BASIC_BUT_USHORT = @import("std").meta.cast(c_ushort, 1);
+ \\pub const CURLAUTH_BASIC = @import("std").meta.cast(c_ulong, 1);
+ \\pub const CURLAUTH_BASIC_BUT_ULONGLONG = @import("std").meta.cast(c_ulonglong, 1);
});
cases.add("macro conditional operator",
@@ -2904,9 +3117,7 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
, &[_][]const u8{
\\pub fn foo() callconv(.C) void {
- \\ if (true) while (true) {
- \\ if (!false) break;
- \\ };
+ \\ if (true) {}
\\}
});
@@ -2923,7 +3134,6 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\}
});
- // TODO: detect to use different block labels here
cases.add("nested assignment",
\\int foo(int *p, int x) {
\\ return *p++ = x;
@@ -3033,10 +3243,10 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
, &[_][]const u8{
\\pub export fn foo(arg_x: bool) bool {
\\ var x = arg_x;
- \\ var a: bool = (@as(c_int, @boolToInt(x)) != @as(c_int, 1));
- \\ var b: bool = (@as(c_int, @boolToInt(a)) != @as(c_int, 0));
+ \\ var a: bool = @as(c_int, @boolToInt(x)) != @as(c_int, 1);
+ \\ var b: bool = @as(c_int, @boolToInt(a)) != @as(c_int, 0);
\\ var c: bool = @ptrToInt(foo) != 0;
- \\ return foo((@as(c_int, @boolToInt(c)) != @as(c_int, @boolToInt(b))));
+ \\ return foo(@as(c_int, @boolToInt(c)) != @as(c_int, @boolToInt(b)));
\\}
});
@@ -3106,8 +3316,8 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define DefaultScreen(dpy) (((_XPrivDisplay)(dpy))->default_screen)
\\
, &[_][]const u8{
- \\pub fn DefaultScreen(dpy: anytype) callconv(.Inline) @TypeOf((@import("std").meta.cast(_XPrivDisplay, dpy)).*.default_screen) {
- \\ return (@import("std").meta.cast(_XPrivDisplay, dpy)).*.default_screen;
+ \\pub fn DefaultScreen(dpy: anytype) callconv(.Inline) @TypeOf(@import("std").meta.cast(_XPrivDisplay, dpy).*.default_screen) {
+ \\ return @import("std").meta.cast(_XPrivDisplay, dpy).*.default_screen;
\\}
});
@@ -3115,9 +3325,9 @@ pub fn addCases(cases: *tests.TranslateCContext) void {
\\#define NULL ((void*)0)
\\#define FOO ((int)0x8000)
, &[_][]const u8{
- \\pub const NULL = (@import("std").meta.cast(?*c_void, 0));
+ \\pub const NULL = @import("std").meta.cast(?*c_void, 0);
,
- \\pub const FOO = (@import("std").meta.cast(c_int, 0x8000));
+ \\pub const FOO = @import("std").meta.cast(c_int, 0x8000);
});
if (std.Target.current.abi == .msvc) {