profile
viewpoint
If you are wondering where the data of this site comes from, please visit https://api.github.com/users/Vexu/events. GitMemory does not store any data, but only uses NGINX to cache data for a period of time. The idea behind GitMemory is simply to give users a better reading experience.
Veikka Tuominen Vexu Finland vexu.eu Doing Zig stuff.

Vexu/arocc 150

A C compiler written in Zig.

ziglang/vscode-zig 135

Zig language support for VSCode

Vexu/routez 130

Http server for Zig

Vexu/bog 108

Small, strongly typed, embeddable language.

Vexu/zuri 27

URI parser for Zig

Vexu/Furigana-TextView 16

TextView for Android that supports furigana and text formatting.

Vexu/SuperCube-API 11

Android API for Xiaomi GiiKER Super Cube i3

Vexu/comptime_hash_map 8

A statically initiated HashMap

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?

Should not be needed as .comma will always map to ",".

doppioandante

comment created time in 7 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 pub fn expandedSlice(pp: *Preprocessor, tok: Token) []const u8 {     return tmp_tokenizer.buf[res.start..res.end]; } +fn pasteTokens2(pp: *Preprocessor, lhs: []const Token, rhs: []const Token) Error![]const Token {

This doesn't seem at all correct, as far as I understand pasting is supposed to form a single token without doing any expansion.

doppioandante

comment created time in 7 hours

PullRequestReviewEvent

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?+                const comma = Token{+                    .id = .comma,+                    .loc = .{+                        .id = .generated,+                    }+                };+                try variable_arguments.append(comma);+                try expanded_variable_arguments.append(comma);+            }+        }+    }+    //for (expanded_variable_arguments.items) |tok| {+    //    std.debug.print("{s} ", .{pp.expandedSlice(tok)});+    //}+    //std.debug.print("\n", .{});++    // token concatenation and expansion phase+    var tok_i: usize = 0;+    while (tok_i < func_macro.tokens.len) : (tok_i += 1) {+        const raw = func_macro.tokens[tok_i];+        switch (raw.id) {+            .hash_hash_from_param => {+                var new_tok = raw;+                new_tok.id = .hash_hash;+                try buf.append(tokFromRaw(new_tok));+             },+            .hash_hash => {+                const raw_prev = func_macro.tokens[tok_i - 1];+                // TODO: is tok_i+1 ever out of bound?

Should not be possible due to the check in defineFn.

doppioandante

comment created time in 7 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?+                const comma = Token{+                    .id = .comma,+                    .loc = .{+                        .id = .generated,+                    }+                };+                try variable_arguments.append(comma);+                try expanded_variable_arguments.append(comma);+            }+        }+    }+    //for (expanded_variable_arguments.items) |tok| {+    //    std.debug.print("{s} ", .{pp.expandedSlice(tok)});+    //}+    //std.debug.print("\n", .{});++    // token concatenation and expansion phase+    var tok_i: usize = 0;+    while (tok_i < func_macro.tokens.len) : (tok_i += 1) {+        const raw = func_macro.tokens[tok_i];+        switch (raw.id) {+            .hash_hash_from_param => {+                var new_tok = raw;+                new_tok.id = .hash_hash;+                try buf.append(tokFromRaw(new_tok));+             },+            .hash_hash => {+                const raw_prev = func_macro.tokens[tok_i - 1];+                // TODO: is tok_i+1 ever out of bound?+                const raw_next = func_macro.tokens[tok_i + 1];++                const prev = switch (raw_prev.id) {+                    .macro_param => args.items[raw_prev.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_prev)})[0..],+                };++                const next = switch (raw_next.id) {+                    .macro_param => args.items[raw_next.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_next)})[0..],+                };++                var res = try pp.pasteTokens2(prev, next);+                defer pp.comp.gpa.free(res);+                // TODO: more work than necessary, but easy+                try buf.replaceRange(buf.items.len - prev.len, prev.len, res);+                // skip next token+                tok_i += 1;+            },+            .macro_param => {+                // TODO: raw.end contains the index of the param+                const arg = expanded_args.items[raw.end];++                if (arg.len == 0) {+                    // needed for the following token pasting phase+                    try buf.append(.{+                        .id = .empty_arg,+                        .loc = .{ .id = raw.source, .byte_offset = raw.start }+                    });+                } else {+                    for (arg) |tok| {+                        try buf.ensureCapacity(buf.items.len + arg.len);+                        buf.appendAssumeCapacity(tok);+                    }+                }+            },+            .keyword_va_args => {+                try buf.ensureCapacity(buf.items.len + expanded_variable_arguments.items.len);+                buf.appendSliceAssumeCapacity(expanded_variable_arguments.items);+            },+            .stringify_param, .stringify_va_args => {+                const arg = if (raw.id == .stringify_va_args)+                    variable_arguments.items+                else+                    args.items[raw.end];++                var char_buf = std.ArrayList(u8).init(pp.comp.gpa);+                defer char_buf.deinit();++                // TODO pretty print these+                try char_buf.append('"');+                for (arg) |tok, i| {+                    if (i != 0) try char_buf.append(' ');+                    for (pp.expandedSlice(tok)) |c| {+                        if (c == '"')+                            try char_buf.appendSlice("\\\"")+                        else+                            try char_buf.append(c);+                    }+                }+                try char_buf.appendSlice("\"\n");++                const start = pp.generated.items.len;+                try pp.generated.appendSlice(char_buf.items);++                try buf.append(.{+                    .id = .string_literal,+                    .loc = .{ // location of token slice in the generated buffer+                        .id = .generated,+                        .byte_offset = @intCast(u32, start),+                    },+                });+            },+            else => {+                try buf.append(tokFromRaw(raw));+            }+        }+    }++    return buf;+}++fn shouldExpand(tok: Token, macro: *Macro) bool {+    const macro_loc = switch (macro.*) {+        .simple => |smacro| smacro.loc,+        .func => |smacro| smacro.loc,+        else => unreachable+    };+    var maybe_loc = tok.loc.next;+    while (maybe_loc) |loc| {

A future improvement will be to convert the source location mechanism into using contiguous arrays instead of linked lists now that they are being iterated upon a lot more.

doppioandante

comment created time in 7 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?+                const comma = Token{+                    .id = .comma,+                    .loc = .{+                        .id = .generated,+                    }+                };+                try variable_arguments.append(comma);+                try expanded_variable_arguments.append(comma);+            }+        }+    }+    //for (expanded_variable_arguments.items) |tok| {+    //    std.debug.print("{s} ", .{pp.expandedSlice(tok)});+    //}+    //std.debug.print("\n", .{});++    // token concatenation and expansion phase+    var tok_i: usize = 0;+    while (tok_i < func_macro.tokens.len) : (tok_i += 1) {+        const raw = func_macro.tokens[tok_i];+        switch (raw.id) {+            .hash_hash_from_param => {+                var new_tok = raw;+                new_tok.id = .hash_hash;+                try buf.append(tokFromRaw(new_tok));+             },+            .hash_hash => {+                const raw_prev = func_macro.tokens[tok_i - 1];+                // TODO: is tok_i+1 ever out of bound?+                const raw_next = func_macro.tokens[tok_i + 1];++                const prev = switch (raw_prev.id) {+                    .macro_param => args.items[raw_prev.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_prev)})[0..],+                };++                const next = switch (raw_next.id) {+                    .macro_param => args.items[raw_next.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_next)})[0..],+                };++                var res = try pp.pasteTokens2(prev, next);+                defer pp.comp.gpa.free(res);+                // TODO: more work than necessary, but easy+                try buf.replaceRange(buf.items.len - prev.len, prev.len, res);+                // skip next token+                tok_i += 1;+            },+            .macro_param => {+                // TODO: raw.end contains the index of the param+                const arg = expanded_args.items[raw.end];++                if (arg.len == 0) {+                    // needed for the following token pasting phase+                    try buf.append(.{+                        .id = .empty_arg,+                        .loc = .{ .id = raw.source, .byte_offset = raw.start }+                    });+                } else {+                    for (arg) |tok| {+                        try buf.ensureCapacity(buf.items.len + arg.len);+                        buf.appendAssumeCapacity(tok);+                    }+                }+            },+            .keyword_va_args => {+                try buf.ensureCapacity(buf.items.len + expanded_variable_arguments.items.len);+                buf.appendSliceAssumeCapacity(expanded_variable_arguments.items);+            },+            .stringify_param, .stringify_va_args => {+                const arg = if (raw.id == .stringify_va_args)+                    variable_arguments.items+                else+                    args.items[raw.end];++                var char_buf = std.ArrayList(u8).init(pp.comp.gpa);+                defer char_buf.deinit();++                // TODO pretty print these+                try char_buf.append('"');+                for (arg) |tok, i| {+                    if (i != 0) try char_buf.append(' ');+                    for (pp.expandedSlice(tok)) |c| {+                        if (c == '"')+                            try char_buf.appendSlice("\\\"")+                        else+                            try char_buf.append(c);+                    }+                }+                try char_buf.appendSlice("\"\n");++                const start = pp.generated.items.len;+                try pp.generated.appendSlice(char_buf.items);++                try buf.append(.{+                    .id = .string_literal,+                    .loc = .{ // location of token slice in the generated buffer+                        .id = .generated,+                        .byte_offset = @intCast(u32, start),+                    },+                });+            },+            else => {+                try buf.append(tokFromRaw(raw));+            }+        }+    }++    return buf;+}++fn shouldExpand(tok: Token, macro: *Macro) bool {+    const macro_loc = switch (macro.*) {+        .simple => |smacro| smacro.loc,+        .func => |smacro| smacro.loc,+        else => unreachable+    };+    var maybe_loc = tok.loc.next;+    while (maybe_loc) |loc| {+        if (loc.id == macro_loc.id and loc.byte_offset == macro_loc.byte_offset) {+            return false;+        }+        maybe_loc = loc.next;+    }++    return true;+}++fn nextBufToken(tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!Token {+    start_idx.* += 1;+    if (start_idx.* == buf.items.len and start_idx.* == end_idx.*) {+        if (extend_buf) {+            const new_tok = tokFromRaw(tokenizer.next());+            end_idx.* += 1;+            try buf.append(new_tok);+            return new_tok;+        } else {+            return Token{+                .id = .eof,+                .loc = .{+                    .id = .generated,+                },+            };+        }+    } else {+        return buf.items[start_idx.*];+    }+}++fn collectMacroFuncArguments(pp: *Preprocessor, tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!(?MacroArguments) {+    const name_tok = buf.items[start_idx.*];+    const l_paren_tok = try nextBufToken(tokenizer, buf, start_idx, end_idx, extend_buf);+    if (l_paren_tok.id != .l_paren) {+        // Not a macro function call, go over normal identifier.+        return null;+    }++    // collect the arguments.+    var parens: u32 = 0;+    var args = MacroArguments.init(pp.comp.gpa);+    var curArgument = std.ArrayList(Token).init(pp.comp.gpa);+    defer curArgument.deinit();+    var done = false;+    while (!done) {+        var tok = try nextBufToken(tokenizer, buf, start_idx, end_idx, extend_buf);+        switch (tok.id) {+            .comma => {+                if (parens == 0) {+                    try args.append(curArgument.toOwnedSlice());+                } else{+                    try curArgument.append(tok);+                }+            },+            .l_paren => {+                try curArgument.append(tok);+                parens += 1;+            },+            .r_paren => {+                if (parens == 0) {+                    try args.append(curArgument.toOwnedSlice());+                    break;+                } else {+                    try curArgument.append(tok);+                    parens -= 1;+                }+            },+            .eof => {+                // TODO+                try pp.comp.diag.add(.{ .tag = .unterminated_macro_arg_list, .loc = name_tok.loc });+                return null;+            },+            else => {+                try curArgument.append(tok);+            }+        }+    }++    return args;+}++fn expandMacroExhaustive(pp: *Preprocessor, tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: usize, end_idx: usize, extend_buf: bool) Error!void {+    _ = tokenizer;+    var moving_end_idx = end_idx;+    // rescan loop+    var do_rescan = true;+    while (do_rescan) {+        do_rescan = false;+        //std.debug.print("Scanning ", .{});+        //try pp.debugTokenBuf(buf.*);+        // expansion loop+        var idx: usize = start_idx;+        while (idx < moving_end_idx) {+            const macro_entry = pp.defines.getPtr(pp.expandedSlice(buf.items[idx]));+            if (macro_entry != null and shouldExpand(buf.items[idx], macro_entry.?)) {+                switch (macro_entry.?.*) {+                    .empty => {+                        idx += 1;+                    },+                    .simple => |simple_macro| {+                        //std.debug.print("Expanding {s}\n", .{pp.expandedSlice(buf.items[idx])});+                        const res = try pp.expandObjMacro(&simple_macro);+                        defer res.deinit();+                        var expansion_loc = simple_macro.loc;+                        expansion_loc.next = buf.items[idx].loc.next;+                        for (res.items) |*tok| {+                            if (buf.items[idx].loc.next) |ln| {+                                try pp.markExpandedFrom(tok, ln.*);+                            }+                            try pp.markExpandedFrom(tok, simple_macro.loc);+                        }++                        try buf.replaceRange(idx, 1, res.items);+                        idx += res.items.len;+                        moving_end_idx += (res.items.len - 1);+                        do_rescan = true;+                    },+                    .func => |func_macro| {+                        var macro_scan_idx = idx;+                        if (try pp.collectMacroFuncArguments(tokenizer, buf, &macro_scan_idx, &moving_end_idx, extend_buf)) |args| {+                            // TODO: bug?+                            //defer for (args.items) |item| {+                            //    pp.comp.gpa.free(item);+                            //};+                            //defer args.deinit();+                            var args_count = @intCast(u32, args.items.len);+                            // if the macro has zero arguments g() args_count is still 1+                            if (args_count == 1 and func_macro.params.len == 0) args_count = 0;++                            // Validate argument count.+                            const extra = Diagnostics.Message.Extra{ .arguments = .{ .expected = @intCast(u32, func_macro.params.len), .actual = args_count } };+                            var validated = true;+                            if (func_macro.var_args and args_count < func_macro.params.len) {+                                try pp.comp.diag.add(.{ .tag = .expected_at_least_arguments, .loc = buf.items[idx].loc, .extra = extra });+                                validated = false;
                                idx += 1;
                                continue;

We're there some issues here earlier?

doppioandante

comment created time in 7 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?+                const comma = Token{+                    .id = .comma,+                    .loc = .{+                        .id = .generated,+                    }+                };+                try variable_arguments.append(comma);+                try expanded_variable_arguments.append(comma);+            }+        }+    }+    //for (expanded_variable_arguments.items) |tok| {+    //    std.debug.print("{s} ", .{pp.expandedSlice(tok)});+    //}+    //std.debug.print("\n", .{});++    // token concatenation and expansion phase+    var tok_i: usize = 0;+    while (tok_i < func_macro.tokens.len) : (tok_i += 1) {+        const raw = func_macro.tokens[tok_i];+        switch (raw.id) {+            .hash_hash_from_param => {+                var new_tok = raw;+                new_tok.id = .hash_hash;+                try buf.append(tokFromRaw(new_tok));+             },+            .hash_hash => {+                const raw_prev = func_macro.tokens[tok_i - 1];+                // TODO: is tok_i+1 ever out of bound?+                const raw_next = func_macro.tokens[tok_i + 1];++                const prev = switch (raw_prev.id) {+                    .macro_param => args.items[raw_prev.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_prev)})[0..],+                };++                const next = switch (raw_next.id) {+                    .macro_param => args.items[raw_next.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_next)})[0..],+                };++                var res = try pp.pasteTokens2(prev, next);+                defer pp.comp.gpa.free(res);+                // TODO: more work than necessary, but easy+                try buf.replaceRange(buf.items.len - prev.len, prev.len, res);+                // skip next token+                tok_i += 1;+            },+            .macro_param => {+                // TODO: raw.end contains the index of the param+                const arg = expanded_args.items[raw.end];++                if (arg.len == 0) {+                    // needed for the following token pasting phase+                    try buf.append(.{+                        .id = .empty_arg,+                        .loc = .{ .id = raw.source, .byte_offset = raw.start }+                    });+                } else {+                    for (arg) |tok| {+                        try buf.ensureCapacity(buf.items.len + arg.len);+                        buf.appendAssumeCapacity(tok);+                    }+                }+            },+            .keyword_va_args => {+                try buf.ensureCapacity(buf.items.len + expanded_variable_arguments.items.len);+                buf.appendSliceAssumeCapacity(expanded_variable_arguments.items);+            },+            .stringify_param, .stringify_va_args => {+                const arg = if (raw.id == .stringify_va_args)+                    variable_arguments.items+                else+                    args.items[raw.end];++                var char_buf = std.ArrayList(u8).init(pp.comp.gpa);+                defer char_buf.deinit();++                // TODO pretty print these+                try char_buf.append('"');+                for (arg) |tok, i| {+                    if (i != 0) try char_buf.append(' ');+                    for (pp.expandedSlice(tok)) |c| {+                        if (c == '"')+                            try char_buf.appendSlice("\\\"")+                        else+                            try char_buf.append(c);+                    }+                }+                try char_buf.appendSlice("\"\n");++                const start = pp.generated.items.len;+                try pp.generated.appendSlice(char_buf.items);++                try buf.append(.{+                    .id = .string_literal,+                    .loc = .{ // location of token slice in the generated buffer+                        .id = .generated,+                        .byte_offset = @intCast(u32, start),+                    },+                });+            },+            else => {+                try buf.append(tokFromRaw(raw));+            }+        }+    }++    return buf;+}++fn shouldExpand(tok: Token, macro: *Macro) bool {+    const macro_loc = switch (macro.*) {+        .simple => |smacro| smacro.loc,+        .func => |smacro| smacro.loc,+        else => unreachable+    };+    var maybe_loc = tok.loc.next;+    while (maybe_loc) |loc| {+        if (loc.id == macro_loc.id and loc.byte_offset == macro_loc.byte_offset) {+            return false;+        }+        maybe_loc = loc.next;+    }++    return true;+}++fn nextBufToken(tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!Token {+    start_idx.* += 1;+    if (start_idx.* == buf.items.len and start_idx.* == end_idx.*) {+        if (extend_buf) {+            const new_tok = tokFromRaw(tokenizer.next());+            end_idx.* += 1;+            try buf.append(new_tok);+            return new_tok;+        } else {+            return Token{+                .id = .eof,+                .loc = .{+                    .id = .generated,+                },+            };+        }+    } else {+        return buf.items[start_idx.*];+    }+}++fn collectMacroFuncArguments(pp: *Preprocessor, tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!(?MacroArguments) {+    const name_tok = buf.items[start_idx.*];+    const l_paren_tok = try nextBufToken(tokenizer, buf, start_idx, end_idx, extend_buf);+    if (l_paren_tok.id != .l_paren) {+        // Not a macro function call, go over normal identifier.+        return null;+    }++    // collect the arguments.+    var parens: u32 = 0;+    var args = MacroArguments.init(pp.comp.gpa);

Missing errdefer.

doppioandante

comment created time in 7 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 fn skipToNl(tokenizer: *Tokenizer) void { }  const ExpandBuf = std.ArrayList(Token);+const MacroArguments = std.ArrayList([]const Token);++fn expandObjMacro(pp: *Preprocessor, simple_macro: *const Macro.Simple) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(simple_macro.tokens.len);++    // Add all of the simple_macros tokens to the new buffer handling any concats.+    var i: usize = 0;+    while (i < simple_macro.tokens.len) : (i += 1) {+        const raw = simple_macro.tokens[i];+        if (raw.id == .hash_hash) {+            _ = buf.pop();+            const lhs = tokFromRaw(simple_macro.tokens[i - 1]);+            const rhs = tokFromRaw(simple_macro.tokens[i + 1]);+            i += 1;+            buf.appendAssumeCapacity(try pp.pasteTokens(lhs, rhs));+        } else {+            buf.appendAssumeCapacity(tokFromRaw(raw));+        }+    }++    return buf;+}++fn expandFuncMacro(pp: *Preprocessor, func_macro: *const Macro.Func, args: *const MacroArguments, expanded_args: *const MacroArguments) Error!ExpandBuf {+    var buf = ExpandBuf.init(pp.comp.gpa);+    try buf.ensureCapacity(func_macro.tokens.len);++    var expanded_variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer expanded_variable_arguments.deinit();+    var variable_arguments = ExpandBuf.init(pp.comp.gpa);+    defer variable_arguments.deinit();++    if (func_macro.var_args) {+        var i: usize = func_macro.params.len;+        while (i < expanded_args.items.len) : (i += 1) {+            try variable_arguments.appendSlice(args.items[i]);+            try expanded_variable_arguments.appendSlice(expanded_args.items[i]);+            if (i != expanded_args.items.len-1 ) {+                // TODO: use generated buffer?+                const comma = Token{+                    .id = .comma,+                    .loc = .{+                        .id = .generated,+                    }+                };+                try variable_arguments.append(comma);+                try expanded_variable_arguments.append(comma);+            }+        }+    }+    //for (expanded_variable_arguments.items) |tok| {+    //    std.debug.print("{s} ", .{pp.expandedSlice(tok)});+    //}+    //std.debug.print("\n", .{});++    // token concatenation and expansion phase+    var tok_i: usize = 0;+    while (tok_i < func_macro.tokens.len) : (tok_i += 1) {+        const raw = func_macro.tokens[tok_i];+        switch (raw.id) {+            .hash_hash_from_param => {+                var new_tok = raw;+                new_tok.id = .hash_hash;+                try buf.append(tokFromRaw(new_tok));+             },+            .hash_hash => {+                const raw_prev = func_macro.tokens[tok_i - 1];+                // TODO: is tok_i+1 ever out of bound?+                const raw_next = func_macro.tokens[tok_i + 1];++                const prev = switch (raw_prev.id) {+                    .macro_param => args.items[raw_prev.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_prev)})[0..],+                };++                const next = switch (raw_next.id) {+                    .macro_param => args.items[raw_next.end],+                    .keyword_va_args => variable_arguments.items,+                    else => ([1]Token{tokFromRaw(raw_next)})[0..],+                };++                var res = try pp.pasteTokens2(prev, next);+                defer pp.comp.gpa.free(res);+                // TODO: more work than necessary, but easy+                try buf.replaceRange(buf.items.len - prev.len, prev.len, res);+                // skip next token+                tok_i += 1;+            },+            .macro_param => {+                // TODO: raw.end contains the index of the param+                const arg = expanded_args.items[raw.end];++                if (arg.len == 0) {+                    // needed for the following token pasting phase+                    try buf.append(.{+                        .id = .empty_arg,+                        .loc = .{ .id = raw.source, .byte_offset = raw.start }+                    });+                } else {+                    for (arg) |tok| {+                        try buf.ensureCapacity(buf.items.len + arg.len);+                        buf.appendAssumeCapacity(tok);+                    }+                }+            },+            .keyword_va_args => {+                try buf.ensureCapacity(buf.items.len + expanded_variable_arguments.items.len);+                buf.appendSliceAssumeCapacity(expanded_variable_arguments.items);+            },+            .stringify_param, .stringify_va_args => {+                const arg = if (raw.id == .stringify_va_args)+                    variable_arguments.items+                else+                    args.items[raw.end];++                var char_buf = std.ArrayList(u8).init(pp.comp.gpa);+                defer char_buf.deinit();++                // TODO pretty print these+                try char_buf.append('"');+                for (arg) |tok, i| {+                    if (i != 0) try char_buf.append(' ');+                    for (pp.expandedSlice(tok)) |c| {+                        if (c == '"')+                            try char_buf.appendSlice("\\\"")+                        else+                            try char_buf.append(c);+                    }+                }+                try char_buf.appendSlice("\"\n");++                const start = pp.generated.items.len;+                try pp.generated.appendSlice(char_buf.items);++                try buf.append(.{+                    .id = .string_literal,+                    .loc = .{ // location of token slice in the generated buffer+                        .id = .generated,+                        .byte_offset = @intCast(u32, start),+                    },+                });+            },+            else => {+                try buf.append(tokFromRaw(raw));+            }+        }+    }++    return buf;+}++fn shouldExpand(tok: Token, macro: *Macro) bool {+    const macro_loc = switch (macro.*) {+        .simple => |smacro| smacro.loc,+        .func => |smacro| smacro.loc,+        else => unreachable+    };+    var maybe_loc = tok.loc.next;+    while (maybe_loc) |loc| {+        if (loc.id == macro_loc.id and loc.byte_offset == macro_loc.byte_offset) {+            return false;+        }+        maybe_loc = loc.next;+    }++    return true;+}++fn nextBufToken(tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!Token {+    start_idx.* += 1;+    if (start_idx.* == buf.items.len and start_idx.* == end_idx.*) {+        if (extend_buf) {+            const new_tok = tokFromRaw(tokenizer.next());+            end_idx.* += 1;+            try buf.append(new_tok);+            return new_tok;+        } else {+            return Token{+                .id = .eof,+                .loc = .{+                    .id = .generated,+                },+            };+        }+    } else {+        return buf.items[start_idx.*];+    }+}++fn collectMacroFuncArguments(pp: *Preprocessor, tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: *usize, end_idx: *usize, extend_buf: bool) Error!(?MacroArguments) {+    const name_tok = buf.items[start_idx.*];+    const l_paren_tok = try nextBufToken(tokenizer, buf, start_idx, end_idx, extend_buf);+    if (l_paren_tok.id != .l_paren) {+        // Not a macro function call, go over normal identifier.+        return null;+    }++    // collect the arguments.+    var parens: u32 = 0;+    var args = MacroArguments.init(pp.comp.gpa);+    var curArgument = std.ArrayList(Token).init(pp.comp.gpa);+    defer curArgument.deinit();+    var done = false;+    while (!done) {+        var tok = try nextBufToken(tokenizer, buf, start_idx, end_idx, extend_buf);+        switch (tok.id) {+            .comma => {+                if (parens == 0) {+                    try args.append(curArgument.toOwnedSlice());+                } else{+                    try curArgument.append(tok);+                }+            },+            .l_paren => {+                try curArgument.append(tok);+                parens += 1;+            },+            .r_paren => {+                if (parens == 0) {+                    try args.append(curArgument.toOwnedSlice());+                    break;+                } else {+                    try curArgument.append(tok);+                    parens -= 1;+                }+            },+            .eof => {+                // TODO+                try pp.comp.diag.add(.{ .tag = .unterminated_macro_arg_list, .loc = name_tok.loc });+                return null;+            },+            else => {+                try curArgument.append(tok);+            }+        }+    }++    return args;+}++fn expandMacroExhaustive(pp: *Preprocessor, tokenizer: *Tokenizer, buf: *ExpandBuf, start_idx: usize, end_idx: usize, extend_buf: bool) Error!void {+    _ = tokenizer;+    var moving_end_idx = end_idx;+    // rescan loop+    var do_rescan = true;+    while (do_rescan) {+        do_rescan = false;+        //std.debug.print("Scanning ", .{});+        //try pp.debugTokenBuf(buf.*);+        // expansion loop+        var idx: usize = start_idx;+        while (idx < moving_end_idx) {+            const macro_entry = pp.defines.getPtr(pp.expandedSlice(buf.items[idx]));+            if (macro_entry != null and shouldExpand(buf.items[idx], macro_entry.?)) {+                switch (macro_entry.?.*) {+                    .empty => {+                        idx += 1;+                    },+                    .simple => |simple_macro| {+                        //std.debug.print("Expanding {s}\n", .{pp.expandedSlice(buf.items[idx])});+                        const res = try pp.expandObjMacro(&simple_macro);+                        defer res.deinit();+                        var expansion_loc = simple_macro.loc;+                        expansion_loc.next = buf.items[idx].loc.next;+                        for (res.items) |*tok| {+                            if (buf.items[idx].loc.next) |ln| {+                                try pp.markExpandedFrom(tok, ln.*);+                            }+                            try pp.markExpandedFrom(tok, simple_macro.loc);+                        }++                        try buf.replaceRange(idx, 1, res.items);+                        idx += res.items.len;+                        moving_end_idx += (res.items.len - 1);+                        do_rescan = true;+                    },+                    .func => |func_macro| {+                        var macro_scan_idx = idx;+                        if (try pp.collectMacroFuncArguments(tokenizer, buf, &macro_scan_idx, &moving_end_idx, extend_buf)) |args| {+                            // TODO: bug?+                            //defer for (args.items) |item| {+                            //    pp.comp.gpa.free(item);+                            //};+                            //defer args.deinit();

Putting the for loop inside a block in the defer seems to fix this for me:

defer {
    for (args.items) |item| {
        pp.comp.gpa.free(item);
    }
    args.deinit();
}
doppioandante

comment created time in 7 hours

PullRequestReviewEvent

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

+#define EXPECTED_TOKENS 2*f(9)

Both clang and gcc give 2*9*g here so if you don't end up fixing it in this pr a note and a #define TESTS_SKIPPED 1 would be useful.

doppioandante

comment created time in 8 hours

Pull request review commentVexu/arocc

Fix preprocessor expansion strategy

 const max_include_depth = 200; const Macro = union(enum) {     /// #define FOO     empty,

empty special case could also be removed in favor of simple with 0 tokens. And while we're at it Macro could be replaced entirely with Func by adding a is_func flag that would fit in the padding.

doppioandante

comment created time in 8 hours

push eventziglang/zig

Mr. Paul

commit sha 25266d08046df6032007b46346faf01a2f40ef31

Langref: fix HTML escaped symbol WASM JavaScript code example docgen HTML escapes characters inside of `syntax_block`s. This commit replaces the escaped greater than with the `>` character. No other occurrences were found. Fixes #9840

view details

push time in 8 hours

PR merged ziglang/zig

Langref: fix HTML escaped symbol WASM JavaScript code example

docgen HTML escapes characters inside of syntax_blocks. This commit replaces the escaped greater than with the > character. No other occurrences were found.

Fixes #9840

+2 -2

0 comment

1 changed file

paulespinosa

pr closed time in 8 hours

issue closedziglang/zig

docgen.zig bug with double-escaped html codes

In the WASM section of the docs , the test.js example has some double escaped html. The &gt; escape code is escaped to become &amp;gt;. This is the only place in the docs where I have observed this double-escaping. I had a look at docgen.zig but it was not clear what is different with this section vs. all the other places where html escaping is done.

Current output:

const fs = require('fs');
const source = fs.readFileSync("./math.wasm");
const typedArray = new Uint8Array(source);

WebAssembly.instantiate(typedArray, {
  env: {
    print: (result) =&gt; { console.log(`The result is ${result}`); }
  }}).then(result =&gt; {
  const add = result.instance.exports.add;
  add(1, 2);
});

Expected output:

const fs = require('fs');
const source = fs.readFileSync("./math.wasm");
const typedArray = new Uint8Array(source);

WebAssembly.instantiate(typedArray, {
  env: {
    print: (result) => { console.log(`The result is ${result}`); }
  }}).then(result => {
  const add = result.instance.exports.add;
  add(1, 2);
});

closed time in 8 hours

guidorice

pull request commentVexu/arocc

Parser: improve typeof support

Thanks! That seems like something that could be improved but it's probably not worth investigating before attributed types are implemented.

ehaas

comment created time in 8 hours

push eventVexu/arocc

Evan Haas

commit sha e3dc054a5db2461c46cbd06e7b4e9939bc8707e0

Parser: improve typeof support Add two new type specifiers: `typeof_type` and `typeof_expr`, which are the types returned by `typeof` (depending on whether it's called with a type or an expression) This allows us to track the underlying type or expression that was used.

view details

Evan Haas

commit sha 26f4cf346a5f1b6fc3aa36aa4afd9125eb4376cd

Type: Add `.is` method for checking specifier Allows typeof() types to function correctly with the parser

view details

Evan Haas

commit sha 225c0b40a8b12c2ec31865ce134c27e958096b40

Rename Type.VLA to Type.Expr

view details

Evan Haas

commit sha dce4fc2450ae8e705107f4db14dd26081de6ccac

Parser: do not inherit register with typeof

view details

Evan Haas

commit sha d35de5a4c9e8724125fdf327ef2df034b6903a5a

Type: Add arrayLen method

view details

Evan Haas

commit sha d6ecc3096c0a06a9eed69ecf6a92ddd47c828ed6

add .get() method and deal with union/struct init

view details

Evan Haas

commit sha 9984a19e946ec7a0440ea56a9f75cf0bdbb496c5

Parser: handle incomplete array init with typeof

view details

Evan Haas

commit sha 4711b0f423d6a43dd76fd5b33c09d2f5e15e9500

Type: add anyQual method for checking qualifiers on meta types

view details

Evan Haas

commit sha 11fc9304ababa74e83aefc44c34a416928795574

Parser: use .get() method when necessary

view details

Evan Haas

commit sha 985545cdbb89d66d0483221b187f2b79f5211310

Update arrayLen to work with more types

view details

Evan Haas

commit sha 18bf7e034a146647aa058bcd51029a550bdbdbf7

Type: ensure qualifiers are correct for typeof-types

view details

Evan Haas

commit sha 8c746e56501f1ba147e46c9530bfb236eab19faf

Type: use .is() functions for eitherLongDouble etc

view details

Veikka Tuominen

commit sha 8c5f625a9eacd34b83c11814e098fddee3121cf5

Merge pull request #27 from ehaas/improve-typeof Parser: improve typeof support

view details

push time in 8 hours

PR merged Vexu/arocc

Parser: improve typeof support

Add two new type specifiers: typeof_type and typeof_expr, which are the types returned by typeof (depending on whether it's called with a type or an expression)

This allows us to track the underlying type or expression that was used.

+509 -94

9 comments

5 changed files

ehaas

pr closed time in 8 hours

Pull request review commentziglang/zig

Spelling

 const char* ir_inst_gen_type_str(Stage1AirInstId id) {         case Stage1AirInstIdWasmMemoryGrow:             return "GenWasmMemoryGrow";         case Stage1AirInstIdExtern:-            return "GenExtrern";+            return "GenExtern";

These are just debug output, fixing spelling here is fine.

jsoref

comment created time in 4 days

PullRequestReviewEvent

Pull request review commentziglang/zig

Spelling

 pub const Decoration = enum(u32) {     RegisterINTEL = 5825,     MemoryINTEL = 5826,     NumbanksINTEL = 5827,-    BankwidthINTEL = 5828,

BankwidthINTEL is correct https://www.khronos.org/registry/SPIR-V/specs/unified1/SPIRV.html.

jsoref

comment created time in 4 days

Pull request review commentziglang/zig

Spelling

 enum ZigClangPreprocessedEntity_EntityKind { };  enum ZigClangExpr_ConstantExprKind {-    ZigClangExpr_ContantExprKind_Normal,-    ZigClangExpr_ContantExprKind_NonClassTemplateArgument,-    ZigClangExpr_ContantExprKind_ClassTemplateArgument,-    ZigClangExpr_ContantExprKind_ImmediateInvocation,+    ZigClangExpr_ConstantExprKind_Normal,+    ZigClangExpr_ConstantExprKind_NonClassTemplateArgument,+    ZigClangExpr_ConstantExprKind_ClassTemplateArgument,+    ZigClangExpr_ConstantExprKind_ImmediateInvocation,

Nope, as long as the numbers stay the same it's fine.

jsoref

comment created time in 4 days

PullRequestReviewEvent

push eventdoppioandante/arocc

Veikka Tuominen

commit sha b2f6d8e06b631385d15d3897bc65601b698829be

Test runner: fix use of dangling pointers There were accidental uses of dangling pointers which lead to double frees so now all the paths are also duped in the cases list to avoid mistakes like this.

view details

Enrico Lumetti

commit sha a0758ff04402cb7859778fae4dce2a0ecd359951

Preprocessor: fix recursive macro func substitution The current approach of having a map of the previously expanded macros comes close, but isn't quite correct as it is too conservative. The `expansion_log` was changed to a simple `expansion_stack`, which is searched linearly to check for already expanded macros.

view details

push time in 4 days

push eventehaas/arocc

Veikka Tuominen

commit sha b2f6d8e06b631385d15d3897bc65601b698829be

Test runner: fix use of dangling pointers There were accidental uses of dangling pointers which lead to double frees so now all the paths are also duped in the cases list to avoid mistakes like this.

view details

Evan Haas

commit sha 5bfc87acee170ccd9dc0e42c68fc3dfd76452e2c

Parser: improve typeof support Add two new type specifiers: `typeof_type` and `typeof_expr`, which are the types returned by `typeof` (depending on whether it's called with a type or an expression) This allows us to track the underlying type or expression that was used.

view details

Evan Haas

commit sha f3eb10b0f12387d8eb36a40da4dbd13ce66c1b7f

Type: Add `.is` method for checking specifier Allows typeof() types to function correctly with the parser

view details

Evan Haas

commit sha 7235e64edd8dec23e2c71ecc64170c677d526698

Type: rename VLA to Expr

view details

Evan Haas

commit sha 304e8f51157f97dcfd436eea3df67340755a3283

Parser: do not inherit register with typeof

view details

Evan Haas

commit sha af73ce389435228e2b1288d4b49f8aa227ce635c

Type: remove mergeAll from Qualifiers No longer necessary since we track typeof types properly

view details

Evan Haas

commit sha 74cdafe6c0f6296cbd4a0ddaa16cef03fcda9b6f

Type: Add arrayLen method

view details

Evan Haas

commit sha b2a920593fee107364c9ceee6094b683225a45d2

Parser: add Type.get() method and use it to deal with union/struct init

view details

Evan Haas

commit sha 26ad1ef1989badc2ea48d58d7a09391b116b8d24

Parser: handle incomplete array init with typeof

view details

Evan Haas

commit sha d039452931be6d24a259daa662d71a134932b2e5

Type: add anyQual method for checking qualifiers on meta types

view details

Evan Haas

commit sha 4eace664876b6f201a33915eaf787f3fea987520

Parser: use Type.get() method when necessary

view details

Evan Haas

commit sha d2ad3971ca7a65fbfa4490a6fbe7dc3cb9ef0cdd

Type: Update arrayLen to work with more types

view details

push time in 4 days

push eventVexu/arocc

Veikka Tuominen

commit sha b2f6d8e06b631385d15d3897bc65601b698829be

Test runner: fix use of dangling pointers There were accidental uses of dangling pointers which lead to double frees so now all the paths are also duped in the cases list to avoid mistakes like this.

view details

push time in 4 days

delete branch Vexu/arocc

delete branch : test

delete time in 4 days

push eventVexu/arocc

Veikka Tuominen

commit sha a85b5d5484ed8058e9c18af2f902365db82ac4a8

Test runner: fix use of dangling pointers There were accidental uses of dangling pointers which lead to double frees so now all the paths are also duped in the cases list to avoid mistakes like this.

view details

push time in 4 days

push eventVexu/arocc

Veikka Tuominen

commit sha e8144896fe95bf4008601457def8a2c6939a7d33

fix my stupid mistake

view details

push time in 4 days

PullRequestReviewEvent

Pull request review commentziglang/zig

Stage1: Add Visibility field to ExportOptions.

 pub const GlobalLinkage = enum {     LinkOnce, }; +/// This data structure is used by the Zig language code generation and+/// therefore must be kept in sync with the compiler implementation.+pub const GlobalVisibility = enum {+    Default,

New enums should be snake_case.

mathetake

comment created time in 4 days