Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion src/DocumentScope.zig
Original file line number Diff line number Diff line change
Expand Up @@ -766,7 +766,6 @@ fn walkNode(
.builtin_call,
.builtin_call_comma,
.container_field,
.asm_legacy,
.asm_simple,
.@"asm",

Expand Down
1 change: 0 additions & 1 deletion src/analysis.zig
Original file line number Diff line number Diff line change
Expand Up @@ -2926,7 +2926,6 @@ fn resolveTypeOfNodeUncached(analyser: *Analyser, options: ResolveOptions) error
.@"resume",
=> {},

.asm_legacy,
.asm_simple,
.@"asm",
.asm_output,
Expand Down
81 changes: 0 additions & 81 deletions src/ast.zig
Original file line number Diff line number Diff line change
Expand Up @@ -116,69 +116,6 @@ pub fn ptrTypeBitRange(tree: *const Ast, node: Node.Index) full.PtrType {
});
}

fn legacyAsmComponents(tree: *const Ast, info: full.AsmLegacy.Components) full.AsmLegacy {
var result: full.AsmLegacy = .{
.ast = info,
.volatile_token = null,
.inputs = &.{},
.outputs = &.{},
.first_clobber = null,
};
if (info.asm_token + 1 < tree.tokens.len and tree.tokenTag(info.asm_token + 1) == .keyword_volatile) {
result.volatile_token = info.asm_token + 1;
}
const outputs_end: usize = for (info.items, 0..) |item, i| {
switch (tree.nodeTag(item)) {
.asm_output => continue,
else => break i,
}
} else info.items.len;

result.outputs = info.items[0..outputs_end];
result.inputs = info.items[outputs_end..];

if (info.items.len == 0) {
// asm ("foo" ::: "a", "b");
const template_token = lastToken(tree, info.template);
if (template_token + 4 < tree.tokens.len and
tree.tokenTag(template_token + 1) == .colon and
tree.tokenTag(template_token + 2) == .colon and
tree.tokenTag(template_token + 3) == .colon and
tree.tokenTag(template_token + 4) == .string_literal)
{
result.first_clobber = template_token + 4;
}
} else if (result.inputs.len != 0) {
// asm ("foo" :: [_] "" (y) : "a", "b");
const last_input = result.inputs[result.inputs.len - 1];
const rparen = lastToken(tree, last_input);
var i = rparen + 1;
// Allow a (useless) comma right after the closing parenthesis.
if (tree.tokenTag(i) == .comma) i += 1;
if (tree.tokenTag(i) == .colon and
tree.tokenTag(i + 1) == .string_literal)
{
result.first_clobber = i + 1;
}
} else {
// asm ("foo" : [_] "" (x) :: "a", "b");
const last_output = result.outputs[result.outputs.len - 1];
const rparen = lastToken(tree, last_output);
var i = rparen + 1;
// Allow a (useless) comma right after the closing parenthesis.
if (i + 1 < tree.tokens.len and tree.tokenTag(i) == .comma) i += 1;
if (i + 2 < tree.tokens.len and
tree.tokenTag(i) == .colon and
tree.tokenTag(i + 1) == .colon and
tree.tokenTag(i + 2) == .string_literal)
{
result.first_clobber = i + 2;
}
}

return result;
}

fn fullAsmComponents(tree: *const Ast, info: full.Asm.Components) full.Asm {
var result: full.Asm = .{
.ast = info,
Expand All @@ -202,18 +139,6 @@ fn fullAsmComponents(tree: *const Ast, info: full.Asm.Components) full.Asm {
return result;
}

pub fn asmLegacy(tree: *const Ast, node: Node.Index) full.AsmLegacy {
const template, const extra_index = tree.nodeData(node).node_and_extra;
const extra = tree.extraData(extra_index, Node.AsmLegacy);
const items = tree.extraDataSlice(.{ .start = extra.items_start, .end = extra.items_end }, Node.Index);
return legacyAsmComponents(tree, .{
.asm_token = tree.nodeMainToken(node),
.template = template,
.items = items,
.rparen = extra.rparen,
});
}

pub fn asmSimple(tree: *const Ast, node: Node.Index) full.Asm {
const template, const rparen = tree.nodeData(node).node_and_token;
return fullAsmComponents(tree, .{
Expand Down Expand Up @@ -691,11 +616,6 @@ pub fn lastToken(tree: *const Ast, node: Node.Index) Ast.TokenIndex {
const index = @intFromEnum(extra_index) + extra.inputs + @intFromBool(extra.has_else);
n = @enumFromInt(tree.extra_data[index]);
},
.asm_legacy => {
_, const extra_index = tree.nodeData(n).node_and_extra;
const extra = tree.extraData(extra_index, Node.AsmLegacy);
break extra.rparen;
},
.@"asm" => {
_, const extra_index = tree.nodeData(n).node_and_extra;
const extra = tree.extraData(extra_index, Node.Asm);
Expand Down Expand Up @@ -1518,7 +1438,6 @@ fn iterateChildrenTypeErased(
if (field.value_expr.unwrap()) |value_expr| try callback(context, tree, value_expr);
},

.asm_legacy,
.@"asm",
=> {
const asm_node = tree.asmFull(node);
Expand Down
45 changes: 0 additions & 45 deletions src/features/semantic_tokens.zig
Original file line number Diff line number Diff line change
Expand Up @@ -737,51 +737,6 @@ fn writeNodeTokens(builder: *Builder, node: Ast.Node.Index) error{OutOfMemory}!v
.unreachable_literal => {
try writeToken(builder, main_token, .keywordLiteral);
},
.asm_legacy => {
const asm_node: Ast.full.AsmLegacy = ast.asmLegacy(tree, node);

try writeToken(builder, main_token, .keyword);
try writeToken(builder, asm_node.volatile_token, .keyword);
try writeNodeTokens(builder, asm_node.ast.template);

for (asm_node.outputs) |output_node| {
try writeToken(builder, tree.nodeMainToken(output_node), .variable);
try writeToken(builder, tree.nodeMainToken(output_node) + 2, .string);
const has_arrow = tree.tokenTag(tree.nodeMainToken(output_node) + 4) == .arrow;
if (has_arrow) {
if (tree.nodeData(output_node).opt_node_and_token[0].unwrap()) |lhs| {
try writeNodeTokens(builder, lhs);
}
} else {
try writeToken(builder, tree.nodeMainToken(output_node) + 4, .variable);
}
}

for (asm_node.inputs) |input_node| {
try writeToken(builder, tree.nodeMainToken(input_node), .variable);
try writeToken(builder, tree.nodeMainToken(input_node) + 2, .string);
try writeNodeTokens(builder, tree.nodeData(input_node).node_and_token[0]);
}

if (asm_node.first_clobber) |first_clobber| clobbers: {
var tok_i = first_clobber;
while (true) : (tok_i += 1) {
try writeToken(builder, tok_i, .string);
tok_i += 1;
switch (tree.tokenTag(tok_i)) {
.r_paren => break :clobbers,
.comma => {
if (tree.tokenTag(tok_i + 1) == .r_paren) {
break :clobbers;
} else {
continue;
}
},
else => break :clobbers,
}
}
}
},
.asm_simple,
.@"asm",
=> {
Expand Down
9 changes: 0 additions & 9 deletions src/print_ast.zig
Original file line number Diff line number Diff line change
Expand Up @@ -489,14 +489,6 @@ const PrintAst = struct {
try p.renderItem(statement);
}
},
.asm_legacy => {
const asm_data = tree.asmLegacy(node);
try p.renderOptTokenField(asm_data.first_clobber, "first_clobber", .hide_if_none);
try p.renderOptTokenField(asm_data.volatile_token, "volatile_token", .hide_if_none);
try p.renderField(asm_data.ast.template, "template");
try p.renderNodeSliceField(asm_data.inputs, "inputs");
try p.renderNodeSliceField(asm_data.outputs, "outputs");
},
.asm_simple,
.@"asm",
=> {
Expand Down Expand Up @@ -758,7 +750,6 @@ fn nodeTagName(tag: Ast.Node.Tag) []const u8 {
.block,
.block_semicolon,
=> "Block",
.asm_legacy,
.asm_simple,
.@"asm",
=> "Asm",
Expand Down
26 changes: 0 additions & 26 deletions tests/lsp_features/semantic_tokens.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1877,32 +1877,6 @@ test "test decl" {
});
}

test "legacy asm" {
try testSemanticTokens(
\\fn foo() void {
\\ asm volatile (""
\\ : [_] "" (-> type),
\\ :
\\ : "clobber"
\\ );
\\}
, &.{
.{ "fn", .keyword, .{} },
.{ "foo", .function, .{ .declaration = true } },
.{ "void", .type, .{} },

.{ "asm", .keyword, .{} },
.{ "volatile", .keyword, .{} },
.{ "\"\"", .string, .{} },

.{ "_", .variable, .{} },
.{ "\"\"", .string, .{} },
.{ "type", .type, .{} },

.{ "\"clobber\"", .string, .{} },
});
}

test "asm" {
try testSemanticTokens(
\\fn syscall1(number: usize, arg1: usize) usize {
Expand Down