Fix last token not getting tokenized
This commit is contained in:
parent
23269baa0b
commit
7889f4c27a
@ -1218,6 +1218,15 @@ pub fn tokenize(fileContent: []u8, allocator: std.mem.Allocator) ![]Token
|
||||
}
|
||||
index += 1;
|
||||
}
|
||||
if(longBracketLevel != 0)
|
||||
{
|
||||
return error.UnbalancedLongBracketLevel;
|
||||
}
|
||||
try tokenizeChar(&state, '\n', &lastIndex, &index, &tokenType, &tokenStr, &tokenNumeral, &tokens, &longBracketLevel, ®ion, allocator);
|
||||
if(region.start != null and region.start.?.col == 0 and region.start.?.line == 0)
|
||||
{
|
||||
region.start = calculatePoint(fileContent, index);
|
||||
}
|
||||
return tokens.toOwnedSlice();
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user