mirror of
https://github.com/krgamestudios/Toy.git
synced 2026-04-15 14:54:07 +10:00
I've expanded support to three major platforms: - linux - windows - macos The CI now runs the test suites for all of these, both under normal conditions and under GDB (except for macos, which lacks GDB support). TOY_BITNESS specifies the bit-width of the current platform, either 32 or 64. A value of -1 means the bit-width could not be determined. Some tests will be disabled if the appropriate bit-width can't be determined, and a warning is printed to stderr. TOY_API has been tweaked, and is now dependant on different preprocessor flags. It is defined as 'extern' on all supported platforms except windows, which instead specifies DLL support. It defaults to 'extern' if the platform can't be determined. commit d0350998ecc80b8925a1962ceb2ab400da50be9d Author: Kayne Ruse <kayneruse@gmail.com> Date: Sun Sep 22 09:55:42 2024 +1000 Expanded GDB tests using matrix strategy commit dc2addacc52830227ddcd0f35997c0e1668b579c Author: Kayne Ruse <kayneruse@gmail.com> Date: Sun Sep 22 09:05:42 2024 +1000 Reserved the yield keyword commit f485c380f74a49092e0c5a41e599fbb06dbce235 Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 15:17:11 2024 +1000 Potential segfault fix commit d8b19d21c92133feb071e631009a3cf99df0f068 Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 14:25:47 2024 +1000 Added testing on windows under GDB, read more I'm hunting a segfault that only appears on windows, but I lack a windows machine, so github's runners are all I have right now. commit 8606db541fb5cbe91b16a39e9815fe4a27ba0c8a Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 13:12:02 2024 +1000 DLL import/export macros tweaked for windows TOY_EXPORT for making a DLL TOY_IMPORT for using a DLL Defaults to 'extern' if neither option is present commit a6929666401953a5b3a93dfe83c9398e012beefc Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 12:52:06 2024 +1000 Investigating bitness issue on windows commit 8f615f735868a316e8d5a6a77ed899e72fd537f8 Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 12:32:55 2024 +1000 Adjusting bitness tests in test_ast.c commit 61694f2183ac84ee7c53c855f2f6aa29f360f16c Author: Kayne Ruse <kayneruse@gmail.com> Date: Sat Sep 21 11:46:59 2024 +1000 Added experimental macOS CI job
583 lines
17 KiB
C
583 lines
17 KiB
C
#include "toy_parser.h"
|
|
#include "toy_console_colors.h"
|
|
|
|
#include <stdio.h>
|
|
|
|
//utilities
|
|
static void printError(Toy_Parser* parser, Toy_Token token, const char* errorMsg) {
|
|
//keep going while panicking
|
|
if (parser->panic) {
|
|
return;
|
|
}
|
|
|
|
fprintf(stderr, TOY_CC_ERROR "[Line %d] Error ", token.line);
|
|
|
|
//check type
|
|
if (token.type == TOY_TOKEN_EOF) {
|
|
fprintf(stderr, "at end");
|
|
}
|
|
else {
|
|
fprintf(stderr, "at '%.*s'", token.length, token.lexeme);
|
|
}
|
|
|
|
//finally
|
|
fprintf(stderr, ": %s\n" TOY_CC_RESET, errorMsg);
|
|
parser->error = true;
|
|
parser->panic = true;
|
|
}
|
|
|
|
static void advance(Toy_Parser* parser) {
|
|
parser->previous = parser->current;
|
|
parser->current = Toy_private_scanLexer(parser->lexer);
|
|
|
|
if (parser->current.type == TOY_TOKEN_ERROR) {
|
|
printError(parser, parser->current, "Read error");
|
|
}
|
|
}
|
|
|
|
static bool match(Toy_Parser* parser, Toy_TokenType tokenType) {
|
|
if (parser->current.type == tokenType) {
|
|
advance(parser);
|
|
return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
static void consume(Toy_Parser* parser, Toy_TokenType tokenType, const char* msg) {
|
|
if (parser->current.type != tokenType) {
|
|
printError(parser, parser->current, msg);
|
|
return;
|
|
}
|
|
|
|
advance(parser);
|
|
}
|
|
|
|
static void synchronize(Toy_Parser* parser) {
|
|
while (parser->current.type != TOY_TOKEN_EOF) {
|
|
switch(parser->current.type) {
|
|
//these tokens can start a statement
|
|
case TOY_TOKEN_KEYWORD_ASSERT:
|
|
case TOY_TOKEN_KEYWORD_BREAK:
|
|
case TOY_TOKEN_KEYWORD_CLASS:
|
|
case TOY_TOKEN_KEYWORD_CONTINUE:
|
|
case TOY_TOKEN_KEYWORD_DO:
|
|
case TOY_TOKEN_KEYWORD_EXPORT:
|
|
case TOY_TOKEN_KEYWORD_FOR:
|
|
case TOY_TOKEN_KEYWORD_FOREACH:
|
|
case TOY_TOKEN_KEYWORD_FUNCTION:
|
|
case TOY_TOKEN_KEYWORD_IF:
|
|
case TOY_TOKEN_KEYWORD_IMPORT:
|
|
case TOY_TOKEN_KEYWORD_PRINT:
|
|
case TOY_TOKEN_KEYWORD_RETURN:
|
|
case TOY_TOKEN_KEYWORD_VAR:
|
|
case TOY_TOKEN_KEYWORD_WHILE:
|
|
parser->error = true;
|
|
parser->panic = false;
|
|
return;
|
|
|
|
default:
|
|
advance(parser);
|
|
}
|
|
}
|
|
}
|
|
|
|
//precedence declarations
|
|
typedef enum ParsingPrecedence {
|
|
PREC_NONE,
|
|
PREC_ASSIGNMENT,
|
|
PREC_GROUP,
|
|
PREC_TERNARY,
|
|
PREC_OR,
|
|
PREC_AND,
|
|
PREC_COMPARISON,
|
|
PREC_TERM,
|
|
PREC_FACTOR,
|
|
PREC_UNARY,
|
|
PREC_CALL,
|
|
PREC_PRIMARY,
|
|
} ParsingPrecedence;
|
|
|
|
typedef Toy_AstFlag (*ParsingRule)(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root);
|
|
|
|
typedef struct ParsingTuple {
|
|
ParsingPrecedence precedence;
|
|
ParsingRule prefix;
|
|
ParsingRule infix;
|
|
} ParsingTuple;
|
|
|
|
static void parsePrecedence(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root, ParsingPrecedence precRule);
|
|
|
|
static Toy_AstFlag atomic(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root);
|
|
static Toy_AstFlag unary(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root);
|
|
static Toy_AstFlag binary(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root);
|
|
static Toy_AstFlag group(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root);
|
|
|
|
//precedence definitions
|
|
static ParsingTuple parsingRulesetTable[] = {
|
|
{PREC_PRIMARY,atomic,NULL},// TOY_TOKEN_NULL,
|
|
|
|
//variable names
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_IDENTIFIER,
|
|
|
|
//types
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_TYPE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_BOOLEAN,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_INTEGER,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_FLOAT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_STRING,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_ARRAY,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_DICTIONARY,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_FUNCTION,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_OPAQUE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_TYPE_ANY,
|
|
|
|
//keywords and reserved words
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_AS,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_ASSERT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_BREAK,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_CLASS,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_CONST,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_CONTINUE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_DO,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_ELSE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_EXPORT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_FOR,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_FOREACH,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_FUNCTION,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_IF,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_IMPORT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_IN,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_OF,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_PRINT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_RETURN,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_TYPEAS,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_TYPEOF,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_VAR,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_WHILE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_KEYWORD_YIELD,
|
|
|
|
//literal values
|
|
{PREC_PRIMARY,atomic,NULL},// TOY_TOKEN_LITERAL_TRUE,
|
|
{PREC_PRIMARY,atomic,NULL},// TOY_TOKEN_LITERAL_FALSE,
|
|
{PREC_PRIMARY,atomic,NULL},// TOY_TOKEN_LITERAL_INTEGER,
|
|
{PREC_PRIMARY,atomic,NULL},// TOY_TOKEN_LITERAL_FLOAT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_LITERAL_STRING,
|
|
|
|
//math operators
|
|
{PREC_TERM,NULL,binary},// TOY_TOKEN_OPERATOR_ADD,
|
|
{PREC_TERM,unary,binary},// TOY_TOKEN_OPERATOR_SUBTRACT,
|
|
{PREC_FACTOR,NULL,binary},// TOY_TOKEN_OPERATOR_MULTIPLY,
|
|
{PREC_FACTOR,NULL,binary},// TOY_TOKEN_OPERATOR_DIVIDE,
|
|
{PREC_FACTOR,NULL,binary},// TOY_TOKEN_OPERATOR_MODULO,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_ADD_ASSIGN,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_SUBTRACT_ASSIGN,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_MULTIPLY_ASSIGN,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_DIVIDE_ASSIGN,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_MODULO_ASSIGN,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_INCREMENT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_DECREMENT,
|
|
{PREC_ASSIGNMENT,NULL,binary},// TOY_TOKEN_OPERATOR_ASSIGN,
|
|
|
|
//comparator operators
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_EQUAL,
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_NOT,
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_LESS,
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_LESS_EQUAL,
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_GREATER,
|
|
{PREC_COMPARISON,NULL,binary},// TOY_TOKEN_OPERATOR_COMPARE_GREATER_EQUAL,
|
|
|
|
//structural operators
|
|
{PREC_NONE,group,NULL},// TOY_TOKEN_OPERATOR_PAREN_LEFT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_PAREN_RIGHT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_BRACKET_LEFT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_BRACKET_RIGHT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_BRACE_LEFT,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_BRACE_RIGHT,
|
|
|
|
//other operators
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_AND,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_OR,
|
|
{PREC_NONE,unary,NULL},// TOY_TOKEN_OPERATOR_NEGATE,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_QUESTION,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_COLON,
|
|
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_CONCAT, // ..
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_REST, // ...
|
|
|
|
//unused operators
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_AMPERSAND, // &
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_OPERATOR_PIPE, // |
|
|
|
|
//meta tokens
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_PASS,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_ERROR,
|
|
{PREC_NONE,NULL,NULL},// TOY_TOKEN_EOF,
|
|
};
|
|
|
|
static Toy_AstFlag atomic(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
switch(parser->previous.type) {
|
|
case TOY_TOKEN_NULL:
|
|
Toy_private_emitAstValue(bucket, root, TOY_VALUE_TO_NULL());
|
|
return TOY_AST_FLAG_NONE;
|
|
|
|
case TOY_TOKEN_LITERAL_TRUE:
|
|
Toy_private_emitAstValue(bucket, root, TOY_VALUE_TO_BOOLEAN(true));
|
|
return TOY_AST_FLAG_NONE;
|
|
|
|
case TOY_TOKEN_LITERAL_FALSE:
|
|
Toy_private_emitAstValue(bucket, root, TOY_VALUE_TO_BOOLEAN(false));
|
|
return TOY_AST_FLAG_NONE;
|
|
|
|
case TOY_TOKEN_LITERAL_INTEGER: {
|
|
//filter the '_' character
|
|
char buffer[parser->previous.length];
|
|
|
|
int i = 0, o = 0;
|
|
do {
|
|
buffer[i] = parser->previous.lexeme[o];
|
|
if (buffer[i] != '_') i++;
|
|
} while (parser->previous.lexeme[o++] && i < parser->previous.length);
|
|
buffer[i] = '\0'; //BUGFIX
|
|
|
|
int value = 0;
|
|
sscanf(buffer, "%d", &value);
|
|
Toy_private_emitAstValue(bucket, root, TOY_VALUE_TO_INTEGER(value));
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
|
|
case TOY_TOKEN_LITERAL_FLOAT: {
|
|
//filter the '_' character
|
|
char buffer[parser->previous.length];
|
|
|
|
int i = 0, o = 0;
|
|
do {
|
|
buffer[i] = parser->previous.lexeme[o];
|
|
if (buffer[i] != '_') i++;
|
|
} while (parser->previous.lexeme[o++] && i < parser->previous.length);
|
|
buffer[i] = '\0'; //BUGFIX
|
|
|
|
float value = 0;
|
|
sscanf(buffer, "%f", &value);
|
|
Toy_private_emitAstValue(bucket, root, TOY_VALUE_TO_FLOAT(value));
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
|
|
default:
|
|
printError(parser, parser->previous, "Unexpected token passed to atomic precedence rule");
|
|
Toy_private_emitAstError(bucket, root);
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
}
|
|
|
|
static Toy_AstFlag unary(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//'subtract' can only be applied to numbers and groups, while 'negate' can only be applied to booleans and groups
|
|
//this function takes the libery of peeking into the uppermost node, to see if it can apply this to it
|
|
|
|
if (parser->previous.type == TOY_TOKEN_OPERATOR_SUBTRACT) {
|
|
|
|
bool connectedDigit = parser->previous.lexeme[1] >= '0' && parser->previous.lexeme[1] <= '9'; //BUGFIX: '- 1' should not be optimised into a negative
|
|
parsePrecedence(bucket, parser, root, PREC_UNARY);
|
|
|
|
//negative numbers
|
|
if ((*root)->type == TOY_AST_VALUE && TOY_VALUE_IS_INTEGER((*root)->value.value) && connectedDigit) {
|
|
(*root)->value.value = TOY_VALUE_TO_INTEGER( -TOY_VALUE_AS_INTEGER((*root)->value.value) );
|
|
}
|
|
else if ((*root)->type == TOY_AST_VALUE && TOY_VALUE_IS_FLOAT((*root)->value.value) && connectedDigit) {
|
|
(*root)->value.value = TOY_VALUE_TO_FLOAT( -TOY_VALUE_AS_FLOAT((*root)->value.value) );
|
|
}
|
|
else {
|
|
//actually emit the negation node
|
|
Toy_private_emitAstUnary(bucket, root, TOY_AST_FLAG_NEGATE);
|
|
}
|
|
}
|
|
|
|
else if (parser->previous.type == TOY_TOKEN_OPERATOR_NEGATE) {
|
|
parsePrecedence(bucket, parser, root, PREC_UNARY);
|
|
|
|
//inverted booleans
|
|
if ((*root)->type == TOY_AST_VALUE && TOY_VALUE_IS_BOOLEAN((*root)->value.value)) {
|
|
(*root)->value.value = TOY_VALUE_TO_BOOLEAN( !TOY_VALUE_AS_BOOLEAN((*root)->value.value) );
|
|
}
|
|
else {
|
|
//actually emit the negation node
|
|
Toy_private_emitAstUnary(bucket, root, TOY_AST_FLAG_NEGATE);
|
|
}
|
|
}
|
|
|
|
else {
|
|
printError(parser, parser->previous, "Unexpected token passed to unary precedence rule");
|
|
Toy_private_emitAstError(bucket, root);
|
|
}
|
|
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
|
|
static Toy_AstFlag binary(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//infix must advance
|
|
advance(parser);
|
|
|
|
switch(parser->previous.type) {
|
|
//arithmetic
|
|
case TOY_TOKEN_OPERATOR_ADD: {
|
|
parsePrecedence(bucket, parser, root, PREC_TERM + 1);
|
|
return TOY_AST_FLAG_ADD;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_SUBTRACT: {
|
|
parsePrecedence(bucket, parser, root, PREC_TERM + 1);
|
|
return TOY_AST_FLAG_SUBTRACT;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_MULTIPLY: {
|
|
parsePrecedence(bucket, parser, root, PREC_FACTOR + 1);
|
|
return TOY_AST_FLAG_MULTIPLY;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_DIVIDE: {
|
|
parsePrecedence(bucket, parser, root, PREC_FACTOR + 1);
|
|
return TOY_AST_FLAG_DIVIDE;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_MODULO: {
|
|
parsePrecedence(bucket, parser, root, PREC_FACTOR + 1);
|
|
return TOY_AST_FLAG_MODULO;
|
|
}
|
|
|
|
//assignment
|
|
case TOY_TOKEN_OPERATOR_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_ASSIGN;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_ADD_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_ADD_ASSIGN;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_SUBTRACT_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_SUBTRACT_ASSIGN;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_MULTIPLY_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_MULTIPLY_ASSIGN;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_DIVIDE_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_DIVIDE_ASSIGN;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_MODULO_ASSIGN: {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT + 1);
|
|
return TOY_AST_FLAG_MODULO_ASSIGN;
|
|
}
|
|
|
|
//comparison
|
|
case TOY_TOKEN_OPERATOR_COMPARE_EQUAL: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_EQUAL;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_COMPARE_NOT: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_NOT;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_COMPARE_LESS: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_LESS;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_COMPARE_LESS_EQUAL: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_LESS_EQUAL;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_COMPARE_GREATER: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_GREATER;
|
|
}
|
|
|
|
case TOY_TOKEN_OPERATOR_COMPARE_GREATER_EQUAL: {
|
|
parsePrecedence(bucket, parser, root, PREC_COMPARISON + 1);
|
|
return TOY_AST_FLAG_COMPARE_GREATER_EQUAL;
|
|
}
|
|
|
|
default:
|
|
printError(parser, parser->previous, "Unexpected token passed to binary precedence rule");
|
|
Toy_private_emitAstError(bucket, root);
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
}
|
|
|
|
static Toy_AstFlag group(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//groups are ()
|
|
if (parser->previous.type == TOY_TOKEN_OPERATOR_PAREN_LEFT) {
|
|
parsePrecedence(bucket, parser, root, PREC_GROUP);
|
|
consume(parser, TOY_TOKEN_OPERATOR_PAREN_RIGHT, "Expected ')' at end of group");
|
|
|
|
//Toy_AstGroup is omitted from generation, as an optimisation
|
|
// Toy_private_emitAstGroup(bucket, root);
|
|
}
|
|
|
|
else {
|
|
printError(parser, parser->previous, "Unexpected token passed to grouping precedence rule");
|
|
Toy_private_emitAstError(bucket, root);
|
|
}
|
|
|
|
return TOY_AST_FLAG_NONE;
|
|
}
|
|
|
|
static ParsingTuple* getParsingRule(Toy_TokenType type) {
|
|
return &parsingRulesetTable[type];
|
|
}
|
|
|
|
//grammar rules
|
|
static void parsePrecedence(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root, ParsingPrecedence precRule) {
|
|
//'step over' the token to parse
|
|
advance(parser);
|
|
|
|
//every valid expression has a prefix rule
|
|
ParsingRule prefix = getParsingRule(parser->previous.type)->prefix;
|
|
|
|
if (prefix == NULL) {
|
|
printError(parser, parser->previous, "Expected expression");
|
|
Toy_private_emitAstError(bucket, root);
|
|
return;
|
|
}
|
|
|
|
prefix(bucket, parser, root);
|
|
|
|
//infix rules are left-recursive
|
|
while (precRule <= getParsingRule(parser->current.type)->precedence) {
|
|
ParsingRule infix = getParsingRule(parser->current.type)->infix;
|
|
|
|
if (infix == NULL) {
|
|
printError(parser, parser->previous, "Expected operator");
|
|
Toy_private_emitAstError(bucket, root);
|
|
return;
|
|
}
|
|
|
|
Toy_Ast* ptr = NULL;
|
|
Toy_AstFlag flag = infix(bucket, parser, &ptr);
|
|
|
|
//finished
|
|
if (flag == TOY_AST_FLAG_NONE) {
|
|
(*root) = ptr;
|
|
return;
|
|
}
|
|
|
|
Toy_private_emitAstBinary(bucket, root, flag, ptr);
|
|
}
|
|
|
|
//can't assign below a certain precedence
|
|
if (precRule <= PREC_ASSIGNMENT && match(parser, TOY_TOKEN_OPERATOR_ASSIGN)) {
|
|
printError(parser, parser->current, "Invalid assignment target");
|
|
}
|
|
}
|
|
|
|
static void makeExpr(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
parsePrecedence(bucket, parser, root, PREC_ASSIGNMENT);
|
|
}
|
|
|
|
static void makeExprStmt(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//check for empty lines
|
|
if (match(parser, TOY_TOKEN_OPERATOR_SEMICOLON)) {
|
|
Toy_private_emitAstPass(bucket, root);
|
|
return;
|
|
}
|
|
|
|
makeExpr(bucket, parser, root);
|
|
consume(parser, TOY_TOKEN_OPERATOR_SEMICOLON, "Expected ';' at the end of expression statement");
|
|
}
|
|
|
|
static void makeStmt(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//block
|
|
//print
|
|
//assert
|
|
//if-then-else
|
|
//while-then
|
|
//for-pre-clause-post-then
|
|
//break
|
|
//continue
|
|
//return
|
|
//import
|
|
|
|
//default
|
|
makeExprStmt(bucket, parser, root);
|
|
}
|
|
|
|
static void makeDeclarationStmt(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
// //variable declarations
|
|
// if (match(parser, TOY_TOKEN_KEYWORD_VAR)) {
|
|
// makeVariableDeclarationStmt(bucket, parser, root);
|
|
// }
|
|
|
|
// //function declarations
|
|
// else if (match(parser, TOY_TOKEN_KEYWORD_FUNCTION)) {
|
|
// makeFunctionDeclarationStmt(bucket, parser, root);
|
|
// }
|
|
|
|
//otherwise
|
|
// else {
|
|
makeStmt(bucket, parser, root);
|
|
// }
|
|
}
|
|
|
|
static void makeBlockStmt(Toy_Bucket** bucket, Toy_Parser* parser, Toy_Ast** root) {
|
|
//begin the block
|
|
Toy_private_initAstBlock(bucket, root);
|
|
|
|
//read a series of statements into the block
|
|
while (!match(parser, TOY_TOKEN_EOF)) {
|
|
//process the grammar rules
|
|
Toy_Ast* stmt = NULL;
|
|
makeDeclarationStmt(bucket, parser, &stmt);
|
|
|
|
//if something went wrong
|
|
if (parser->panic) {
|
|
synchronize(parser);
|
|
|
|
Toy_Ast* err = NULL;
|
|
Toy_private_emitAstError(bucket, &err);
|
|
Toy_private_appendAstBlock(bucket, root, err);
|
|
|
|
continue;
|
|
}
|
|
Toy_private_appendAstBlock(bucket, root, stmt);
|
|
}
|
|
}
|
|
|
|
//exposed functions
|
|
void Toy_bindParser(Toy_Parser* parser, Toy_Lexer* lexer) {
|
|
Toy_resetParser(parser);
|
|
parser->lexer = lexer;
|
|
advance(parser);
|
|
}
|
|
|
|
Toy_Ast* Toy_scanParser(Toy_Bucket** bucket, Toy_Parser* parser) {
|
|
Toy_Ast* root = NULL;
|
|
|
|
//check for EOF
|
|
if (match(parser, TOY_TOKEN_EOF)) {
|
|
Toy_private_emitAstEnd(bucket, &root);
|
|
return root;
|
|
}
|
|
|
|
makeBlockStmt(bucket, parser, &root);
|
|
|
|
return root;
|
|
}
|
|
|
|
void Toy_resetParser(Toy_Parser* parser) {
|
|
parser->lexer = NULL;
|
|
|
|
parser->current = TOY_BLANK_TOKEN();
|
|
parser->previous = TOY_BLANK_TOKEN();
|
|
|
|
parser->error = false;
|
|
parser->panic = false;
|
|
}
|