mirror of
https://github.com/krgamestudios/Toy.git
synced 2026-04-15 14:54:07 +10:00
Renemed all variables to fit into a namespace
Basically, all Toy varaibles, functions, etc. are prepended with "Toy_", and macros are prepended with "TOY_". This is to reduce namespace pollution, which was an issue pointed out to be - blame @GyroVorbis. I've also bumped the minor version number - theoretically I should bump the major number, but I'm not quite ready for 1.0 yet.
This commit is contained in:
@@ -1,33 +1,33 @@
|
||||
#include "lexer.h"
|
||||
#include "console_colors.h"
|
||||
#include "keyword_types.h"
|
||||
#include "toy_lexer.h"
|
||||
#include "toy_console_colors.h"
|
||||
#include "toy_keyword_types.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
//static generic utility functions
|
||||
static void cleanLexer(Lexer* lexer) {
|
||||
static void cleanLexer(Toy_Lexer* lexer) {
|
||||
lexer->source = NULL;
|
||||
lexer->start = 0;
|
||||
lexer->current = 0;
|
||||
lexer->line = 1;
|
||||
}
|
||||
|
||||
static bool isAtEnd(Lexer* lexer) {
|
||||
static bool isAtEnd(Toy_Lexer* lexer) {
|
||||
return lexer->source[lexer->current] == '\0';
|
||||
}
|
||||
|
||||
static char peek(Lexer* lexer) {
|
||||
static char peek(Toy_Lexer* lexer) {
|
||||
return lexer->source[lexer->current];
|
||||
}
|
||||
|
||||
static char peekNext(Lexer* lexer) {
|
||||
static char peekNext(Toy_Lexer* lexer) {
|
||||
if (isAtEnd(lexer)) return '\0';
|
||||
return lexer->source[lexer->current + 1];
|
||||
}
|
||||
|
||||
static char advance(Lexer* lexer) {
|
||||
static char advance(Toy_Lexer* lexer) {
|
||||
if (isAtEnd(lexer)) {
|
||||
return '\0';
|
||||
}
|
||||
@@ -41,7 +41,7 @@ static char advance(Lexer* lexer) {
|
||||
return lexer->source[lexer->current - 1];
|
||||
}
|
||||
|
||||
static void eatWhitespace(Lexer* lexer) {
|
||||
static void eatWhitespace(Toy_Lexer* lexer) {
|
||||
const char c = peek(lexer);
|
||||
|
||||
switch(c) {
|
||||
@@ -79,11 +79,11 @@ static void eatWhitespace(Lexer* lexer) {
|
||||
eatWhitespace(lexer);
|
||||
}
|
||||
|
||||
static bool isDigit(Lexer* lexer) {
|
||||
static bool isDigit(Toy_Lexer* lexer) {
|
||||
return peek(lexer) >= '0' && peek(lexer) <= '9';
|
||||
}
|
||||
|
||||
static bool isAlpha(Lexer* lexer) {
|
||||
static bool isAlpha(Toy_Lexer* lexer) {
|
||||
return
|
||||
(peek(lexer) >= 'A' && peek(lexer) <= 'Z') ||
|
||||
(peek(lexer) >= 'a' && peek(lexer) <= 'z') ||
|
||||
@@ -91,7 +91,7 @@ static bool isAlpha(Lexer* lexer) {
|
||||
;
|
||||
}
|
||||
|
||||
static bool match(Lexer* lexer, char c) {
|
||||
static bool match(Toy_Lexer* lexer, char c) {
|
||||
if (peek(lexer) == c) {
|
||||
advance(lexer);
|
||||
return true;
|
||||
@@ -101,10 +101,10 @@ static bool match(Lexer* lexer, char c) {
|
||||
}
|
||||
|
||||
//token generators
|
||||
static Token makeErrorToken(Lexer* lexer, char* msg) {
|
||||
Token token;
|
||||
static Toy_Token makeErrorToken(Toy_Lexer* lexer, char* msg) {
|
||||
Toy_Token token;
|
||||
|
||||
token.type = TOKEN_ERROR;
|
||||
token.type = TOY_TOKEN_ERROR;
|
||||
token.lexeme = msg;
|
||||
token.length = strlen(msg);
|
||||
token.line = lexer->line;
|
||||
@@ -112,15 +112,15 @@ static Token makeErrorToken(Lexer* lexer, char* msg) {
|
||||
#ifndef TOY_EXPORT
|
||||
if (command.verbose) {
|
||||
printf("err:");
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
static Token makeToken(Lexer* lexer, TokenType type) {
|
||||
Token token;
|
||||
static Toy_Token makeToken(Toy_Lexer* lexer, Toy_TokenType type) {
|
||||
Toy_Token token;
|
||||
|
||||
token.type = type;
|
||||
token.length = lexer->current - lexer->start;
|
||||
@@ -131,25 +131,25 @@ static Token makeToken(Lexer* lexer, TokenType type) {
|
||||
//BUG #10: this shows TOKEN_EOF twice due to the overarching structure of the program - can't be fixed
|
||||
if (command.verbose) {
|
||||
printf("tok:");
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
static Token makeIntegerOrFloat(Lexer* lexer) {
|
||||
TokenType type = TOKEN_LITERAL_INTEGER; //what am I making?
|
||||
static Toy_Token makeIntegerOrFloat(Toy_Lexer* lexer) {
|
||||
Toy_TokenType type = TOY_TOKEN_LITERAL_INTEGER; //what am I making?
|
||||
|
||||
while(isDigit(lexer)) advance(lexer);
|
||||
|
||||
if (peek(lexer) == '.' && (peekNext(lexer) >= '0' && peekNext(lexer) <= '9')) { //BUGFIX: peekNext == digit
|
||||
type = TOKEN_LITERAL_FLOAT;
|
||||
type = TOY_TOKEN_LITERAL_FLOAT;
|
||||
advance(lexer);
|
||||
while(isDigit(lexer)) advance(lexer);
|
||||
}
|
||||
|
||||
Token token;
|
||||
Toy_Token token;
|
||||
|
||||
token.type = type;
|
||||
token.lexeme = &lexer->source[lexer->start];
|
||||
@@ -158,19 +158,19 @@ static Token makeIntegerOrFloat(Lexer* lexer) {
|
||||
|
||||
#ifndef TOY_EXPORT
|
||||
if (command.verbose) {
|
||||
if (type == TOKEN_LITERAL_INTEGER) {
|
||||
if (type == TOY_TOKEN_LITERAL_INTEGER) {
|
||||
printf("int:");
|
||||
} else {
|
||||
printf("flt:");
|
||||
}
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
static Token makeString(Lexer* lexer, char terminator) {
|
||||
static Toy_Token makeString(Toy_Lexer* lexer, char terminator) {
|
||||
while (!isAtEnd(lexer) && peek(lexer) != terminator) {
|
||||
advance(lexer);
|
||||
}
|
||||
@@ -181,9 +181,9 @@ static Token makeString(Lexer* lexer, char terminator) {
|
||||
return makeErrorToken(lexer, "Unterminated string");
|
||||
}
|
||||
|
||||
Token token;
|
||||
Toy_Token token;
|
||||
|
||||
token.type = TOKEN_LITERAL_STRING;
|
||||
token.type = TOY_TOKEN_LITERAL_STRING;
|
||||
token.lexeme = &lexer->source[lexer->start + 1];
|
||||
token.length = lexer->current - lexer->start - 2;
|
||||
token.line = lexer->line;
|
||||
@@ -191,14 +191,14 @@ static Token makeString(Lexer* lexer, char terminator) {
|
||||
#ifndef TOY_EXPORT
|
||||
if (command.verbose) {
|
||||
printf("str:");
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
static Toy_Token makeKeywordOrIdentifier(Toy_Lexer* lexer) {
|
||||
advance(lexer); //first letter can only be alpha
|
||||
|
||||
while(isDigit(lexer) || isAlpha(lexer)) {
|
||||
@@ -206,11 +206,11 @@ static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
}
|
||||
|
||||
//scan for a keyword
|
||||
for (int i = 0; keywordTypes[i].keyword; i++) {
|
||||
if (strlen(keywordTypes[i].keyword) == (long unsigned int)(lexer->current - lexer->start) && !strncmp(keywordTypes[i].keyword, &lexer->source[lexer->start], lexer->current - lexer->start)) {
|
||||
Token token;
|
||||
for (int i = 0; Toy_keywordTypes[i].keyword; i++) {
|
||||
if (strlen(Toy_keywordTypes[i].keyword) == (long unsigned int)(lexer->current - lexer->start) && !strncmp(Toy_keywordTypes[i].keyword, &lexer->source[lexer->start], lexer->current - lexer->start)) {
|
||||
Toy_Token token;
|
||||
|
||||
token.type = keywordTypes[i].type;
|
||||
token.type = Toy_keywordTypes[i].type;
|
||||
token.lexeme = &lexer->source[lexer->start];
|
||||
token.length = lexer->current - lexer->start;
|
||||
token.line = lexer->line;
|
||||
@@ -218,7 +218,7 @@ static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
#ifndef TOY_EXPORT
|
||||
if (command.verbose) {
|
||||
printf("kwd:");
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -227,9 +227,9 @@ static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
}
|
||||
|
||||
//return an identifier
|
||||
Token token;
|
||||
Toy_Token token;
|
||||
|
||||
token.type = TOKEN_IDENTIFIER;
|
||||
token.type = TOY_TOKEN_IDENTIFIER;
|
||||
token.lexeme = &lexer->source[lexer->start];
|
||||
token.length = lexer->current - lexer->start;
|
||||
token.line = lexer->line;
|
||||
@@ -237,7 +237,7 @@ static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
#ifndef TOY_EXPORT
|
||||
if (command.verbose) {
|
||||
printf("idf:");
|
||||
printToken(&token);
|
||||
Toy_printToken(&token);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -245,18 +245,18 @@ static Token makeKeywordOrIdentifier(Lexer* lexer) {
|
||||
}
|
||||
|
||||
//exposed functions
|
||||
void initLexer(Lexer* lexer, char* source) {
|
||||
void Toy_initLexer(Toy_Lexer* lexer, char* source) {
|
||||
cleanLexer(lexer);
|
||||
|
||||
lexer->source = source;
|
||||
}
|
||||
|
||||
Token scanLexer(Lexer* lexer) {
|
||||
Toy_Token Toy_scanLexer(Toy_Lexer* lexer) {
|
||||
eatWhitespace(lexer);
|
||||
|
||||
lexer->start = lexer->current;
|
||||
|
||||
if (isAtEnd(lexer)) return makeToken(lexer, TOKEN_EOF);
|
||||
if (isAtEnd(lexer)) return makeToken(lexer, TOY_TOKEN_EOF);
|
||||
|
||||
if (isDigit(lexer)) return makeIntegerOrFloat(lexer);
|
||||
if (isAlpha(lexer)) return makeKeywordOrIdentifier(lexer);
|
||||
@@ -264,45 +264,45 @@ Token scanLexer(Lexer* lexer) {
|
||||
char c = advance(lexer);
|
||||
|
||||
switch(c) {
|
||||
case '(': return makeToken(lexer, TOKEN_PAREN_LEFT);
|
||||
case ')': return makeToken(lexer, TOKEN_PAREN_RIGHT);
|
||||
case '{': return makeToken(lexer, TOKEN_BRACE_LEFT);
|
||||
case '}': return makeToken(lexer, TOKEN_BRACE_RIGHT);
|
||||
case '[': return makeToken(lexer, TOKEN_BRACKET_LEFT);
|
||||
case ']': return makeToken(lexer, TOKEN_BRACKET_RIGHT);
|
||||
case '(': return makeToken(lexer, TOY_TOKEN_PAREN_LEFT);
|
||||
case ')': return makeToken(lexer, TOY_TOKEN_PAREN_RIGHT);
|
||||
case '{': return makeToken(lexer, TOY_TOKEN_BRACE_LEFT);
|
||||
case '}': return makeToken(lexer, TOY_TOKEN_BRACE_RIGHT);
|
||||
case '[': return makeToken(lexer, TOY_TOKEN_BRACKET_LEFT);
|
||||
case ']': return makeToken(lexer, TOY_TOKEN_BRACKET_RIGHT);
|
||||
|
||||
case '+': return makeToken(lexer, match(lexer, '=') ? TOKEN_PLUS_ASSIGN : match(lexer, '+') ? TOKEN_PLUS_PLUS: TOKEN_PLUS);
|
||||
case '-': return makeToken(lexer, match(lexer, '=') ? TOKEN_MINUS_ASSIGN : match(lexer, '-') ? TOKEN_MINUS_MINUS: TOKEN_MINUS);
|
||||
case '*': return makeToken(lexer, match(lexer, '=') ? TOKEN_MULTIPLY_ASSIGN : TOKEN_MULTIPLY);
|
||||
case '/': return makeToken(lexer, match(lexer, '=') ? TOKEN_DIVIDE_ASSIGN : TOKEN_DIVIDE);
|
||||
case '%': return makeToken(lexer, match(lexer, '=') ? TOKEN_MODULO_ASSIGN : TOKEN_MODULO);
|
||||
case '+': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_PLUS_ASSIGN : match(lexer, '+') ? TOY_TOKEN_PLUS_PLUS: TOY_TOKEN_PLUS);
|
||||
case '-': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_MINUS_ASSIGN : match(lexer, '-') ? TOY_TOKEN_MINUS_MINUS: TOY_TOKEN_MINUS);
|
||||
case '*': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_MULTIPLY_ASSIGN : TOY_TOKEN_MULTIPLY);
|
||||
case '/': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_DIVIDE_ASSIGN : TOY_TOKEN_DIVIDE);
|
||||
case '%': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_MODULO_ASSIGN : TOY_TOKEN_MODULO);
|
||||
|
||||
case '!': return makeToken(lexer, match(lexer, '=') ? TOKEN_NOT_EQUAL : TOKEN_NOT);
|
||||
case '=': return makeToken(lexer, match(lexer, '=') ? TOKEN_EQUAL : TOKEN_ASSIGN);
|
||||
case '!': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_NOT_EQUAL : TOY_TOKEN_NOT);
|
||||
case '=': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_EQUAL : TOY_TOKEN_ASSIGN);
|
||||
|
||||
case '<': return makeToken(lexer, match(lexer, '=') ? TOKEN_LESS_EQUAL : TOKEN_LESS);
|
||||
case '>': return makeToken(lexer, match(lexer, '=') ? TOKEN_GREATER_EQUAL : TOKEN_GREATER);
|
||||
case '<': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_LESS_EQUAL : TOY_TOKEN_LESS);
|
||||
case '>': return makeToken(lexer, match(lexer, '=') ? TOY_TOKEN_GREATER_EQUAL : TOY_TOKEN_GREATER);
|
||||
|
||||
case '&': //TOKEN_AND not used
|
||||
if (advance(lexer) != '&') {
|
||||
return makeErrorToken(lexer, "Unexpected '&'");
|
||||
} else {
|
||||
return makeToken(lexer, TOKEN_AND);
|
||||
return makeToken(lexer, TOY_TOKEN_AND);
|
||||
}
|
||||
|
||||
case '|': return makeToken(lexer, match(lexer, '|') ? TOKEN_OR : TOKEN_PIPE);
|
||||
case '|': return makeToken(lexer, match(lexer, '|') ? TOY_TOKEN_OR : TOY_TOKEN_PIPE);
|
||||
|
||||
case '?': return makeToken(lexer, TOKEN_QUESTION);
|
||||
case ':': return makeToken(lexer, TOKEN_COLON);
|
||||
case ';': return makeToken(lexer, TOKEN_SEMICOLON);
|
||||
case ',': return makeToken(lexer, TOKEN_COMMA);
|
||||
case '?': return makeToken(lexer, TOY_TOKEN_QUESTION);
|
||||
case ':': return makeToken(lexer, TOY_TOKEN_COLON);
|
||||
case ';': return makeToken(lexer, TOY_TOKEN_SEMICOLON);
|
||||
case ',': return makeToken(lexer, TOY_TOKEN_COMMA);
|
||||
case '.':
|
||||
if (peek(lexer) == '.' && peekNext(lexer) == '.') {
|
||||
advance(lexer);
|
||||
advance(lexer);
|
||||
return makeToken(lexer, TOKEN_REST);
|
||||
return makeToken(lexer, TOY_TOKEN_REST);
|
||||
}
|
||||
return makeToken(lexer, TOKEN_DOT);
|
||||
return makeToken(lexer, TOY_TOKEN_DOT);
|
||||
|
||||
case '"':
|
||||
return makeString(lexer, c);
|
||||
@@ -322,18 +322,18 @@ static void trim(char** s, int* l) { //all this to remove a newline?
|
||||
}
|
||||
|
||||
//for debugging
|
||||
void printToken(Token* token) {
|
||||
if (token->type == TOKEN_ERROR) {
|
||||
printf(ERROR "Error\t%d\t%.*s\n" RESET, token->line, token->length, token->lexeme);
|
||||
void Toy_printToken(Toy_Token* token) {
|
||||
if (token->type == TOY_TOKEN_ERROR) {
|
||||
printf(TOY_CC_ERROR "Error\t%d\t%.*s\n" TOY_CC_RESET, token->line, token->length, token->lexeme);
|
||||
return;
|
||||
}
|
||||
|
||||
printf("\t%d\t%d\t", token->type, token->line);
|
||||
|
||||
if (token->type == TOKEN_IDENTIFIER || token->type == TOKEN_LITERAL_INTEGER || token->type == TOKEN_LITERAL_FLOAT || token->type == TOKEN_LITERAL_STRING) {
|
||||
if (token->type == TOY_TOKEN_IDENTIFIER || token->type == TOY_TOKEN_LITERAL_INTEGER || token->type == TOY_TOKEN_LITERAL_FLOAT || token->type == TOY_TOKEN_LITERAL_STRING) {
|
||||
printf("%.*s\t", token->length, token->lexeme);
|
||||
} else {
|
||||
char* keyword = findKeywordByType(token->type);
|
||||
char* keyword = Toy_findKeywordByType(token->type);
|
||||
|
||||
if (keyword != NULL) {
|
||||
printf("%s", keyword);
|
||||
|
||||
Reference in New Issue
Block a user