chibicc/tokenize.c

155 lines
3.4 KiB
C
Raw Normal View History

2020-10-07 14:11:16 +03:00
#include "chibicc.h"
// Input string
static char *current_input;
// Reports an error and exit.
void error(char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
vfprintf(stderr, fmt, ap);
fprintf(stderr, "\n");
exit(1);
}
// Reports an error location and exit.
static void verror_at(char *loc, char *fmt, va_list ap) {
int pos = loc - current_input;
fprintf(stderr, "%s\n", current_input);
fprintf(stderr, "%*s", pos, ""); // print pos spaces.
fprintf(stderr, "^ ");
vfprintf(stderr, fmt, ap);
fprintf(stderr, "\n");
exit(1);
}
void error_at(char *loc, char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
verror_at(loc, fmt, ap);
}
void error_tok(Token *tok, char *fmt, ...) {
va_list ap;
va_start(ap, fmt);
verror_at(tok->loc, fmt, ap);
}
// Consumes the current token if it matches `op`.
bool equal(Token *tok, char *op) {
return memcmp(tok->loc, op, tok->len) == 0 && op[tok->len] == '\0';
}
// Ensure that the current token is `op`.
Token *skip(Token *tok, char *op) {
if (!equal(tok, op))
error_tok(tok, "expected '%s'", op);
return tok->next;
}
bool consume(Token **rest, Token *tok, char *str) {
if (equal(tok, str)) {
*rest = tok->next;
return true;
}
*rest = tok;
return false;
}
2020-10-07 14:11:16 +03:00
// Create a new token.
static Token *new_token(TokenKind kind, char *start, char *end) {
Token *tok = calloc(1, sizeof(Token));
tok->kind = kind;
tok->loc = start;
tok->len = end - start;
return tok;
}
static bool startswith(char *p, char *q) {
return strncmp(p, q, strlen(q)) == 0;
}
2020-10-07 14:12:19 +03:00
// Returns true if c is valid as the first character of an identifier.
static bool is_ident1(char c) {
return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_';
}
// Returns true if c is valid as a non-first character of an identifier.
static bool is_ident2(char c) {
return is_ident1(c) || ('0' <= c && c <= '9');
}
2020-10-07 14:11:16 +03:00
// Read a punctuator token from p and returns its length.
static int read_punct(char *p) {
if (startswith(p, "==") || startswith(p, "!=") ||
startswith(p, "<=") || startswith(p, ">="))
return 2;
return ispunct(*p) ? 1 : 0;
}
2020-10-07 06:47:09 +03:00
static bool is_keyword(Token *tok) {
2019-08-06 12:44:53 +03:00
static char *kw[] = {
2020-08-27 15:04:17 +03:00
"return", "if", "else", "for", "while", "int", "sizeof", "char",
2019-08-06 12:44:53 +03:00
};
2020-10-07 06:47:09 +03:00
for (int i = 0; i < sizeof(kw) / sizeof(*kw); i++)
if (equal(tok, kw[i]))
return true;
return false;
}
2020-10-07 14:12:57 +03:00
static void convert_keywords(Token *tok) {
for (Token *t = tok; t->kind != TK_EOF; t = t->next)
2020-10-07 06:47:09 +03:00
if (is_keyword(t))
2020-10-07 14:12:57 +03:00
t->kind = TK_KEYWORD;
}
// Tokenize a given string and returns new tokens.
2020-10-07 14:11:16 +03:00
Token *tokenize(char *p) {
current_input = p;
Token head = {};
Token *cur = &head;
while (*p) {
// Skip whitespace characters.
if (isspace(*p)) {
p++;
continue;
}
// Numeric literal
if (isdigit(*p)) {
cur = cur->next = new_token(TK_NUM, p, p);
char *q = p;
cur->val = strtoul(p, &p, 10);
cur->len = p - q;
continue;
}
2020-10-07 14:12:57 +03:00
// Identifier or keyword
2020-10-07 14:12:19 +03:00
if (is_ident1(*p)) {
char *start = p;
do {
p++;
} while (is_ident2(*p));
cur = cur->next = new_token(TK_IDENT, start, p);
2020-09-26 02:59:56 +03:00
continue;
}
2020-10-07 14:11:16 +03:00
// Punctuators
int punct_len = read_punct(p);
if (punct_len) {
cur = cur->next = new_token(TK_PUNCT, p, p + punct_len);
p += cur->len;
continue;
}
error_at(p, "invalid token");
}
cur = cur->next = new_token(TK_EOF, p, p);
2020-10-07 14:12:57 +03:00
convert_keywords(head.next);
2020-10-07 14:11:16 +03:00
return head.next;
}