removed TOKEN_LEN (useless)

This commit is contained in:
Roberto Ierusalimschy 2011-05-03 12:51:16 -03:00
parent c9ce754e38
commit bc1c718cc0
2 changed files with 2 additions and 5 deletions

3
llex.c
View File

@ -1,5 +1,5 @@
/*
** $Id: llex.c,v 2.45 2011/02/02 14:55:17 roberto Exp roberto $
** $Id: llex.c,v 2.46 2011/02/23 13:13:10 roberto Exp roberto $
** Lexical Analyzer
** See Copyright Notice in lua.h
*/
@ -67,7 +67,6 @@ void luaX_init (lua_State *L) {
for (i=0; i<NUM_RESERVED; i++) {
TString *ts = luaS_new(L, luaX_tokens[i]);
luaS_fix(ts); /* reserved words are never collected */
lua_assert(strlen(luaX_tokens[i])+1 <= TOKEN_LEN);
ts->tsv.reserved = cast_byte(i+1); /* reserved word */
}
}

4
llex.h
View File

@ -1,5 +1,5 @@
/*
** $Id: llex.h,v 1.68 2011/02/07 17:14:50 roberto Exp roberto $
** $Id: llex.h,v 1.69 2011/02/23 13:13:10 roberto Exp roberto $
** Lexical Analyzer
** See Copyright Notice in lua.h
*/
@ -13,8 +13,6 @@
#define FIRST_RESERVED 257
/* maximum length of a reserved word */
#define TOKEN_LEN (sizeof("function")/sizeof(char))
/*