#ifndef LANGSCAN_JAVASCRIPT_H #define LANGSCAN_JAVASCRIPT_H #define LANGSCAN_JAVASCRIPT_TOKEN_LIST \ LANGSCAN_JAVASCRIPT_TOKEN(preproc_beg) \ LANGSCAN_JAVASCRIPT_TOKEN(preproc_end) \ LANGSCAN_JAVASCRIPT_TOKEN(character) \ LANGSCAN_JAVASCRIPT_TOKEN(integer) \ LANGSCAN_JAVASCRIPT_TOKEN(floating) \ LANGSCAN_JAVASCRIPT_TOKEN(string) \ LANGSCAN_JAVASCRIPT_TOKEN(ident) \ LANGSCAN_JAVASCRIPT_TOKEN(punct) \ LANGSCAN_JAVASCRIPT_TOKEN(comment) \ LANGSCAN_JAVASCRIPT_TOKEN(space) typedef enum { langscan_javascript_eof = 0, #define LANGSCAN_JAVASCRIPT_TOKEN(name) langscan_javascript_##name, LANGSCAN_JAVASCRIPT_TOKEN_LIST #undef LANGSCAN_JAVASCRIPT_TOKEN } langscan_javascript_token_t; typedef struct { int beg_lineno; int beg_columnno; int beg_byteno; int end_lineno; int end_columnno; int end_byteno; int eof; char *text; int leng; size_t (*user_read)(void **user_data_p, char *buf, size_t maxlen); void *user_data; } langscan_javascript_lex_extra_t; typedef struct langscan_javascript_tokenizer_tag { langscan_javascript_lex_extra_t *extra; void *scanner; } langscan_javascript_tokenizer_t; typedef size_t (*user_read_t)(void **user_data_p, char *buf, size_t maxlen); langscan_javascript_tokenizer_t *langscan_javascript_make_tokenizer(user_read_t user_read, void *user_data); langscan_javascript_token_t langscan_javascript_get_token(langscan_javascript_tokenizer_t *tokenizer); void langscan_javascript_free_tokenizer(langscan_javascript_tokenizer_t *tokenizer); user_read_t langscan_javascript_tokenizer_get_user_read(langscan_javascript_tokenizer_t *tokenizer); void *langscan_javascript_tokenizer_get_user_data(langscan_javascript_tokenizer_t *tokenizer); const char *langscan_javascript_token_name(langscan_javascript_token_t token); #define langscan_javascript_curtoken_beg_lineno(tokenizer) ((tokenizer)->extra->beg_lineno) #define langscan_javascript_curtoken_beg_columnno(tokenizer) ((tokenizer)->extra->beg_columnno) #define langscan_javascript_curtoken_beg_byteno(tokenizer) ((tokenizer)->extra->beg_byteno) #define langscan_javascript_curtoken_end_lineno(tokenizer) ((tokenizer)->extra->end_lineno) #define langscan_javascript_curtoken_end_columnno(tokenizer) ((tokenizer)->extra->end_columnno) #define langscan_javascript_curtoken_end_byteno(tokenizer) ((tokenizer)->extra->end_byteno) #define langscan_javascript_curtoken_text(tokenizer) ((tokenizer)->extra->text) #define langscan_javascript_curtoken_leng(tokenizer) ((tokenizer)->extra->leng) void langscan_javascript_extract_functions(langscan_javascript_tokenizer_t *); #endif