start tokenization

This commit is contained in:
pommicket 2022-01-11 00:09:11 -05:00
parent 34dfc3238d
commit 97dca844c8
6 changed files with 813 additions and 66 deletions

View file

@ -15,11 +15,24 @@ global function_macros_size
global object_macros
global function_macros
; accepts EITHER file index OR pointer to filename
function fprint_filename
argument fd
argument file
if file ] 65535 goto print_filename_string
file = file_get(file)
fputs(2, file)
; (fallthrough)
:print_filename_string
fputs(2, file)
return
; accepts EITHER file index OR pointer to filename
function compile_error
argument file
argument line
argument message
fputs(2, file)
fprint_filename(2, file)
fputc(2, ':)
fputn(2, line)
fputs(2, .str_error_prefix)
@ -27,11 +40,12 @@ function compile_error
fputc(2, 10)
exit(1)
; accepts EITHER file index OR pointer to filename
function compile_warning
argument file
argument line
argument message
fputs(2, file)
fprint_filename(2, file)
fputc(2, ':)
fputn(2, line)
fputs(2, .str_warning_prefix)
@ -52,6 +66,7 @@ function compile_warning
#include util.b
#include constants.b
#include preprocess.b
#include tokenize.b
function main
argument argv2
@ -62,10 +77,13 @@ function main
local output_filename
local pptokens
local processed_pptokens
local tokens
dat_banned_objmacros = 255
dat_banned_fmacros = 255
file_list = malloc(40000)
*1file_list = 255
object_macros = malloc(4000000)
function_macros = malloc(4000000)
@ -77,15 +95,19 @@ function main
output_filename = argv2
:have_filenames
pptokens = split_into_preprocessing_tokens(input_filename)
print_pptokens(pptokens)
print_separator()
;print_pptokens(pptokens)
;print_separator()
processed_pptokens = malloc(16000000)
translation_phase_4(input_filename, pptokens, processed_pptokens)
free(pptokens)
pptokens = processed_pptokens
print_pptokens(pptokens)
print_object_macros()
print_function_macros()
print_separator()
;print_object_macros()
;print_function_macros()
tokens = malloc(16000000)
tokenize(pptokens, tokens)
print_tokens(tokens)
exit(0)
:usage_error