@@ -286,21 +286,23 @@ let ParseInput (lexer, errorLogger: ErrorLogger, lexbuf: UnicodeLexing.Lexbuf, d
286286 let filteringErrorLogger = GetErrorLoggerFilteringByScopedPragmas( false , scopedPragmas, errorLogger)
287287 delayLogger.CommitDelayedDiagnostics filteringErrorLogger
288288
289+ type Tokenizer = unit -> Parser.token
290+
289291// Show all tokens in the stream, for testing purposes
290- let ShowAllTokensAndExit ( shortFilename , tokenizer : LexFilter.LexFilter , lexbuf : LexBuffer < char >) =
292+ let ShowAllTokensAndExit ( shortFilename , tokenizer : Tokenizer , lexbuf : LexBuffer < char >) =
291293 while true do
292294 printf " tokenize - getting one token from %s \n " shortFilename
293- let t = tokenizer.GetToken ()
295+ let t = tokenizer ()
294296 printf " tokenize - got %s @ %a \n " ( Parser.token_ to_ string t) outputRange lexbuf.LexemeRange
295297 match t with
296298 | Parser.EOF _ -> exit 0
297299 | _ -> ()
298300 if lexbuf.IsPastEndOfStream then printf " !!! at end of stream\n "
299301
300302// Test one of the parser entry points, just for testing purposes
301- let TestInteractionParserAndExit ( tokenizer : LexFilter.LexFilter , lexbuf : LexBuffer < char >) =
303+ let TestInteractionParserAndExit ( tokenizer : Tokenizer , lexbuf : LexBuffer < char >) =
302304 while true do
303- match ( Parser.interaction ( fun _ -> tokenizer.GetToken ()) lexbuf) with
305+ match ( Parser.interaction ( fun _ -> tokenizer ()) lexbuf) with
304306 | IDefns( l, m) -> printfn " Parsed OK, got %d defs @ %a " l.Length outputRange m
305307 | IHash (_, m) -> printfn " Parsed OK, got hash @ %a " outputRange m
306308 exit 0
@@ -341,18 +343,22 @@ let ParseOneInputLexbuf (tcConfig: TcConfig, lexResourceManager, conditionalComp
341343 Lexhelp.usingLexbufForParsing ( lexbuf, filename) ( fun lexbuf ->
342344
343345 // Set up the LexFilter over the token stream
344- let tokenizer = LexFilter.LexFilter( lightStatus, tcConfig.compilingFslib, Lexer.token lexargs skipWhitespaceTokens, lexbuf)
346+ let tokenizer , tokenizeOnly =
347+ match tcConfig.tokenize with
348+ | Unfiltered -> ( fun () -> Lexer.token lexargs skipWhitespaceTokens lexbuf), true
349+ | Only -> LexFilter.LexFilter( lightStatus, tcConfig.compilingFslib, Lexer.token lexargs skipWhitespaceTokens, lexbuf) .GetToken, true
350+ | _ -> LexFilter.LexFilter( lightStatus, tcConfig.compilingFslib, Lexer.token lexargs skipWhitespaceTokens, lexbuf) .GetToken, false
345351
346352 // If '--tokenize' then show the tokens now and exit
347- if tcConfig. tokenizeOnly then
353+ if tokenizeOnly then
348354 ShowAllTokensAndExit( shortFilename, tokenizer, lexbuf)
349355
350356 // Test hook for one of the parser entry points
351357 if tcConfig.testInteractionParser then
352358 TestInteractionParserAndExit ( tokenizer, lexbuf)
353359
354360 // Parse the input
355- let res = ParseInput(( fun _ -> tokenizer.GetToken ()), errorLogger, lexbuf, None, filename, isLastCompiland)
361+ let res = ParseInput(( fun _ -> tokenizer ()), errorLogger, lexbuf, None, filename, isLastCompiland)
356362
357363 // Report the statistics for testing purposes
358364 if tcConfig.reportNumDecls then
0 commit comments