2 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
4 \section[Lexical analysis]{Lexical analysis}
6 --------------------------------------------------------
8 There's a known bug in here:
10 If an interface file ends prematurely, Lex tries to
11 do headFS of an empty FastString.
13 An example that provokes the error is
15 f _:_ _forall_ [a] <<<END OF FILE>>>
16 --------------------------------------------------------
22 ifaceParseErr, srcParseErr,
25 Token(..), lexer, ParseResult(..), PState(..),
29 P, thenP, thenP_, returnP, mapP, failP, failMsgP,
30 getSrcLocP, setSrcLocP, getSrcFile,
31 layoutOn, layoutOff, pushContext, popContext
34 #include "HsVersions.h"
36 import Char ( isSpace, toUpper )
37 import List ( isSuffixOf )
39 import IdInfo ( InlinePragInfo(..) )
40 import PrelNames ( mkTupNameStr )
41 import CmdLineOpts ( opt_HiVersion, opt_NoHiCheck )
42 import ForeignCall ( Safety(..) )
43 import NewDemand ( StrictSig(..), Demand(..), Keepity(..),
44 DmdResult(..), mkTopDmdType )
45 import UniqFM ( listToUFM, lookupUFM )
46 import BasicTypes ( Boxity(..) )
47 import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
48 replaceSrcLine, mkSrcLoc )
50 import ErrUtils ( Message )
57 import Char ( chr, ord )
58 import PrelRead ( readRational__ ) -- Glasgow non-std
61 %************************************************************************
63 \subsection{Data types}
65 %************************************************************************
67 The token data type, fairly un-interesting except from one
68 constructor, @ITidinfo@, which is used to lazily lex id info (arity,
69 strictness, unfolding etc).
71 The Idea/Observation here is that the renamer needs to scan through
72 all of an interface file before it can continue. But only a fraction
73 of the information contained in the file turns out to be useful, so
74 delaying as much as possible of the scanning and parsing of an
75 interface file Makes Sense (Heap profiles of the compiler
76 show a reduction in heap usage by at least a factor of two,
79 Hence, the interface file lexer spots when value declarations are
80 being scanned and return the @ITidinfo@ and @ITtype@ constructors
81 for the type and any other id info for that binding (unfolding, strictness
82 etc). These constructors are applied to the result of lexing these sub-chunks.
84 The lexing of the type and id info is all done lazily, of course, so
85 the scanning (and subsequent parsing) will be done *only* on the ids the
86 renamer finds out that it is interested in. The rest will just be junked.
87 Laziness, you know it makes sense :-)
91 = ITas -- Haskell keywords
115 | ITscc -- ToDo: remove (we use {-# SCC "..." #-} now)
117 | ITforall -- GHC extension keywords
128 | ITinterface -- interface keywords
136 | ITccall (Bool,Bool,Safety) -- (is_dyn, is_casm, may_gc)
155 | ITunfold InlinePragInfo
163 | ITspecialise_prag -- Pragmas
173 | ITdotdot -- reserved symbols
187 | ITbiglam -- GHC-extension symbols
189 | ITocurly -- special symbols
191 | ITocurlybar -- {|, for type applications
192 | ITccurlybar -- |}, for type applications
205 | ITvarid FAST_STRING -- identifiers
206 | ITconid FAST_STRING
207 | ITvarsym FAST_STRING
208 | ITconsym FAST_STRING
209 | ITqvarid (FAST_STRING,FAST_STRING)
210 | ITqconid (FAST_STRING,FAST_STRING)
211 | ITqvarsym (FAST_STRING,FAST_STRING)
212 | ITqconsym (FAST_STRING,FAST_STRING)
214 | ITipvarid FAST_STRING -- GHC extension: implicit param: ?x
216 | ITpragma StringBuffer
219 | ITstring FAST_STRING
221 | ITrational Rational
224 | ITprimstring FAST_STRING
226 | ITprimfloat Rational
227 | ITprimdouble Rational
228 | ITlitlit FAST_STRING
230 | ITunknown String -- Used when the lexer can't make sense of it
231 | ITeof -- end of file token
232 deriving Show -- debugging
235 -----------------------------------------------------------------------------
239 pragmaKeywordsFM = listToUFM $
240 map (\ (x,y) -> (_PK_ x,y))
241 [( "SPECIALISE", ITspecialise_prag ),
242 ( "SPECIALIZE", ITspecialise_prag ),
243 ( "SOURCE", ITsource_prag ),
244 ( "INLINE", ITinline_prag ),
245 ( "NOINLINE", ITnoinline_prag ),
246 ( "NOTINLINE", ITnoinline_prag ),
247 ( "LINE", ITline_prag ),
248 ( "RULES", ITrules_prag ),
249 ( "RULEZ", ITrules_prag ), -- american spelling :-)
250 ( "SCC", ITscc_prag ),
251 ( "DEPRECATED", ITdeprecated_prag )
254 haskellKeywordsFM = listToUFM $
255 map (\ (x,y) -> (_PK_ x,y))
256 [( "_", ITunderscore ),
259 ( "class", ITclass ),
261 ( "default", ITdefault ),
262 ( "deriving", ITderiving ),
265 ( "hiding", IThiding ),
267 ( "import", ITimport ),
269 ( "infix", ITinfix ),
270 ( "infixl", ITinfixl ),
271 ( "infixr", ITinfixr ),
272 ( "instance", ITinstance ),
274 ( "module", ITmodule ),
275 ( "newtype", ITnewtype ),
277 ( "qualified", ITqualified ),
280 ( "where", ITwhere ),
281 ( "_scc_", ITscc ) -- ToDo: remove
284 isSpecial :: Token -> Bool
285 -- If we see M.x, where x is a keyword, but
286 -- is special, we treat is as just plain M.x,
288 isSpecial ITas = True
289 isSpecial IThiding = True
290 isSpecial ITqualified = True
291 isSpecial ITforall = True
292 isSpecial ITexport = True
293 isSpecial ITlabel = True
294 isSpecial ITdynamic = True
295 isSpecial ITunsafe = True
296 isSpecial ITwith = True
297 isSpecial ITccallconv = True
298 isSpecial ITstdcallconv = True
301 -- IMPORTANT: Keep this in synch with ParseIface.y's var_fs production! (SUP)
302 ghcExtensionKeywordsFM = listToUFM $
303 map (\ (x,y) -> (_PK_ x,y))
304 [ ( "forall", ITforall ),
305 ( "foreign", ITforeign ),
306 ( "export", ITexport ),
307 ( "label", ITlabel ),
308 ( "dynamic", ITdynamic ),
309 ( "unsafe", ITunsafe ),
311 ( "stdcall", ITstdcallconv),
312 ( "ccall", ITccallconv),
313 ( "dotnet", ITdotnet),
314 ("_ccall_", ITccall (False, False, PlayRisky)),
315 ("_ccall_GC_", ITccall (False, False, PlaySafe)),
316 ("_casm_", ITccall (False, True, PlayRisky)),
317 ("_casm_GC_", ITccall (False, True, PlaySafe)),
319 -- interface keywords
320 ("__interface", ITinterface),
321 ("__export", IT__export),
322 ("__depends", ITdepends),
323 ("__forall", IT__forall),
324 ("__letrec", ITletrec),
325 ("__coerce", ITcoerce),
326 ("__inline_me", ITinlineMe),
327 ("__inline_call", ITinlineCall),
328 ("__depends", ITdepends),
329 ("__DEFAULT", ITdefaultbranch),
331 ("__integer", ITinteger_lit),
332 ("__float", ITfloat_lit),
333 ("__int64", ITint64_lit),
334 ("__word", ITword_lit),
335 ("__word64", ITword64_lit),
336 ("__rational", ITrational_lit),
337 ("__addr", ITaddr_lit),
338 ("__label", ITlabel_lit),
339 ("__litlit", ITlit_lit),
340 ("__string", ITstring_lit),
343 ("__fuall", ITfuall),
345 ("__P", ITspecialise),
348 ("__D", ITdeprecated),
349 ("__U", ITunfold NoInlinePragInfo),
351 ("__ccall", ITccall (False, False, PlayRisky)),
352 ("__ccall_GC", ITccall (False, False, PlaySafe)),
353 ("__dyn_ccall", ITccall (True, False, PlayRisky)),
354 ("__dyn_ccall_GC", ITccall (True, False, PlaySafe)),
355 ("__casm", ITccall (False, True, PlayRisky)),
356 ("__dyn_casm", ITccall (True, True, PlayRisky)),
357 ("__casm_GC", ITccall (False, True, PlaySafe)),
358 ("__dyn_casm_GC", ITccall (True, True, PlaySafe)),
364 haskellKeySymsFM = listToUFM $
365 map (\ (x,y) -> (_PK_ x,y))
378 ,(".", ITdot) -- sadly, for 'forall a . t'
382 -----------------------------------------------------------------------------
387 - (glaexts) lexing an interface file or -fglasgow-exts
388 - (bol) pointer to beginning of line (for column calculations)
389 - (buf) pointer to beginning of token
390 - (buf) pointer to current char
391 - (atbol) flag indicating whether we're at the beginning of a line
394 lexer :: (Token -> P a) -> P a
395 lexer cont buf s@(PState{
397 glasgow_exts = glaexts,
403 -- first, start a new lexeme and lose all the whitespace
405 tab line bol atbol (stepOverLexeme buf)
407 line = srcLocLine loc
409 tab y bol atbol buf = -- trace ("tab: " ++ show (I# y) ++ " : " ++ show (currentChar buf)) $
410 case currentChar# buf of
413 if bufferExhausted (stepOn buf)
414 then cont ITeof buf s'
415 else trace "lexer: misplaced NUL?" $
416 tab y bol atbol (stepOn buf)
418 '\n'# -> let buf' = stepOn buf
419 in tab (y +# 1#) (currentIndex# buf') 1# buf'
421 -- find comments. This got harder in Haskell 98.
422 '-'# -> let trundle n =
423 let next = lookAhead# buf n in
424 if next `eqChar#` '-'# then trundle (n +# 1#)
425 else if is_symbol next || n <# 2#
428 (stepOnUntilChar# (stepOnBy# buf n) '\n'#)
431 -- comments and pragmas. We deal with LINE pragmas here,
432 -- and throw out any unrecognised pragmas as comments. Any
433 -- pragmas we know about are dealt with later (after any layout
434 -- processing if necessary).
435 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
436 if lookAhead# buf 2# `eqChar#` '#'# then
437 if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
438 case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
439 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2->
440 let lexeme = mkFastString -- ToDo: too slow
441 (map toUpper (lexemeToString buf2)) in
442 case lookupUFM pragmaKeywordsFM lexeme of
444 line_prag skip_to_end buf2 s'
445 Just other -> is_a_token
446 Nothing -> skip_to_end (stepOnBy# buf 2#) s'
449 else skip_to_end (stepOnBy# buf 2#) s'
451 skip_to_end = nested_comment (lexer cont)
453 -- special GHC extension: we grok cpp-style #line pragmas
454 '#'# | lexemeIndex buf ==# bol -> -- the '#' must be in column 0
455 case expandWhile# is_space (stepOn buf) of { buf1 ->
456 if is_digit (currentChar# buf1)
457 then line_prag next_line buf1 s'
461 next_line buf = lexer cont (stepOnUntilChar# buf '\n'#)
463 -- tabs have been expanded beforehand
464 c | is_space c -> tab y bol atbol (stepOn buf)
465 | otherwise -> is_a_token
467 where s' = s{loc = replaceSrcLine loc y,
471 is_a_token | atbol /=# 0# = lexBOL cont buf s'
472 | otherwise = lexToken cont glaexts buf s'
474 -- {-# LINE .. #-} pragmas. yeuch.
475 line_prag cont buf s@PState{loc=loc} =
476 case expandWhile# is_space buf of { buf1 ->
477 case scanNumLit 0 (stepOverLexeme buf1) of { (line,buf2) ->
478 -- subtract one: the line number refers to the *following* line.
479 let real_line = line - 1 in
480 case fromInteger real_line of { i@(I# l) ->
481 -- ToDo, if no filename then we skip the newline.... d'oh
482 case expandWhile# is_space buf2 of { buf3 ->
483 case currentChar# buf3 of
485 case untilEndOfString# (stepOn (stepOverLexeme buf3)) of { buf4 ->
487 file = lexemeToFastString buf4
488 new_buf = stepOn (stepOverLexeme buf4)
490 if nullFastString file
491 then cont new_buf s{loc = replaceSrcLine loc l}
492 else cont new_buf s{loc = mkSrcLoc file i}
494 _other -> cont (stepOverLexeme buf3) s{loc = replaceSrcLine loc l}
497 nested_comment :: P a -> P a
498 nested_comment cont buf = loop buf
501 case currentChar# buf of
502 '\NUL'# | bufferExhausted (stepOn buf) ->
503 lexError "unterminated `{-'" buf -- -}
504 '-'# | lookAhead# buf 1# `eqChar#` '}'# ->
505 cont (stepOnBy# buf 2#)
507 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
508 nested_comment (nested_comment cont) (stepOnBy# buf 2#)
510 '\n'# -> \ s@PState{loc=loc} ->
511 let buf' = stepOn buf in
512 nested_comment cont buf'
513 s{loc = incSrcLine loc, bol = currentIndex# buf',
516 _ -> nested_comment cont (stepOn buf)
518 -- When we are lexing the first token of a line, check whether we need to
519 -- insert virtual semicolons or close braces due to layout.
521 lexBOL :: (Token -> P a) -> P a
522 lexBOL cont buf s@(PState{
524 glasgow_exts = glaexts,
529 if need_close_curly then
530 --trace ("col = " ++ show (I# col) ++ ", layout: inserting '}'") $
531 cont ITvccurly buf s{atbol = 1#, context = tail ctx}
532 else if need_semi_colon then
533 --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
534 cont ITsemi buf s{atbol = 0#}
536 lexToken cont glaexts buf s{atbol = 0#}
538 col = currentIndex# buf -# bol
551 Layout n -> col ==# n
554 lexToken :: (Token -> P a) -> Int# -> P a
555 lexToken cont glaexts buf =
556 -- trace "lexToken" $
557 case currentChar# buf of
559 -- special symbols ----------------------------------------------------
560 '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'#
561 -> cont IToubxparen (setCurrentPos# buf 2#)
563 -> cont IToparen (incLexeme buf)
565 ')'# -> cont ITcparen (incLexeme buf)
566 '['# -> cont ITobrack (incLexeme buf)
567 ']'# -> cont ITcbrack (incLexeme buf)
568 ','# -> cont ITcomma (incLexeme buf)
569 ';'# -> cont ITsemi (incLexeme buf)
570 '}'# -> \ s@PState{context = ctx} ->
572 (_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
573 _ -> lexError "too many '}'s" buf s
574 '|'# -> case lookAhead# buf 1# of
575 '}'# | flag glaexts -> cont ITccurlybar
576 (setCurrentPos# buf 2#)
577 _ -> lex_sym cont (incLexeme buf)
580 '#'# -> case lookAhead# buf 1# of
581 ')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
582 '-'# -> case lookAhead# buf 2# of
583 '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
584 _ -> lex_sym cont (incLexeme buf)
585 _ -> lex_sym cont (incLexeme buf)
587 '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
588 -> lex_cstring cont (setCurrentPos# buf 2#)
590 -> cont ITbackquote (incLexeme buf)
592 '{'# -> -- look for "{-##" special iface pragma
593 case lookAhead# buf 1# of
595 -> cont ITocurlybar (setCurrentPos# buf 2#)
596 '-'# -> case lookAhead# buf 2# of
597 '#'# -> case lookAhead# buf 3# of
600 = doDiscard 0# (stepOnBy# (stepOverLexeme buf) 4#) in
601 cont (ITpragma lexeme) buf'
602 _ -> lex_prag cont (setCurrentPos# buf 3#)
603 _ -> cont ITocurly (incLexeme buf)
604 _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
606 -- strings/characters -------------------------------------------------
607 '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
608 '\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
610 -- strictness and cpr pragmas and __scc treated specially.
611 '_'# | flag glaexts ->
612 case lookAhead# buf 1# of
613 '_'# -> case lookAhead# buf 2# of
615 lex_demand cont (stepOnUntil (not . isSpace)
616 (stepOnBy# buf 3#)) -- past __S
618 cont ITcprinfo (stepOnBy# buf 3#) -- past __M
621 case prefixMatch (stepOnBy# buf 3#) "cc" of
622 Just buf' -> lex_scc cont (stepOverLexeme buf')
623 Nothing -> lex_id cont glaexts buf
624 _ -> lex_id cont glaexts buf
625 _ -> lex_id cont glaexts buf
627 -- Hexadecimal and octal constants
628 '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
629 -> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
630 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
631 -> readNum (after_lexnum cont glaexts) buf' is_octdigit 8 oct_or_dec
632 where ch = lookAhead# buf 1#
633 ch2 = lookAhead# buf 2#
634 buf' = setCurrentPos# buf 2#
637 if bufferExhausted (stepOn buf) then
640 trace "lexIface: misplaced NUL?" $
641 cont (ITunknown "\NUL") (stepOn buf)
643 '?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
644 lex_ip cont (incLexeme buf)
645 c | is_digit c -> lex_num cont glaexts 0 buf
646 | is_symbol c -> lex_sym cont buf
647 | is_upper c -> lex_con cont glaexts buf
648 | is_ident c -> lex_id cont glaexts buf
649 | otherwise -> lexError "illegal character" buf
651 -- Int# is unlifted, and therefore faster than Bool for flags.
657 -------------------------------------------------------------------------------
661 = case expandWhile# is_space buf of { buf1 ->
662 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2 ->
663 let lexeme = mkFastString (map toUpper (lexemeToString buf2)) in
664 case lookupUFM pragmaKeywordsFM lexeme of
665 Just kw -> cont kw (mergeLexemes buf buf2)
666 Nothing -> panic "lex_prag"
669 -------------------------------------------------------------------------------
672 lex_string cont glaexts s buf
673 = case currentChar# buf of
675 let buf' = incLexeme buf
676 s' = mkFastStringNarrow (map chr (reverse s))
677 in case currentChar# buf' of
678 '#'# | flag glaexts -> if all (<= 0xFF) s
679 then cont (ITprimstring s') (incLexeme buf')
680 else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
681 _ -> cont (ITstring s') buf'
683 -- ignore \& in a string, deal with string gaps
684 '\\'# | next_ch `eqChar#` '&'#
685 -> lex_string cont glaexts s buf'
687 -> lex_stringgap cont glaexts s (incLexeme buf)
689 where next_ch = lookAhead# buf 1#
690 buf' = setCurrentPos# buf 2#
692 _ -> lex_char (lex_next_string cont s) glaexts buf
694 lex_stringgap cont glaexts s buf
695 = let buf' = incLexeme buf in
696 case currentChar# buf of
697 '\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf'
698 st{loc = incSrcLine loc}
699 '\\'# -> lex_string cont glaexts s buf'
700 c | is_space c -> lex_stringgap cont glaexts s buf'
701 other -> charError buf'
703 lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
705 lex_char :: (Int# -> Int -> P a) -> Int# -> P a
706 lex_char cont glaexts buf
707 = case currentChar# buf of
708 '\\'# -> lex_escape (cont glaexts) (incLexeme buf)
709 c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
710 other -> charError buf
712 char_end cont glaexts c buf
713 = case currentChar# buf of
714 '\''# -> let buf' = incLexeme buf in
715 case currentChar# buf' of
717 -> cont (ITprimchar c) (incLexeme buf')
718 _ -> cont (ITchar c) buf'
722 = let buf' = incLexeme buf in
723 case currentChar# buf of
724 'a'# -> cont (ord '\a') buf'
725 'b'# -> cont (ord '\b') buf'
726 'f'# -> cont (ord '\f') buf'
727 'n'# -> cont (ord '\n') buf'
728 'r'# -> cont (ord '\r') buf'
729 't'# -> cont (ord '\t') buf'
730 'v'# -> cont (ord '\v') buf'
731 '\\'# -> cont (ord '\\') buf'
732 '"'# -> cont (ord '\"') buf'
733 '\''# -> cont (ord '\'') buf'
734 '^'# -> let c = currentChar# buf' in
735 if c `geChar#` '@'# && c `leChar#` '_'#
736 then cont (I# (ord# c -# ord# '@'#)) (incLexeme buf')
739 'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
740 'o'# -> readNum (after_charnum cont) buf' is_octdigit 8 oct_or_dec
742 -> readNum (after_charnum cont) buf is_digit 10 oct_or_dec
744 _ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
745 Just buf2 <- [prefixMatch buf p] ] of
746 (c,buf2):_ -> cont (ord c) buf2
749 after_charnum cont i buf
750 = if i >= 0 && i <= 0x10FFFF
751 then cont (fromInteger i) buf
754 readNum cont buf is_digit base conv = read buf 0
756 = case currentChar# buf of { c ->
758 then read (incLexeme buf) (i*base + (toInteger (I# (conv c))))
764 || (c `geChar#` 'a'# && c `leChar#` 'f'#)
765 || (c `geChar#` 'A'# && c `leChar#` 'F'#)
767 hex c | is_digit c = ord# c -# ord# '0'#
768 | otherwise = ord# (to_lower c) -# ord# 'a'# +# 10#
769 oct_or_dec c = ord# c -# ord# '0'#
771 is_octdigit c = c `geChar#` '0'# && c `leChar#` '7'#
774 | c `geChar#` 'A'# && c `leChar#` 'Z'#
775 = chr# (ord# c -# (ord# 'A'# -# ord# 'a'#))
778 charError buf = lexError "error in character literal" buf
780 silly_escape_chars = [
817 -------------------------------------------------------------------------------
819 lex_demand cont buf =
820 case read_em [] buf of { (ls,buf') ->
821 case currentChar# buf' of
822 'b'# -> cont (ITstrict (StrictSig (mkTopDmdType ls BotRes))) (incLexeme buf')
823 'm'# -> cont (ITstrict (StrictSig (mkTopDmdType ls RetCPR))) (incLexeme buf')
824 _ -> cont (ITstrict (StrictSig (mkTopDmdType ls TopRes))) buf'
828 case currentChar# buf of
829 'L'# -> read_em (Lazy : acc) (stepOn buf)
830 'A'# -> read_em (Abs : acc) (stepOn buf)
831 'V'# -> read_em (Eval : acc) (stepOn buf)
832 'X'# -> read_em (Err : acc) (stepOn buf)
833 'B'# -> read_em (Bot : acc) (stepOn buf)
834 ')'# -> (reverse acc, stepOn buf)
835 'C'# -> do_call acc (stepOnBy# buf 2#)
836 'D'# -> do_unpack1 Defer acc (stepOnBy# buf 1#)
837 'U'# -> do_unpack1 Drop acc (stepOnBy# buf 1#)
838 'S'# -> do_unpack1 Keep acc (stepOnBy# buf 1#)
839 _ -> (reverse acc, buf)
841 do_unpack1 keepity acc buf
842 = case currentChar# buf of
843 '('# -> do_unpack2 keepity acc (stepOnBy# buf 1#)
844 _ -> read_em (Seq keepity [] : acc) buf
846 do_unpack2 keepity acc buf
847 = case read_em [] buf of
848 (stuff, rest) -> read_em (Seq keepity stuff : acc) rest
851 = case read_em [] buf of
852 ([dmd], rest) -> read_em (Call dmd : acc) rest
856 case currentChar# buf of
857 'C'# -> cont ITsccAllCafs (incLexeme buf)
858 other -> cont ITscc buf
860 -----------------------------------------------------------------------------
863 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
864 lex_num cont glaexts acc buf =
865 case scanNumLit acc buf of
867 case currentChar# buf' of
868 '.'# | is_digit (lookAhead# buf' 1#) ->
869 -- this case is not optimised at all, as the
870 -- presence of floating point numbers in interface
871 -- files is not that common. (ToDo)
872 case expandWhile# is_digit (incLexeme buf') of
873 buf2 -> -- points to first non digit char
875 let l = case currentChar# buf2 of
881 = let buf3 = incLexeme buf2 in
882 case currentChar# buf3 of
883 '-'# -> expandWhile# is_digit (incLexeme buf3)
884 '+'# -> expandWhile# is_digit (incLexeme buf3)
885 x | is_digit x -> expandWhile# is_digit buf3
888 v = readRational__ (lexemeToString l)
890 in case currentChar# l of -- glasgow exts only
891 '#'# | flag glaexts -> let l' = incLexeme l in
892 case currentChar# l' of
893 '#'# -> cont (ITprimdouble v) (incLexeme l')
894 _ -> cont (ITprimfloat v) l'
895 _ -> cont (ITrational v) l
897 _ -> after_lexnum cont glaexts acc' buf'
899 after_lexnum cont glaexts i buf
900 = case currentChar# buf of
901 '#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
902 _ -> cont (ITinteger i) buf
904 -----------------------------------------------------------------------------
905 -- C "literal literal"s (i.e. things like ``NULL'', ``stdout'' etc.)
907 -- we lexemeToFastString on the bit between the ``''s, but include the
908 -- quotes in the full lexeme.
910 lex_cstring cont buf =
911 case expandUntilMatch (stepOverLexeme buf) "\'\'" of
912 Just buf' -> cont (ITlitlit (lexemeToFastString
913 (setCurrentPos# buf' (negateInt# 2#))))
914 (mergeLexemes buf buf')
915 Nothing -> lexError "unterminated ``" buf
917 -----------------------------------------------------------------------------
918 -- identifiers, symbols etc.
921 case expandWhile# is_ident buf of
922 buf' -> cont (ITipvarid lexeme) buf'
923 where lexeme = lexemeToFastString buf'
925 lex_id cont glaexts buf =
926 let buf1 = expandWhile# is_ident buf in
929 case (if flag glaexts
930 then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
931 else buf1) of { buf' ->
933 let lexeme = lexemeToFastString buf' in
935 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
936 Just kwd_token -> --trace ("hkeywd: "++_UNPK_(lexeme)) $
940 let var_token = cont (ITvarid lexeme) buf' in
942 if not (flag glaexts)
946 case lookupUFM ghcExtensionKeywordsFM lexeme of {
947 Just kwd_token -> cont kwd_token buf';
954 case expandWhile# is_symbol buf of
955 buf' -> case lookupUFM haskellKeySymsFM lexeme of {
956 Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
957 cont kwd_token buf' ;
958 Nothing -> --trace ("sym: "++unpackFS lexeme) $
959 cont (mk_var_token lexeme) buf'
961 where lexeme = lexemeToFastString buf'
964 -- lex_con recursively collects components of a qualified identifer.
965 -- The argument buf is the StringBuffer representing the lexeme
966 -- identified so far, where the next character is upper-case.
968 lex_con cont glaexts buf =
969 -- trace ("con: "{-++unpackFS lexeme-}) $
970 let empty_buf = stepOverLexeme buf in
971 case expandWhile# is_ident empty_buf of { buf1 ->
972 case slurp_trailing_hashes buf1 glaexts of { con_buf ->
974 let all_buf = mergeLexemes buf con_buf
976 con_lexeme = lexemeToFastString con_buf
977 mod_lexeme = lexemeToFastString (decLexeme buf)
978 all_lexeme = lexemeToFastString all_buf
981 | emptyLexeme buf = cont (ITconid con_lexeme) all_buf
982 | otherwise = cont (ITqconid (mod_lexeme,con_lexeme)) all_buf
985 case currentChar# all_buf of
986 '.'# -> maybe_qualified cont glaexts all_lexeme
987 (incLexeme all_buf) just_a_conid
992 maybe_qualified cont glaexts mod buf just_a_conid =
993 -- trace ("qid: "{-++unpackFS lexeme-}) $
994 case currentChar# buf of
995 '['# -> -- Special case for []
996 case lookAhead# buf 1# of
997 ']'# -> cont (ITqconid (mod,SLIT("[]"))) (setCurrentPos# buf 2#)
1000 '('# -> -- Special case for (,,,)
1001 -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
1002 case lookAhead# buf 1# of
1003 '#'# | flag glaexts -> case lookAhead# buf 2# of
1004 ','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#)
1007 ')'# -> cont (ITqconid (mod,SLIT("()"))) (setCurrentPos# buf 2#)
1008 ','# -> lex_tuple cont mod (setCurrentPos# buf 2#) just_a_conid
1011 '-'# -> case lookAhead# buf 1# of
1012 '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
1013 _ -> lex_id3 cont glaexts mod buf just_a_conid
1015 _ -> lex_id3 cont glaexts mod buf just_a_conid
1018 lex_id3 cont glaexts mod buf just_a_conid
1019 | is_upper (currentChar# buf) =
1020 lex_con cont glaexts buf
1022 | is_symbol (currentChar# buf) =
1024 start_new_lexeme = stepOverLexeme buf
1026 -- trace ("lex_id31 "{-++unpackFS lexeme-}) $
1027 case expandWhile# is_symbol start_new_lexeme of { buf' ->
1029 lexeme = lexemeToFastString buf'
1030 -- real lexeme is M.<sym>
1031 new_buf = mergeLexemes buf buf'
1033 cont (mk_qvar_token mod lexeme) new_buf
1034 -- wrong, but arguably morally right: M... is now a qvarsym
1039 start_new_lexeme = stepOverLexeme buf
1041 -- trace ("lex_id32 "{-++unpackFS lexeme-}) $
1042 case expandWhile# is_ident start_new_lexeme of { buf1 ->
1047 case slurp_trailing_hashes buf1 glaexts of { buf' ->
1050 lexeme = lexemeToFastString buf'
1051 new_buf = mergeLexemes buf buf'
1052 is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
1054 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
1055 Nothing -> is_a_qvarid ;
1057 Just kwd_token | isSpecial kwd_token -- special ids (as, qualified, hiding) shouldn't be
1058 -> is_a_qvarid -- recognised as keywords here.
1060 -> just_a_conid -- avoid M.where etc.
1063 slurp_trailing_hashes buf glaexts
1064 | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
1069 | is_upper f = ITconid pk_str
1070 | is_ident f = ITvarid pk_str
1071 | f `eqChar#` ':'# = ITconsym pk_str
1072 | otherwise = ITvarsym pk_str
1074 (C# f) = _HEAD_ pk_str
1075 -- tl = _TAIL_ pk_str
1077 mk_qvar_token m token =
1078 -- trace ("mk_qvar ") $
1079 case mk_var_token token of
1080 ITconid n -> ITqconid (m,n)
1081 ITvarid n -> ITqvarid (m,n)
1082 ITconsym n -> ITqconsym (m,n)
1083 ITvarsym n -> ITqvarsym (m,n)
1084 _ -> ITunknown (show token)
1087 ----------------------------------------------------------------------------
1088 Horrible stuff for dealing with M.(,,,)
1091 lex_tuple cont mod buf back_off =
1095 case currentChar# buf of
1096 ','# -> go (n+1) (stepOn buf)
1097 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Boxed n))) (stepOn buf)
1100 lex_ubx_tuple cont mod buf back_off =
1104 case currentChar# buf of
1105 ','# -> go (n+1) (stepOn buf)
1106 '#'# -> case lookAhead# buf 1# of
1107 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Unboxed n)))
1113 -----------------------------------------------------------------------------
1114 doDiscard rips along really fast, looking for a '##-}',
1115 indicating the end of the pragma we're skipping
1118 doDiscard inStr buf =
1119 case currentChar# buf of
1120 '#'# | inStr ==# 0# ->
1121 case lookAhead# buf 1# of { '#'# ->
1122 case lookAhead# buf 2# of { '-'# ->
1123 case lookAhead# buf 3# of { '}'# ->
1124 (lexemeToBuffer buf, stepOverLexeme (setCurrentPos# buf 4#));
1125 _ -> doDiscard inStr (incLexeme buf) };
1126 _ -> doDiscard inStr (incLexeme buf) };
1127 _ -> doDiscard inStr (incLexeme buf) }
1131 odd_slashes buf flg i# =
1132 case lookAhead# buf i# of
1133 '\\'# -> odd_slashes buf (not flg) (i# -# 1#)
1136 not_inStr = if inStr ==# 0# then 1# else 0#
1138 case lookAhead# buf (negateInt# 1#) of --backwards, actually
1139 '\\'# -> -- escaping something..
1140 if odd_slashes buf True (negateInt# 2#)
1141 then -- odd number of slashes, " is escaped.
1142 doDiscard inStr (incLexeme buf)
1143 else -- even number of slashes, \ is escaped.
1144 doDiscard not_inStr (incLexeme buf)
1145 _ -> doDiscard not_inStr (incLexeme buf)
1147 '\''# | inStr ==# 0# ->
1148 case lookAhead# buf 1# of { '"'# ->
1149 case lookAhead# buf 2# of { '\''# ->
1150 doDiscard inStr (setCurrentPos# buf 3#);
1151 _ -> doDiscard inStr (incLexeme buf) };
1152 _ -> doDiscard inStr (incLexeme buf) }
1154 _ -> doDiscard inStr (incLexeme buf)
1158 -----------------------------------------------------------------------------
1169 data PState = PState {
1171 glasgow_exts :: Int#,
1174 context :: [LayoutContext]
1177 type P a = StringBuffer -- Input string
1182 returnP a buf s = POk s a
1184 thenP :: P a -> (a -> P b) -> P b
1185 m `thenP` k = \ buf s ->
1187 POk s1 a -> k a buf s1
1188 PFailed err -> PFailed err
1190 thenP_ :: P a -> P b -> P b
1191 m `thenP_` k = m `thenP` \_ -> k
1193 mapP :: (a -> P b) -> [a] -> P [b]
1194 mapP f [] = returnP []
1197 mapP f as `thenP` \bs ->
1200 failP :: String -> P a
1201 failP msg buf s = PFailed (text msg)
1203 failMsgP :: Message -> P a
1204 failMsgP msg buf s = PFailed msg
1206 lexError :: String -> P a
1207 lexError str buf s@PState{ loc = loc }
1208 = failMsgP (hcat [ppr loc, text ": ", text str]) buf s
1210 getSrcLocP :: P SrcLoc
1211 getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
1213 -- use a temporary SrcLoc for the duration of the argument
1214 setSrcLocP :: SrcLoc -> P a -> P a
1215 setSrcLocP new_loc p buf s =
1216 case p buf s{ loc=new_loc } of
1218 PFailed e -> PFailed e
1220 getSrcFile :: P FAST_STRING
1221 getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
1223 pushContext :: LayoutContext -> P ()
1224 pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()
1228 This special case in layoutOn is to handle layout contexts with are
1229 indented the same or less than the current context. This is illegal
1230 according to the Haskell spec, so we have to arrange to close the
1231 current context. eg.
1236 after the first 'where', the sequence of events is:
1238 - layout system inserts a ';' (column 0)
1239 - parser begins a new context at column 0
1240 - parser shifts ';' (legal empty declaration)
1241 - parser sees 'class': parse error (we're still in the inner context)
1243 trouble is, by the time we know we need a new context, the lexer has
1244 already generated the ';'. Hacky solution is as follows: since we
1245 know the column of the next token (it's the column number of the new
1246 context), we set the ACTUAL column number of the new context to this
1247 numer plus one. Hence the next time the lexer is called, a '}' will
1248 be generated to close the new context straight away. Furthermore, we
1249 have to set the atbol flag so that the ';' that the parser shifted as
1250 part of the new context is re-generated.
1252 when the new context is *less* indented than the current one:
1254 f = f where g = g where
1257 - current context: column 12.
1258 - on seeing 'h' (column 0), the layout system inserts '}'
1259 - parser starts a new context, column 0
1260 - parser sees '}', uses it to close new context
1261 - we still need to insert another '}' followed by a ';',
1262 hence the atbol trick.
1264 There's also a special hack in here to deal with
1271 i.e. the inner context is at the same indentation level as the outer
1272 context. This is strictly illegal according to Haskell 98, but
1273 there's a lot of existing code using this style and it doesn't make
1274 any sense to disallow it, since empty 'do' lists don't make sense.
1277 layoutOn :: Bool -> P ()
1278 layoutOn strict buf s@(PState{ bol = bol, context = ctx }) =
1279 let offset = lexemeIndex buf -# bol in
1282 | if strict then prev_off >=# offset else prev_off ># offset ->
1283 --trace ("layout on, column: " ++ show (I# offset)) $
1284 POk s{ context = Layout (offset +# 1#) : ctx, atbol = 1# } ()
1286 --trace ("layout on, column: " ++ show (I# offset)) $
1287 POk s{ context = Layout offset : ctx } ()
1290 layoutOff buf s@(PState{ context = ctx }) =
1291 POk s{ context = NoLayout:ctx } ()
1294 popContext = \ buf s@(PState{ context = ctx, loc = loc }) ->
1296 (_:tl) -> POk s{ context = tl } ()
1297 [] -> PFailed (srcParseErr buf loc)
1300 Note that if the name of the file we're processing ends
1301 with `hi-boot', we accept it on faith as having the right
1302 version. This is done so that .hi-boot files that comes
1303 with hsc don't have to be updated before every release,
1304 *and* it allows us to share .hi-boot files with versions
1305 of hsc that don't have .hi version checking (e.g., ghc-2.10's)
1307 If the version number is 0, the checking is also turned off.
1308 (needed to deal with GHC.hi only!)
1310 Once we can assume we're compiling with a version of ghc that
1311 supports interface file checking, we can drop the special
1314 checkVersion :: Maybe Integer -> P ()
1315 checkVersion mb@(Just v) buf s@(PState{loc = loc})
1316 | (v==0) || (v == fromInt opt_HiVersion) || opt_NoHiCheck = POk s ()
1317 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1318 checkVersion mb@Nothing buf s@(PState{loc = loc})
1319 | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
1320 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1322 -----------------------------------------------------------------
1324 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
1326 = hsep [ppr l, ptext SLIT("Interface file parse error; on input `"),
1327 text (lexemeToString s), char '\'']
1329 ifaceVersionErr hi_vers l toks
1330 = hsep [ppr l, ptext SLIT("Interface file version error;"),
1331 ptext SLIT("Expected"), int opt_HiVersion,
1332 ptext SLIT("found "), pp_version]
1336 Nothing -> ptext SLIT("pre ghc-3.02 version")
1337 Just v -> ptext SLIT("version") <+> integer v
1339 -----------------------------------------------------------------------------
1341 srcParseErr :: StringBuffer -> SrcLoc -> Message
1345 then ptext SLIT(": parse error (possibly incorrect indentation)")
1346 else hcat [ptext SLIT(": parse error on input "),
1347 char '`', text token, char '\'']
1350 token = lexemeToString s