2 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
4 \section[Lexical analysis]{Lexical analysis}
6 --------------------------------------------------------
8 There's a known bug in here:
10 If an interface file ends prematurely, Lex tries to
11 do headFS of an empty FastString.
13 An example that provokes the error is
15 f _:_ _forall_ [a] <<<END OF FILE>>>
16 --------------------------------------------------------
22 ifaceParseErr, srcParseErr,
25 Token(..), lexer, ParseResult(..), PState(..),
29 P, thenP, thenP_, returnP, mapP, failP, failMsgP,
30 getSrcLocP, setSrcLocP, getSrcFile,
31 layoutOn, layoutOff, pushContext, popContext
34 #include "HsVersions.h"
36 import Char ( isSpace, toUpper )
37 import List ( isSuffixOf )
39 import PrelNames ( mkTupNameStr )
40 import CmdLineOpts ( opt_HiVersion, opt_NoHiCheck )
41 import ForeignCall ( Safety(..) )
42 import NewDemand ( StrictSig(..), Demand(..), Demands(..),
43 DmdResult(..), mkTopDmdType, evalDmd, lazyDmd )
44 import UniqFM ( listToUFM, lookupUFM )
45 import BasicTypes ( Boxity(..) )
46 import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
47 replaceSrcLine, mkSrcLoc )
49 import ErrUtils ( Message )
56 import Char ( chr, ord )
57 import PrelRead ( readRational__ ) -- Glasgow non-std
60 %************************************************************************
62 \subsection{Data types}
64 %************************************************************************
66 The token data type, fairly un-interesting except from one
67 constructor, @ITidinfo@, which is used to lazily lex id info (arity,
68 strictness, unfolding etc).
70 The Idea/Observation here is that the renamer needs to scan through
71 all of an interface file before it can continue. But only a fraction
72 of the information contained in the file turns out to be useful, so
73 delaying as much as possible of the scanning and parsing of an
74 interface file Makes Sense (Heap profiles of the compiler
75 show a reduction in heap usage by at least a factor of two,
78 Hence, the interface file lexer spots when value declarations are
79 being scanned and return the @ITidinfo@ and @ITtype@ constructors
80 for the type and any other id info for that binding (unfolding, strictness
81 etc). These constructors are applied to the result of lexing these sub-chunks.
83 The lexing of the type and id info is all done lazily, of course, so
84 the scanning (and subsequent parsing) will be done *only* on the ids the
85 renamer finds out that it is interested in. The rest will just be junked.
86 Laziness, you know it makes sense :-)
90 = ITas -- Haskell keywords
114 | ITscc -- ToDo: remove (we use {-# SCC "..." #-} now)
116 | ITforall -- GHC extension keywords
127 | ITinterface -- interface keywords
135 | ITccall (Bool,Bool,Safety) -- (is_dyn, is_casm, may_gc)
162 | ITspecialise_prag -- Pragmas
172 | ITdotdot -- reserved symbols
186 | ITbiglam -- GHC-extension symbols
188 | ITocurly -- special symbols
190 | ITocurlybar -- {|, for type applications
191 | ITccurlybar -- |}, for type applications
204 | ITvarid FAST_STRING -- identifiers
205 | ITconid FAST_STRING
206 | ITvarsym FAST_STRING
207 | ITconsym FAST_STRING
208 | ITqvarid (FAST_STRING,FAST_STRING)
209 | ITqconid (FAST_STRING,FAST_STRING)
210 | ITqvarsym (FAST_STRING,FAST_STRING)
211 | ITqconsym (FAST_STRING,FAST_STRING)
213 | ITdupipvarid FAST_STRING -- GHC extension: implicit param: ?x
214 | ITsplitipvarid FAST_STRING -- GHC extension: implicit param: %x
216 | ITpragma StringBuffer
219 | ITstring FAST_STRING
221 | ITrational Rational
224 | ITprimstring FAST_STRING
226 | ITprimfloat Rational
227 | ITprimdouble Rational
228 | ITlitlit FAST_STRING
230 | ITunknown String -- Used when the lexer can't make sense of it
231 | ITeof -- end of file token
232 deriving Show -- debugging
235 -----------------------------------------------------------------------------
239 pragmaKeywordsFM = listToUFM $
240 map (\ (x,y) -> (_PK_ x,y))
241 [( "SPECIALISE", ITspecialise_prag ),
242 ( "SPECIALIZE", ITspecialise_prag ),
243 ( "SOURCE", ITsource_prag ),
244 ( "INLINE", ITinline_prag ),
245 ( "NOINLINE", ITnoinline_prag ),
246 ( "NOTINLINE", ITnoinline_prag ),
247 ( "LINE", ITline_prag ),
248 ( "RULES", ITrules_prag ),
249 ( "RULEZ", ITrules_prag ), -- american spelling :-)
250 ( "SCC", ITscc_prag ),
251 ( "DEPRECATED", ITdeprecated_prag )
254 haskellKeywordsFM = listToUFM $
255 map (\ (x,y) -> (_PK_ x,y))
256 [( "_", ITunderscore ),
259 ( "class", ITclass ),
261 ( "default", ITdefault ),
262 ( "deriving", ITderiving ),
265 ( "hiding", IThiding ),
267 ( "import", ITimport ),
269 ( "infix", ITinfix ),
270 ( "infixl", ITinfixl ),
271 ( "infixr", ITinfixr ),
272 ( "instance", ITinstance ),
274 ( "module", ITmodule ),
275 ( "newtype", ITnewtype ),
277 ( "qualified", ITqualified ),
280 ( "where", ITwhere ),
281 ( "_scc_", ITscc ) -- ToDo: remove
284 isSpecial :: Token -> Bool
285 -- If we see M.x, where x is a keyword, but
286 -- is special, we treat is as just plain M.x,
288 isSpecial ITas = True
289 isSpecial IThiding = True
290 isSpecial ITqualified = True
291 isSpecial ITforall = True
292 isSpecial ITexport = True
293 isSpecial ITlabel = True
294 isSpecial ITdynamic = True
295 isSpecial ITunsafe = True
296 isSpecial ITwith = True
297 isSpecial ITccallconv = True
298 isSpecial ITstdcallconv = True
301 -- IMPORTANT: Keep this in synch with ParseIface.y's var_fs production! (SUP)
302 ghcExtensionKeywordsFM = listToUFM $
303 map (\ (x,y) -> (_PK_ x,y))
304 [ ( "forall", ITforall ),
305 ( "foreign", ITforeign ),
306 ( "export", ITexport ),
307 ( "label", ITlabel ),
308 ( "dynamic", ITdynamic ),
309 ( "unsafe", ITunsafe ),
311 ( "stdcall", ITstdcallconv),
312 ( "ccall", ITccallconv),
313 ( "dotnet", ITdotnet),
314 ("_ccall_", ITccall (False, False, PlayRisky)),
315 ("_ccall_GC_", ITccall (False, False, PlaySafe)),
316 ("_casm_", ITccall (False, True, PlayRisky)),
317 ("_casm_GC_", ITccall (False, True, PlaySafe)),
319 -- interface keywords
320 ("__interface", ITinterface),
321 ("__export", IT__export),
322 ("__depends", ITdepends),
323 ("__forall", IT__forall),
324 ("__letrec", ITletrec),
325 ("__coerce", ITcoerce),
326 ("__inline_me", ITinlineMe),
327 ("__inline_call", ITinlineCall),
328 ("__depends", ITdepends),
329 ("__DEFAULT", ITdefaultbranch),
331 ("__integer", ITinteger_lit),
332 ("__float", ITfloat_lit),
333 ("__int64", ITint64_lit),
334 ("__word", ITword_lit),
335 ("__word64", ITword64_lit),
336 ("__rational", ITrational_lit),
337 ("__addr", ITaddr_lit),
338 ("__label", ITlabel_lit),
339 ("__litlit", ITlit_lit),
340 ("__string", ITstring_lit),
343 ("__fuall", ITfuall),
345 ("__P", ITspecialise),
348 ("__D", ITdeprecated),
351 ("__ccall", ITccall (False, False, PlayRisky)),
352 ("__ccall_GC", ITccall (False, False, PlaySafe)),
353 ("__dyn_ccall", ITccall (True, False, PlayRisky)),
354 ("__dyn_ccall_GC", ITccall (True, False, PlaySafe)),
355 ("__casm", ITccall (False, True, PlayRisky)),
356 ("__dyn_casm", ITccall (True, True, PlayRisky)),
357 ("__casm_GC", ITccall (False, True, PlaySafe)),
358 ("__dyn_casm_GC", ITccall (True, True, PlaySafe)),
364 haskellKeySymsFM = listToUFM $
365 map (\ (x,y) -> (_PK_ x,y))
378 ,(".", ITdot) -- sadly, for 'forall a . t'
382 -----------------------------------------------------------------------------
387 - (glaexts) lexing an interface file or -fglasgow-exts
388 - (bol) pointer to beginning of line (for column calculations)
389 - (buf) pointer to beginning of token
390 - (buf) pointer to current char
391 - (atbol) flag indicating whether we're at the beginning of a line
394 lexer :: (Token -> P a) -> P a
395 lexer cont buf s@(PState{
397 glasgow_exts = glaexts,
403 -- first, start a new lexeme and lose all the whitespace
405 tab line bol atbol (stepOverLexeme buf)
407 line = srcLocLine loc
409 tab y bol atbol buf = --trace ("tab: " ++ show (I# y) ++ " : " ++ show (currentChar buf)) $
410 case currentChar# buf of
413 if bufferExhausted (stepOn buf)
414 then cont ITeof buf s'
415 else trace "lexer: misplaced NUL?" $
416 tab y bol atbol (stepOn buf)
418 '\n'# -> let buf' = stepOn buf
419 in tab (y +# 1#) (currentIndex# buf') 1# buf'
421 -- find comments. This got harder in Haskell 98.
422 '-'# -> let trundle n =
423 let next = lookAhead# buf n in
424 if next `eqChar#` '-'# then trundle (n +# 1#)
425 else if is_symbol next || n <# 2#
428 (stepOnUntilChar# (stepOnBy# buf n) '\n'#)
431 -- comments and pragmas. We deal with LINE pragmas here,
432 -- and throw out any unrecognised pragmas as comments. Any
433 -- pragmas we know about are dealt with later (after any layout
434 -- processing if necessary).
435 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
436 if lookAhead# buf 2# `eqChar#` '#'# then
437 if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
438 case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
439 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2->
440 let lexeme = mkFastString -- ToDo: too slow
441 (map toUpper (lexemeToString buf2)) in
442 case lookupUFM pragmaKeywordsFM lexeme of
443 -- ignore RULES pragmas when -fglasgow-exts is off
444 Just ITrules_prag | not (flag glaexts) ->
445 skip_to_end (stepOnBy# buf 2#) s'
447 line_prag skip_to_end buf2 s'
448 Just other -> is_a_token
449 Nothing -> skip_to_end (stepOnBy# buf 2#) s'
452 else skip_to_end (stepOnBy# buf 2#) s'
454 skip_to_end = skipNestedComment (lexer cont)
456 -- special GHC extension: we grok cpp-style #line pragmas
457 '#'# | lexemeIndex buf ==# bol -> -- the '#' must be in column 0
458 case expandWhile# is_space (stepOn buf) of { buf1 ->
459 if is_digit (currentChar# buf1)
460 then line_prag next_line buf1 s'
464 next_line buf = lexer cont (stepOnUntilChar# buf '\n'#)
466 -- tabs have been expanded beforehand
467 c | is_space c -> tab y bol atbol (stepOn buf)
468 | otherwise -> is_a_token
470 where s' = s{loc = replaceSrcLine loc y,
474 is_a_token | atbol /=# 0# = lexBOL cont buf s'
475 | otherwise = lexToken cont glaexts buf s'
477 -- {-# LINE .. #-} pragmas. yeuch.
478 line_prag cont buf s@PState{loc=loc} =
479 case expandWhile# is_space buf of { buf1 ->
480 case scanNumLit 0 (stepOverLexeme buf1) of { (line,buf2) ->
481 -- subtract one: the line number refers to the *following* line.
482 let real_line = line - 1 in
483 case fromInteger real_line of { i@(I# l) ->
484 -- ToDo, if no filename then we skip the newline.... d'oh
485 case expandWhile# is_space buf2 of { buf3 ->
486 case currentChar# buf3 of
488 case untilEndOfString# (stepOn (stepOverLexeme buf3)) of { buf4 ->
490 file = lexemeToFastString buf4
491 new_buf = stepOn (stepOverLexeme buf4)
493 if nullFastString file
494 then cont new_buf s{loc = replaceSrcLine loc l}
495 else cont new_buf s{loc = mkSrcLoc file i}
497 _other -> cont (stepOverLexeme buf3) s{loc = replaceSrcLine loc l}
500 skipNestedComment :: P a -> P a
501 skipNestedComment cont buf state = skipNestedComment' (loc state) cont buf state
503 skipNestedComment' :: SrcLoc -> P a -> P a
504 skipNestedComment' orig_loc cont buf = loop buf
507 case currentChar# buf of
508 '-'# | lookAhead# buf 1# `eqChar#` '}'# -> cont (stepOnBy# buf 2#)
510 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
512 (skipNestedComment' orig_loc cont)
515 '\n'# -> \ s@PState{loc=loc} ->
516 let buf' = stepOn buf in
517 loop buf' s{loc = incSrcLine loc,
518 bol = currentIndex# buf',
521 -- pass the original SrcLoc to lexError so that the error is
522 -- reported at the line it was originally on, not the line at
523 -- the end of the file.
524 '\NUL'# | bufferExhausted (stepOn buf) ->
525 \s -> lexError "unterminated `{-'" buf s{loc=orig_loc} -- -}
527 _ -> loop (stepOn buf)
529 -- When we are lexing the first token of a line, check whether we need to
530 -- insert virtual semicolons or close braces due to layout.
532 lexBOL :: (Token -> P a) -> P a
533 lexBOL cont buf s@(PState{
535 glasgow_exts = glaexts,
540 if need_close_curly then
541 --trace ("col = " ++ show (I# col) ++ ", layout: inserting '}'") $
542 cont ITvccurly buf s{atbol = 1#, context = tail ctx}
543 else if need_semi_colon then
544 --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
545 cont ITsemi buf s{atbol = 0#}
547 lexToken cont glaexts buf s{atbol = 0#}
549 col = currentIndex# buf -# bol
562 Layout n -> col ==# n
565 lexToken :: (Token -> P a) -> Int# -> P a
566 lexToken cont glaexts buf =
567 -- trace "lexToken" $
568 case currentChar# buf of
570 -- special symbols ----------------------------------------------------
571 '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'#
572 -> cont IToubxparen (setCurrentPos# buf 2#)
574 -> cont IToparen (incLexeme buf)
576 ')'# -> cont ITcparen (incLexeme buf)
577 '['# -> cont ITobrack (incLexeme buf)
578 ']'# -> cont ITcbrack (incLexeme buf)
579 ','# -> cont ITcomma (incLexeme buf)
580 ';'# -> cont ITsemi (incLexeme buf)
581 '}'# -> \ s@PState{context = ctx} ->
583 (_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
584 _ -> lexError "too many '}'s" buf s
585 '|'# -> case lookAhead# buf 1# of
586 '}'# | flag glaexts -> cont ITccurlybar
587 (setCurrentPos# buf 2#)
588 _ -> lex_sym cont (incLexeme buf)
591 '#'# -> case lookAhead# buf 1# of
592 ')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
593 '-'# -> case lookAhead# buf 2# of
594 '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
595 _ -> lex_sym cont (incLexeme buf)
596 _ -> lex_sym cont (incLexeme buf)
598 '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
599 -> lex_cstring cont (setCurrentPos# buf 2#)
601 -> cont ITbackquote (incLexeme buf)
603 '{'# -> -- look for "{-##" special iface pragma
604 case lookAhead# buf 1# of
606 -> cont ITocurlybar (setCurrentPos# buf 2#)
607 '-'# -> case lookAhead# buf 2# of
608 '#'# -> case lookAhead# buf 3# of
612 (\ cont lexeme buf' -> cont (ITpragma lexeme) buf')
614 (stepOnBy# (stepOverLexeme buf) 4#)
615 _ -> lex_prag cont (setCurrentPos# buf 3#)
616 _ -> cont ITocurly (incLexeme buf)
617 _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
619 -- strings/characters -------------------------------------------------
620 '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
621 '\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
623 -- strictness and cpr pragmas and __scc treated specially.
624 '_'# | flag glaexts ->
625 case lookAhead# buf 1# of
626 '_'# -> case lookAhead# buf 2# of
628 lex_demand cont (stepOnUntil (not . isSpace)
629 (stepOnBy# buf 3#)) -- past __S
631 cont ITcprinfo (stepOnBy# buf 3#) -- past __M
634 case prefixMatch (stepOnBy# buf 3#) "cc" of
635 Just buf' -> lex_scc cont (stepOverLexeme buf')
636 Nothing -> lex_id cont glaexts buf
637 _ -> lex_id cont glaexts buf
638 _ -> lex_id cont glaexts buf
640 -- Hexadecimal and octal constants
641 '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
642 -> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
643 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
644 -> readNum (after_lexnum cont glaexts) buf' is_octdigit 8 oct_or_dec
645 where ch = lookAhead# buf 1#
646 ch2 = lookAhead# buf 2#
647 buf' = setCurrentPos# buf 2#
650 if bufferExhausted (stepOn buf) then
653 trace "lexIface: misplaced NUL?" $
654 cont (ITunknown "\NUL") (stepOn buf)
656 '?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
657 lex_ip ITdupipvarid cont (incLexeme buf)
658 '%'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
659 lex_ip ITsplitipvarid cont (incLexeme buf)
660 c | is_digit c -> lex_num cont glaexts 0 buf
661 | is_symbol c -> lex_sym cont buf
662 | is_upper c -> lex_con cont glaexts buf
663 | is_ident c -> lex_id cont glaexts buf
664 | otherwise -> lexError "illegal character" buf
666 -- Int# is unlifted, and therefore faster than Bool for flags.
672 -------------------------------------------------------------------------------
676 = case expandWhile# is_space buf of { buf1 ->
677 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2 ->
678 let lexeme = mkFastString (map toUpper (lexemeToString buf2)) in
679 case lookupUFM pragmaKeywordsFM lexeme of
680 Just kw -> cont kw (mergeLexemes buf buf2)
681 Nothing -> panic "lex_prag"
684 -------------------------------------------------------------------------------
687 lex_string cont glaexts s buf
688 = case currentChar# buf of
690 let buf' = incLexeme buf
691 s' = mkFastStringNarrow (map chr (reverse s))
692 in case currentChar# buf' of
693 '#'# | flag glaexts -> if all (<= 0xFF) s
694 then cont (ITprimstring s') (incLexeme buf')
695 else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
696 _ -> cont (ITstring s') buf'
698 -- ignore \& in a string, deal with string gaps
699 '\\'# | next_ch `eqChar#` '&'#
700 -> lex_string cont glaexts s buf'
702 -> lex_stringgap cont glaexts s (incLexeme buf)
704 where next_ch = lookAhead# buf 1#
705 buf' = setCurrentPos# buf 2#
707 _ -> lex_char (lex_next_string cont s) glaexts buf
709 lex_stringgap cont glaexts s buf
710 = let buf' = incLexeme buf in
711 case currentChar# buf of
712 '\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf'
713 st{loc = incSrcLine loc}
714 '\\'# -> lex_string cont glaexts s buf'
715 c | is_space c -> lex_stringgap cont glaexts s buf'
716 other -> charError buf'
718 lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
720 lex_char :: (Int# -> Int -> P a) -> Int# -> P a
721 lex_char cont glaexts buf
722 = case currentChar# buf of
723 '\\'# -> lex_escape (cont glaexts) (incLexeme buf)
724 c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
725 other -> charError buf
727 char_end cont glaexts c buf
728 = case currentChar# buf of
729 '\''# -> let buf' = incLexeme buf in
730 case currentChar# buf' of
732 -> cont (ITprimchar c) (incLexeme buf')
733 _ -> cont (ITchar c) buf'
737 = let buf' = incLexeme buf in
738 case currentChar# buf of
739 'a'# -> cont (ord '\a') buf'
740 'b'# -> cont (ord '\b') buf'
741 'f'# -> cont (ord '\f') buf'
742 'n'# -> cont (ord '\n') buf'
743 'r'# -> cont (ord '\r') buf'
744 't'# -> cont (ord '\t') buf'
745 'v'# -> cont (ord '\v') buf'
746 '\\'# -> cont (ord '\\') buf'
747 '"'# -> cont (ord '\"') buf'
748 '\''# -> cont (ord '\'') buf'
749 '^'# -> let c = currentChar# buf' in
750 if c `geChar#` '@'# && c `leChar#` '_'#
751 then cont (I# (ord# c -# ord# '@'#)) (incLexeme buf')
754 'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
755 'o'# -> readNum (after_charnum cont) buf' is_octdigit 8 oct_or_dec
757 -> readNum (after_charnum cont) buf is_digit 10 oct_or_dec
759 _ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
760 Just buf2 <- [prefixMatch buf p] ] of
761 (c,buf2):_ -> cont (ord c) buf2
764 after_charnum cont i buf
765 = if i >= 0 && i <= 0x10FFFF
766 then cont (fromInteger i) buf
769 readNum cont buf is_digit base conv = read buf 0
771 = case currentChar# buf of { c ->
773 then read (incLexeme buf) (i*base + (toInteger (I# (conv c))))
779 || (c `geChar#` 'a'# && c `leChar#` 'f'#)
780 || (c `geChar#` 'A'# && c `leChar#` 'F'#)
782 hex c | is_digit c = ord# c -# ord# '0'#
783 | otherwise = ord# (to_lower c) -# ord# 'a'# +# 10#
784 oct_or_dec c = ord# c -# ord# '0'#
786 is_octdigit c = c `geChar#` '0'# && c `leChar#` '7'#
789 | c `geChar#` 'A'# && c `leChar#` 'Z'#
790 = chr# (ord# c -# (ord# 'A'# -# ord# 'a'#))
793 charError buf = lexError "error in character literal" buf
795 silly_escape_chars = [
832 -------------------------------------------------------------------------------
834 lex_demand cont buf =
835 case read_em [] buf of { (ls,buf') ->
836 case currentChar# buf' of
837 'b'# -> cont (ITstrict (StrictSig (mkTopDmdType ls BotRes))) (incLexeme buf')
838 'm'# -> cont (ITstrict (StrictSig (mkTopDmdType ls RetCPR))) (incLexeme buf')
839 _ -> cont (ITstrict (StrictSig (mkTopDmdType ls TopRes))) buf'
843 case currentChar# buf of
844 'T'# -> read_em (Top : acc) (stepOn buf)
845 'L'# -> read_em (lazyDmd : acc) (stepOn buf)
846 'A'# -> read_em (Abs : acc) (stepOn buf)
847 'V'# -> read_em (evalDmd : acc) (stepOn buf) -- Temporary, until
848 -- we've recompiled prelude etc
849 'C'# -> do_unary Call acc (stepOnBy# buf 2#) -- Skip 'C('
851 'U'# -> do_seq1 Eval acc (stepOnBy# buf 1#)
852 'D'# -> do_seq1 Defer acc (stepOnBy# buf 1#)
853 'S'# -> do_seq1 (Box . Eval) acc (stepOnBy# buf 1#)
855 _ -> (reverse acc, buf)
858 = case currentChar# buf of
859 '('# -> do_seq2 fn acc (stepOnBy# buf 1#)
860 _ -> read_em (fn (Poly Abs) : acc) buf
863 = case read_em [] buf of { (dmds, buf) ->
864 case currentChar# buf of
865 ')'# -> read_em (fn (Prod dmds) : acc)
867 '*'# -> ASSERT( length dmds == 1 )
868 read_em (fn (Poly (head dmds)) : acc)
869 (stepOnBy# buf 2#) -- Skip '*)'
873 = case read_em [] buf of
874 ([dmd], rest) -> read_em (fn dmd : acc) (stepOn rest) -- Skip ')'
878 case currentChar# buf of
879 'C'# -> cont ITsccAllCafs (incLexeme buf)
880 other -> cont ITscc buf
882 -----------------------------------------------------------------------------
885 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
886 lex_num cont glaexts acc buf =
887 case scanNumLit acc buf of
889 case currentChar# buf' of
890 '.'# | is_digit (lookAhead# buf' 1#) ->
891 -- this case is not optimised at all, as the
892 -- presence of floating point numbers in interface
893 -- files is not that common. (ToDo)
894 case expandWhile# is_digit (incLexeme buf') of
895 buf2 -> -- points to first non digit char
897 let l = case currentChar# buf2 of
903 = let buf3 = incLexeme buf2 in
904 case currentChar# buf3 of
905 '-'# -> expandWhile# is_digit (incLexeme buf3)
906 '+'# -> expandWhile# is_digit (incLexeme buf3)
907 x | is_digit x -> expandWhile# is_digit buf3
910 v = readRational__ (lexemeToString l)
912 in case currentChar# l of -- glasgow exts only
913 '#'# | flag glaexts -> let l' = incLexeme l in
914 case currentChar# l' of
915 '#'# -> cont (ITprimdouble v) (incLexeme l')
916 _ -> cont (ITprimfloat v) l'
917 _ -> cont (ITrational v) l
919 _ -> after_lexnum cont glaexts acc' buf'
921 after_lexnum cont glaexts i buf
922 = case currentChar# buf of
923 '#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
924 _ -> cont (ITinteger i) buf
926 -----------------------------------------------------------------------------
927 -- C "literal literal"s (i.e. things like ``NULL'', ``stdout'' etc.)
929 -- we lexemeToFastString on the bit between the ``''s, but include the
930 -- quotes in the full lexeme.
932 lex_cstring cont buf =
933 case expandUntilMatch (stepOverLexeme buf) "\'\'" of
934 Just buf' -> cont (ITlitlit (lexemeToFastString
935 (setCurrentPos# buf' (negateInt# 2#))))
936 (mergeLexemes buf buf')
937 Nothing -> lexError "unterminated ``" buf
939 -----------------------------------------------------------------------------
940 -- identifiers, symbols etc.
942 lex_ip ip_constr cont buf =
943 case expandWhile# is_ident buf of
944 buf' -> cont (ip_constr (tailFS lexeme)) buf'
945 where lexeme = lexemeToFastString buf'
947 lex_id cont glaexts buf =
948 let buf1 = expandWhile# is_ident buf in
951 case (if flag glaexts
952 then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
953 else buf1) of { buf' ->
955 let lexeme = lexemeToFastString buf' in
957 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
958 Just kwd_token -> --trace ("hkeywd: "++_UNPK_(lexeme)) $
962 let var_token = cont (ITvarid lexeme) buf' in
964 if not (flag glaexts)
968 case lookupUFM ghcExtensionKeywordsFM lexeme of {
969 Just kwd_token -> cont kwd_token buf';
976 case expandWhile# is_symbol buf of
977 buf' -> case lookupUFM haskellKeySymsFM lexeme of {
978 Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
979 cont kwd_token buf' ;
980 Nothing -> --trace ("sym: "++unpackFS lexeme) $
981 cont (mk_var_token lexeme) buf'
983 where lexeme = lexemeToFastString buf'
986 -- lex_con recursively collects components of a qualified identifer.
987 -- The argument buf is the StringBuffer representing the lexeme
988 -- identified so far, where the next character is upper-case.
990 lex_con cont glaexts buf =
991 -- trace ("con: "{-++unpackFS lexeme-}) $
992 let empty_buf = stepOverLexeme buf in
993 case expandWhile# is_ident empty_buf of { buf1 ->
994 case slurp_trailing_hashes buf1 glaexts of { con_buf ->
996 let all_buf = mergeLexemes buf con_buf
998 con_lexeme = lexemeToFastString con_buf
999 mod_lexeme = lexemeToFastString (decLexeme buf)
1000 all_lexeme = lexemeToFastString all_buf
1003 | emptyLexeme buf = cont (ITconid con_lexeme) all_buf
1004 | otherwise = cont (ITqconid (mod_lexeme,con_lexeme)) all_buf
1007 case currentChar# all_buf of
1008 '.'# -> maybe_qualified cont glaexts all_lexeme
1009 (incLexeme all_buf) just_a_conid
1014 maybe_qualified cont glaexts mod buf just_a_conid =
1015 -- trace ("qid: "{-++unpackFS lexeme-}) $
1016 case currentChar# buf of
1017 '['# -> -- Special case for []
1018 case lookAhead# buf 1# of
1019 ']'# -> cont (ITqconid (mod,SLIT("[]"))) (setCurrentPos# buf 2#)
1022 '('# -> -- Special case for (,,,)
1023 -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
1024 case lookAhead# buf 1# of
1025 '#'# | flag glaexts -> case lookAhead# buf 2# of
1026 ','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#)
1029 ')'# -> cont (ITqconid (mod,SLIT("()"))) (setCurrentPos# buf 2#)
1030 ','# -> lex_tuple cont mod (setCurrentPos# buf 2#) just_a_conid
1033 '-'# -> case lookAhead# buf 1# of
1034 '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
1035 _ -> lex_id3 cont glaexts mod buf just_a_conid
1037 _ -> lex_id3 cont glaexts mod buf just_a_conid
1040 lex_id3 cont glaexts mod buf just_a_conid
1041 | is_upper (currentChar# buf) =
1042 lex_con cont glaexts buf
1044 | is_symbol (currentChar# buf) =
1046 start_new_lexeme = stepOverLexeme buf
1048 -- trace ("lex_id31 "{-++unpackFS lexeme-}) $
1049 case expandWhile# is_symbol start_new_lexeme of { buf' ->
1051 lexeme = lexemeToFastString buf'
1052 -- real lexeme is M.<sym>
1053 new_buf = mergeLexemes buf buf'
1055 cont (mk_qvar_token mod lexeme) new_buf
1056 -- wrong, but arguably morally right: M... is now a qvarsym
1061 start_new_lexeme = stepOverLexeme buf
1063 -- trace ("lex_id32 "{-++unpackFS lexeme-}) $
1064 case expandWhile# is_ident start_new_lexeme of { buf1 ->
1069 case slurp_trailing_hashes buf1 glaexts of { buf' ->
1072 lexeme = lexemeToFastString buf'
1073 new_buf = mergeLexemes buf buf'
1074 is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
1076 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
1077 Nothing -> is_a_qvarid ;
1079 Just kwd_token | isSpecial kwd_token -- special ids (as, qualified, hiding) shouldn't be
1080 -> is_a_qvarid -- recognised as keywords here.
1082 -> just_a_conid -- avoid M.where etc.
1085 slurp_trailing_hashes buf glaexts
1086 | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
1091 | is_upper f = ITconid pk_str
1092 | is_ident f = ITvarid pk_str
1093 | f `eqChar#` ':'# = ITconsym pk_str
1094 | otherwise = ITvarsym pk_str
1096 (C# f) = _HEAD_ pk_str
1097 -- tl = _TAIL_ pk_str
1099 mk_qvar_token m token =
1100 -- trace ("mk_qvar ") $
1101 case mk_var_token token of
1102 ITconid n -> ITqconid (m,n)
1103 ITvarid n -> ITqvarid (m,n)
1104 ITconsym n -> ITqconsym (m,n)
1105 ITvarsym n -> ITqvarsym (m,n)
1106 _ -> ITunknown (show token)
1109 ----------------------------------------------------------------------------
1110 Horrible stuff for dealing with M.(,,,)
1113 lex_tuple cont mod buf back_off =
1117 case currentChar# buf of
1118 ','# -> go (n+1) (stepOn buf)
1119 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Boxed n))) (stepOn buf)
1122 lex_ubx_tuple cont mod buf back_off =
1126 case currentChar# buf of
1127 ','# -> go (n+1) (stepOn buf)
1128 '#'# -> case lookAhead# buf 1# of
1129 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Unboxed n)))
1135 -----------------------------------------------------------------------------
1136 'lexPragma' rips along really fast, looking for a '##-}',
1137 indicating the end of the pragma we're skipping
1140 lexPragma cont contf inStr buf =
1141 case currentChar# buf of
1142 '#'# | inStr ==# 0# ->
1143 case lookAhead# buf 1# of { '#'# ->
1144 case lookAhead# buf 2# of { '-'# ->
1145 case lookAhead# buf 3# of { '}'# ->
1146 contf cont (lexemeToBuffer buf)
1147 (stepOverLexeme (setCurrentPos# buf 4#));
1148 _ -> lexPragma cont contf inStr (incLexeme buf) };
1149 _ -> lexPragma cont contf inStr (incLexeme buf) };
1150 _ -> lexPragma cont contf inStr (incLexeme buf) }
1154 odd_slashes buf flg i# =
1155 case lookAhead# buf i# of
1156 '\\'# -> odd_slashes buf (not flg) (i# -# 1#)
1159 not_inStr = if inStr ==# 0# then 1# else 0#
1161 case lookAhead# buf (negateInt# 1#) of --backwards, actually
1162 '\\'# -> -- escaping something..
1163 if odd_slashes buf True (negateInt# 2#)
1164 then -- odd number of slashes, " is escaped.
1165 lexPragma cont contf inStr (incLexeme buf)
1166 else -- even number of slashes, \ is escaped.
1167 lexPragma cont contf not_inStr (incLexeme buf)
1168 _ -> lexPragma cont contf not_inStr (incLexeme buf)
1170 '\''# | inStr ==# 0# ->
1171 case lookAhead# buf 1# of { '"'# ->
1172 case lookAhead# buf 2# of { '\''# ->
1173 lexPragma cont contf inStr (setCurrentPos# buf 3#);
1174 _ -> lexPragma cont contf inStr (incLexeme buf) };
1175 _ -> lexPragma cont contf inStr (incLexeme buf) }
1177 -- a sign that the input is ill-formed, since pragmas are
1178 -- assumed to always be properly closed (in .hi files).
1179 '\NUL'# -> trace "lexPragma: unexpected end-of-file" $
1180 cont (ITunknown "\NUL") buf
1182 _ -> lexPragma cont contf inStr (incLexeme buf)
1186 -----------------------------------------------------------------------------
1197 data PState = PState {
1199 glasgow_exts :: Int#,
1202 context :: [LayoutContext]
1205 type P a = StringBuffer -- Input string
1210 returnP a buf s = POk s a
1212 thenP :: P a -> (a -> P b) -> P b
1213 m `thenP` k = \ buf s ->
1215 POk s1 a -> k a buf s1
1216 PFailed err -> PFailed err
1218 thenP_ :: P a -> P b -> P b
1219 m `thenP_` k = m `thenP` \_ -> k
1221 mapP :: (a -> P b) -> [a] -> P [b]
1222 mapP f [] = returnP []
1225 mapP f as `thenP` \bs ->
1228 failP :: String -> P a
1229 failP msg buf s = PFailed (text msg)
1231 failMsgP :: Message -> P a
1232 failMsgP msg buf s = PFailed msg
1234 lexError :: String -> P a
1235 lexError str buf s@PState{ loc = loc }
1236 = failMsgP (hcat [ppr loc, text ": ", text str]) buf s
1238 getSrcLocP :: P SrcLoc
1239 getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
1241 -- use a temporary SrcLoc for the duration of the argument
1242 setSrcLocP :: SrcLoc -> P a -> P a
1243 setSrcLocP new_loc p buf s =
1244 case p buf s{ loc=new_loc } of
1246 PFailed e -> PFailed e
1248 getSrcFile :: P FAST_STRING
1249 getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
1251 pushContext :: LayoutContext -> P ()
1252 pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()
1256 This special case in layoutOn is to handle layout contexts with are
1257 indented the same or less than the current context. This is illegal
1258 according to the Haskell spec, so we have to arrange to close the
1259 current context. eg.
1264 after the first 'where', the sequence of events is:
1266 - layout system inserts a ';' (column 0)
1267 - parser begins a new context at column 0
1268 - parser shifts ';' (legal empty declaration)
1269 - parser sees 'class': parse error (we're still in the inner context)
1271 trouble is, by the time we know we need a new context, the lexer has
1272 already generated the ';'. Hacky solution is as follows: since we
1273 know the column of the next token (it's the column number of the new
1274 context), we set the ACTUAL column number of the new context to this
1275 numer plus one. Hence the next time the lexer is called, a '}' will
1276 be generated to close the new context straight away. Furthermore, we
1277 have to set the atbol flag so that the ';' that the parser shifted as
1278 part of the new context is re-generated.
1280 when the new context is *less* indented than the current one:
1282 f = f where g = g where
1285 - current context: column 12.
1286 - on seeing 'h' (column 0), the layout system inserts '}'
1287 - parser starts a new context, column 0
1288 - parser sees '}', uses it to close new context
1289 - we still need to insert another '}' followed by a ';',
1290 hence the atbol trick.
1292 There's also a special hack in here to deal with
1299 i.e. the inner context is at the same indentation level as the outer
1300 context. This is strictly illegal according to Haskell 98, but
1301 there's a lot of existing code using this style and it doesn't make
1302 any sense to disallow it, since empty 'do' lists don't make sense.
1305 layoutOn :: Bool -> P ()
1306 layoutOn strict buf s@(PState{ bol = bol, context = ctx }) =
1307 let offset = lexemeIndex buf -# bol in
1310 | if strict then prev_off >=# offset else prev_off ># offset ->
1311 --trace ("layout on, column: " ++ show (I# offset)) $
1312 POk s{ context = Layout (offset +# 1#) : ctx, atbol = 1# } ()
1314 --trace ("layout on, column: " ++ show (I# offset)) $
1315 POk s{ context = Layout offset : ctx } ()
1318 layoutOff buf s@(PState{ context = ctx }) =
1319 POk s{ context = NoLayout:ctx } ()
1322 popContext = \ buf s@(PState{ context = ctx, loc = loc }) ->
1324 (_:tl) -> POk s{ context = tl } ()
1325 [] -> PFailed (srcParseErr buf loc)
1328 Note that if the name of the file we're processing ends
1329 with `hi-boot', we accept it on faith as having the right
1330 version. This is done so that .hi-boot files that comes
1331 with hsc don't have to be updated before every release,
1332 *and* it allows us to share .hi-boot files with versions
1333 of hsc that don't have .hi version checking (e.g., ghc-2.10's)
1335 If the version number is 0, the checking is also turned off.
1336 (needed to deal with GHC.hi only!)
1338 Once we can assume we're compiling with a version of ghc that
1339 supports interface file checking, we can drop the special
1342 checkVersion :: Maybe Integer -> P ()
1343 checkVersion mb@(Just v) buf s@(PState{loc = loc})
1344 | (v==0) || (v == fromInt opt_HiVersion) || opt_NoHiCheck = POk s ()
1345 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1346 checkVersion mb@Nothing buf s@(PState{loc = loc})
1347 | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
1348 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1350 -----------------------------------------------------------------
1352 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
1354 = hsep [ppr l, ptext SLIT("Interface file parse error; on input `"),
1355 text (lexemeToString s), char '\'']
1357 ifaceVersionErr hi_vers l toks
1358 = hsep [ppr l, ptext SLIT("Interface file version error;"),
1359 ptext SLIT("Expected"), int opt_HiVersion,
1360 ptext SLIT("found "), pp_version]
1364 Nothing -> ptext SLIT("pre ghc-3.02 version")
1365 Just v -> ptext SLIT("version") <+> integer v
1367 -----------------------------------------------------------------------------
1369 srcParseErr :: StringBuffer -> SrcLoc -> Message
1373 then ptext SLIT(": parse error (possibly incorrect indentation)")
1374 else hcat [ptext SLIT(": parse error on input "),
1375 char '`', text token, char '\'']
1378 token = lexemeToString s