2 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
4 \section[Lexical analysis]{Lexical analysis}
6 --------------------------------------------------------
8 There's a known bug in here:
10 If an interface file ends prematurely, Lex tries to
11 do headFS of an empty FastString.
13 An example that provokes the error is
15 f _:_ _forall_ [a] <<<END OF FILE>>>
16 --------------------------------------------------------
22 ifaceParseErr, srcParseErr,
25 Token(..), lexer, ParseResult(..), PState(..),
29 P, thenP, thenP_, returnP, mapP, failP, failMsgP,
30 getSrcLocP, setSrcLocP, getSrcFile,
31 layoutOn, layoutOff, pushContext, popContext
34 #include "HsVersions.h"
36 import Char ( isSpace, toUpper )
37 import List ( isSuffixOf )
39 import IdInfo ( InlinePragInfo(..) )
40 import PrelNames ( mkTupNameStr )
41 import CmdLineOpts ( opt_HiVersion, opt_NoHiCheck )
42 import ForeignCall ( Safety(..) )
43 import NewDemand ( StrictSig(..), Demand(..), Keepity(..),
44 DmdResult(..), Deferredness(..), mkTopDmdType )
45 import UniqFM ( listToUFM, lookupUFM )
46 import BasicTypes ( Boxity(..) )
47 import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
48 replaceSrcLine, mkSrcLoc )
50 import ErrUtils ( Message )
57 import Char ( chr, ord )
58 import PrelRead ( readRational__ ) -- Glasgow non-std
61 %************************************************************************
63 \subsection{Data types}
65 %************************************************************************
67 The token data type, fairly un-interesting except from one
68 constructor, @ITidinfo@, which is used to lazily lex id info (arity,
69 strictness, unfolding etc).
71 The Idea/Observation here is that the renamer needs to scan through
72 all of an interface file before it can continue. But only a fraction
73 of the information contained in the file turns out to be useful, so
74 delaying as much as possible of the scanning and parsing of an
75 interface file Makes Sense (Heap profiles of the compiler
76 show a reduction in heap usage by at least a factor of two,
79 Hence, the interface file lexer spots when value declarations are
80 being scanned and return the @ITidinfo@ and @ITtype@ constructors
81 for the type and any other id info for that binding (unfolding, strictness
82 etc). These constructors are applied to the result of lexing these sub-chunks.
84 The lexing of the type and id info is all done lazily, of course, so
85 the scanning (and subsequent parsing) will be done *only* on the ids the
86 renamer finds out that it is interested in. The rest will just be junked.
87 Laziness, you know it makes sense :-)
91 = ITas -- Haskell keywords
115 | ITscc -- ToDo: remove (we use {-# SCC "..." #-} now)
117 | ITforall -- GHC extension keywords
128 | ITinterface -- interface keywords
136 | ITccall (Bool,Bool,Safety) -- (is_dyn, is_casm, may_gc)
155 | ITunfold InlinePragInfo
163 | ITspecialise_prag -- Pragmas
173 | ITdotdot -- reserved symbols
187 | ITbiglam -- GHC-extension symbols
189 | ITocurly -- special symbols
191 | ITocurlybar -- {|, for type applications
192 | ITccurlybar -- |}, for type applications
205 | ITvarid FAST_STRING -- identifiers
206 | ITconid FAST_STRING
207 | ITvarsym FAST_STRING
208 | ITconsym FAST_STRING
209 | ITqvarid (FAST_STRING,FAST_STRING)
210 | ITqconid (FAST_STRING,FAST_STRING)
211 | ITqvarsym (FAST_STRING,FAST_STRING)
212 | ITqconsym (FAST_STRING,FAST_STRING)
214 | ITipvarid FAST_STRING -- GHC extension: implicit param: ?x
216 | ITpragma StringBuffer
219 | ITstring FAST_STRING
221 | ITrational Rational
224 | ITprimstring FAST_STRING
226 | ITprimfloat Rational
227 | ITprimdouble Rational
228 | ITlitlit FAST_STRING
230 | ITunknown String -- Used when the lexer can't make sense of it
231 | ITeof -- end of file token
232 deriving Show -- debugging
235 -----------------------------------------------------------------------------
239 pragmaKeywordsFM = listToUFM $
240 map (\ (x,y) -> (_PK_ x,y))
241 [( "SPECIALISE", ITspecialise_prag ),
242 ( "SPECIALIZE", ITspecialise_prag ),
243 ( "SOURCE", ITsource_prag ),
244 ( "INLINE", ITinline_prag ),
245 ( "NOINLINE", ITnoinline_prag ),
246 ( "NOTINLINE", ITnoinline_prag ),
247 ( "LINE", ITline_prag ),
248 ( "RULES", ITrules_prag ),
249 ( "RULEZ", ITrules_prag ), -- american spelling :-)
250 ( "SCC", ITscc_prag ),
251 ( "DEPRECATED", ITdeprecated_prag )
254 haskellKeywordsFM = listToUFM $
255 map (\ (x,y) -> (_PK_ x,y))
256 [( "_", ITunderscore ),
259 ( "class", ITclass ),
261 ( "default", ITdefault ),
262 ( "deriving", ITderiving ),
265 ( "hiding", IThiding ),
267 ( "import", ITimport ),
269 ( "infix", ITinfix ),
270 ( "infixl", ITinfixl ),
271 ( "infixr", ITinfixr ),
272 ( "instance", ITinstance ),
274 ( "module", ITmodule ),
275 ( "newtype", ITnewtype ),
277 ( "qualified", ITqualified ),
280 ( "where", ITwhere ),
281 ( "_scc_", ITscc ) -- ToDo: remove
284 isSpecial :: Token -> Bool
285 -- If we see M.x, where x is a keyword, but
286 -- is special, we treat is as just plain M.x,
288 isSpecial ITas = True
289 isSpecial IThiding = True
290 isSpecial ITqualified = True
291 isSpecial ITforall = True
292 isSpecial ITexport = True
293 isSpecial ITlabel = True
294 isSpecial ITdynamic = True
295 isSpecial ITunsafe = True
296 isSpecial ITwith = True
297 isSpecial ITccallconv = True
298 isSpecial ITstdcallconv = True
301 -- IMPORTANT: Keep this in synch with ParseIface.y's var_fs production! (SUP)
302 ghcExtensionKeywordsFM = listToUFM $
303 map (\ (x,y) -> (_PK_ x,y))
304 [ ( "forall", ITforall ),
305 ( "foreign", ITforeign ),
306 ( "export", ITexport ),
307 ( "label", ITlabel ),
308 ( "dynamic", ITdynamic ),
309 ( "unsafe", ITunsafe ),
311 ( "stdcall", ITstdcallconv),
312 ( "ccall", ITccallconv),
313 ( "dotnet", ITdotnet),
314 ("_ccall_", ITccall (False, False, PlayRisky)),
315 ("_ccall_GC_", ITccall (False, False, PlaySafe)),
316 ("_casm_", ITccall (False, True, PlayRisky)),
317 ("_casm_GC_", ITccall (False, True, PlaySafe)),
319 -- interface keywords
320 ("__interface", ITinterface),
321 ("__export", IT__export),
322 ("__depends", ITdepends),
323 ("__forall", IT__forall),
324 ("__letrec", ITletrec),
325 ("__coerce", ITcoerce),
326 ("__inline_me", ITinlineMe),
327 ("__inline_call", ITinlineCall),
328 ("__depends", ITdepends),
329 ("__DEFAULT", ITdefaultbranch),
331 ("__integer", ITinteger_lit),
332 ("__float", ITfloat_lit),
333 ("__int64", ITint64_lit),
334 ("__word", ITword_lit),
335 ("__word64", ITword64_lit),
336 ("__rational", ITrational_lit),
337 ("__addr", ITaddr_lit),
338 ("__label", ITlabel_lit),
339 ("__litlit", ITlit_lit),
340 ("__string", ITstring_lit),
343 ("__fuall", ITfuall),
345 ("__P", ITspecialise),
348 ("__D", ITdeprecated),
349 ("__U", ITunfold NoInlinePragInfo),
351 ("__ccall", ITccall (False, False, PlayRisky)),
352 ("__ccall_GC", ITccall (False, False, PlaySafe)),
353 ("__dyn_ccall", ITccall (True, False, PlayRisky)),
354 ("__dyn_ccall_GC", ITccall (True, False, PlaySafe)),
355 ("__casm", ITccall (False, True, PlayRisky)),
356 ("__dyn_casm", ITccall (True, True, PlayRisky)),
357 ("__casm_GC", ITccall (False, True, PlaySafe)),
358 ("__dyn_casm_GC", ITccall (True, True, PlaySafe)),
364 haskellKeySymsFM = listToUFM $
365 map (\ (x,y) -> (_PK_ x,y))
378 ,(".", ITdot) -- sadly, for 'forall a . t'
382 -----------------------------------------------------------------------------
387 - (glaexts) lexing an interface file or -fglasgow-exts
388 - (bol) pointer to beginning of line (for column calculations)
389 - (buf) pointer to beginning of token
390 - (buf) pointer to current char
391 - (atbol) flag indicating whether we're at the beginning of a line
394 lexer :: (Token -> P a) -> P a
395 lexer cont buf s@(PState{
397 glasgow_exts = glaexts,
403 -- first, start a new lexeme and lose all the whitespace
405 tab line bol atbol (stepOverLexeme buf)
407 line = srcLocLine loc
409 tab y bol atbol buf = -- trace ("tab: " ++ show (I# y) ++ " : " ++ show (currentChar buf)) $
410 case currentChar# buf of
413 if bufferExhausted (stepOn buf)
414 then cont ITeof buf s'
415 else trace "lexer: misplaced NUL?" $
416 tab y bol atbol (stepOn buf)
418 '\n'# -> let buf' = stepOn buf
419 in tab (y +# 1#) (currentIndex# buf') 1# buf'
421 -- find comments. This got harder in Haskell 98.
422 '-'# -> let trundle n =
423 let next = lookAhead# buf n in
424 if next `eqChar#` '-'# then trundle (n +# 1#)
425 else if is_symbol next || n <# 2#
428 (stepOnUntilChar# (stepOnBy# buf n) '\n'#)
431 -- comments and pragmas. We deal with LINE pragmas here,
432 -- and throw out any unrecognised pragmas as comments. Any
433 -- pragmas we know about are dealt with later (after any layout
434 -- processing if necessary).
435 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
436 if lookAhead# buf 2# `eqChar#` '#'# then
437 if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
438 case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
439 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2->
440 let lexeme = mkFastString -- ToDo: too slow
441 (map toUpper (lexemeToString buf2)) in
442 case lookupUFM pragmaKeywordsFM lexeme of
444 line_prag skip_to_end buf2 s'
445 Just other -> is_a_token
446 Nothing -> skip_to_end (stepOnBy# buf 2#) s'
449 else skip_to_end (stepOnBy# buf 2#) s'
451 skip_to_end = nested_comment (lexer cont)
453 -- special GHC extension: we grok cpp-style #line pragmas
454 '#'# | lexemeIndex buf ==# bol -> -- the '#' must be in column 0
455 case expandWhile# is_space (stepOn buf) of { buf1 ->
456 if is_digit (currentChar# buf1)
457 then line_prag next_line buf1 s'
461 next_line buf = lexer cont (stepOnUntilChar# buf '\n'#)
463 -- tabs have been expanded beforehand
464 c | is_space c -> tab y bol atbol (stepOn buf)
465 | otherwise -> is_a_token
467 where s' = s{loc = replaceSrcLine loc y,
471 is_a_token | atbol /=# 0# = lexBOL cont buf s'
472 | otherwise = lexToken cont glaexts buf s'
474 -- {-# LINE .. #-} pragmas. yeuch.
475 line_prag cont buf s@PState{loc=loc} =
476 case expandWhile# is_space buf of { buf1 ->
477 case scanNumLit 0 (stepOverLexeme buf1) of { (line,buf2) ->
478 -- subtract one: the line number refers to the *following* line.
479 let real_line = line - 1 in
480 case fromInteger real_line of { i@(I# l) ->
481 -- ToDo, if no filename then we skip the newline.... d'oh
482 case expandWhile# is_space buf2 of { buf3 ->
483 case currentChar# buf3 of
485 case untilEndOfString# (stepOn (stepOverLexeme buf3)) of { buf4 ->
487 file = lexemeToFastString buf4
488 new_buf = stepOn (stepOverLexeme buf4)
490 if nullFastString file
491 then cont new_buf s{loc = replaceSrcLine loc l}
492 else cont new_buf s{loc = mkSrcLoc file i}
494 _other -> cont (stepOverLexeme buf3) s{loc = replaceSrcLine loc l}
497 nested_comment :: P a -> P a
498 nested_comment cont buf = loop buf
501 case currentChar# buf of
502 '\NUL'# | bufferExhausted (stepOn buf) ->
503 lexError "unterminated `{-'" buf -- -}
504 '-'# | lookAhead# buf 1# `eqChar#` '}'# ->
505 cont (stepOnBy# buf 2#)
507 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
508 nested_comment (nested_comment cont) (stepOnBy# buf 2#)
510 '\n'# -> \ s@PState{loc=loc} ->
511 let buf' = stepOn buf in
512 nested_comment cont buf'
513 s{loc = incSrcLine loc, bol = currentIndex# buf',
516 _ -> nested_comment cont (stepOn buf)
518 -- When we are lexing the first token of a line, check whether we need to
519 -- insert virtual semicolons or close braces due to layout.
521 lexBOL :: (Token -> P a) -> P a
522 lexBOL cont buf s@(PState{
524 glasgow_exts = glaexts,
529 if need_close_curly then
530 --trace ("col = " ++ show (I# col) ++ ", layout: inserting '}'") $
531 cont ITvccurly buf s{atbol = 1#, context = tail ctx}
532 else if need_semi_colon then
533 --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
534 cont ITsemi buf s{atbol = 0#}
536 lexToken cont glaexts buf s{atbol = 0#}
538 col = currentIndex# buf -# bol
551 Layout n -> col ==# n
554 lexToken :: (Token -> P a) -> Int# -> P a
555 lexToken cont glaexts buf =
556 -- trace "lexToken" $
557 case currentChar# buf of
559 -- special symbols ----------------------------------------------------
560 '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'#
561 -> cont IToubxparen (setCurrentPos# buf 2#)
563 -> cont IToparen (incLexeme buf)
565 ')'# -> cont ITcparen (incLexeme buf)
566 '['# -> cont ITobrack (incLexeme buf)
567 ']'# -> cont ITcbrack (incLexeme buf)
568 ','# -> cont ITcomma (incLexeme buf)
569 ';'# -> cont ITsemi (incLexeme buf)
570 '}'# -> \ s@PState{context = ctx} ->
572 (_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
573 _ -> lexError "too many '}'s" buf s
574 '|'# -> case lookAhead# buf 1# of
575 '}'# | flag glaexts -> cont ITccurlybar
576 (setCurrentPos# buf 2#)
577 _ -> lex_sym cont (incLexeme buf)
580 '#'# -> case lookAhead# buf 1# of
581 ')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
582 '-'# -> case lookAhead# buf 2# of
583 '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
584 _ -> lex_sym cont (incLexeme buf)
585 _ -> lex_sym cont (incLexeme buf)
587 '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
588 -> lex_cstring cont (setCurrentPos# buf 2#)
590 -> cont ITbackquote (incLexeme buf)
592 '{'# -> -- look for "{-##" special iface pragma
593 case lookAhead# buf 1# of
595 -> cont ITocurlybar (setCurrentPos# buf 2#)
596 '-'# -> case lookAhead# buf 2# of
597 '#'# -> case lookAhead# buf 3# of
600 = doDiscard 0# (stepOnBy# (stepOverLexeme buf) 4#) in
601 cont (ITpragma lexeme) buf'
602 _ -> lex_prag cont (setCurrentPos# buf 3#)
603 _ -> cont ITocurly (incLexeme buf)
604 _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
606 -- strings/characters -------------------------------------------------
607 '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
608 '\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
610 -- strictness and cpr pragmas and __scc treated specially.
611 '_'# | flag glaexts ->
612 case lookAhead# buf 1# of
613 '_'# -> case lookAhead# buf 2# of
615 lex_demand cont (stepOnUntil (not . isSpace)
616 (stepOnBy# buf 3#)) -- past __S
618 cont ITcprinfo (stepOnBy# buf 3#) -- past __M
621 case prefixMatch (stepOnBy# buf 3#) "cc" of
622 Just buf' -> lex_scc cont (stepOverLexeme buf')
623 Nothing -> lex_id cont glaexts buf
624 _ -> lex_id cont glaexts buf
625 _ -> lex_id cont glaexts buf
627 -- Hexadecimal and octal constants
628 '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
629 -> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
630 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
631 -> readNum (after_lexnum cont glaexts) buf' is_octdigit 8 oct_or_dec
632 where ch = lookAhead# buf 1#
633 ch2 = lookAhead# buf 2#
634 buf' = setCurrentPos# buf 2#
637 if bufferExhausted (stepOn buf) then
640 trace "lexIface: misplaced NUL?" $
641 cont (ITunknown "\NUL") (stepOn buf)
643 '?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
644 lex_ip cont (incLexeme buf)
645 c | is_digit c -> lex_num cont glaexts 0 buf
646 | is_symbol c -> lex_sym cont buf
647 | is_upper c -> lex_con cont glaexts buf
648 | is_ident c -> lex_id cont glaexts buf
649 | otherwise -> lexError "illegal character" buf
651 -- Int# is unlifted, and therefore faster than Bool for flags.
657 -------------------------------------------------------------------------------
661 = case expandWhile# is_space buf of { buf1 ->
662 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2 ->
663 let lexeme = mkFastString (map toUpper (lexemeToString buf2)) in
664 case lookupUFM pragmaKeywordsFM lexeme of
665 Just kw -> cont kw (mergeLexemes buf buf2)
666 Nothing -> panic "lex_prag"
669 -------------------------------------------------------------------------------
672 lex_string cont glaexts s buf
673 = case currentChar# buf of
675 let buf' = incLexeme buf
676 s' = mkFastStringNarrow (map chr (reverse s))
677 in case currentChar# buf' of
678 '#'# | flag glaexts -> if all (<= 0xFF) s
679 then cont (ITprimstring s') (incLexeme buf')
680 else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
681 _ -> cont (ITstring s') buf'
683 -- ignore \& in a string, deal with string gaps
684 '\\'# | next_ch `eqChar#` '&'#
685 -> lex_string cont glaexts s buf'
687 -> lex_stringgap cont glaexts s (incLexeme buf)
689 where next_ch = lookAhead# buf 1#
690 buf' = setCurrentPos# buf 2#
692 _ -> lex_char (lex_next_string cont s) glaexts buf
694 lex_stringgap cont glaexts s buf
695 = let buf' = incLexeme buf in
696 case currentChar# buf of
697 '\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf'
698 st{loc = incSrcLine loc}
699 '\\'# -> lex_string cont glaexts s buf'
700 c | is_space c -> lex_stringgap cont glaexts s buf'
701 other -> charError buf'
703 lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
705 lex_char :: (Int# -> Int -> P a) -> Int# -> P a
706 lex_char cont glaexts buf
707 = case currentChar# buf of
708 '\\'# -> lex_escape (cont glaexts) (incLexeme buf)
709 c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
710 other -> charError buf
712 char_end cont glaexts c buf
713 = case currentChar# buf of
714 '\''# -> let buf' = incLexeme buf in
715 case currentChar# buf' of
717 -> cont (ITprimchar c) (incLexeme buf')
718 _ -> cont (ITchar c) buf'
722 = let buf' = incLexeme buf in
723 case currentChar# buf of
724 'a'# -> cont (ord '\a') buf'
725 'b'# -> cont (ord '\b') buf'
726 'f'# -> cont (ord '\f') buf'
727 'n'# -> cont (ord '\n') buf'
728 'r'# -> cont (ord '\r') buf'
729 't'# -> cont (ord '\t') buf'
730 'v'# -> cont (ord '\v') buf'
731 '\\'# -> cont (ord '\\') buf'
732 '"'# -> cont (ord '\"') buf'
733 '\''# -> cont (ord '\'') buf'
734 '^'# -> let c = currentChar# buf' in
735 if c `geChar#` '@'# && c `leChar#` '_'#
736 then cont (I# (ord# c -# ord# '@'#)) (incLexeme buf')
739 'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
740 'o'# -> readNum (after_charnum cont) buf' is_octdigit 8 oct_or_dec
742 -> readNum (after_charnum cont) buf is_digit 10 oct_or_dec
744 _ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
745 Just buf2 <- [prefixMatch buf p] ] of
746 (c,buf2):_ -> cont (ord c) buf2
749 after_charnum cont i buf
750 = if i >= 0 && i <= 0x10FFFF
751 then cont (fromInteger i) buf
754 readNum cont buf is_digit base conv = read buf 0
756 = case currentChar# buf of { c ->
758 then read (incLexeme buf) (i*base + (toInteger (I# (conv c))))
764 || (c `geChar#` 'a'# && c `leChar#` 'f'#)
765 || (c `geChar#` 'A'# && c `leChar#` 'F'#)
767 hex c | is_digit c = ord# c -# ord# '0'#
768 | otherwise = ord# (to_lower c) -# ord# 'a'# +# 10#
769 oct_or_dec c = ord# c -# ord# '0'#
771 is_octdigit c = c `geChar#` '0'# && c `leChar#` '7'#
774 | c `geChar#` 'A'# && c `leChar#` 'Z'#
775 = chr# (ord# c -# (ord# 'A'# -# ord# 'a'#))
778 charError buf = lexError "error in character literal" buf
780 silly_escape_chars = [
817 -------------------------------------------------------------------------------
819 lex_demand cont buf =
820 case read_em [] buf of { (ls,buf') ->
821 case currentChar# buf' of
822 'X'# -> cont (ITstrict (StrictSig (mkTopDmdType ls BotRes))) (incLexeme buf')
823 'M'# -> cont (ITstrict (StrictSig (mkTopDmdType ls RetCPR))) (incLexeme buf')
824 _ -> cont (ITstrict (StrictSig (mkTopDmdType ls TopRes))) buf'
827 -- code snatched from Demand.lhs
829 case currentChar# buf of
830 'L'# -> read_em (Lazy : acc) (stepOn buf)
831 'A'# -> read_em (Abs : acc) (stepOn buf)
832 'V'# -> read_em (Eval : acc) (stepOn buf)
833 ')'# -> (reverse acc, stepOn buf)
834 'C'# -> do_call acc (stepOnBy# buf 2#)
835 'U'# -> do_unpack1 Drop Now acc (stepOnBy# buf 1#)
836 'S'# -> do_unpack1 Keep Now acc (stepOnBy# buf 1#)
837 _ -> (reverse acc, buf)
839 do_unpack1 keepity defer acc buf
840 = case currentChar# buf of
841 '*'# -> do_unpack1 keepity Defer acc (stepOnBy# buf 1#)
842 '('# -> do_unpack2 keepity defer acc (stepOnBy# buf 1#)
843 _ -> read_em (Seq keepity defer [] : acc) buf
845 do_unpack2 keepity defer acc buf
846 = case read_em [] buf of
847 (stuff, rest) -> read_em (Seq keepity defer stuff : acc) rest
850 = case read_em [] buf of
851 ([dmd], rest) -> read_em (Call dmd : acc) rest
855 case currentChar# buf of
856 'C'# -> cont ITsccAllCafs (incLexeme buf)
857 other -> cont ITscc buf
859 -----------------------------------------------------------------------------
862 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
863 lex_num cont glaexts acc buf =
864 case scanNumLit acc buf of
866 case currentChar# buf' of
867 '.'# | is_digit (lookAhead# buf' 1#) ->
868 -- this case is not optimised at all, as the
869 -- presence of floating point numbers in interface
870 -- files is not that common. (ToDo)
871 case expandWhile# is_digit (incLexeme buf') of
872 buf2 -> -- points to first non digit char
874 let l = case currentChar# buf2 of
880 = let buf3 = incLexeme buf2 in
881 case currentChar# buf3 of
882 '-'# -> expandWhile# is_digit (incLexeme buf3)
883 '+'# -> expandWhile# is_digit (incLexeme buf3)
884 x | is_digit x -> expandWhile# is_digit buf3
887 v = readRational__ (lexemeToString l)
889 in case currentChar# l of -- glasgow exts only
890 '#'# | flag glaexts -> let l' = incLexeme l in
891 case currentChar# l' of
892 '#'# -> cont (ITprimdouble v) (incLexeme l')
893 _ -> cont (ITprimfloat v) l'
894 _ -> cont (ITrational v) l
896 _ -> after_lexnum cont glaexts acc' buf'
898 after_lexnum cont glaexts i buf
899 = case currentChar# buf of
900 '#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
901 _ -> cont (ITinteger i) buf
903 -----------------------------------------------------------------------------
904 -- C "literal literal"s (i.e. things like ``NULL'', ``stdout'' etc.)
906 -- we lexemeToFastString on the bit between the ``''s, but include the
907 -- quotes in the full lexeme.
909 lex_cstring cont buf =
910 case expandUntilMatch (stepOverLexeme buf) "\'\'" of
911 Just buf' -> cont (ITlitlit (lexemeToFastString
912 (setCurrentPos# buf' (negateInt# 2#))))
913 (mergeLexemes buf buf')
914 Nothing -> lexError "unterminated ``" buf
916 -----------------------------------------------------------------------------
917 -- identifiers, symbols etc.
920 case expandWhile# is_ident buf of
921 buf' -> cont (ITipvarid lexeme) buf'
922 where lexeme = lexemeToFastString buf'
924 lex_id cont glaexts buf =
925 let buf1 = expandWhile# is_ident buf in
928 case (if flag glaexts
929 then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
930 else buf1) of { buf' ->
932 let lexeme = lexemeToFastString buf' in
934 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
935 Just kwd_token -> --trace ("hkeywd: "++_UNPK_(lexeme)) $
939 let var_token = cont (ITvarid lexeme) buf' in
941 if not (flag glaexts)
945 case lookupUFM ghcExtensionKeywordsFM lexeme of {
946 Just kwd_token -> cont kwd_token buf';
953 case expandWhile# is_symbol buf of
954 buf' -> case lookupUFM haskellKeySymsFM lexeme of {
955 Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
956 cont kwd_token buf' ;
957 Nothing -> --trace ("sym: "++unpackFS lexeme) $
958 cont (mk_var_token lexeme) buf'
960 where lexeme = lexemeToFastString buf'
963 -- lex_con recursively collects components of a qualified identifer.
964 -- The argument buf is the StringBuffer representing the lexeme
965 -- identified so far, where the next character is upper-case.
967 lex_con cont glaexts buf =
968 -- trace ("con: "{-++unpackFS lexeme-}) $
969 let empty_buf = stepOverLexeme buf in
970 case expandWhile# is_ident empty_buf of { buf1 ->
971 case slurp_trailing_hashes buf1 glaexts of { con_buf ->
973 let all_buf = mergeLexemes buf con_buf
975 con_lexeme = lexemeToFastString con_buf
976 mod_lexeme = lexemeToFastString (decLexeme buf)
977 all_lexeme = lexemeToFastString all_buf
980 | emptyLexeme buf = cont (ITconid con_lexeme) all_buf
981 | otherwise = cont (ITqconid (mod_lexeme,con_lexeme)) all_buf
984 case currentChar# all_buf of
985 '.'# -> maybe_qualified cont glaexts all_lexeme
986 (incLexeme all_buf) just_a_conid
991 maybe_qualified cont glaexts mod buf just_a_conid =
992 -- trace ("qid: "{-++unpackFS lexeme-}) $
993 case currentChar# buf of
994 '['# -> -- Special case for []
995 case lookAhead# buf 1# of
996 ']'# -> cont (ITqconid (mod,SLIT("[]"))) (setCurrentPos# buf 2#)
999 '('# -> -- Special case for (,,,)
1000 -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
1001 case lookAhead# buf 1# of
1002 '#'# | flag glaexts -> case lookAhead# buf 2# of
1003 ','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#)
1006 ')'# -> cont (ITqconid (mod,SLIT("()"))) (setCurrentPos# buf 2#)
1007 ','# -> lex_tuple cont mod (setCurrentPos# buf 2#) just_a_conid
1010 '-'# -> case lookAhead# buf 1# of
1011 '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
1012 _ -> lex_id3 cont glaexts mod buf just_a_conid
1014 _ -> lex_id3 cont glaexts mod buf just_a_conid
1017 lex_id3 cont glaexts mod buf just_a_conid
1018 | is_upper (currentChar# buf) =
1019 lex_con cont glaexts buf
1021 | is_symbol (currentChar# buf) =
1023 start_new_lexeme = stepOverLexeme buf
1025 -- trace ("lex_id31 "{-++unpackFS lexeme-}) $
1026 case expandWhile# is_symbol start_new_lexeme of { buf' ->
1028 lexeme = lexemeToFastString buf'
1029 -- real lexeme is M.<sym>
1030 new_buf = mergeLexemes buf buf'
1032 cont (mk_qvar_token mod lexeme) new_buf
1033 -- wrong, but arguably morally right: M... is now a qvarsym
1038 start_new_lexeme = stepOverLexeme buf
1040 -- trace ("lex_id32 "{-++unpackFS lexeme-}) $
1041 case expandWhile# is_ident start_new_lexeme of { buf1 ->
1046 case slurp_trailing_hashes buf1 glaexts of { buf' ->
1049 lexeme = lexemeToFastString buf'
1050 new_buf = mergeLexemes buf buf'
1051 is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
1053 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
1054 Nothing -> is_a_qvarid ;
1056 Just kwd_token | isSpecial kwd_token -- special ids (as, qualified, hiding) shouldn't be
1057 -> is_a_qvarid -- recognised as keywords here.
1059 -> just_a_conid -- avoid M.where etc.
1062 slurp_trailing_hashes buf glaexts
1063 | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
1068 | is_upper f = ITconid pk_str
1069 | is_ident f = ITvarid pk_str
1070 | f `eqChar#` ':'# = ITconsym pk_str
1071 | otherwise = ITvarsym pk_str
1073 (C# f) = _HEAD_ pk_str
1074 -- tl = _TAIL_ pk_str
1076 mk_qvar_token m token =
1077 -- trace ("mk_qvar ") $
1078 case mk_var_token token of
1079 ITconid n -> ITqconid (m,n)
1080 ITvarid n -> ITqvarid (m,n)
1081 ITconsym n -> ITqconsym (m,n)
1082 ITvarsym n -> ITqvarsym (m,n)
1083 _ -> ITunknown (show token)
1086 ----------------------------------------------------------------------------
1087 Horrible stuff for dealing with M.(,,,)
1090 lex_tuple cont mod buf back_off =
1094 case currentChar# buf of
1095 ','# -> go (n+1) (stepOn buf)
1096 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Boxed n))) (stepOn buf)
1099 lex_ubx_tuple cont mod buf back_off =
1103 case currentChar# buf of
1104 ','# -> go (n+1) (stepOn buf)
1105 '#'# -> case lookAhead# buf 1# of
1106 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Unboxed n)))
1112 -----------------------------------------------------------------------------
1113 doDiscard rips along really fast, looking for a '##-}',
1114 indicating the end of the pragma we're skipping
1117 doDiscard inStr buf =
1118 case currentChar# buf of
1119 '#'# | inStr ==# 0# ->
1120 case lookAhead# buf 1# of { '#'# ->
1121 case lookAhead# buf 2# of { '-'# ->
1122 case lookAhead# buf 3# of { '}'# ->
1123 (lexemeToBuffer buf, stepOverLexeme (setCurrentPos# buf 4#));
1124 _ -> doDiscard inStr (incLexeme buf) };
1125 _ -> doDiscard inStr (incLexeme buf) };
1126 _ -> doDiscard inStr (incLexeme buf) }
1130 odd_slashes buf flg i# =
1131 case lookAhead# buf i# of
1132 '\\'# -> odd_slashes buf (not flg) (i# -# 1#)
1135 not_inStr = if inStr ==# 0# then 1# else 0#
1137 case lookAhead# buf (negateInt# 1#) of --backwards, actually
1138 '\\'# -> -- escaping something..
1139 if odd_slashes buf True (negateInt# 2#)
1140 then -- odd number of slashes, " is escaped.
1141 doDiscard inStr (incLexeme buf)
1142 else -- even number of slashes, \ is escaped.
1143 doDiscard not_inStr (incLexeme buf)
1144 _ -> doDiscard not_inStr (incLexeme buf)
1146 '\''# | inStr ==# 0# ->
1147 case lookAhead# buf 1# of { '"'# ->
1148 case lookAhead# buf 2# of { '\''# ->
1149 doDiscard inStr (setCurrentPos# buf 3#);
1150 _ -> doDiscard inStr (incLexeme buf) };
1151 _ -> doDiscard inStr (incLexeme buf) }
1153 _ -> doDiscard inStr (incLexeme buf)
1157 -----------------------------------------------------------------------------
1168 data PState = PState {
1170 glasgow_exts :: Int#,
1173 context :: [LayoutContext]
1176 type P a = StringBuffer -- Input string
1181 returnP a buf s = POk s a
1183 thenP :: P a -> (a -> P b) -> P b
1184 m `thenP` k = \ buf s ->
1186 POk s1 a -> k a buf s1
1187 PFailed err -> PFailed err
1189 thenP_ :: P a -> P b -> P b
1190 m `thenP_` k = m `thenP` \_ -> k
1192 mapP :: (a -> P b) -> [a] -> P [b]
1193 mapP f [] = returnP []
1196 mapP f as `thenP` \bs ->
1199 failP :: String -> P a
1200 failP msg buf s = PFailed (text msg)
1202 failMsgP :: Message -> P a
1203 failMsgP msg buf s = PFailed msg
1205 lexError :: String -> P a
1206 lexError str buf s@PState{ loc = loc }
1207 = failMsgP (hcat [ppr loc, text ": ", text str]) buf s
1209 getSrcLocP :: P SrcLoc
1210 getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
1212 -- use a temporary SrcLoc for the duration of the argument
1213 setSrcLocP :: SrcLoc -> P a -> P a
1214 setSrcLocP new_loc p buf s =
1215 case p buf s{ loc=new_loc } of
1217 PFailed e -> PFailed e
1219 getSrcFile :: P FAST_STRING
1220 getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
1222 pushContext :: LayoutContext -> P ()
1223 pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()
1227 This special case in layoutOn is to handle layout contexts with are
1228 indented the same or less than the current context. This is illegal
1229 according to the Haskell spec, so we have to arrange to close the
1230 current context. eg.
1235 after the first 'where', the sequence of events is:
1237 - layout system inserts a ';' (column 0)
1238 - parser begins a new context at column 0
1239 - parser shifts ';' (legal empty declaration)
1240 - parser sees 'class': parse error (we're still in the inner context)
1242 trouble is, by the time we know we need a new context, the lexer has
1243 already generated the ';'. Hacky solution is as follows: since we
1244 know the column of the next token (it's the column number of the new
1245 context), we set the ACTUAL column number of the new context to this
1246 numer plus one. Hence the next time the lexer is called, a '}' will
1247 be generated to close the new context straight away. Furthermore, we
1248 have to set the atbol flag so that the ';' that the parser shifted as
1249 part of the new context is re-generated.
1251 when the new context is *less* indented than the current one:
1253 f = f where g = g where
1256 - current context: column 12.
1257 - on seeing 'h' (column 0), the layout system inserts '}'
1258 - parser starts a new context, column 0
1259 - parser sees '}', uses it to close new context
1260 - we still need to insert another '}' followed by a ';',
1261 hence the atbol trick.
1263 There's also a special hack in here to deal with
1270 i.e. the inner context is at the same indentation level as the outer
1271 context. This is strictly illegal according to Haskell 98, but
1272 there's a lot of existing code using this style and it doesn't make
1273 any sense to disallow it, since empty 'do' lists don't make sense.
1276 layoutOn :: Bool -> P ()
1277 layoutOn strict buf s@(PState{ bol = bol, context = ctx }) =
1278 let offset = lexemeIndex buf -# bol in
1281 | if strict then prev_off >=# offset else prev_off ># offset ->
1282 --trace ("layout on, column: " ++ show (I# offset)) $
1283 POk s{ context = Layout (offset +# 1#) : ctx, atbol = 1# } ()
1285 --trace ("layout on, column: " ++ show (I# offset)) $
1286 POk s{ context = Layout offset : ctx } ()
1289 layoutOff buf s@(PState{ context = ctx }) =
1290 POk s{ context = NoLayout:ctx } ()
1293 popContext = \ buf s@(PState{ context = ctx, loc = loc }) ->
1295 (_:tl) -> POk s{ context = tl } ()
1296 [] -> PFailed (srcParseErr buf loc)
1299 Note that if the name of the file we're processing ends
1300 with `hi-boot', we accept it on faith as having the right
1301 version. This is done so that .hi-boot files that comes
1302 with hsc don't have to be updated before every release,
1303 *and* it allows us to share .hi-boot files with versions
1304 of hsc that don't have .hi version checking (e.g., ghc-2.10's)
1306 If the version number is 0, the checking is also turned off.
1307 (needed to deal with GHC.hi only!)
1309 Once we can assume we're compiling with a version of ghc that
1310 supports interface file checking, we can drop the special
1313 checkVersion :: Maybe Integer -> P ()
1314 checkVersion mb@(Just v) buf s@(PState{loc = loc})
1315 | (v==0) || (v == fromInt opt_HiVersion) || opt_NoHiCheck = POk s ()
1316 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1317 checkVersion mb@Nothing buf s@(PState{loc = loc})
1318 | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
1319 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1321 -----------------------------------------------------------------
1323 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
1325 = hsep [ppr l, ptext SLIT("Interface file parse error; on input `"),
1326 text (lexemeToString s), char '\'']
1328 ifaceVersionErr hi_vers l toks
1329 = hsep [ppr l, ptext SLIT("Interface file version error;"),
1330 ptext SLIT("Expected"), int opt_HiVersion,
1331 ptext SLIT("found "), pp_version]
1335 Nothing -> ptext SLIT("pre ghc-3.02 version")
1336 Just v -> ptext SLIT("version") <+> integer v
1338 -----------------------------------------------------------------------------
1340 srcParseErr :: StringBuffer -> SrcLoc -> Message
1344 then ptext SLIT(": parse error (possibly incorrect indentation)")
1345 else hcat [ptext SLIT(": parse error on input "),
1346 char '`', text token, char '\'']
1349 token = lexemeToString s