2 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
4 \section[Lexical analysis]{Lexical analysis}
6 --------------------------------------------------------
8 There's a known bug in here:
10 If an interface file ends prematurely, Lex tries to
11 do headFS of an empty FastString.
13 An example that provokes the error is
15 f _:_ _forall_ [a] <<<END OF FILE>>>
16 --------------------------------------------------------
22 ifaceParseErr, srcParseErr,
25 Token(..), lexer, ParseResult(..), PState(..),
29 P, thenP, thenP_, returnP, mapP, failP, failMsgP,
30 getSrcLocP, setSrcLocP, getSrcFile,
31 layoutOn, layoutOff, pushContext, popContext
34 #include "HsVersions.h"
36 import Char ( isSpace, toUpper )
37 import List ( isSuffixOf )
39 import IdInfo ( InlinePragInfo(..) )
40 import PrelNames ( mkTupNameStr )
41 import CmdLineOpts ( opt_HiVersion, opt_NoHiCheck )
42 import ForeignCall ( Safety(..) )
43 import NewDemand ( StrictSig(..), Demand(..), Keepity(..),
44 DmdResult(..), Deferredness(..), mkTopDmdType )
45 import UniqFM ( listToUFM, lookupUFM )
46 import BasicTypes ( Boxity(..) )
47 import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
48 replaceSrcLine, mkSrcLoc )
50 import ErrUtils ( Message )
57 import Char ( chr, ord )
58 import PrelRead ( readRational__ ) -- Glasgow non-std
61 %************************************************************************
63 \subsection{Data types}
65 %************************************************************************
67 The token data type, fairly un-interesting except from one
68 constructor, @ITidinfo@, which is used to lazily lex id info (arity,
69 strictness, unfolding etc).
71 The Idea/Observation here is that the renamer needs to scan through
72 all of an interface file before it can continue. But only a fraction
73 of the information contained in the file turns out to be useful, so
74 delaying as much as possible of the scanning and parsing of an
75 interface file Makes Sense (Heap profiles of the compiler
76 show a reduction in heap usage by at least a factor of two,
79 Hence, the interface file lexer spots when value declarations are
80 being scanned and return the @ITidinfo@ and @ITtype@ constructors
81 for the type and any other id info for that binding (unfolding, strictness
82 etc). These constructors are applied to the result of lexing these sub-chunks.
84 The lexing of the type and id info is all done lazily, of course, so
85 the scanning (and subsequent parsing) will be done *only* on the ids the
86 renamer finds out that it is interested in. The rest will just be junked.
87 Laziness, you know it makes sense :-)
91 = ITas -- Haskell keywords
115 | ITscc -- ToDo: remove (we use {-# SCC "..." #-} now)
117 | ITforall -- GHC extension keywords
128 | ITinterface -- interface keywords
136 | ITccall (Bool,Bool,Safety) -- (is_dyn, is_casm, may_gc)
155 | ITunfold InlinePragInfo
163 | ITspecialise_prag -- Pragmas
173 | ITdotdot -- reserved symbols
187 | ITbiglam -- GHC-extension symbols
189 | ITocurly -- special symbols
191 | ITocurlybar -- {|, for type applications
192 | ITccurlybar -- |}, for type applications
205 | ITvarid FAST_STRING -- identifiers
206 | ITconid FAST_STRING
207 | ITvarsym FAST_STRING
208 | ITconsym FAST_STRING
209 | ITqvarid (FAST_STRING,FAST_STRING)
210 | ITqconid (FAST_STRING,FAST_STRING)
211 | ITqvarsym (FAST_STRING,FAST_STRING)
212 | ITqconsym (FAST_STRING,FAST_STRING)
214 | ITipvarid FAST_STRING -- GHC extension: implicit param: ?x
216 | ITpragma StringBuffer
219 | ITstring FAST_STRING
221 | ITrational Rational
224 | ITprimstring FAST_STRING
226 | ITprimfloat Rational
227 | ITprimdouble Rational
228 | ITlitlit FAST_STRING
230 | ITunknown String -- Used when the lexer can't make sense of it
231 | ITeof -- end of file token
232 deriving Show -- debugging
235 -----------------------------------------------------------------------------
239 pragmaKeywordsFM = listToUFM $
240 map (\ (x,y) -> (_PK_ x,y))
241 [( "SPECIALISE", ITspecialise_prag ),
242 ( "SPECIALIZE", ITspecialise_prag ),
243 ( "SOURCE", ITsource_prag ),
244 ( "INLINE", ITinline_prag ),
245 ( "NOINLINE", ITnoinline_prag ),
246 ( "NOTINLINE", ITnoinline_prag ),
247 ( "LINE", ITline_prag ),
248 ( "RULES", ITrules_prag ),
249 ( "RULEZ", ITrules_prag ), -- american spelling :-)
250 ( "SCC", ITscc_prag ),
251 ( "DEPRECATED", ITdeprecated_prag )
254 haskellKeywordsFM = listToUFM $
255 map (\ (x,y) -> (_PK_ x,y))
256 [( "_", ITunderscore ),
259 ( "class", ITclass ),
261 ( "default", ITdefault ),
262 ( "deriving", ITderiving ),
265 ( "hiding", IThiding ),
267 ( "import", ITimport ),
269 ( "infix", ITinfix ),
270 ( "infixl", ITinfixl ),
271 ( "infixr", ITinfixr ),
272 ( "instance", ITinstance ),
274 ( "module", ITmodule ),
275 ( "newtype", ITnewtype ),
277 ( "qualified", ITqualified ),
280 ( "where", ITwhere ),
281 ( "_scc_", ITscc ) -- ToDo: remove
284 isSpecial :: Token -> Bool
285 -- If we see M.x, where x is a keyword, but
286 -- is special, we treat is as just plain M.x,
288 isSpecial ITas = True
289 isSpecial IThiding = True
290 isSpecial ITqualified = True
291 isSpecial ITforall = True
292 isSpecial ITexport = True
293 isSpecial ITlabel = True
294 isSpecial ITdynamic = True
295 isSpecial ITunsafe = True
296 isSpecial ITwith = True
297 isSpecial ITccallconv = True
298 isSpecial ITstdcallconv = True
301 -- IMPORTANT: Keep this in synch with ParseIface.y's var_fs production! (SUP)
302 ghcExtensionKeywordsFM = listToUFM $
303 map (\ (x,y) -> (_PK_ x,y))
304 [ ( "forall", ITforall ),
305 ( "foreign", ITforeign ),
306 ( "export", ITexport ),
307 ( "label", ITlabel ),
308 ( "dynamic", ITdynamic ),
309 ( "unsafe", ITunsafe ),
311 ( "stdcall", ITstdcallconv),
312 ( "ccall", ITccallconv),
313 ( "dotnet", ITdotnet),
314 ("_ccall_", ITccall (False, False, PlayRisky)),
315 ("_ccall_GC_", ITccall (False, False, PlaySafe)),
316 ("_casm_", ITccall (False, True, PlayRisky)),
317 ("_casm_GC_", ITccall (False, True, PlaySafe)),
319 -- interface keywords
320 ("__interface", ITinterface),
321 ("__export", IT__export),
322 ("__depends", ITdepends),
323 ("__forall", IT__forall),
324 ("__letrec", ITletrec),
325 ("__coerce", ITcoerce),
326 ("__inline_me", ITinlineMe),
327 ("__inline_call", ITinlineCall),
328 ("__depends", ITdepends),
329 ("__DEFAULT", ITdefaultbranch),
331 ("__integer", ITinteger_lit),
332 ("__float", ITfloat_lit),
333 ("__int64", ITint64_lit),
334 ("__word", ITword_lit),
335 ("__word64", ITword64_lit),
336 ("__rational", ITrational_lit),
337 ("__addr", ITaddr_lit),
338 ("__label", ITlabel_lit),
339 ("__litlit", ITlit_lit),
340 ("__string", ITstring_lit),
343 ("__fuall", ITfuall),
345 ("__P", ITspecialise),
348 ("__D", ITdeprecated),
349 ("__U", ITunfold NoInlinePragInfo),
351 ("__ccall", ITccall (False, False, PlayRisky)),
352 ("__ccall_GC", ITccall (False, False, PlaySafe)),
353 ("__dyn_ccall", ITccall (True, False, PlayRisky)),
354 ("__dyn_ccall_GC", ITccall (True, False, PlaySafe)),
355 ("__casm", ITccall (False, True, PlayRisky)),
356 ("__dyn_casm", ITccall (True, True, PlayRisky)),
357 ("__casm_GC", ITccall (False, True, PlaySafe)),
358 ("__dyn_casm_GC", ITccall (True, True, PlaySafe)),
364 haskellKeySymsFM = listToUFM $
365 map (\ (x,y) -> (_PK_ x,y))
378 ,(".", ITdot) -- sadly, for 'forall a . t'
382 -----------------------------------------------------------------------------
387 - (glaexts) lexing an interface file or -fglasgow-exts
388 - (bol) pointer to beginning of line (for column calculations)
389 - (buf) pointer to beginning of token
390 - (buf) pointer to current char
391 - (atbol) flag indicating whether we're at the beginning of a line
394 lexer :: (Token -> P a) -> P a
395 lexer cont buf s@(PState{
397 glasgow_exts = glaexts,
403 -- first, start a new lexeme and lose all the whitespace
405 tab line bol atbol (stepOverLexeme buf)
407 line = srcLocLine loc
409 tab y bol atbol buf = -- trace ("tab: " ++ show (I# y) ++ " : " ++ show (currentChar buf)) $
410 case currentChar# buf of
413 if bufferExhausted (stepOn buf)
414 then cont ITeof buf s'
415 else trace "lexer: misplaced NUL?" $
416 tab y bol atbol (stepOn buf)
418 '\n'# -> let buf' = stepOn buf
419 in tab (y +# 1#) (currentIndex# buf') 1# buf'
421 -- find comments. This got harder in Haskell 98.
422 '-'# -> let trundle n =
423 let next = lookAhead# buf n in
424 if next `eqChar#` '-'# then trundle (n +# 1#)
425 else if is_symbol next || n <# 2#
428 (stepOnUntilChar# (stepOnBy# buf n) '\n'#)
431 -- comments and pragmas. We deal with LINE pragmas here,
432 -- and throw out any unrecognised pragmas as comments. Any
433 -- pragmas we know about are dealt with later (after any layout
434 -- processing if necessary).
435 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
436 if lookAhead# buf 2# `eqChar#` '#'# then
437 if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
438 case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
439 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2->
440 let lexeme = mkFastString -- ToDo: too slow
441 (map toUpper (lexemeToString buf2)) in
442 case lookupUFM pragmaKeywordsFM lexeme of
444 line_prag skip_to_end buf2 s'
445 Just other -> is_a_token
446 Nothing -> skip_to_end (stepOnBy# buf 2#) s'
449 else skip_to_end (stepOnBy# buf 2#) s'
451 skip_to_end = nested_comment (lexer cont)
453 -- special GHC extension: we grok cpp-style #line pragmas
454 '#'# | lexemeIndex buf ==# bol -> -- the '#' must be in column 0
455 case expandWhile# is_space (stepOn buf) of { buf1 ->
456 if is_digit (currentChar# buf1)
457 then line_prag next_line buf1 s'
461 next_line buf = lexer cont (stepOnUntilChar# buf '\n'#)
463 -- tabs have been expanded beforehand
464 c | is_space c -> tab y bol atbol (stepOn buf)
465 | otherwise -> is_a_token
467 where s' = s{loc = replaceSrcLine loc y,
471 is_a_token | atbol /=# 0# = lexBOL cont buf s'
472 | otherwise = lexToken cont glaexts buf s'
474 -- {-# LINE .. #-} pragmas. yeuch.
475 line_prag cont buf s@PState{loc=loc} =
476 case expandWhile# is_space buf of { buf1 ->
477 case scanNumLit 0 (stepOverLexeme buf1) of { (line,buf2) ->
478 -- subtract one: the line number refers to the *following* line.
479 let real_line = line - 1 in
480 case fromInteger real_line of { i@(I# l) ->
481 -- ToDo, if no filename then we skip the newline.... d'oh
482 case expandWhile# is_space buf2 of { buf3 ->
483 case currentChar# buf3 of
485 case untilEndOfString# (stepOn (stepOverLexeme buf3)) of { buf4 ->
487 file = lexemeToFastString buf4
488 new_buf = stepOn (stepOverLexeme buf4)
490 if nullFastString file
491 then cont new_buf s{loc = replaceSrcLine loc l}
492 else cont new_buf s{loc = mkSrcLoc file i}
494 _other -> cont (stepOverLexeme buf3) s{loc = replaceSrcLine loc l}
497 nested_comment :: P a -> P a
498 nested_comment cont buf = loop buf
501 case currentChar# buf of
502 '\NUL'# | bufferExhausted (stepOn buf) ->
503 lexError "unterminated `{-'" buf -- -}
504 '-'# | lookAhead# buf 1# `eqChar#` '}'# ->
505 cont (stepOnBy# buf 2#)
507 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
508 nested_comment (nested_comment cont) (stepOnBy# buf 2#)
510 '\n'# -> \ s@PState{loc=loc} ->
511 let buf' = stepOn buf in
512 nested_comment cont buf'
513 s{loc = incSrcLine loc, bol = currentIndex# buf',
516 _ -> nested_comment cont (stepOn buf)
518 -- When we are lexing the first token of a line, check whether we need to
519 -- insert virtual semicolons or close braces due to layout.
521 lexBOL :: (Token -> P a) -> P a
522 lexBOL cont buf s@(PState{
524 glasgow_exts = glaexts,
529 if need_close_curly then
530 --trace ("col = " ++ show (I# col) ++ ", layout: inserting '}'") $
531 cont ITvccurly buf s{atbol = 1#, context = tail ctx}
532 else if need_semi_colon then
533 --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
534 cont ITsemi buf s{atbol = 0#}
536 lexToken cont glaexts buf s{atbol = 0#}
538 col = currentIndex# buf -# bol
551 Layout n -> col ==# n
554 lexToken :: (Token -> P a) -> Int# -> P a
555 lexToken cont glaexts buf =
556 -- trace "lexToken" $
557 case currentChar# buf of
559 -- special symbols ----------------------------------------------------
560 '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'#
561 -> cont IToubxparen (setCurrentPos# buf 2#)
563 -> cont IToparen (incLexeme buf)
565 ')'# -> cont ITcparen (incLexeme buf)
566 '['# -> cont ITobrack (incLexeme buf)
567 ']'# -> cont ITcbrack (incLexeme buf)
568 ','# -> cont ITcomma (incLexeme buf)
569 ';'# -> cont ITsemi (incLexeme buf)
570 '}'# -> \ s@PState{context = ctx} ->
572 (_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
573 _ -> lexError "too many '}'s" buf s
574 '|'# -> case lookAhead# buf 1# of
575 '}'# | flag glaexts -> cont ITccurlybar
576 (setCurrentPos# buf 2#)
577 _ -> lex_sym cont (incLexeme buf)
580 '#'# -> case lookAhead# buf 1# of
581 ')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
582 '-'# -> case lookAhead# buf 2# of
583 '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
584 _ -> lex_sym cont (incLexeme buf)
585 _ -> lex_sym cont (incLexeme buf)
587 '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
588 -> lex_cstring cont (setCurrentPos# buf 2#)
590 -> cont ITbackquote (incLexeme buf)
592 '{'# -> -- look for "{-##" special iface pragma
593 case lookAhead# buf 1# of
595 -> cont ITocurlybar (setCurrentPos# buf 2#)
596 '-'# -> case lookAhead# buf 2# of
597 '#'# -> case lookAhead# buf 3# of
600 = doDiscard 0# (stepOnBy# (stepOverLexeme buf) 4#) in
601 cont (ITpragma lexeme) buf'
602 _ -> lex_prag cont (setCurrentPos# buf 3#)
603 _ -> cont ITocurly (incLexeme buf)
604 _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
606 -- strings/characters -------------------------------------------------
607 '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
608 '\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
610 -- strictness and cpr pragmas and __scc treated specially.
611 '_'# | flag glaexts ->
612 case lookAhead# buf 1# of
613 '_'# -> case lookAhead# buf 2# of
615 lex_demand cont (stepOnUntil (not . isSpace)
616 (stepOnBy# buf 3#)) -- past __S
618 cont ITcprinfo (stepOnBy# buf 3#) -- past __M
621 case prefixMatch (stepOnBy# buf 3#) "cc" of
622 Just buf' -> lex_scc cont (stepOverLexeme buf')
623 Nothing -> lex_id cont glaexts buf
624 _ -> lex_id cont glaexts buf
625 _ -> lex_id cont glaexts buf
627 -- Hexadecimal and octal constants
628 '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
629 -> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
630 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
631 -> readNum (after_lexnum cont glaexts) buf' is_octdigit 8 oct_or_dec
632 where ch = lookAhead# buf 1#
633 ch2 = lookAhead# buf 2#
634 buf' = setCurrentPos# buf 2#
637 if bufferExhausted (stepOn buf) then
640 trace "lexIface: misplaced NUL?" $
641 cont (ITunknown "\NUL") (stepOn buf)
643 '?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
644 lex_ip cont (incLexeme buf)
645 c | is_digit c -> lex_num cont glaexts 0 buf
646 | is_symbol c -> lex_sym cont buf
647 | is_upper c -> lex_con cont glaexts buf
648 | is_ident c -> lex_id cont glaexts buf
649 | otherwise -> lexError "illegal character" buf
651 -- Int# is unlifted, and therefore faster than Bool for flags.
657 -------------------------------------------------------------------------------
661 = case expandWhile# is_space buf of { buf1 ->
662 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2 ->
663 let lexeme = mkFastString (map toUpper (lexemeToString buf2)) in
664 case lookupUFM pragmaKeywordsFM lexeme of
665 Just kw -> cont kw (mergeLexemes buf buf2)
666 Nothing -> panic "lex_prag"
669 -------------------------------------------------------------------------------
672 lex_string cont glaexts s buf
673 = case currentChar# buf of
675 let buf' = incLexeme buf
676 s' = mkFastStringNarrow (map chr (reverse s))
677 in case currentChar# buf' of
678 '#'# | flag glaexts -> if all (<= 0xFF) s
679 then cont (ITprimstring s') (incLexeme buf')
680 else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
681 _ -> cont (ITstring s') buf'
683 -- ignore \& in a string, deal with string gaps
684 '\\'# | next_ch `eqChar#` '&'#
685 -> lex_string cont glaexts s buf'
687 -> lex_stringgap cont glaexts s (incLexeme buf)
689 where next_ch = lookAhead# buf 1#
690 buf' = setCurrentPos# buf 2#
692 _ -> lex_char (lex_next_string cont s) glaexts buf
694 lex_stringgap cont glaexts s buf
695 = let buf' = incLexeme buf in
696 case currentChar# buf of
697 '\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf'
698 st{loc = incSrcLine loc}
699 '\\'# -> lex_string cont glaexts s buf'
700 c | is_space c -> lex_stringgap cont glaexts s buf'
701 other -> charError buf'
703 lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
705 lex_char :: (Int# -> Int -> P a) -> Int# -> P a
706 lex_char cont glaexts buf
707 = case currentChar# buf of
708 '\\'# -> lex_escape (cont glaexts) (incLexeme buf)
709 c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
710 other -> charError buf
712 char_end cont glaexts c buf
713 = case currentChar# buf of
714 '\''# -> let buf' = incLexeme buf in
715 case currentChar# buf' of
717 -> cont (ITprimchar c) (incLexeme buf')
718 _ -> cont (ITchar c) buf'
722 = let buf' = incLexeme buf in
723 case currentChar# buf of
724 'a'# -> cont (ord '\a') buf'
725 'b'# -> cont (ord '\b') buf'
726 'f'# -> cont (ord '\f') buf'
727 'n'# -> cont (ord '\n') buf'
728 'r'# -> cont (ord '\r') buf'
729 't'# -> cont (ord '\t') buf'
730 'v'# -> cont (ord '\v') buf'
731 '\\'# -> cont (ord '\\') buf'
732 '"'# -> cont (ord '\"') buf'
733 '\''# -> cont (ord '\'') buf'
734 '^'# -> let c = currentChar# buf' in
735 if c `geChar#` '@'# && c `leChar#` '_'#
736 then cont (I# (ord# c -# ord# '@'#)) (incLexeme buf')
739 'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
740 'o'# -> readNum (after_charnum cont) buf' is_octdigit 8 oct_or_dec
742 -> readNum (after_charnum cont) buf is_digit 10 oct_or_dec
744 _ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
745 Just buf2 <- [prefixMatch buf p] ] of
746 (c,buf2):_ -> cont (ord c) buf2
749 after_charnum cont i buf
750 = if i >= 0 && i <= 0x10FFFF
751 then cont (fromInteger i) buf
754 readNum cont buf is_digit base conv = read buf 0
756 = case currentChar# buf of { c ->
758 then read (incLexeme buf) (i*base + (toInteger (I# (conv c))))
764 || (c `geChar#` 'a'# && c `leChar#` 'f'#)
765 || (c `geChar#` 'A'# && c `leChar#` 'F'#)
767 hex c | is_digit c = ord# c -# ord# '0'#
768 | otherwise = ord# (to_lower c) -# ord# 'a'# +# 10#
769 oct_or_dec c = ord# c -# ord# '0'#
771 is_octdigit c = c `geChar#` '0'# && c `leChar#` '7'#
774 | c `geChar#` 'A'# && c `leChar#` 'Z'#
775 = chr# (ord# c -# (ord# 'A'# -# ord# 'a'#))
778 charError buf = lexError "error in character literal" buf
780 silly_escape_chars = [
817 -------------------------------------------------------------------------------
819 lex_demand cont buf =
820 case read_em [] buf of { (ls,buf') ->
821 case currentChar# buf' of
822 'X'# -> cont (ITstrict (StrictSig (mkTopDmdType ls BotRes))) (incLexeme buf')
823 'M'# -> cont (ITstrict (StrictSig (mkTopDmdType ls RetCPR))) (incLexeme buf')
824 _ -> cont (ITstrict (StrictSig (mkTopDmdType ls TopRes))) buf'
827 -- code snatched from Demand.lhs
829 case currentChar# buf of
830 'L'# -> read_em (Lazy : acc) (stepOn buf)
831 'A'# -> read_em (Abs : acc) (stepOn buf)
832 'V'# -> read_em (Eval : acc) (stepOn buf)
833 'X'# -> read_em (Err : acc) (stepOn buf)
834 'B'# -> read_em (Bot : acc) (stepOn buf)
835 ')'# -> (reverse acc, stepOn buf)
836 'C'# -> do_call acc (stepOnBy# buf 2#)
837 'U'# -> do_unpack1 Drop Now acc (stepOnBy# buf 1#)
838 'S'# -> do_unpack1 Keep Now acc (stepOnBy# buf 1#)
839 _ -> (reverse acc, buf)
841 do_unpack1 keepity defer acc buf
842 = case currentChar# buf of
843 '*'# -> do_unpack1 keepity Defer acc (stepOnBy# buf 1#)
844 '('# -> do_unpack2 keepity defer acc (stepOnBy# buf 1#)
845 _ -> read_em (Seq keepity defer [] : acc) buf
847 do_unpack2 keepity defer acc buf
848 = case read_em [] buf of
849 (stuff, rest) -> read_em (Seq keepity defer stuff : acc) rest
852 = case read_em [] buf of
853 ([dmd], rest) -> read_em (Call dmd : acc) rest
857 case currentChar# buf of
858 'C'# -> cont ITsccAllCafs (incLexeme buf)
859 other -> cont ITscc buf
861 -----------------------------------------------------------------------------
864 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
865 lex_num cont glaexts acc buf =
866 case scanNumLit acc buf of
868 case currentChar# buf' of
869 '.'# | is_digit (lookAhead# buf' 1#) ->
870 -- this case is not optimised at all, as the
871 -- presence of floating point numbers in interface
872 -- files is not that common. (ToDo)
873 case expandWhile# is_digit (incLexeme buf') of
874 buf2 -> -- points to first non digit char
876 let l = case currentChar# buf2 of
882 = let buf3 = incLexeme buf2 in
883 case currentChar# buf3 of
884 '-'# -> expandWhile# is_digit (incLexeme buf3)
885 '+'# -> expandWhile# is_digit (incLexeme buf3)
886 x | is_digit x -> expandWhile# is_digit buf3
889 v = readRational__ (lexemeToString l)
891 in case currentChar# l of -- glasgow exts only
892 '#'# | flag glaexts -> let l' = incLexeme l in
893 case currentChar# l' of
894 '#'# -> cont (ITprimdouble v) (incLexeme l')
895 _ -> cont (ITprimfloat v) l'
896 _ -> cont (ITrational v) l
898 _ -> after_lexnum cont glaexts acc' buf'
900 after_lexnum cont glaexts i buf
901 = case currentChar# buf of
902 '#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
903 _ -> cont (ITinteger i) buf
905 -----------------------------------------------------------------------------
906 -- C "literal literal"s (i.e. things like ``NULL'', ``stdout'' etc.)
908 -- we lexemeToFastString on the bit between the ``''s, but include the
909 -- quotes in the full lexeme.
911 lex_cstring cont buf =
912 case expandUntilMatch (stepOverLexeme buf) "\'\'" of
913 Just buf' -> cont (ITlitlit (lexemeToFastString
914 (setCurrentPos# buf' (negateInt# 2#))))
915 (mergeLexemes buf buf')
916 Nothing -> lexError "unterminated ``" buf
918 -----------------------------------------------------------------------------
919 -- identifiers, symbols etc.
922 case expandWhile# is_ident buf of
923 buf' -> cont (ITipvarid lexeme) buf'
924 where lexeme = lexemeToFastString buf'
926 lex_id cont glaexts buf =
927 let buf1 = expandWhile# is_ident buf in
930 case (if flag glaexts
931 then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
932 else buf1) of { buf' ->
934 let lexeme = lexemeToFastString buf' in
936 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
937 Just kwd_token -> --trace ("hkeywd: "++_UNPK_(lexeme)) $
941 let var_token = cont (ITvarid lexeme) buf' in
943 if not (flag glaexts)
947 case lookupUFM ghcExtensionKeywordsFM lexeme of {
948 Just kwd_token -> cont kwd_token buf';
955 case expandWhile# is_symbol buf of
956 buf' -> case lookupUFM haskellKeySymsFM lexeme of {
957 Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
958 cont kwd_token buf' ;
959 Nothing -> --trace ("sym: "++unpackFS lexeme) $
960 cont (mk_var_token lexeme) buf'
962 where lexeme = lexemeToFastString buf'
965 -- lex_con recursively collects components of a qualified identifer.
966 -- The argument buf is the StringBuffer representing the lexeme
967 -- identified so far, where the next character is upper-case.
969 lex_con cont glaexts buf =
970 -- trace ("con: "{-++unpackFS lexeme-}) $
971 let empty_buf = stepOverLexeme buf in
972 case expandWhile# is_ident empty_buf of { buf1 ->
973 case slurp_trailing_hashes buf1 glaexts of { con_buf ->
975 let all_buf = mergeLexemes buf con_buf
977 con_lexeme = lexemeToFastString con_buf
978 mod_lexeme = lexemeToFastString (decLexeme buf)
979 all_lexeme = lexemeToFastString all_buf
982 | emptyLexeme buf = cont (ITconid con_lexeme) all_buf
983 | otherwise = cont (ITqconid (mod_lexeme,con_lexeme)) all_buf
986 case currentChar# all_buf of
987 '.'# -> maybe_qualified cont glaexts all_lexeme
988 (incLexeme all_buf) just_a_conid
993 maybe_qualified cont glaexts mod buf just_a_conid =
994 -- trace ("qid: "{-++unpackFS lexeme-}) $
995 case currentChar# buf of
996 '['# -> -- Special case for []
997 case lookAhead# buf 1# of
998 ']'# -> cont (ITqconid (mod,SLIT("[]"))) (setCurrentPos# buf 2#)
1001 '('# -> -- Special case for (,,,)
1002 -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
1003 case lookAhead# buf 1# of
1004 '#'# | flag glaexts -> case lookAhead# buf 2# of
1005 ','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#)
1008 ')'# -> cont (ITqconid (mod,SLIT("()"))) (setCurrentPos# buf 2#)
1009 ','# -> lex_tuple cont mod (setCurrentPos# buf 2#) just_a_conid
1012 '-'# -> case lookAhead# buf 1# of
1013 '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
1014 _ -> lex_id3 cont glaexts mod buf just_a_conid
1016 _ -> lex_id3 cont glaexts mod buf just_a_conid
1019 lex_id3 cont glaexts mod buf just_a_conid
1020 | is_upper (currentChar# buf) =
1021 lex_con cont glaexts buf
1023 | is_symbol (currentChar# buf) =
1025 start_new_lexeme = stepOverLexeme buf
1027 -- trace ("lex_id31 "{-++unpackFS lexeme-}) $
1028 case expandWhile# is_symbol start_new_lexeme of { buf' ->
1030 lexeme = lexemeToFastString buf'
1031 -- real lexeme is M.<sym>
1032 new_buf = mergeLexemes buf buf'
1034 cont (mk_qvar_token mod lexeme) new_buf
1035 -- wrong, but arguably morally right: M... is now a qvarsym
1040 start_new_lexeme = stepOverLexeme buf
1042 -- trace ("lex_id32 "{-++unpackFS lexeme-}) $
1043 case expandWhile# is_ident start_new_lexeme of { buf1 ->
1048 case slurp_trailing_hashes buf1 glaexts of { buf' ->
1051 lexeme = lexemeToFastString buf'
1052 new_buf = mergeLexemes buf buf'
1053 is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
1055 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
1056 Nothing -> is_a_qvarid ;
1058 Just kwd_token | isSpecial kwd_token -- special ids (as, qualified, hiding) shouldn't be
1059 -> is_a_qvarid -- recognised as keywords here.
1061 -> just_a_conid -- avoid M.where etc.
1064 slurp_trailing_hashes buf glaexts
1065 | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
1070 | is_upper f = ITconid pk_str
1071 | is_ident f = ITvarid pk_str
1072 | f `eqChar#` ':'# = ITconsym pk_str
1073 | otherwise = ITvarsym pk_str
1075 (C# f) = _HEAD_ pk_str
1076 -- tl = _TAIL_ pk_str
1078 mk_qvar_token m token =
1079 -- trace ("mk_qvar ") $
1080 case mk_var_token token of
1081 ITconid n -> ITqconid (m,n)
1082 ITvarid n -> ITqvarid (m,n)
1083 ITconsym n -> ITqconsym (m,n)
1084 ITvarsym n -> ITqvarsym (m,n)
1085 _ -> ITunknown (show token)
1088 ----------------------------------------------------------------------------
1089 Horrible stuff for dealing with M.(,,,)
1092 lex_tuple cont mod buf back_off =
1096 case currentChar# buf of
1097 ','# -> go (n+1) (stepOn buf)
1098 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Boxed n))) (stepOn buf)
1101 lex_ubx_tuple cont mod buf back_off =
1105 case currentChar# buf of
1106 ','# -> go (n+1) (stepOn buf)
1107 '#'# -> case lookAhead# buf 1# of
1108 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr Unboxed n)))
1114 -----------------------------------------------------------------------------
1115 doDiscard rips along really fast, looking for a '##-}',
1116 indicating the end of the pragma we're skipping
1119 doDiscard inStr buf =
1120 case currentChar# buf of
1121 '#'# | inStr ==# 0# ->
1122 case lookAhead# buf 1# of { '#'# ->
1123 case lookAhead# buf 2# of { '-'# ->
1124 case lookAhead# buf 3# of { '}'# ->
1125 (lexemeToBuffer buf, stepOverLexeme (setCurrentPos# buf 4#));
1126 _ -> doDiscard inStr (incLexeme buf) };
1127 _ -> doDiscard inStr (incLexeme buf) };
1128 _ -> doDiscard inStr (incLexeme buf) }
1132 odd_slashes buf flg i# =
1133 case lookAhead# buf i# of
1134 '\\'# -> odd_slashes buf (not flg) (i# -# 1#)
1137 not_inStr = if inStr ==# 0# then 1# else 0#
1139 case lookAhead# buf (negateInt# 1#) of --backwards, actually
1140 '\\'# -> -- escaping something..
1141 if odd_slashes buf True (negateInt# 2#)
1142 then -- odd number of slashes, " is escaped.
1143 doDiscard inStr (incLexeme buf)
1144 else -- even number of slashes, \ is escaped.
1145 doDiscard not_inStr (incLexeme buf)
1146 _ -> doDiscard not_inStr (incLexeme buf)
1148 '\''# | inStr ==# 0# ->
1149 case lookAhead# buf 1# of { '"'# ->
1150 case lookAhead# buf 2# of { '\''# ->
1151 doDiscard inStr (setCurrentPos# buf 3#);
1152 _ -> doDiscard inStr (incLexeme buf) };
1153 _ -> doDiscard inStr (incLexeme buf) }
1155 _ -> doDiscard inStr (incLexeme buf)
1159 -----------------------------------------------------------------------------
1170 data PState = PState {
1172 glasgow_exts :: Int#,
1175 context :: [LayoutContext]
1178 type P a = StringBuffer -- Input string
1183 returnP a buf s = POk s a
1185 thenP :: P a -> (a -> P b) -> P b
1186 m `thenP` k = \ buf s ->
1188 POk s1 a -> k a buf s1
1189 PFailed err -> PFailed err
1191 thenP_ :: P a -> P b -> P b
1192 m `thenP_` k = m `thenP` \_ -> k
1194 mapP :: (a -> P b) -> [a] -> P [b]
1195 mapP f [] = returnP []
1198 mapP f as `thenP` \bs ->
1201 failP :: String -> P a
1202 failP msg buf s = PFailed (text msg)
1204 failMsgP :: Message -> P a
1205 failMsgP msg buf s = PFailed msg
1207 lexError :: String -> P a
1208 lexError str buf s@PState{ loc = loc }
1209 = failMsgP (hcat [ppr loc, text ": ", text str]) buf s
1211 getSrcLocP :: P SrcLoc
1212 getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
1214 -- use a temporary SrcLoc for the duration of the argument
1215 setSrcLocP :: SrcLoc -> P a -> P a
1216 setSrcLocP new_loc p buf s =
1217 case p buf s{ loc=new_loc } of
1219 PFailed e -> PFailed e
1221 getSrcFile :: P FAST_STRING
1222 getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
1224 pushContext :: LayoutContext -> P ()
1225 pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()
1229 This special case in layoutOn is to handle layout contexts with are
1230 indented the same or less than the current context. This is illegal
1231 according to the Haskell spec, so we have to arrange to close the
1232 current context. eg.
1237 after the first 'where', the sequence of events is:
1239 - layout system inserts a ';' (column 0)
1240 - parser begins a new context at column 0
1241 - parser shifts ';' (legal empty declaration)
1242 - parser sees 'class': parse error (we're still in the inner context)
1244 trouble is, by the time we know we need a new context, the lexer has
1245 already generated the ';'. Hacky solution is as follows: since we
1246 know the column of the next token (it's the column number of the new
1247 context), we set the ACTUAL column number of the new context to this
1248 numer plus one. Hence the next time the lexer is called, a '}' will
1249 be generated to close the new context straight away. Furthermore, we
1250 have to set the atbol flag so that the ';' that the parser shifted as
1251 part of the new context is re-generated.
1253 when the new context is *less* indented than the current one:
1255 f = f where g = g where
1258 - current context: column 12.
1259 - on seeing 'h' (column 0), the layout system inserts '}'
1260 - parser starts a new context, column 0
1261 - parser sees '}', uses it to close new context
1262 - we still need to insert another '}' followed by a ';',
1263 hence the atbol trick.
1265 There's also a special hack in here to deal with
1272 i.e. the inner context is at the same indentation level as the outer
1273 context. This is strictly illegal according to Haskell 98, but
1274 there's a lot of existing code using this style and it doesn't make
1275 any sense to disallow it, since empty 'do' lists don't make sense.
1278 layoutOn :: Bool -> P ()
1279 layoutOn strict buf s@(PState{ bol = bol, context = ctx }) =
1280 let offset = lexemeIndex buf -# bol in
1283 | if strict then prev_off >=# offset else prev_off ># offset ->
1284 --trace ("layout on, column: " ++ show (I# offset)) $
1285 POk s{ context = Layout (offset +# 1#) : ctx, atbol = 1# } ()
1287 --trace ("layout on, column: " ++ show (I# offset)) $
1288 POk s{ context = Layout offset : ctx } ()
1291 layoutOff buf s@(PState{ context = ctx }) =
1292 POk s{ context = NoLayout:ctx } ()
1295 popContext = \ buf s@(PState{ context = ctx, loc = loc }) ->
1297 (_:tl) -> POk s{ context = tl } ()
1298 [] -> PFailed (srcParseErr buf loc)
1301 Note that if the name of the file we're processing ends
1302 with `hi-boot', we accept it on faith as having the right
1303 version. This is done so that .hi-boot files that comes
1304 with hsc don't have to be updated before every release,
1305 *and* it allows us to share .hi-boot files with versions
1306 of hsc that don't have .hi version checking (e.g., ghc-2.10's)
1308 If the version number is 0, the checking is also turned off.
1309 (needed to deal with GHC.hi only!)
1311 Once we can assume we're compiling with a version of ghc that
1312 supports interface file checking, we can drop the special
1315 checkVersion :: Maybe Integer -> P ()
1316 checkVersion mb@(Just v) buf s@(PState{loc = loc})
1317 | (v==0) || (v == fromInt opt_HiVersion) || opt_NoHiCheck = POk s ()
1318 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1319 checkVersion mb@Nothing buf s@(PState{loc = loc})
1320 | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
1321 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1323 -----------------------------------------------------------------
1325 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
1327 = hsep [ppr l, ptext SLIT("Interface file parse error; on input `"),
1328 text (lexemeToString s), char '\'']
1330 ifaceVersionErr hi_vers l toks
1331 = hsep [ppr l, ptext SLIT("Interface file version error;"),
1332 ptext SLIT("Expected"), int opt_HiVersion,
1333 ptext SLIT("found "), pp_version]
1337 Nothing -> ptext SLIT("pre ghc-3.02 version")
1338 Just v -> ptext SLIT("version") <+> integer v
1340 -----------------------------------------------------------------------------
1342 srcParseErr :: StringBuffer -> SrcLoc -> Message
1346 then ptext SLIT(": parse error (possibly incorrect indentation)")
1347 else hcat [ptext SLIT(": parse error on input "),
1348 char '`', text token, char '\'']
1351 token = lexemeToString s