2 % (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
4 \section[Lexical analysis]{Lexical analysis}
6 --------------------------------------------------------
8 There's a known bug in here:
10 If an interface file ends prematurely, Lex tries to
11 do headFS of an empty FastString.
13 An example that provokes the error is
15 f _:_ _forall_ [a] <<<END OF FILE>>>
16 --------------------------------------------------------
19 {-# OPTIONS -#include "ctypes.h" #-}
26 Token(..), lexer, ParseResult(..), PState(..),
30 P, thenP, thenP_, returnP, mapP, failP, failMsgP,
31 getSrcLocP, getSrcFile,
32 layoutOn, layoutOff, pushContext, popContext
35 #include "HsVersions.h"
37 import Char ( ord, isSpace, toUpper )
38 import List ( isSuffixOf )
40 import IdInfo ( InlinePragInfo(..), CprInfo(..) )
41 import Name ( isLowerISO, isUpperISO )
42 import PrelMods ( mkTupNameStr, mkUbxTupNameStr )
43 import CmdLineOpts ( opt_IgnoreIfacePragmas, opt_HiVersion, opt_NoHiCheck )
44 import Demand ( Demand(..) {- instance Read -} )
45 import UniqFM ( UniqFM, listToUFM, lookupUFM)
46 import BasicTypes ( NewOrData(..) )
47 import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
48 replaceSrcLine, mkSrcLoc )
50 import Maybes ( MaybeErr(..) )
51 import ErrUtils ( Message )
59 #if __GLASGOW_HASKELL__ >= 303
66 import PrelRead ( readRational__ ) -- Glasgow non-std
69 %************************************************************************
71 \subsection{Data types}
73 %************************************************************************
75 The token data type, fairly un-interesting except from one
76 constructor, @ITidinfo@, which is used to lazily lex id info (arity,
77 strictness, unfolding etc).
79 The Idea/Observation here is that the renamer needs to scan through
80 all of an interface file before it can continue. But only a fraction
81 of the information contained in the file turns out to be useful, so
82 delaying as much as possible of the scanning and parsing of an
83 interface file Makes Sense (Heap profiles of the compiler
84 show a reduction in heap usage by at least a factor of two,
87 Hence, the interface file lexer spots when value declarations are
88 being scanned and return the @ITidinfo@ and @ITtype@ constructors
89 for the type and any other id info for that binding (unfolding, strictness
90 etc). These constructors are applied to the result of lexing these sub-chunks.
92 The lexing of the type and id info is all done lazily, of course, so
93 the scanning (and subsequent parsing) will be done *only* on the ids the
94 renamer finds out that it is interested in. The rest will just be junked.
95 Laziness, you know it makes sense :-)
99 = ITas -- Haskell keywords
125 | ITforall -- GHC extension keywords
132 | ITinterface -- interface keywords
140 | ITccall (Bool,Bool,Bool) -- (is_dyn, is_casm, may_gc)
155 | ITunfold InlinePragInfo
156 | ITstrict ([Demand], Bool)
158 | ITcprinfo (CprInfo)
162 | ITspecialise_prag -- Pragmas
170 | ITdotdot -- reserved symbols
184 | ITbiglam -- GHC-extension symbols
187 | ITocurly -- special symbols
201 | ITvarid FAST_STRING -- identifiers
202 | ITconid FAST_STRING
203 | ITvarsym FAST_STRING
204 | ITconsym FAST_STRING
205 | ITqvarid (FAST_STRING,FAST_STRING)
206 | ITqconid (FAST_STRING,FAST_STRING)
207 | ITqvarsym (FAST_STRING,FAST_STRING)
208 | ITqconsym (FAST_STRING,FAST_STRING)
210 | ITpragma StringBuffer
213 | ITstring FAST_STRING
215 | ITrational Rational
218 | ITprimstring FAST_STRING
220 | ITprimfloat Rational
221 | ITprimdouble Rational
222 | ITlitlit FAST_STRING
224 | ITunknown String -- Used when the lexer can't make sense of it
225 | ITeof -- end of file token
226 deriving Text -- debugging
229 -----------------------------------------------------------------------------
233 pragmaKeywordsFM = listToUFM $
234 map (\ (x,y) -> (_PK_ x,y))
235 [( "SPECIALISE", ITspecialise_prag ),
236 ( "SPECIALIZE", ITspecialise_prag ),
237 ( "SOURCE", ITsource_prag ),
238 ( "INLINE", ITinline_prag ),
239 ( "NOINLINE", ITnoinline_prag ),
240 ( "NOTINLINE", ITnoinline_prag ),
241 ( "LINE", ITline_prag ),
242 ( "RULES", ITrules_prag ),
243 ( "RULEZ", ITrules_prag ) -- american spelling :-)
246 haskellKeywordsFM = listToUFM $
247 map (\ (x,y) -> (_PK_ x,y))
248 [( "_", ITunderscore ),
251 ( "class", ITclass ),
253 ( "default", ITdefault ),
254 ( "deriving", ITderiving ),
257 ( "hiding", IThiding ),
259 ( "import", ITimport ),
261 ( "infix", ITinfix ),
262 ( "infixl", ITinfixl ),
263 ( "infixr", ITinfixr ),
264 ( "instance", ITinstance ),
266 ( "module", ITmodule ),
267 ( "newtype", ITnewtype ),
269 ( "qualified", ITqualified ),
272 ( "where", ITwhere ),
276 ghcExtensionKeywordsFM = listToUFM $
277 map (\ (x,y) -> (_PK_ x,y))
278 [ ( "forall", ITforall ),
279 ( "foreign", ITforeign ),
280 ( "export", ITexport ),
281 ( "label", ITlabel ),
282 ( "dynamic", ITdynamic ),
283 ( "unsafe", ITunsafe ),
284 ("_ccall_", ITccall (False, False, False)),
285 ("_ccall_GC_", ITccall (False, False, True)),
286 ("_casm_", ITccall (False, True, False)),
287 ("_casm_GC_", ITccall (False, True, True)),
289 -- interface keywords
290 ("__interface", ITinterface),
291 ("__export", IT__export),
292 ("__depends", ITdepends),
293 ("__forall", IT__forall),
294 ("__letrec", ITletrec),
295 ("__coerce", ITcoerce),
296 ("__inline_me", ITinlineMe),
297 ("__inline_call", ITinlineCall),
298 ("__depends", ITdepends),
299 ("__DEFAULT", ITdefaultbranch),
301 ("__integer", ITinteger_lit),
302 ("__float", ITfloat_lit),
303 ("__rational", ITrational_lit),
304 ("__addr", ITaddr_lit),
305 ("__litlit", ITlit_lit),
306 ("__string", ITstring_lit),
309 ("__fuall", ITfuall),
311 ("__P", ITspecialise),
314 ("__U", ITunfold NoInlinePragInfo),
316 ("__ccall", ITccall (False, False, False)),
317 ("__ccall_GC", ITccall (False, False, True)),
318 ("__dyn_ccall", ITccall (True, False, False)),
319 ("__dyn_ccall_GC", ITccall (True, False, True)),
320 ("__casm", ITccall (False, True, False)),
321 ("__dyn_casm", ITccall (True, True, False)),
322 ("__casm_GC", ITccall (False, True, True)),
323 ("__dyn_casm_GC", ITccall (True, True, True)),
329 haskellKeySymsFM = listToUFM $
330 map (\ (x,y) -> (_PK_ x,y))
343 ,(".", ITdot) -- sadly, for 'forall a . t'
347 -----------------------------------------------------------------------------
352 - (glaexts) lexing an interface file or -fglasgow-exts
353 - (bol) pointer to beginning of line (for column calculations)
354 - (buf) pointer to beginning of token
355 - (buf) pointer to current char
356 - (atbol) flag indicating whether we're at the beginning of a line
359 lexer :: (Token -> P a) -> P a
360 lexer cont buf s@(PState{
362 glasgow_exts = glaexts,
368 -- first, start a new lexeme and lose all the whitespace
369 = tab line bol atbol (stepOverLexeme buf)
371 line = srcLocLine loc
373 tab y bol atbol buf = --trace ("tab: " ++ show (I# y) ++ " : " ++ show (currentChar buf)) $
374 case currentChar# buf of
377 if bufferExhausted (stepOn buf)
378 then cont ITeof buf s'
379 else trace "lexer: misplaced NUL?" $
380 tab y bol atbol (stepOn buf)
382 '\n'# -> let buf' = stepOn buf
383 in tab (y +# 1#) (currentIndex# buf') 1# buf'
385 -- find comments. This got harder in Haskell 98.
386 '-'# -> let trundle n =
387 let next = lookAhead# buf n in
388 if next `eqChar#` '-'# then trundle (n +# 1#)
389 else if is_symbol next || n <# 2#
391 else case untilChar# (stepOnBy# buf n) '\n'# of
392 { buf' -> tab y bol atbol (stepOverLexeme buf')
396 -- comments and pragmas. We deal with LINE pragmas here,
397 -- and throw out any unrecognised pragmas as comments. Any
398 -- pragmas we know about are dealt with later (after any layout
399 -- processing if necessary).
401 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
402 if lookAhead# buf 2# `eqChar#` '#'# then
403 if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
404 case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
405 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2->
406 let lexeme = mkFastString -- ToDo: too slow
407 (map toUpper (lexemeToString buf2)) in
408 case lookupUFM pragmaKeywordsFM lexeme of
409 Just ITline_prag -> line_prag (lexer cont) buf2 s'
410 Just other -> is_a_token
411 Nothing -> skip_to_end (stepOnBy# buf 2#)
414 else skip_to_end (stepOnBy# buf 2#)
416 skip_to_end buf = nested_comment (lexer cont) buf s'
418 -- tabs have been expanded beforehand
419 c | is_space c -> tab y bol atbol (stepOn buf)
420 | otherwise -> is_a_token
422 where s' = s{loc = replaceSrcLine loc y,
426 is_a_token | atbol /=# 0# = lexBOL cont buf s'
427 | otherwise = lexToken cont glaexts buf s'
429 -- {-# LINE .. #-} pragmas. yeuch.
431 case expandWhile# is_space buf of { buf1 ->
432 case scanNumLit 0 (stepOverLexeme buf1) of { (line,buf2) ->
433 -- subtract one: the line number refers to the *following* line.
434 let real_line = line - 1 in
435 case fromInteger real_line of { i@(I# l) ->
436 case expandWhile# is_space buf2 of { buf3 ->
437 case currentChar# buf3 of
439 case untilEndOfString# (stepOn (stepOverLexeme buf3)) of { buf4 ->
440 let file = lexemeToFastString buf4 in
441 \s@PState{loc=loc} -> skipToEnd buf4 s{loc = mkSrcLoc file i}
443 other -> \s@PState{loc=loc} -> skipToEnd buf3 s{loc = replaceSrcLine loc l}
446 skipToEnd buf = nested_comment cont buf
448 nested_comment :: P a -> P a
449 nested_comment cont buf = loop buf
452 case currentChar# buf of
453 '\NUL'# | bufferExhausted (stepOn buf) ->
454 lexError "unterminated `{-'" buf
456 '-'# | lookAhead# buf 1# `eqChar#` '}'# ->
457 cont (stepOnBy# buf 2#)
459 '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
460 nested_comment (nested_comment cont) (stepOnBy# buf 2#)
462 '\n'# -> \ s@PState{loc=loc} ->
463 let buf' = stepOn buf in
464 nested_comment cont buf'
465 s{loc = incSrcLine loc, bol = currentIndex# buf',
468 _ -> nested_comment cont (stepOn buf)
470 -- When we are lexing the first token of a line, check whether we need to
471 -- insert virtual semicolons or close braces due to layout.
473 lexBOL :: (Token -> P a) -> P a
474 lexBOL cont buf s@(PState{
476 glasgow_exts = glaexts,
481 if need_close_curly then
482 --trace ("col = " ++ show (I# col) ++ ", layout: inserting '}'") $
483 cont ITvccurly buf s{atbol = 1#, context = tail ctx}
484 else if need_semi_colon then
485 --trace ("col = " ++ show (I# col) ++ ", layout: inserting ';'") $
486 cont ITsemi buf s{atbol = 0#}
488 lexToken cont glaexts buf s{atbol = 0#}
490 col = currentIndex# buf -# bol
503 Layout n -> col ==# n
506 lexToken :: (Token -> P a) -> Int# -> P a
507 lexToken cont glaexts buf =
510 case currentChar# buf of
512 -- special symbols ----------------------------------------------------
513 '('# | flag glaexts && lookAhead# buf 1# `eqChar#` '#'#
514 -> cont IToubxparen (setCurrentPos# buf 2#)
516 -> cont IToparen (incLexeme buf)
518 ')'# -> cont ITcparen (incLexeme buf)
519 '['# -> cont ITobrack (incLexeme buf)
520 ']'# -> cont ITcbrack (incLexeme buf)
521 ','# -> cont ITcomma (incLexeme buf)
522 ';'# -> cont ITsemi (incLexeme buf)
524 '}'# -> \ s@PState{context = ctx} ->
526 (_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
527 _ -> lexError "too many '}'s" buf s
529 '#'# -> case lookAhead# buf 1# of
530 ')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
531 '-'# -> case lookAhead# buf 2# of
532 '}'# -> cont ITclose_prag (setCurrentPos# buf 3#)
533 _ -> lex_sym cont (incLexeme buf)
534 _ -> lex_sym cont (incLexeme buf)
536 '`'# | flag glaexts && lookAhead# buf 1# `eqChar#` '`'#
537 -> lex_cstring cont (setCurrentPos# buf 2#)
539 -> cont ITbackquote (incLexeme buf)
541 '{'# -> -- look for "{-##" special iface pragma
542 case lookAhead# buf 1# of
543 '-'# -> case lookAhead# buf 2# of
544 '#'# -> case lookAhead# buf 3# of
547 = doDiscard False (stepOnBy# (stepOverLexeme buf) 4#) in
548 cont (ITpragma lexeme) buf'
549 _ -> lex_prag cont (setCurrentPos# buf 3#)
550 _ -> cont ITocurly (incLexeme buf)
551 _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
553 -- strings/characters -------------------------------------------------
554 '\"'#{-"-} -> lex_string cont glaexts "" (incLexeme buf)
555 '\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
557 -- strictness and cpr pragmas and __scc treated specially.
558 '_'# | flag glaexts ->
559 case lookAhead# buf 1# of
560 '_'# -> case lookAhead# buf 2# of
562 lex_demand cont (stepOnUntil (not . isSpace)
563 (stepOnBy# buf 3#)) -- past __S
565 lex_cpr cont (stepOnUntil (not . isSpace)
566 (stepOnBy# buf 3#)) -- past __M
568 case prefixMatch (stepOnBy# buf 3#) "cc" of
569 Just buf' -> lex_scc cont (stepOverLexeme buf')
570 Nothing -> lex_id cont glaexts buf
571 _ -> lex_id cont glaexts buf
572 _ -> lex_id cont glaexts buf
574 -- Hexadecimal and octal constants
575 '0'# | (ch `eqChar#` 'x'# || ch `eqChar#` 'X'#) && is_hexdigit ch2
576 -> readNum (after_lexnum cont glaexts) buf' is_hexdigit 16 hex
577 | (ch `eqChar#` 'o'# || ch `eqChar#` 'O'#) && is_octdigit ch2
578 -> readNum (after_lexnum cont glaexts) buf' is_octdigit 8 oct_or_dec
579 where ch = lookAhead# buf 1#
580 ch2 = lookAhead# buf 2#
581 buf' = setCurrentPos# buf 2#
584 if bufferExhausted (stepOn buf) then
587 trace "lexIface: misplaced NUL?" $
588 cont (ITunknown "\NUL") (stepOn buf)
590 c | is_digit c -> lex_num cont glaexts 0 buf
591 | is_symbol c -> lex_sym cont buf
592 | is_upper c -> lex_con cont glaexts buf
593 | is_ident c -> lex_id cont glaexts buf
594 | otherwise -> lexError "illegal character" buf
596 -- Int# is unlifted, and therefore faster than Bool for flags.
602 -------------------------------------------------------------------------------
606 = case expandWhile# is_space buf of { buf1 ->
607 case expandWhile# is_ident (stepOverLexeme buf1) of { buf2 ->
608 let lexeme = mkFastString (map toUpper (lexemeToString buf2)) in
609 case lookupUFM pragmaKeywordsFM lexeme of
610 Just kw -> cont kw (mergeLexemes buf buf2)
611 Nothing -> panic "lex_prag"
614 -------------------------------------------------------------------------------
617 lex_string cont glaexts s buf
618 = case currentChar# buf of
620 let buf' = incLexeme buf; s' = mkFastString (reverse s) in
621 case currentChar# buf' of
622 '#'# | flag glaexts -> cont (ITprimstring s') (incLexeme buf')
623 _ -> cont (ITstring s') buf'
625 -- ignore \& in a string, deal with string gaps
626 '\\'# | next_ch `eqChar#` '&'#
627 -> lex_string cont glaexts s (setCurrentPos# buf 2#)
629 -> lex_stringgap cont glaexts s buf'
631 where next_ch = lookAhead# buf 1#
632 buf' = setCurrentPos# buf 2#
634 _ -> lex_char (lex_next_string cont s) glaexts buf
636 lex_stringgap cont glaexts s buf
637 = let buf' = incLexeme buf in
638 case currentChar# buf of
639 '\n'# -> \st@PState{loc = loc} -> lex_stringgap cont glaexts s buf'
640 st{loc = incSrcLine loc}
641 '\\'# -> lex_string cont glaexts s buf'
642 c | is_space c -> lex_stringgap cont glaexts s buf'
643 other -> charError buf'
645 lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
647 lex_char :: (Int# -> Char -> P a) -> Int# -> P a
648 lex_char cont glaexts buf
649 = case currentChar# buf of
650 '\\'# -> lex_escape (cont glaexts) (incLexeme buf)
651 c | is_any c -> cont glaexts (C# c) (incLexeme buf)
652 other -> charError buf
654 char_end cont glaexts c buf
655 = case currentChar# buf of
656 '\''# -> let buf' = incLexeme buf in
657 case currentChar# buf' of
659 -> cont (ITprimchar c) (incLexeme buf')
660 _ -> cont (ITchar c) buf'
664 = let buf' = incLexeme buf in
665 case currentChar# buf of
666 'a'# -> cont '\a' buf'
667 'b'# -> cont '\b' buf'
668 'f'# -> cont '\f' buf'
669 'n'# -> cont '\n' buf'
670 'r'# -> cont '\r' buf'
671 't'# -> cont '\t' buf'
672 'v'# -> cont '\v' buf'
673 '\\'# -> cont '\\' buf'
674 '"'# -> cont '\"' buf'
675 '\''# -> cont '\'' buf'
676 '^'# -> let c = currentChar# buf' in
677 if c `geChar#` '@'# && c `leChar#` '_'#
678 then cont (C# (chr# (ord# c -# ord# '@'#))) (incLexeme buf')
681 'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
682 'o'# -> readNum (after_charnum cont) buf' is_octdigit 8 oct_or_dec
684 -> readNum (after_charnum cont) buf is_digit 10 oct_or_dec
686 _ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
687 Just buf2 <- [prefixMatch buf p] ] of
688 (c,buf2):_ -> cont c buf2
691 after_charnum cont i buf
692 = let int = fromInteger i in
693 if i >= 0 && i <= 255
694 then cont (chr int) buf
697 readNum cont buf is_digit base conv = read buf 0
699 = case currentChar# buf of { c ->
701 then read (incLexeme buf) (i*base + (toInteger (I# (conv c))))
707 || (c `geChar#` 'a'# && c `leChar#` 'h'#)
708 || (c `geChar#` 'A'# && c `leChar#` 'H'#)
710 hex c | is_digit c = ord# c -# ord# '0'#
711 | otherwise = ord# (to_lower c) -# ord# 'a'# +# 10#
712 oct_or_dec c = ord# c -# ord# '0'#
714 is_octdigit c = c `geChar#` '0'# && c `leChar#` '7'#
717 | c `geChar#` 'A'# && c `leChar#` 'Z'#
718 = chr# (ord# c -# (ord# 'A'# -# ord# 'a'#))
721 charError buf = lexError "error in character literal" buf
723 silly_escape_chars = [
760 -------------------------------------------------------------------------------
762 lex_demand cont buf =
763 case read_em [] buf of { (ls,buf') ->
764 case currentChar# buf' of
765 'B'# -> cont (ITstrict (ls, True )) (incLexeme buf')
766 _ -> cont (ITstrict (ls, False)) buf'
769 -- code snatched from Demand.lhs
771 case currentChar# buf of
772 'L'# -> read_em (WwLazy False : acc) (stepOn buf)
773 'A'# -> read_em (WwLazy True : acc) (stepOn buf)
774 'S'# -> read_em (WwStrict : acc) (stepOn buf)
775 'P'# -> read_em (WwPrim : acc) (stepOn buf)
776 'E'# -> read_em (WwEnum : acc) (stepOn buf)
777 ')'# -> (reverse acc, stepOn buf)
778 'U'# -> do_unpack DataType True acc (stepOnBy# buf 2#)
779 'u'# -> do_unpack DataType False acc (stepOnBy# buf 2#)
780 'N'# -> do_unpack NewType True acc (stepOnBy# buf 2#)
781 'n'# -> do_unpack NewType False acc (stepOnBy# buf 2#)
782 _ -> (reverse acc, buf)
784 do_unpack new_or_data wrapper_unpacks acc buf
785 = case read_em [] buf of
786 (stuff, rest) -> read_em (WwUnpack new_or_data wrapper_unpacks stuff : acc) rest
789 case read_em [] buf of { (cpr_inf,buf') ->
790 ASSERT ( null (tail cpr_inf) )
791 cont (ITcprinfo $ head cpr_inf) buf'
794 -- code snatched from lex_demand above
796 case currentChar# buf of
797 '-'# -> read_em (NoCPRInfo : acc) (stepOn buf)
798 '('# -> do_unpack acc (stepOn buf)
799 ')'# -> (reverse acc, stepOn buf)
800 _ -> (reverse acc, buf)
803 = case read_em [] buf of
804 (stuff, rest) -> read_em ((CPRInfo stuff) : acc) rest
808 case currentChar# buf of
809 'C'# -> cont ITsccAllCafs (incLexeme buf)
810 other -> cont ITscc buf
812 -----------------------------------------------------------------------------
815 lex_num :: (Token -> P a) -> Int# -> Integer -> P a
816 lex_num cont glaexts acc buf =
817 case scanNumLit acc buf of
819 case currentChar# buf' of
820 '.'# | is_digit (lookAhead# buf' 1#) ->
821 -- this case is not optimised at all, as the
822 -- presence of floating point numbers in interface
823 -- files is not that common. (ToDo)
824 case expandWhile# is_digit (incLexeme buf') of
825 buf2 -> -- points to first non digit char
827 let l = case currentChar# buf2 of
833 = let buf3 = incLexeme buf2 in
834 case currentChar# buf3 of
835 '-'# -> expandWhile# is_digit (incLexeme buf3)
836 '+'# -> expandWhile# is_digit (incLexeme buf3)
837 x | is_digit x -> expandWhile# is_digit buf3
840 v = readRational__ (lexemeToString l)
842 in case currentChar# l of -- glasgow exts only
843 '#'# | flag glaexts -> let l' = incLexeme l in
844 case currentChar# l' of
845 '#'# -> cont (ITprimdouble v) (incLexeme l')
846 _ -> cont (ITprimfloat v) l'
847 _ -> cont (ITrational v) l
849 _ -> after_lexnum cont glaexts acc' buf'
851 after_lexnum cont glaexts i buf
852 = case currentChar# buf of
853 '#'# | flag glaexts -> cont (ITprimint i) (incLexeme buf)
854 _ -> cont (ITinteger i) buf
856 -----------------------------------------------------------------------------
857 -- C "literal literal"s (i.e. things like ``NULL'', ``stdout'' etc.)
859 -- we lexemeToFastString on the bit between the ``''s, but include the
860 -- quotes in the full lexeme.
862 lex_cstring cont buf =
863 case expandUntilMatch (stepOverLexeme buf) "\'\'" of
864 buf' -> cont (ITlitlit (lexemeToFastString
865 (setCurrentPos# buf' (negateInt# 2#))))
866 (mergeLexemes buf buf')
868 ------------------------------------------------------------------------------
871 is_ident, is_symbol, is_any, is_upper, is_digit :: Char# -> Bool
873 {-# INLINE is_ctype #-}
874 #if __GLASGOW_HASKELL__ >= 303
875 is_ctype :: Word8 -> Char# -> Bool
876 is_ctype mask = \c ->
877 (indexWord8OffAddr (``char_types'' :: Addr) (ord (C# c)) .&. mask) /= 0
879 is_ctype :: Int -> Char# -> Bool
880 is_ctype (I# mask) = \c ->
881 let (A# ctype) = ``char_types'' :: Addr
882 flag_word = int2Word# (ord# (indexCharOffAddr# ctype (ord# c)))
884 (flag_word `and#` (int2Word# mask)) `neWord#` (int2Word# 0#)
887 is_ident = is_ctype 1
888 is_symbol = is_ctype 2
890 is_space = is_ctype 8
891 is_upper = is_ctype 16
892 is_digit = is_ctype 32
894 -----------------------------------------------------------------------------
895 -- identifiers, symbols etc.
897 lex_id cont glaexts buf =
898 case expandWhile# is_ident buf of { buf1 ->
900 case (if flag glaexts
901 then expandWhile# (eqChar# '#'#) buf1 -- slurp trailing hashes
902 else buf1) of { buf' ->
904 let lexeme = lexemeToFastString buf' in
906 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
907 Just kwd_token -> --trace ("hkeywd: "++_UNPK_(lexeme)) $
911 let var_token = cont (mk_var_token lexeme) buf' in
913 if not (flag glaexts)
917 case lookupUFM ghcExtensionKeywordsFM lexeme of {
918 Just kwd_token -> cont kwd_token buf';
924 case expandWhile# is_symbol buf of
925 buf' -> case lookupUFM haskellKeySymsFM lexeme of {
926 Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
927 cont kwd_token buf' ;
928 Nothing -> --trace ("sym: "++unpackFS lexeme) $
929 cont (mk_var_token lexeme) buf'
931 where lexeme = lexemeToFastString buf'
934 lex_con cont glaexts buf =
935 case expandWhile# is_ident buf of { buf1 ->
936 case slurp_trailing_hashes buf1 glaexts of { buf' ->
938 case currentChar# buf' of
943 just_a_conid = --trace ("con: "++unpackFS lexeme) $
944 cont (ITconid lexeme) buf'
945 lexeme = lexemeToFastString buf'
946 munch = lex_qid cont glaexts lexeme (incLexeme buf') just_a_conid
949 lex_qid cont glaexts mod buf just_a_conid =
950 case currentChar# buf of
951 '['# -> -- Special case for []
952 case lookAhead# buf 1# of
953 ']'# -> cont (ITqconid (mod,SLIT("[]"))) (setCurrentPos# buf 2#)
956 '('# -> -- Special case for (,,,)
957 -- This *is* necessary to deal with e.g. "instance C PrelBase.(,,)"
958 case lookAhead# buf 1# of
959 '#'# | flag glaexts -> case lookAhead# buf 2# of
960 ','# -> lex_ubx_tuple cont mod (setCurrentPos# buf 3#)
963 ')'# -> cont (ITqconid (mod,SLIT("()"))) (setCurrentPos# buf 2#)
964 ','# -> lex_tuple cont mod (setCurrentPos# buf 2#) just_a_conid
967 '-'# -> case lookAhead# buf 1# of
968 '>'# -> cont (ITqconid (mod,SLIT("->"))) (setCurrentPos# buf 2#)
969 _ -> lex_id3 cont glaexts mod buf just_a_conid
970 _ -> lex_id3 cont glaexts mod buf just_a_conid
972 lex_id3 cont glaexts mod buf just_a_conid
973 | is_symbol (currentChar# buf) =
975 start_new_lexeme = stepOverLexeme buf
977 case expandWhile# is_symbol start_new_lexeme of { buf' ->
979 lexeme = lexemeToFastString buf'
980 -- real lexeme is M.<sym>
981 new_buf = mergeLexemes buf buf'
983 cont (mk_qvar_token mod lexeme) new_buf
984 -- wrong, but arguably morally right: M... is now a qvarsym
989 start_new_lexeme = stepOverLexeme buf
991 case expandWhile# is_ident start_new_lexeme of { buf1 ->
996 case slurp_trailing_hashes buf1 glaexts of { buf' ->
999 lexeme = lexemeToFastString buf'
1000 new_buf = mergeLexemes buf buf'
1001 is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
1003 case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
1004 Just kwd_token -> just_a_conid; -- avoid M.where etc.
1005 Nothing -> is_a_qvarid
1006 -- TODO: special ids (as, qualified, hiding) shouldn't be
1007 -- recognised as keywords here. ie. M.as is a qualified varid.
1011 slurp_trailing_hashes buf glaexts
1012 | flag glaexts = expandWhile# (`eqChar#` '#'#) buf
1017 | is_upper f = ITconid pk_str
1018 -- _[A-Z] is treated as a constructor in interface files.
1019 | f `eqChar#` '_'# && not (_NULL_ tl)
1020 && (case _HEAD_ tl of { C# g -> is_upper g }) = ITconid pk_str
1021 | is_ident f = ITvarid pk_str
1022 | f `eqChar#` ':'# = ITconsym pk_str
1023 | otherwise = ITvarsym pk_str
1025 (C# f) = _HEAD_ pk_str
1028 mk_qvar_token m token =
1029 case mk_var_token token of
1030 ITconid n -> ITqconid (m,n)
1031 ITvarid n -> ITqvarid (m,n)
1032 ITconsym n -> ITqconsym (m,n)
1033 ITvarsym n -> ITqvarsym (m,n)
1034 _ -> ITunknown (show token)
1037 ----------------------------------------------------------------------------
1038 Horrible stuff for dealing with M.(,,,)
1041 lex_tuple cont mod buf back_off =
1045 case currentChar# buf of
1046 ','# -> go (n+1) (stepOn buf)
1047 ')'# -> cont (ITqconid (mod, snd (mkTupNameStr n))) (stepOn buf)
1050 lex_ubx_tuple cont mod buf back_off =
1054 case currentChar# buf of
1055 ','# -> go (n+1) (stepOn buf)
1056 '#'# -> case lookAhead# buf 1# of
1057 ')'# -> cont (ITqconid (mod, snd (mkUbxTupNameStr n)))
1063 -----------------------------------------------------------------------------
1064 doDiscard rips along really fast, looking for a '#-}',
1065 indicating the end of the pragma we're skipping
1068 doDiscard inStr buf =
1069 case currentChar# buf of
1071 case lookAhead# buf 1# of { '#'# ->
1072 case lookAhead# buf 2# of { '-'# ->
1073 case lookAhead# buf 3# of { '}'# ->
1074 (lexemeToBuffer buf, stepOverLexeme (setCurrentPos# buf 4#));
1075 _ -> doDiscard inStr (incLexeme buf) };
1076 _ -> doDiscard inStr (incLexeme buf) };
1077 _ -> doDiscard inStr (incLexeme buf) }
1080 odd_slashes buf flg i# =
1081 case lookAhead# buf i# of
1082 '\\'# -> odd_slashes buf (not flg) (i# -# 1#)
1085 case lookAhead# buf (negateInt# 1#) of --backwards, actually
1086 '\\'# -> -- escaping something..
1087 if odd_slashes buf True (negateInt# 2#) then
1088 -- odd number of slashes, " is escaped.
1089 doDiscard inStr (incLexeme buf)
1091 -- even number of slashes, \ is escaped.
1092 doDiscard (not inStr) (incLexeme buf)
1093 _ -> case inStr of -- forced to avoid build-up
1094 True -> doDiscard False (incLexeme buf)
1095 False -> doDiscard True (incLexeme buf)
1096 _ -> doDiscard inStr (incLexeme buf)
1100 -----------------------------------------------------------------------------
1111 data PState = PState {
1113 glasgow_exts :: Int#,
1116 context :: [LayoutContext]
1119 type P a = StringBuffer -- Input string
1124 returnP a buf s = POk s a
1126 thenP :: P a -> (a -> P b) -> P b
1127 m `thenP` k = \ buf s ->
1129 POk s1 a -> k a buf s1
1130 PFailed err -> PFailed err
1132 thenP_ :: P a -> P b -> P b
1133 m `thenP_` k = m `thenP` \_ -> k
1135 mapP :: (a -> P b) -> [a] -> P [b]
1136 mapP f [] = returnP []
1139 mapP f as `thenP` \bs ->
1142 failP :: String -> P a
1143 failP msg buf s = PFailed (text msg)
1145 failMsgP :: Message -> P a
1146 failMsgP msg buf s = PFailed msg
1148 lexError :: String -> P a
1149 lexError str buf s@PState{ loc = loc }
1150 = failMsgP (hcat [ppr loc, text ": ", text str]) buf s
1152 getSrcLocP :: P SrcLoc
1153 getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
1155 getSrcFile :: P FAST_STRING
1156 getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
1158 getContext :: P [LayoutContext]
1159 getContext buf s@(PState{ context = ctx }) = POk s ctx
1161 pushContext :: LayoutContext -> P ()
1162 pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()
1166 This special case in layoutOn is to handle layout contexts with are
1167 indented the same or less than the current context. This is illegal
1168 according to the Haskell spec, so we have to arrange to close the
1169 current context. eg.
1174 after the first 'where', the sequence of events is:
1176 - layout system inserts a ';' (column 0)
1177 - parser begins a new context at column 0
1178 - parser shifts ';' (legal empty declaration)
1179 - parser sees 'class': parse error (we're still in the inner context)
1181 trouble is, by the time we know we need a new context, the lexer has
1182 already generated the ';'. Hacky solution is as follows: since we
1183 know the column of the next token (it's the column number of the new
1184 context), we set the ACTUAL column number of the new context to this
1185 numer plus one. Hence the next time the lexer is called, a '}' will
1186 be generated to close the new context straight away. Furthermore, we
1187 have to set the atbol flag so that the ';' that the parser shifted as
1188 part of the new context is re-generated.
1190 when the new context is *less* indented than the current one:
1192 f = f where g = g where
1195 - current context: column 12.
1196 - on seeing 'h' (column 0), the layout system inserts '}'
1197 - parser starts a new context, column 0
1198 - parser sees '}', uses it to close new context
1199 - we still need to insert another '}' followed by a ';',
1200 hence the atbol trick.
1202 There's also a special hack in here to deal with
1209 i.e. the inner context is at the same indentation level as the outer
1210 context. This is strictly illegal according to Haskell 98, but
1211 there's a lot of existing code using this style and it doesn't make
1212 any sense to disallow it, since empty 'do' lists don't make sense.
1215 layoutOn :: Bool -> P ()
1216 layoutOn strict buf s@(PState{ bol = bol, context = ctx }) =
1217 let offset = lexemeIndex buf -# bol in
1220 | if strict then prev_off >=# offset else prev_off ># offset ->
1221 --trace ("layout on, column: " ++ show (I# offset)) $
1222 POk s{ context = Layout (offset +# 1#) : ctx, atbol = 1# } ()
1224 --trace ("layout on, column: " ++ show (I# offset)) $
1225 POk s{ context = Layout offset : ctx } ()
1228 layoutOff buf s@(PState{ context = ctx }) =
1229 POk s{ context = NoLayout:ctx } ()
1232 popContext = \ buf s@(PState{ context = ctx }) ->
1234 (_:tl) -> POk s{ context = tl } ()
1235 [] -> panic "Lex.popContext: empty context"
1238 Note that if the name of the file we're processing ends
1239 with `hi-boot', we accept it on faith as having the right
1240 version. This is done so that .hi-boot files that comes
1241 with hsc don't have to be updated before every release,
1242 *and* it allows us to share .hi-boot files with versions
1243 of hsc that don't have .hi version checking (e.g., ghc-2.10's)
1245 If the version number is 0, the checking is also turned off.
1246 (needed to deal with GHC.hi only!)
1248 Once we can assume we're compiling with a version of ghc that
1249 supports interface file checking, we can drop the special
1252 checkVersion :: Maybe Integer -> P ()
1253 checkVersion mb@(Just v) buf s@(PState{loc = loc})
1254 | (v==0) || (v == fromInt opt_HiVersion) || opt_NoHiCheck = POk s ()
1255 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1256 checkVersion mb@Nothing buf s@(PState{loc = loc})
1257 | "hi-boot" `isSuffixOf` (_UNPK_ (srcLocFile loc)) = POk s ()
1258 | otherwise = PFailed (ifaceVersionErr mb loc ([]::[Token]){-Todo-})
1260 -----------------------------------------------------------------
1262 ifaceParseErr :: StringBuffer -> SrcLoc -> Message
1264 = hsep [ppr l, ptext SLIT("Interface file parse error; on input `"),
1265 text (lexemeToString s), char '\'']
1267 ifaceVersionErr hi_vers l toks
1268 = hsep [ppr l, ptext SLIT("Interface file version error;"),
1269 ptext SLIT("Expected"), int opt_HiVersion,
1270 ptext SLIT("found "), pp_version]
1274 Nothing -> ptext SLIT("pre ghc-3.02 version")
1275 Just v -> ptext SLIT("version") <+> integer v