StringBuffer,
P, thenP, thenP_, returnP, mapP, failP, failMsgP,
- getSrcLocP, getSrcFile,
+ getSrcLocP, setSrcLocP, getSrcFile,
layoutOn, layoutOff, pushContext, popContext
) where
import Char ( isSpace, toUpper )
import List ( isSuffixOf )
-import IdInfo ( InlinePragInfo(..) )
import PrelNames ( mkTupNameStr )
import CmdLineOpts ( opt_HiVersion, opt_NoHiCheck )
-import Demand ( Demand(..) {- instance Read -} )
+import ForeignCall ( Safety(..) )
+import NewDemand ( StrictSig(..), Demand(..), Demands(..),
+ DmdResult(..), mkTopDmdType, evalDmd, lazyDmd )
import UniqFM ( listToUFM, lookupUFM )
-import BasicTypes ( NewOrData(..), Boxity(..) )
+import BasicTypes ( Boxity(..) )
import SrcLoc ( SrcLoc, incSrcLine, srcLocFile, srcLocLine,
replaceSrcLine, mkSrcLoc )
import StringBuffer
import GlaExts
import Ctype
-import Char ( chr )
+import Char ( chr, ord )
import PrelRead ( readRational__ ) -- Glasgow non-std
\end{code}
| ITthen
| ITtype
| ITwhere
- | ITscc
+ | ITscc -- ToDo: remove (we use {-# SCC "..." #-} now)
| ITforall -- GHC extension keywords
| ITforeign
| ITexport
| ITlabel
| ITdynamic
+ | ITsafe
| ITunsafe
| ITwith
| ITstdcallconv
| ITccallconv
+ | ITdotnet
| ITinterface -- interface keywords
| IT__export
| ITcoerce
| ITinlineMe
| ITinlineCall
- | ITccall (Bool,Bool,Bool) -- (is_dyn, is_casm, may_gc)
+ | ITccall (Bool,Bool,Safety) -- (is_dyn, is_casm, may_gc)
| ITdefaultbranch
| ITbottom
| ITinteger_lit
| ITint64_lit
| ITrational_lit
| ITaddr_lit
+ | ITlabel_lit
| ITlit_lit
| ITstring_lit
| ITtypeapp
| ITarity
| ITspecialise
| ITnocaf
- | ITunfold InlinePragInfo
- | ITstrict ([Demand], Bool)
+ | ITunfold
+ | ITstrict StrictSig
| ITrules
| ITcprinfo
| ITdeprecated
| ITrules_prag
| ITdeprecated_prag
| ITline_prag
+ | ITscc_prag
| ITclose_prag
| ITdotdot -- reserved symbols
| ITocurly -- special symbols
| ITccurly
+ | ITocurlybar -- {|, for type applications
+ | ITccurlybar -- |}, for type applications
| ITvccurly
| ITobrack
| ITcbrack
| ITqvarsym (FAST_STRING,FAST_STRING)
| ITqconsym (FAST_STRING,FAST_STRING)
- | ITipvarid FAST_STRING -- GHC extension: implicit param: ?x
+ | ITdupipvarid FAST_STRING -- GHC extension: implicit param: ?x
+ | ITsplitipvarid FAST_STRING -- GHC extension: implicit param: %x
| ITpragma StringBuffer
- | ITchar Char
+ | ITchar Int
| ITstring FAST_STRING
- | ITinteger Integer
+ | ITinteger Integer
| ITrational Rational
- | ITprimchar Char
+ | ITprimchar Int
| ITprimstring FAST_STRING
| ITprimint Integer
| ITprimfloat Rational
( "LINE", ITline_prag ),
( "RULES", ITrules_prag ),
( "RULEZ", ITrules_prag ), -- american spelling :-)
+ ( "SCC", ITscc_prag ),
( "DEPRECATED", ITdeprecated_prag )
]
( "then", ITthen ),
( "type", ITtype ),
( "where", ITwhere ),
- ( "_scc_", ITscc )
+ ( "_scc_", ITscc ) -- ToDo: remove
]
+isSpecial :: Token -> Bool
+-- If we see M.x, where x is a keyword, but
+-- is special, we treat is as just plain M.x,
+-- not as a keyword.
+isSpecial ITas = True
+isSpecial IThiding = True
+isSpecial ITqualified = True
+isSpecial ITforall = True
+isSpecial ITexport = True
+isSpecial ITlabel = True
+isSpecial ITdynamic = True
+isSpecial ITsafe = True
+isSpecial ITunsafe = True
+isSpecial ITwith = True
+isSpecial ITccallconv = True
+isSpecial ITstdcallconv = True
+isSpecial _ = False
+
-- IMPORTANT: Keep this in synch with ParseIface.y's var_fs production! (SUP)
ghcExtensionKeywordsFM = listToUFM $
map (\ (x,y) -> (_PK_ x,y))
( "export", ITexport ),
( "label", ITlabel ),
( "dynamic", ITdynamic ),
+ ( "safe", ITunsafe ),
( "unsafe", ITunsafe ),
( "with", ITwith ),
( "stdcall", ITstdcallconv),
( "ccall", ITccallconv),
- ("_ccall_", ITccall (False, False, False)),
- ("_ccall_GC_", ITccall (False, False, True)),
- ("_casm_", ITccall (False, True, False)),
- ("_casm_GC_", ITccall (False, True, True)),
+ ( "dotnet", ITdotnet),
+ ("_ccall_", ITccall (False, False, PlayRisky)),
+ ("_ccall_GC_", ITccall (False, False, PlaySafe)),
+ ("_casm_", ITccall (False, True, PlayRisky)),
+ ("_casm_GC_", ITccall (False, True, PlaySafe)),
-- interface keywords
("__interface", ITinterface),
("__word64", ITword64_lit),
("__rational", ITrational_lit),
("__addr", ITaddr_lit),
+ ("__label", ITlabel_lit),
("__litlit", ITlit_lit),
("__string", ITstring_lit),
("__a", ITtypeapp),
("__C", ITnocaf),
("__R", ITrules),
("__D", ITdeprecated),
- ("__U", ITunfold NoInlinePragInfo),
+ ("__U", ITunfold),
- ("__ccall", ITccall (False, False, False)),
- ("__ccall_GC", ITccall (False, False, True)),
- ("__dyn_ccall", ITccall (True, False, False)),
- ("__dyn_ccall_GC", ITccall (True, False, True)),
- ("__casm", ITccall (False, True, False)),
- ("__dyn_casm", ITccall (True, True, False)),
- ("__casm_GC", ITccall (False, True, True)),
- ("__dyn_casm_GC", ITccall (True, True, True)),
+ ("__ccall", ITccall (False, False, PlayRisky)),
+ ("__ccall_GC", ITccall (False, False, PlaySafe)),
+ ("__dyn_ccall", ITccall (True, False, PlayRisky)),
+ ("__dyn_ccall_GC", ITccall (True, False, PlaySafe)),
+ ("__casm", ITccall (False, True, PlayRisky)),
+ ("__dyn_casm", ITccall (True, True, PlayRisky)),
+ ("__casm_GC", ITccall (False, True, PlaySafe)),
+ ("__dyn_casm_GC", ITccall (True, True, PlaySafe)),
("/\\", ITbiglam)
]
-- and throw out any unrecognised pragmas as comments. Any
-- pragmas we know about are dealt with later (after any layout
-- processing if necessary).
-
- '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
+ '{'# | lookAhead# buf 1# `eqChar#` '-'# ->
if lookAhead# buf 2# `eqChar#` '#'# then
if lookAhead# buf 3# `eqChar#` '#'# then is_a_token else
case expandWhile# is_space (setCurrentPos# buf 3#) of { buf1->
let lexeme = mkFastString -- ToDo: too slow
(map toUpper (lexemeToString buf2)) in
case lookupUFM pragmaKeywordsFM lexeme of
+ -- ignore RULES pragmas when -fglasgow-exts is off
+ Just ITrules_prag | not (flag glaexts) ->
+ skip_to_end (stepOnBy# buf 2#) s'
Just ITline_prag ->
line_prag skip_to_end buf2 s'
Just other -> is_a_token
else skip_to_end (stepOnBy# buf 2#) s'
where
- skip_to_end = nested_comment (lexer cont)
+ skip_to_end = skipNestedComment (lexer cont)
-- special GHC extension: we grok cpp-style #line pragmas
'#'# | lexemeIndex buf ==# bol -> -- the '#' must be in column 0
- line_prag next_line (stepOn buf) s'
+ let buf1 | lookAhead# buf 1# `eqChar#` 'l'# &&
+ lookAhead# buf 2# `eqChar#` 'i'# &&
+ lookAhead# buf 3# `eqChar#` 'n'# &&
+ lookAhead# buf 4# `eqChar#` 'e'# = stepOnBy# buf 5#
+ | otherwise = stepOn buf
+ in
+ case expandWhile# is_space buf1 of { buf2 ->
+ if is_digit (currentChar# buf2)
+ then line_prag next_line buf2 s'
+ else is_a_token
+ }
where
next_line buf = lexer cont (stepOnUntilChar# buf '\n'#)
_other -> cont (stepOverLexeme buf3) s{loc = replaceSrcLine loc l}
}}}}
-nested_comment :: P a -> P a
-nested_comment cont buf = loop buf
+skipNestedComment :: P a -> P a
+skipNestedComment cont buf state = skipNestedComment' (loc state) cont buf state
+
+skipNestedComment' :: SrcLoc -> P a -> P a
+skipNestedComment' orig_loc cont buf = loop buf
where
loop buf =
case currentChar# buf of
- '\NUL'# | bufferExhausted (stepOn buf) ->
- lexError "unterminated `{-'" buf
-
- '-'# | lookAhead# buf 1# `eqChar#` '}'# ->
- cont (stepOnBy# buf 2#)
+ '-'# | lookAhead# buf 1# `eqChar#` '}'# -> cont (stepOnBy# buf 2#)
'{'# | lookAhead# buf 1# `eqChar#` '-'# ->
- nested_comment (nested_comment cont) (stepOnBy# buf 2#)
+ skipNestedComment
+ (skipNestedComment' orig_loc cont)
+ (stepOnBy# buf 2#)
'\n'# -> \ s@PState{loc=loc} ->
let buf' = stepOn buf in
- nested_comment cont buf'
- s{loc = incSrcLine loc, bol = currentIndex# buf',
- atbol = 1#}
+ loop buf' s{loc = incSrcLine loc,
+ bol = currentIndex# buf',
+ atbol = 1#}
+
+ -- pass the original SrcLoc to lexError so that the error is
+ -- reported at the line it was originally on, not the line at
+ -- the end of the file.
+ '\NUL'# | bufferExhausted (stepOn buf) ->
+ \s -> lexError "unterminated `{-'" buf s{loc=orig_loc} -- -}
- _ -> nested_comment cont (stepOn buf)
+ _ -> loop (stepOn buf)
-- When we are lexing the first token of a line, check whether we need to
-- insert virtual semicolons or close braces due to layout.
lexToken :: (Token -> P a) -> Int# -> P a
lexToken cont glaexts buf =
- --trace "lexToken" $
+-- trace "lexToken" $
case currentChar# buf of
-- special symbols ----------------------------------------------------
']'# -> cont ITcbrack (incLexeme buf)
','# -> cont ITcomma (incLexeme buf)
';'# -> cont ITsemi (incLexeme buf)
-
'}'# -> \ s@PState{context = ctx} ->
case ctx of
(_:ctx') -> cont ITccurly (incLexeme buf) s{context=ctx'}
_ -> lexError "too many '}'s" buf s
+ '|'# -> case lookAhead# buf 1# of
+ '}'# | flag glaexts -> cont ITccurlybar
+ (setCurrentPos# buf 2#)
+ _ -> lex_sym cont (incLexeme buf)
+
'#'# -> case lookAhead# buf 1# of
')'# | flag glaexts -> cont ITcubxparen (setCurrentPos# buf 2#)
'-'# -> case lookAhead# buf 2# of
-> cont ITbackquote (incLexeme buf)
'{'# -> -- look for "{-##" special iface pragma
- case lookAhead# buf 1# of
+ case lookAhead# buf 1# of
+ '|'# | flag glaexts
+ -> cont ITocurlybar (setCurrentPos# buf 2#)
'-'# -> case lookAhead# buf 2# of
'#'# -> case lookAhead# buf 3# of
- '#'# ->
- let (lexeme, buf')
- = doDiscard False (stepOnBy# (stepOverLexeme buf) 4#) in
- cont (ITpragma lexeme) buf'
+ '#'# ->
+ lexPragma
+ cont
+ (\ cont lexeme buf' -> cont (ITpragma lexeme) buf')
+ 0#
+ (stepOnBy# (stepOverLexeme buf) 4#)
_ -> lex_prag cont (setCurrentPos# buf 3#)
- _ -> cont ITocurly (incLexeme buf)
- _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
+ _ -> cont ITocurly (incLexeme buf)
+ _ -> (layoutOff `thenP_` cont ITocurly) (incLexeme buf)
-- strings/characters -------------------------------------------------
- '\"'#{-"-} -> lex_string cont glaexts "" (incLexeme buf)
+ '\"'#{-"-} -> lex_string cont glaexts [] (incLexeme buf)
'\''# -> lex_char (char_end cont) glaexts (incLexeme buf)
-- strictness and cpr pragmas and __scc treated specially.
cont (ITunknown "\NUL") (stepOn buf)
'?'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
- lex_ip cont (incLexeme buf)
+ lex_ip ITdupipvarid cont (incLexeme buf)
+ '%'# | flag glaexts && is_lower (lookAhead# buf 1#) ->
+ lex_ip ITsplitipvarid cont (incLexeme buf)
c | is_digit c -> lex_num cont glaexts 0 buf
| is_symbol c -> lex_sym cont buf
| is_upper c -> lex_con cont glaexts buf
lex_string cont glaexts s buf
= case currentChar# buf of
'"'#{-"-} ->
- let buf' = incLexeme buf; s' = mkFastString (reverse s) in
- case currentChar# buf' of
- '#'# | flag glaexts -> cont (ITprimstring s') (incLexeme buf')
+ let buf' = incLexeme buf
+ s' = mkFastStringNarrow (map chr (reverse s))
+ in case currentChar# buf' of
+ '#'# | flag glaexts -> if all (<= 0xFF) s
+ then cont (ITprimstring s') (incLexeme buf')
+ else lexError "primitive string literal must contain only characters <= \'\\xFF\'" buf'
_ -> cont (ITstring s') buf'
-- ignore \& in a string, deal with string gaps
lex_next_string cont s glaexts c buf = lex_string cont glaexts (c:s) buf
-lex_char :: (Int# -> Char -> P a) -> Int# -> P a
+lex_char :: (Int# -> Int -> P a) -> Int# -> P a
lex_char cont glaexts buf
= case currentChar# buf of
'\\'# -> lex_escape (cont glaexts) (incLexeme buf)
- c | is_any c -> cont glaexts (C# c) (incLexeme buf)
+ c | is_any c -> cont glaexts (I# (ord# c)) (incLexeme buf)
other -> charError buf
char_end cont glaexts c buf
lex_escape cont buf
= let buf' = incLexeme buf in
case currentChar# buf of
- 'a'# -> cont '\a' buf'
- 'b'# -> cont '\b' buf'
- 'f'# -> cont '\f' buf'
- 'n'# -> cont '\n' buf'
- 'r'# -> cont '\r' buf'
- 't'# -> cont '\t' buf'
- 'v'# -> cont '\v' buf'
- '\\'# -> cont '\\' buf'
- '"'# -> cont '\"' buf'
- '\''# -> cont '\'' buf'
+ 'a'# -> cont (ord '\a') buf'
+ 'b'# -> cont (ord '\b') buf'
+ 'f'# -> cont (ord '\f') buf'
+ 'n'# -> cont (ord '\n') buf'
+ 'r'# -> cont (ord '\r') buf'
+ 't'# -> cont (ord '\t') buf'
+ 'v'# -> cont (ord '\v') buf'
+ '\\'# -> cont (ord '\\') buf'
+ '"'# -> cont (ord '\"') buf'
+ '\''# -> cont (ord '\'') buf'
'^'# -> let c = currentChar# buf' in
if c `geChar#` '@'# && c `leChar#` '_'#
- then cont (C# (chr# (ord# c -# ord# '@'#))) (incLexeme buf')
+ then cont (I# (ord# c -# ord# '@'#)) (incLexeme buf')
else charError buf'
'x'# -> readNum (after_charnum cont) buf' is_hexdigit 16 hex
_ -> case [ (c,buf2) | (p,c) <- silly_escape_chars,
Just buf2 <- [prefixMatch buf p] ] of
- (c,buf2):_ -> cont c buf2
+ (c,buf2):_ -> cont (ord c) buf2
[] -> charError buf'
-after_charnum cont i buf
- = let int = fromInteger i in
- if i >= 0 && i <= 255
- then cont (chr int) buf
+after_charnum cont i buf
+ = if i >= 0 && i <= 0x10FFFF
+ then cont (fromInteger i) buf
else charError buf
readNum cont buf is_digit base conv = read buf 0
lex_demand cont buf =
case read_em [] buf of { (ls,buf') ->
case currentChar# buf' of
- 'B'# -> cont (ITstrict (ls, True )) (incLexeme buf')
- _ -> cont (ITstrict (ls, False)) buf'
+ 'b'# -> cont (ITstrict (StrictSig (mkTopDmdType ls BotRes))) (incLexeme buf')
+ 'm'# -> cont (ITstrict (StrictSig (mkTopDmdType ls RetCPR))) (incLexeme buf')
+ _ -> cont (ITstrict (StrictSig (mkTopDmdType ls TopRes))) buf'
}
where
- -- code snatched from Demand.lhs
read_em acc buf =
case currentChar# buf of
- 'L'# -> read_em (WwLazy False : acc) (stepOn buf)
- 'A'# -> read_em (WwLazy True : acc) (stepOn buf)
- 'S'# -> read_em (WwStrict : acc) (stepOn buf)
- 'P'# -> read_em (WwPrim : acc) (stepOn buf)
- 'E'# -> read_em (WwEnum : acc) (stepOn buf)
- ')'# -> (reverse acc, stepOn buf)
- 'U'# -> do_unpack DataType True acc (stepOnBy# buf 2#)
- 'u'# -> do_unpack DataType False acc (stepOnBy# buf 2#)
- 'N'# -> do_unpack NewType True acc (stepOnBy# buf 2#)
- 'n'# -> do_unpack NewType False acc (stepOnBy# buf 2#)
- _ -> (reverse acc, buf)
+ 'T'# -> read_em (Top : acc) (stepOn buf)
+ 'L'# -> read_em (lazyDmd : acc) (stepOn buf)
+ 'A'# -> read_em (Abs : acc) (stepOn buf)
+ 'V'# -> read_em (evalDmd : acc) (stepOn buf) -- Temporary, until
+ -- we've recompiled prelude etc
+ 'C'# -> do_unary Call acc (stepOnBy# buf 2#) -- Skip 'C('
- do_unpack new_or_data wrapper_unpacks acc buf
- = case read_em [] buf of
- (stuff, rest) -> read_em (WwUnpack new_or_data wrapper_unpacks stuff : acc) rest
+ 'U'# -> do_seq1 Eval acc (stepOnBy# buf 1#)
+ 'D'# -> do_seq1 Defer acc (stepOnBy# buf 1#)
+ 'S'# -> do_seq1 (Box . Eval) acc (stepOnBy# buf 1#)
+ _ -> (reverse acc, buf)
+
+ do_seq1 fn acc buf
+ = case currentChar# buf of
+ '('# -> do_seq2 fn acc (stepOnBy# buf 1#)
+ _ -> read_em (fn (Poly Abs) : acc) buf
+
+ do_seq2 fn acc buf
+ = case read_em [] buf of { (dmds, buf) ->
+ case currentChar# buf of
+ ')'# -> read_em (fn (Prod dmds) : acc)
+ (stepOn buf)
+ '*'# -> ASSERT( length dmds == 1 )
+ read_em (fn (Poly (head dmds)) : acc)
+ (stepOnBy# buf 2#) -- Skip '*)'
+ }
+
+ do_unary fn acc buf
+ = case read_em [] buf of
+ ([dmd], rest) -> read_em (fn dmd : acc) (stepOn rest) -- Skip ')'
------------------
lex_scc cont buf =
-----------------------------------------------------------------------------
-- identifiers, symbols etc.
-lex_ip cont buf =
+lex_ip ip_constr cont buf =
case expandWhile# is_ident buf of
- buf' -> cont (ITipvarid lexeme) buf'
- where lexeme = lexemeToFastString buf'
+ buf' -> cont (ip_constr (tailFS lexeme)) buf'
+ where lexeme = lexemeToFastString buf'
lex_id cont glaexts buf =
let buf1 = expandWhile# is_ident buf in
}}}
lex_sym cont buf =
+ -- trace "lex_sym" $
case expandWhile# is_symbol buf of
buf' -> case lookupUFM haskellKeySymsFM lexeme of {
Just kwd_token -> --trace ("keysym: "++unpackFS lexeme) $
where lexeme = lexemeToFastString buf'
-lex_con cont glaexts buf =
- case expandWhile# is_ident buf of { buf1 ->
- case slurp_trailing_hashes buf1 glaexts of { buf' ->
+-- lex_con recursively collects components of a qualified identifer.
+-- The argument buf is the StringBuffer representing the lexeme
+-- identified so far, where the next character is upper-case.
- case currentChar# buf' of
- '.'# -> munch
+lex_con cont glaexts buf =
+ -- trace ("con: "{-++unpackFS lexeme-}) $
+ let empty_buf = stepOverLexeme buf in
+ case expandWhile# is_ident empty_buf of { buf1 ->
+ case slurp_trailing_hashes buf1 glaexts of { con_buf ->
+
+ let all_buf = mergeLexemes buf con_buf
+
+ con_lexeme = lexemeToFastString con_buf
+ mod_lexeme = lexemeToFastString (decLexeme buf)
+ all_lexeme = lexemeToFastString all_buf
+
+ just_a_conid
+ | emptyLexeme buf = cont (ITconid con_lexeme) all_buf
+ | otherwise = cont (ITqconid (mod_lexeme,con_lexeme)) all_buf
+ in
+
+ case currentChar# all_buf of
+ '.'# -> maybe_qualified cont glaexts all_lexeme
+ (incLexeme all_buf) just_a_conid
_ -> just_a_conid
-
- where
- just_a_conid = --trace ("con: "++unpackFS lexeme) $
- cont (ITconid lexeme) buf'
- lexeme = lexemeToFastString buf'
- munch = lex_qid cont glaexts lexeme (incLexeme buf') just_a_conid
- }}
-
-lex_qid cont glaexts mod buf just_a_conid =
+ }}
+
+
+maybe_qualified cont glaexts mod buf just_a_conid =
+ -- trace ("qid: "{-++unpackFS lexeme-}) $
case currentChar# buf of
'['# -> -- Special case for []
case lookAhead# buf 1# of
_ -> just_a_conid
'-'# -> case lookAhead# buf 1# of
- '>'# -> cont (ITqconid (mod,SLIT("->"))) (setCurrentPos# buf 2#)
+ '>'# -> cont (ITqconid (mod,SLIT("(->)"))) (setCurrentPos# buf 2#)
_ -> lex_id3 cont glaexts mod buf just_a_conid
+
_ -> lex_id3 cont glaexts mod buf just_a_conid
+
lex_id3 cont glaexts mod buf just_a_conid
+ | is_upper (currentChar# buf) =
+ lex_con cont glaexts buf
+
| is_symbol (currentChar# buf) =
let
start_new_lexeme = stepOverLexeme buf
in
+ -- trace ("lex_id31 "{-++unpackFS lexeme-}) $
case expandWhile# is_symbol start_new_lexeme of { buf' ->
let
lexeme = lexemeToFastString buf'
let
start_new_lexeme = stepOverLexeme buf
in
+ -- trace ("lex_id32 "{-++unpackFS lexeme-}) $
case expandWhile# is_ident start_new_lexeme of { buf1 ->
if emptyLexeme buf1
then just_a_conid
case slurp_trailing_hashes buf1 glaexts of { buf' ->
let
- lexeme = lexemeToFastString buf'
- new_buf = mergeLexemes buf buf'
+ lexeme = lexemeToFastString buf'
+ new_buf = mergeLexemes buf buf'
is_a_qvarid = cont (mk_qvar_token mod lexeme) new_buf
in
case _scc_ "Lex.haskellKeyword" lookupUFM haskellKeywordsFM lexeme of {
- Just kwd_token -> just_a_conid; -- avoid M.where etc.
- Nothing -> is_a_qvarid
- -- TODO: special ids (as, qualified, hiding) shouldn't be
- -- recognised as keywords here. ie. M.as is a qualified varid.
- }}}
+ Nothing -> is_a_qvarid ;
+ Just kwd_token | isSpecial kwd_token -- special ids (as, qualified, hiding) shouldn't be
+ -> is_a_qvarid -- recognised as keywords here.
+ | otherwise
+ -> just_a_conid -- avoid M.where etc.
+ }}}
slurp_trailing_hashes buf glaexts
| flag glaexts = expandWhile# (`eqChar#` '#'#) buf
| otherwise = ITvarsym pk_str
where
(C# f) = _HEAD_ pk_str
- tl = _TAIL_ pk_str
+ -- tl = _TAIL_ pk_str
mk_qvar_token m token =
+-- trace ("mk_qvar ") $
case mk_var_token token of
ITconid n -> ITqconid (m,n)
ITvarid n -> ITqvarid (m,n)
\end{code}
-----------------------------------------------------------------------------
-doDiscard rips along really fast, looking for a '#-}',
+'lexPragma' rips along really fast, looking for a '##-}',
indicating the end of the pragma we're skipping
\begin{code}
-doDiscard inStr buf =
+lexPragma cont contf inStr buf =
case currentChar# buf of
- '#'# | not inStr ->
+ '#'# | inStr ==# 0# ->
case lookAhead# buf 1# of { '#'# ->
case lookAhead# buf 2# of { '-'# ->
case lookAhead# buf 3# of { '}'# ->
- (lexemeToBuffer buf, stepOverLexeme (setCurrentPos# buf 4#));
- _ -> doDiscard inStr (incLexeme buf) };
- _ -> doDiscard inStr (incLexeme buf) };
- _ -> doDiscard inStr (incLexeme buf) }
+ contf cont (lexemeToBuffer buf)
+ (stepOverLexeme (setCurrentPos# buf 4#));
+ _ -> lexPragma cont contf inStr (incLexeme buf) };
+ _ -> lexPragma cont contf inStr (incLexeme buf) };
+ _ -> lexPragma cont contf inStr (incLexeme buf) }
+
'"'# ->
let
odd_slashes buf flg i# =
case lookAhead# buf i# of
'\\'# -> odd_slashes buf (not flg) (i# -# 1#)
_ -> flg
+
+ not_inStr = if inStr ==# 0# then 1# else 0#
in
case lookAhead# buf (negateInt# 1#) of --backwards, actually
'\\'# -> -- escaping something..
- if odd_slashes buf True (negateInt# 2#) then
- -- odd number of slashes, " is escaped.
- doDiscard inStr (incLexeme buf)
- else
- -- even number of slashes, \ is escaped.
- doDiscard (not inStr) (incLexeme buf)
- _ -> case inStr of -- forced to avoid build-up
- True -> doDiscard False (incLexeme buf)
- False -> doDiscard True (incLexeme buf)
- _ -> doDiscard inStr (incLexeme buf)
+ if odd_slashes buf True (negateInt# 2#)
+ then -- odd number of slashes, " is escaped.
+ lexPragma cont contf inStr (incLexeme buf)
+ else -- even number of slashes, \ is escaped.
+ lexPragma cont contf not_inStr (incLexeme buf)
+ _ -> lexPragma cont contf not_inStr (incLexeme buf)
+
+ '\''# | inStr ==# 0# ->
+ case lookAhead# buf 1# of { '"'# ->
+ case lookAhead# buf 2# of { '\''# ->
+ lexPragma cont contf inStr (setCurrentPos# buf 3#);
+ _ -> lexPragma cont contf inStr (incLexeme buf) };
+ _ -> lexPragma cont contf inStr (incLexeme buf) }
+
+ -- a sign that the input is ill-formed, since pragmas are
+ -- assumed to always be properly closed (in .hi files).
+ '\NUL'# -> trace "lexPragma: unexpected end-of-file" $
+ cont (ITunknown "\NUL") buf
+
+ _ -> lexPragma cont contf inStr (incLexeme buf)
\end{code}
getSrcLocP :: P SrcLoc
getSrcLocP buf s@(PState{ loc = loc }) = POk s loc
+-- use a temporary SrcLoc for the duration of the argument
+setSrcLocP :: SrcLoc -> P a -> P a
+setSrcLocP new_loc p buf s =
+ case p buf s{ loc=new_loc } of
+ POk _ a -> POk s a
+ PFailed e -> PFailed e
+
getSrcFile :: P FAST_STRING
getSrcFile buf s@(PState{ loc = loc }) = POk s (srcLocFile loc)
-getContext :: P [LayoutContext]
-getContext buf s@(PState{ context = ctx }) = POk s ctx
-
pushContext :: LayoutContext -> P ()
pushContext ctxt buf s@(PState{ context = ctx }) = POk s{context = ctxt:ctx} ()