+ go "" (1::Int) input
+ where
+ go commentAcc 0 input = do setInput input
+ b <- extension rawTokenStreamEnabled
+ if b
+ then docCommentEnd input commentAcc ITblockComment _str span
+ else cont
+ go commentAcc n input = case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('-',input) -> case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('\125',input) -> go commentAcc (n-1) input
+ Just (_,_) -> go ('-':commentAcc) n input
+ Just ('\123',input) -> case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('-',input) -> go ('-':'\123':commentAcc) (n+1) input
+ Just (_,_) -> go ('\123':commentAcc) n input
+ Just (c,input) -> go (c:commentAcc) n input
+
+nested_doc_comment :: Action
+nested_doc_comment span buf _len = withLexedDocType (go "")
+ where
+ go commentAcc input docType _ = case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('-',input) -> case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('\125',input) ->
+ docCommentEnd input commentAcc docType buf span
+ Just (_,_) -> go ('-':commentAcc) input docType False
+ Just ('\123', input) -> case alexGetChar input of
+ Nothing -> errBrace input span
+ Just ('-',input) -> do
+ setInput input
+ let cont = do input <- getInput; go commentAcc input docType False
+ nested_comment cont span buf _len
+ Just (_,_) -> go ('\123':commentAcc) input docType False
+ Just (c,input) -> go (c:commentAcc) input docType False
+
+withLexedDocType lexDocComment = do
+ input@(AI _ _ buf) <- getInput
+ case prevChar buf ' ' of
+ '|' -> lexDocComment input ITdocCommentNext False
+ '^' -> lexDocComment input ITdocCommentPrev False
+ '$' -> lexDocComment input ITdocCommentNamed False
+ '*' -> lexDocSection 1 input
+ '#' -> lexDocComment input ITdocOptionsOld False
+ where
+ lexDocSection n input = case alexGetChar input of
+ Just ('*', input) -> lexDocSection (n+1) input
+ Just (_, _) -> lexDocComment input (ITdocSection n) True
+ Nothing -> do setInput input; lexToken -- eof reached, lex it normally
+
+-- RULES pragmas turn on the forall and '.' keywords, and we turn them
+-- off again at the end of the pragma.
+rulePrag :: Action
+rulePrag span buf len = do
+ setExts (.|. bit inRulePragBit)
+ return (L span ITrules_prag)
+
+endPrag :: Action
+endPrag span buf len = do
+ setExts (.&. complement (bit inRulePragBit))
+ return (L span ITclose_prag)
+
+-- docCommentEnd
+-------------------------------------------------------------------------------
+-- This function is quite tricky. We can't just return a new token, we also
+-- need to update the state of the parser. Why? Because the token is longer
+-- than what was lexed by Alex, and the lexToken function doesn't know this, so
+-- it writes the wrong token length to the parser state. This function is
+-- called afterwards, so it can just update the state.
+
+-- This is complicated by the fact that Haddock tokens can span multiple lines,
+-- which is something that the original lexer didn't account for.
+-- I have added last_line_len in the parser state which represents the length
+-- of the part of the token that is on the last line. It is now used for layout
+-- calculation in pushCurrentContext instead of last_len. last_len is, like it
+-- was before, the full length of the token, and it is now only used for error
+-- messages. /Waern
+
+docCommentEnd :: AlexInput -> String -> (String -> Token) -> StringBuffer ->
+ SrcSpan -> P (Located Token)
+docCommentEnd input commentAcc docType buf span = do
+ setInput input
+ let (AI loc last_offs nextBuf) = input
+ comment = reverse commentAcc
+ span' = mkSrcSpan (srcSpanStart span) loc
+ last_len = byteDiff buf nextBuf
+
+ last_line_len = if (last_offs - last_len < 0)
+ then last_offs
+ else last_len
+
+ span `seq` setLastToken span' last_len last_line_len
+ return (L span' (docType comment))
+
+errBrace (AI end _ _) span = failLocMsgP (srcSpanStart span) end "unterminated `{-'"
+