From 448ab6144535f901601aee7f4b97d952d98c16e8 Mon Sep 17 00:00:00 2001 From: Eugene Auduchinok Date: Tue, 23 May 2023 11:48:44 +0200 Subject: [PATCH] LexFilter: cleanup whitespaces (#15250) --- src/Compiler/SyntaxTree/LexFilter.fs | 1628 +++++++++++++------------- 1 file changed, 814 insertions(+), 814 deletions(-) diff --git a/src/Compiler/SyntaxTree/LexFilter.fs b/src/Compiler/SyntaxTree/LexFilter.fs index f0fc084b7..e163646f6 100644 --- a/src/Compiler/SyntaxTree/LexFilter.fs +++ b/src/Compiler/SyntaxTree/LexFilter.fs @@ -6,7 +6,7 @@ module internal FSharp.Compiler.LexFilter open System.Collections.Generic open Internal.Utilities.Text.Lexing -open FSharp.Compiler +open FSharp.Compiler open Internal.Utilities.Library open FSharp.Compiler.AbstractIL.Diagnostics open FSharp.Compiler.DiagnosticsLogger @@ -22,52 +22,52 @@ let stringOfPos (pos: Position) = sprintf "(%d:%d)" pos.OriginalLine pos.Column let outputPos os (pos: Position) = Printf.fprintf os "(%d:%d)" pos.OriginalLine pos.Column -/// Used for warning strings, which should display columns as 1-based and display +/// Used for warning strings, which should display columns as 1-based and display /// the lines after taking '# line' directives into account (i.e. do not use /// p.OriginalLine) let warningStringOfPosition (pos: Position) = warningStringOfCoords pos.Line pos.Column -type Context = +type Context = // Position is position of keyword. - // bool indicates 'LET' is an offside let that's part of a CtxtSeqBlock where the 'in' is optional - | CtxtLetDecl of bool * Position - | CtxtIf of Position - | CtxtTry of Position - | CtxtFun of Position - | CtxtFunction of Position - | CtxtWithAsLet of Position // 'with' when used in an object expression - | CtxtWithAsAugment of Position // 'with' as used in a type augmentation - | CtxtMatch of Position - | CtxtFor of Position - | CtxtWhile of Position - | CtxtWhen of Position + // bool indicates 'LET' is an offside let that's part of a CtxtSeqBlock where the 'in' is optional + | CtxtLetDecl of bool * Position + | CtxtIf of Position + | CtxtTry of Position + | CtxtFun of Position + | CtxtFunction of Position + | CtxtWithAsLet of Position // 'with' when used in an object expression + | CtxtWithAsAugment of Position // 'with' as used in a type augmentation + | CtxtMatch of Position + | CtxtFor of Position + | CtxtWhile of Position + | CtxtWhen of Position | CtxtVanilla of Position * bool // boolean indicates if vanilla started with 'x = ...' or 'x.y = ...' - | CtxtThen of Position - | CtxtElse of Position - | CtxtDo of Position - | CtxtInterfaceHead of Position + | CtxtThen of Position + | CtxtElse of Position + | CtxtDo of Position + | CtxtInterfaceHead of Position | CtxtTypeDefns of Position // 'type =', not removed when we find the "=" - + | CtxtNamespaceHead of Position * token | CtxtModuleHead of Position * token * LexingModuleAttributes - | CtxtMemberHead of Position - | CtxtMemberBody of Position - // If bool is true then this is "whole file" + | CtxtMemberHead of Position + | CtxtMemberBody of Position + // If bool is true then this is "whole file" // module A.B - // If bool is false, this is a "module declaration" + // If bool is false, this is a "module declaration" // module A = ... - | CtxtModuleBody of Position * bool - | CtxtNamespaceBody of Position - | CtxtException of Position - | CtxtParen of token * Position - // Position is position of following token - | CtxtSeqBlock of FirstInSequence * Position * AddBlockEnd + | CtxtModuleBody of Position * bool + | CtxtNamespaceBody of Position + | CtxtException of Position + | CtxtParen of token * Position + // Position is position of following token + | CtxtSeqBlock of FirstInSequence * Position * AddBlockEnd // Indicates we're processing the second part of a match, after the 'with' // First bool indicates "was this 'with' followed immediately by a '|'"? | CtxtMatchClauses of bool * Position - member c.StartPos = - match c with + member c.StartPos = + match c with | CtxtNamespaceHead (p, _) | CtxtModuleHead (p, _, _) | CtxtException p | CtxtModuleBody (p, _) | CtxtNamespaceBody p | CtxtLetDecl (_, p) | CtxtDo p | CtxtInterfaceHead p | CtxtTypeDefns p | CtxtParen (_, p) | CtxtMemberHead p | CtxtMemberBody p | CtxtWithAsLet p @@ -77,8 +77,8 @@ type Context = member c.StartCol = c.StartPos.Column - override c.ToString() = - match c with + override c.ToString() = + match c with | CtxtNamespaceHead _ -> "nshead" | CtxtModuleHead _ -> "modhead" | CtxtException _ -> "exception" @@ -100,7 +100,7 @@ type Context = | CtxtMatch _ -> "match" | CtxtFor _ -> "for" | CtxtWhile p -> sprintf "while(%s)" (stringOfPos p) - | CtxtWhen _ -> "when" + | CtxtWhen _ -> "when" | CtxtTry _ -> "try" | CtxtFun _ -> "fun" | CtxtFunction _ -> "function" @@ -108,54 +108,54 @@ type Context = | CtxtThen _ -> "then" | CtxtElse p -> sprintf "else(%s)" (stringOfPos p) | CtxtVanilla (p, _) -> sprintf "vanilla(%s)" (stringOfPos p) - + and AddBlockEnd = AddBlockEnd | NoAddBlockEnd | AddOneSidedBlockEnd and FirstInSequence = FirstInSeqBlock | NotFirstInSeqBlock and LexingModuleAttributes = LexingModuleAttributes | NotLexingModuleAttributes -let isInfix token = - match token with - | COMMA - | BAR_BAR - | AMP_AMP - | AMP +let isInfix token = + match token with + | COMMA + | BAR_BAR + | AMP_AMP + | AMP | OR - | INFIX_BAR_OP _ - | INFIX_AMP_OP _ - | INFIX_COMPARE_OP _ - | DOLLAR + | INFIX_BAR_OP _ + | INFIX_AMP_OP _ + | INFIX_COMPARE_OP _ + | DOLLAR // For the purposes of #light processing, <, > and = are not considered to be infix operators. // This is because treating them as infix conflicts with their role in other parts of the grammar, - // e.g. to delimit "f", or for "let f x = ...." + // e.g. to delimit "f", or for "let f x = ...." // // This has the impact that a SeqBlock does not automatically start on the right of a "<", ">" or "=", // e.g. - // let f x = (x = + // let f x = (x = // let a = 1 // no #light block started here, parentheses or 'in' needed // a + x) // LESS | GREATER | EQUALS - + | INFIX_AT_HAT_OP _ - | PLUS_MINUS_OP _ + | PLUS_MINUS_OP _ | COLON_COLON | COLON_GREATER | COLON_QMARK_GREATER - | COLON_EQUALS - | MINUS - | STAR + | COLON_EQUALS + | MINUS + | STAR | INFIX_STAR_DIV_MOD_OP _ - | INFIX_STAR_STAR_OP _ + | INFIX_STAR_STAR_OP _ | QMARK_QMARK -> true | _ -> false -let infixTokenLength token = - match token with +let infixTokenLength token = + match token with | COMMA -> 1 | AMP -> 1 | OR -> 1 | DOLLAR -> 1 - | MINUS -> 1 + | MINUS -> 1 | STAR -> 1 | BAR -> 1 | LESS false -> 1 @@ -167,16 +167,16 @@ let infixTokenLength token = | COLON_EQUALS -> 2 | BAR_BAR -> 2 | AMP_AMP -> 2 - | INFIX_BAR_OP d - | INFIX_AMP_OP d - | INFIX_COMPARE_OP d + | INFIX_BAR_OP d + | INFIX_AMP_OP d + | INFIX_COMPARE_OP d | INFIX_AT_HAT_OP d - | PLUS_MINUS_OP d + | PLUS_MINUS_OP d | INFIX_STAR_DIV_MOD_OP d | INFIX_STAR_STAR_OP d -> d.Length | COLON_QMARK_GREATER -> 3 | _ -> assert false; 1 - + /// Matches against a left-parenthesis-like token that is valid in expressions. // // LBRACK_LESS and GREATER_RBRACK are not here because adding them in these active patterns @@ -197,20 +197,20 @@ let (|TokenRExprParen|_|) token = /// Determine the tokens that may align with the 'if' of an 'if/then/elif/else' without closing /// the construct let rec isIfBlockContinuator token = - match token with + match token with // The following tokens may align with the "if" without closing the "if", e.g. // if ... // then ... // elif ... - // else ... - | THEN | ELSE | ELIF -> true - // Likewise + // else ... + | THEN | ELSE | ELIF -> true + // Likewise // if ... then ( - // ) elif begin - // end else ... - | END | RPAREN -> true - // The following arise during reprocessing of the inserted tokens, e.g. when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + // ) elif begin + // end else ... + | END | RPAREN -> true + // The following arise during reprocessing of the inserted tokens, e.g. when we hit a DONE + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true | ODUMMY token -> isIfBlockContinuator token | _ -> false @@ -218,215 +218,215 @@ let rec isIfBlockContinuator token = /// Determine the token that may align with the 'match' of a 'match/with' without closing /// the construct let rec isMatchBlockContinuator token = - match token with + match token with // These tokens may align with the "match" without closing the construct, e.g. // match ... - // with ... + // with ... | WITH -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true | ODUMMY token -> isMatchBlockContinuator token | _ -> false /// Determine the token that may align with the 'try' of a 'try/with' or 'try/finally' without closing /// the construct let rec isTryBlockContinuator token = - match token with + match token with // These tokens may align with the "try" without closing the construct, e.g. // try ... - // with ... - | FINALLY | WITH -> true + // with ... + | FINALLY | WITH -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true | ODUMMY token -> isTryBlockContinuator token | _ -> false let rec isThenBlockContinuator token = - match token with + match token with // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true | ODUMMY token -> isThenBlockContinuator token | _ -> false let rec isDoContinuator token = - match token with + match token with // These tokens may align with the "for" without closing the construct, e.g. - // for ... - // do - // ... - // done *) - | DONE -> true + // for ... + // do + // ... + // done *) + | DONE -> true | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE | ODUMMY token -> isDoContinuator token | _ -> false let rec isInterfaceContinuator token = - match token with + match token with // These tokens may align with the token "interface" without closing the construct, e.g. - // interface ... with + // interface ... with // ... - // end - | END -> true + // end + | END -> true | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE | ODUMMY token -> isInterfaceContinuator token | _ -> false let rec isNamespaceContinuator token = - match token with - // These tokens end the construct, e.g. - // namespace A.B.C + match token with + // These tokens end the construct, e.g. + // namespace A.B.C // ... // namespace <-- here - // .... - | EOF _ | NAMESPACE -> false + // .... + | EOF _ | NAMESPACE -> false | ODUMMY token -> isNamespaceContinuator token - | _ -> true // anything else is a namespace continuator + | _ -> true // anything else is a namespace continuator let rec isTypeContinuator token = - match token with + match token with // The following tokens may align with the token "type" without closing the construct, e.g. - // type X = + // type X = // | A // | B // and Y = c <--- 'and' HERE - // + // // type X = { // x: int // y: int // } <--- '}' HERE - // and Y = c + // and Y = c // // type Complex = struct // val im : float // end with <--- 'end' HERE // static member M() = 1 - // end - | RBRACE _ | WITH | BAR | AND | END -> true - - // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + // end + | RBRACE _ | WITH | BAR | AND | END -> true + + // The following arise during reprocessing of the inserted tokens when we hit a DONE + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true | ODUMMY token -> isTypeContinuator token | _ -> false let rec isForLoopContinuator token = - match token with + match token with // This token may align with the "for" without closing the construct, e.g. - // for ... do - // ... - // done - | DONE -> true + // for ... do + // ... + // done + | DONE -> true | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true// The following arise during reprocessing of the inserted tokens when we hit a DONE | ODUMMY token -> isForLoopContinuator token | _ -> false let rec isWhileBlockContinuator token = - match token with + match token with // This token may align with the "while" without closing the construct, e.g. - // while ... do - // ... - // done - | DONE -> true + // while ... do + // ... + // done + | DONE -> true | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE | ODUMMY token -> isWhileBlockContinuator token | _ -> false let rec isLetContinuator token = - match token with + match token with // This token may align with the "let" without closing the construct, e.g. // let ... - // and ... - | AND -> true + // and ... + | AND -> true | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE | ODUMMY token -> isLetContinuator token | _ -> false -let rec isTypeSeqBlockElementContinuator token = - match token with +let rec isTypeSeqBlockElementContinuator token = + match token with // A sequence of items separated by '|' counts as one sequence block element, e.g. - // type x = + // type x = // | A <-- These together count as one element // | B <-- These together count as one element // member x.M1 - // member x.M2 + // member x.M2 | BAR -> true | OBLOCKBEGIN | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ODUMMY token -> isTypeSeqBlockElementContinuator token + | ODUMMY token -> isTypeSeqBlockElementContinuator token | _ -> false -// Work out when a token doesn't terminate a single item in a sequence definition +// Work out when a token doesn't terminate a single item in a sequence definition let rec isSeqBlockElementContinuator token = - isInfix token || - // Infix tokens may align with the first column of a sequence block without closing a sequence element and starting a new one - // e.g. + isInfix token || + // Infix tokens may align with the first column of a sequence block without closing a sequence element and starting a new one + // e.g. // let f x - // h x + // h x // |> y <------- NOTE: Not a new element in the sequence - match token with + match token with // The following tokens may align with the first column of a sequence block without closing a sequence element and starting a new one *) - // e.g. + // e.g. // new MenuItem("&Open...", - // new EventHandler(fun _ _ -> + // new EventHandler(fun _ _ -> // ... // ), <------- NOTE RPAREN HERE // Shortcut.CtrlO) - | END | AND | WITH | THEN | RPAREN | RBRACE _ | BAR_RBRACE | RBRACK | BAR_RBRACK | RQUOTE _ -> true + | END | AND | WITH | THEN | RPAREN | RBRACE _ | BAR_RBRACE | RBRACK | BAR_RBRACK | RQUOTE _ -> true // The following arise during reprocessing of the inserted tokens when we hit a DONE - | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true - | ODUMMY token -> isSeqBlockElementContinuator token + | ORIGHT_BLOCK_END | OBLOCKEND | ODECLEND -> true + | ODUMMY token -> isSeqBlockElementContinuator token | _ -> false -let rec isWithAugmentBlockContinuator token = - match token with +let rec isWithAugmentBlockContinuator token = + match token with // This token may align with "with" of an augmentation block without closing the construct, e.g. // interface Foo - // with + // with // member ... - // end - | END -> true + // end + | END -> true | ODUMMY token -> isWithAugmentBlockContinuator token | _ -> false -let isAtomicExprEndToken token = +let isAtomicExprEndToken token = match token with - | IDENT _ - | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ + | IDENT _ + | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ | UINT8 _ | UINT16 _ | UINT32 _ | UINT64 _ | UNATIVEINT _ - | DECIMAL _ | BIGNUM _ | STRING _ | BYTEARRAY _ | CHAR _ - | IEEE32 _ | IEEE64 _ - | RPAREN | RBRACK | RBRACE _ | BAR_RBRACE | BAR_RBRACK | END + | DECIMAL _ | BIGNUM _ | STRING _ | BYTEARRAY _ | CHAR _ + | IEEE32 _ | IEEE64 _ + | RPAREN | RBRACK | RBRACE _ | BAR_RBRACE | BAR_RBRACK | END | NULL | FALSE | TRUE | UNDERSCORE -> true | _ -> false - + //---------------------------------------------------------------------------- // give a 'begin' token, does an 'end' token match? //-------------------------------------------------------------------------- -let parenTokensBalance token1 token2 = - match token1, token2 with - | LPAREN, RPAREN - | LBRACE _, RBRACE _ - | LBRACE_BAR, BAR_RBRACE - | LBRACK, RBRACK - | INTERFACE, END - | CLASS, END - | SIG, END - | STRUCT, END +let parenTokensBalance token1 token2 = + match token1, token2 with + | LPAREN, RPAREN + | LBRACE _, RBRACE _ + | LBRACE_BAR, BAR_RBRACE + | LBRACK, RBRACK + | INTERFACE, END + | CLASS, END + | SIG, END + | STRUCT, END | INTERP_STRING_BEGIN_PART _, INTERP_STRING_END _ | INTERP_STRING_BEGIN_PART _, INTERP_STRING_PART _ | INTERP_STRING_PART _, INTERP_STRING_PART _ | INTERP_STRING_PART _, INTERP_STRING_END _ | LBRACK_BAR, BAR_RBRACK - | LESS true, GREATER true - | BEGIN, END -> true - | LQUOTE q1, RQUOTE q2 when q1 = q2 -> true + | LESS true, GREATER true + | BEGIN, END -> true + | LQUOTE q1, RQUOTE q2 when q1 = q2 -> true | _ -> false - + /// Used to save some aspects of the lexbuffer state [] type LexbufState(startPos: Position, endPos : Position, - pastEOF : bool) = + pastEOF : bool) = member _.StartPos = startPos member _.EndPos = endPos member _.PastEOF = pastEOF @@ -434,13 +434,13 @@ type LexbufState(startPos: Position, /// Used to save the state related to a token /// Treat as though this is read-only. [] -type TokenTup = +type TokenTup = // This is mutable for performance reasons. val mutable Token : token val mutable LexbufState : LexbufState val mutable LastTokenPos: Position new (token, state, lastTokenPos) = { Token=token; LexbufState=state;LastTokenPos=lastTokenPos } - + /// Returns starting position of the token member x.StartPos = x.LexbufState.StartPos /// Returns end position of the token @@ -457,7 +457,7 @@ type TokenTupPool() = let mutable currentPoolSize = 0 let stack = Stack(10) - member pool.Rent() = + member pool.Rent() = if stack.Count = 0 then if currentPoolSize < maxSize then stack.Push(TokenTup(Unchecked.defaultof<_>, Unchecked.defaultof<_>, Unchecked.defaultof<_>)) @@ -479,24 +479,24 @@ type TokenTupPool() = stack.Push x /// Returns a token 'tok' with the same position as this token - member pool.UseLocation(x: TokenTup, tok) = - let tokState = x.LexbufState + member pool.UseLocation(x: TokenTup, tok) = + let tokState = x.LexbufState let tokTup = pool.Rent() tokTup.Token <- tok tokTup.LexbufState <- LexbufState(tokState.StartPos, tokState.EndPos, false) tokTup.LastTokenPos <- x.LastTokenPos tokTup - - /// Returns a token 'tok' with the same position as this token, except that + + /// Returns a token 'tok' with the same position as this token, except that /// it is shifted by specified number of characters from the left and from the right /// Note: positive value means shift to the right in both cases - member pool.UseShiftedLocation(x: TokenTup, tok, shiftLeft, shiftRight) = + member pool.UseShiftedLocation(x: TokenTup, tok, shiftLeft, shiftRight) = let tokState = x.LexbufState let tokTup = pool.Rent() tokTup.Token <- tok tokTup.LexbufState <- LexbufState(tokState.StartPos.ShiftColumnBy shiftLeft, tokState.EndPos.ShiftColumnBy shiftRight, false) tokTup.LastTokenPos <- x.LastTokenPos - tokTup + tokTup //---------------------------------------------------------------------------- // Utilities for the tokenizer that are needed in other places @@ -504,33 +504,33 @@ type TokenTupPool() = // Strip a bunch of leading '>' of a token, at the end of a typar application // Note: this is used in the 'service.fs' to do limited postprocessing -let (|TyparsCloseOp|_|) (txt: string) = +let (|TyparsCloseOp|_|) (txt: string) = let angles = txt |> Seq.takeWhile (fun c -> c = '>') |> Seq.toList let afterAngles = txt |> Seq.skipWhile (fun c -> c = '>') |> Seq.toList if List.isEmpty angles then None else - let afterOp = - match (System.String(Array.ofSeq afterAngles)) with + let afterOp = + match (System.String(Array.ofSeq afterAngles)) with | "." -> Some DOT | "]" -> Some RBRACK | "-" -> Some MINUS - | ".." -> Some DOT_DOT - | "?" -> Some QMARK - | "??" -> Some QMARK_QMARK - | ":=" -> Some COLON_EQUALS + | ".." -> Some DOT_DOT + | "?" -> Some QMARK + | "??" -> Some QMARK_QMARK + | ":=" -> Some COLON_EQUALS | "::" -> Some COLON_COLON - | "*" -> Some STAR + | "*" -> Some STAR | "&" -> Some AMP - | "->" -> Some RARROW - | "<-" -> Some LARROW - | "=" -> Some EQUALS + | "->" -> Some RARROW + | "<-" -> Some LARROW + | "=" -> Some EQUALS | "<" -> Some (LESS false) | "$" -> Some DOLLAR | "%" -> Some (PERCENT_OP("%") ) | "%%" -> Some (PERCENT_OP("%%")) | "" -> None - | s -> - match List.ofSeq afterAngles with + | s -> + match List.ofSeq afterAngles with | '=' :: _ | '!' :: '=' :: _ | '<' :: _ @@ -566,7 +566,7 @@ type LexFilterImpl ( compilingFSharpCore, lexer: (Lexbuf -> token), lexbuf: Lexbuf -) = +) = //---------------------------------------------------------------------------- // Part I. Building a new lex stream from an old @@ -576,47 +576,47 @@ type LexFilterImpl ( // coming out of an existing lexbuf. Ideally lexbufs would be abstract interfaces // and we could just build a new abstract interface that wraps an existing one. // However that is not how F# lexbufs currently work. - // - // Part of the fakery we perform involves buffering a lookahead token which + // + // Part of the fakery we perform involves buffering a lookahead token which // we eventually pass on to the client. However, this client also looks at // other aspects of the 'state' of lexbuf directly, e.g. F# lexbufs have a triple // (start-pos, end-pos, eof-reached) // // You may ask why the F# parser reads this lexbuf state directly. Well, the - // pars.fsy code itself it doesn't, but the parser engines (prim-parsing.fs) - // certainly do for F#. e.g. when these parsers read a token + // pars.fsy code itself it doesn't, but the parser engines (prim-parsing.fs) + // certainly do for F#. e.g. when these parsers read a token // from the lexstream they also read the position information and keep this - // a related stack. + // a related stack. // // Anyway, this explains the functions getLexbufState(), setLexbufState() etc. //-------------------------------------------------------------------------- - // Make sure we don't report 'eof' when inserting a token, and set the positions to the - // last reported token position + // Make sure we don't report 'eof' when inserting a token, and set the positions to the + // last reported token position let lexbufStateForInsertedDummyTokens (lastTokenStartPos, lastTokenEndPos) = - LexbufState(lastTokenStartPos, lastTokenEndPos, false) + LexbufState(lastTokenStartPos, lastTokenEndPos, false) - let getLexbufState() = - LexbufState(lexbuf.StartPos, lexbuf.EndPos, lexbuf.IsPastEndOfStream) + let getLexbufState() = + LexbufState(lexbuf.StartPos, lexbuf.EndPos, lexbuf.IsPastEndOfStream) let setLexbufState (p: LexbufState) = - lexbuf.StartPos <- p.StartPos + lexbuf.StartPos <- p.StartPos lexbuf.EndPos <- p.EndPos lexbuf.IsPastEndOfStream <- p.PastEOF - let posOfTokenTup (tokenTup: TokenTup) = + let posOfTokenTup (tokenTup: TokenTup) = match tokenTup.Token with - // EOF token is processed as if on column -1 - // This forces the closure of all contexts. - | EOF _ -> tokenTup.LexbufState.StartPos.ColumnMinusOne, tokenTup.LexbufState.EndPos.ColumnMinusOne + // EOF token is processed as if on column -1 + // This forces the closure of all contexts. + | EOF _ -> tokenTup.LexbufState.StartPos.ColumnMinusOne, tokenTup.LexbufState.EndPos.ColumnMinusOne | _ -> tokenTup.LexbufState.StartPos, tokenTup.LexbufState.EndPos - let startPosOfTokenTup (tokenTup: TokenTup) = + let startPosOfTokenTup (tokenTup: TokenTup) = match tokenTup.Token with - // EOF token is processed as if on column -1 - // This forces the closure of all contexts. + // EOF token is processed as if on column -1 + // This forces the closure of all contexts. | EOF _ -> tokenTup.LexbufState.StartPos.ColumnMinusOne - | _ -> tokenTup.LexbufState.StartPos + | _ -> tokenTup.LexbufState.StartPos //---------------------------------------------------------------------------- // TokenTup pool @@ -628,8 +628,8 @@ type LexFilterImpl ( // Part II. The state of the new lex stream object. //-------------------------------------------------------------------------- - // Ok, we're going to the wrapped lexbuf. Set the lexstate back so that the lexbuf - // appears consistent and correct for the wrapped lexer function. + // Ok, we're going to the wrapped lexbuf. Set the lexstate back so that the lexbuf + // appears consistent and correct for the wrapped lexer function. let mutable savedLexbufState = Unchecked.defaultof let mutable haveLexbufState = false let runWrappedLexerInConsistentLexbufState() = @@ -659,18 +659,18 @@ type LexFilterImpl ( let delayedStack = Stack() let mutable tokensThatNeedNoProcessingCount = 0 - let delayToken tokenTup = delayedStack.Push tokenTup + let delayToken tokenTup = delayedStack.Push tokenTup let delayTokenNoProcessing tokenTup = delayToken tokenTup; tokensThatNeedNoProcessingCount <- tokensThatNeedNoProcessingCount + 1 - let popNextTokenTup() = - if delayedStack.Count > 0 then + let popNextTokenTup() = + if delayedStack.Count > 0 then let tokenTup = delayedStack.Pop() if debug then dprintf "popNextTokenTup: delayed token, tokenStartPos = %a\n" outputPos (startPosOfTokenTup tokenTup) tokenTup else if debug then dprintf "popNextTokenTup: no delayed tokens, running lexer...\n" - runWrappedLexerInConsistentLexbufState() - + runWrappedLexerInConsistentLexbufState() + //---------------------------------------------------------------------------- // Part III. Initial configuration of state. @@ -682,64 +682,64 @@ type LexFilterImpl ( let mutable initialized = false let mutable offsideStack = [] let mutable prevWasAtomicEnd = false - + let peekInitial() = // Forget the lexbuf state we might have saved from previous input haveLexbufState <- false let initialLookaheadTokenTup = popNextTokenTup() if debug then dprintf "first token: initialLookaheadTokenLexbufState = %a\n" outputPos (startPosOfTokenTup initialLookaheadTokenTup) - + delayToken initialLookaheadTokenTup initialized <- true offsideStack <- (CtxtSeqBlock(FirstInSeqBlock, startPosOfTokenTup initialLookaheadTokenTup, NoAddBlockEnd)) :: offsideStack - initialLookaheadTokenTup + initialLookaheadTokenTup - let warn (s: TokenTup) msg = + let warn (s: TokenTup) msg = warning(IndentationProblem(msg, mkSynRange (startPosOfTokenTup s) s.LexbufState.EndPos)) // 'query { join x in ys ... }' - // 'query { ... + // 'query { ... // join x in ys ... }' // 'query { for ... do // join x in ys ... }' let detectJoinInCtxt stack = - let rec check s = - match s with + let rec check s = + match s with | CtxtParen(LBRACE _, _) :: _ -> true | (CtxtSeqBlock _ | CtxtDo _ | CtxtFor _) :: rest -> check rest | _ -> false - match stack with + match stack with | CtxtVanilla _ :: rest -> check rest | _ -> false //---------------------------------------------------------------------------- // Part IV. Helper functions for pushing contexts and giving good warnings - // if a context is undented. + // if a context is undented. // // Undentation rules //-------------------------------------------------------------------------- - + let relaxWhitespace2 = lexbuf.SupportsFeature LanguageFeature.RelaxWhitespace2 //let indexerNotationWithoutDot = lexbuf.SupportsFeature LanguageFeature.IndexerNotationWithoutDot let pushCtxt tokenTup (newCtxt: Context) = - let rec undentationLimit strict stack = - match newCtxt, stack with - | _, [] -> PositionWithColumn(newCtxt.StartPos, -1) + let rec undentationLimit strict stack = + match newCtxt, stack with + | _, [] -> PositionWithColumn(newCtxt.StartPos, -1) - // ignore Vanilla because a SeqBlock is always coming + // ignore Vanilla because a SeqBlock is always coming | _, CtxtVanilla _ :: rest -> undentationLimit strict rest | _, CtxtSeqBlock _ :: rest when not strict -> undentationLimit strict rest | _, CtxtParen _ :: rest when not strict -> undentationLimit strict rest - // 'begin match' limited by minimum of two - // '(match' limited by minimum of two + // 'begin match' limited by minimum of two + // '(match' limited by minimum of two | _, (CtxtMatch _ as ctxt1) :: CtxtSeqBlock _ :: (CtxtParen ((BEGIN | LPAREN), _) as ctxt2) :: _ - -> if ctxt1.StartCol <= ctxt2.StartCol - then PositionWithColumn(ctxt1.StartPos, ctxt1.StartCol) - else PositionWithColumn(ctxt2.StartPos, ctxt2.StartCol) + -> if ctxt1.StartCol <= ctxt2.StartCol + then PositionWithColumn(ctxt1.StartPos, ctxt1.StartCol) + else PositionWithColumn(ctxt2.StartPos, ctxt2.StartCol) // Insert this rule to allow // begin match 1 with // | 1 -> () @@ -751,24 +751,24 @@ type LexFilterImpl ( // will consider the CtxtMatch as the limiting context instead of allowing undentation until the parenthesis // Test here: Tests/FSharp.Compiler.ComponentTests/Conformance/LexicalFiltering/Basic/OffsideExceptions.fs, RelaxWhitespace2_AllowedBefore11 | _, (CtxtMatchClauses _ as ctxt1) :: (CtxtMatch _) :: CtxtSeqBlock _ :: (CtxtParen ((BEGIN | LPAREN), _) as ctxt2) :: _ when relaxWhitespace2 - -> if ctxt1.StartCol <= ctxt2.StartCol - then PositionWithColumn(ctxt1.StartPos, ctxt1.StartCol) - else PositionWithColumn(ctxt2.StartPos, ctxt2.StartCol) - - // 'let ... = function' limited by 'let', precisely - // This covers the common form - // - // let f x = function - // | Case1 -> ... - // | Case2 -> ... + -> if ctxt1.StartCol <= ctxt2.StartCol + then PositionWithColumn(ctxt1.StartPos, ctxt1.StartCol) + else PositionWithColumn(ctxt2.StartPos, ctxt2.StartCol) + + // 'let ... = function' limited by 'let', precisely + // This covers the common form + // + // let f x = function + // | Case1 -> ... + // | Case2 -> ... | CtxtMatchClauses _, CtxtFunction _ :: CtxtSeqBlock _ :: (CtxtLetDecl _ as limitCtxt) :: _rest -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - // Otherwise 'function ...' places no limit until we hit a CtxtLetDecl etc... (Recursive) + // Otherwise 'function ...' places no limit until we hit a CtxtLetDecl etc... (Recursive) | CtxtMatchClauses _, CtxtFunction _ :: rest -> undentationLimit false rest - // 'try ... with' limited by 'try' + // 'try ... with' limited by 'try' | _, (CtxtMatchClauses _ :: (CtxtTry _ as limitCtxt) :: _rest) -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) @@ -776,10 +776,10 @@ type LexFilterImpl ( | _, (CtxtMatchClauses _ :: (CtxtMatch _ as limitCtxt) :: _rest) when relaxWhitespace2 -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - // 'fun ->' places no limit until we hit a CtxtLetDecl etc... (Recursive) + // 'fun ->' places no limit until we hit a CtxtLetDecl etc... (Recursive) | _, CtxtFun _ :: rest -> undentationLimit false rest - + // 'let ... = f ... begin' limited by 'let' (given RelaxWhitespace2) // 'let (' (pattern match) limited by 'let' (given RelaxWhitespace2) // 'let [' (pattern match) limited by 'let' (given RelaxWhitespace2) @@ -798,9 +798,9 @@ type LexFilterImpl ( | _, CtxtSeqBlock _ :: CtxtParen (TokenLExprParen, _) :: rest when relaxWhitespace2 -> undentationLimit false rest - // 'f ...{' places no limit until we hit a CtxtLetDecl etc... - // 'f ...[' places no limit until we hit a CtxtLetDecl etc... - // 'f ...[|' places no limit until we hit a CtxtLetDecl etc... + // 'f ...{' places no limit until we hit a CtxtLetDecl etc... + // 'f ...[' places no limit until we hit a CtxtLetDecl etc... + // 'f ...[|' places no limit until we hit a CtxtLetDecl etc... | _, CtxtParen ((LBRACE _ | LBRACK | LBRACK_BAR), _) :: CtxtSeqBlock _ :: rest | _, CtxtParen ((LBRACE _ | LBRACK | LBRACK_BAR), _) :: CtxtVanilla _ :: CtxtSeqBlock _ :: rest | _, CtxtSeqBlock _ :: CtxtParen((LBRACE _ | LBRACK | LBRACK_BAR), _) :: CtxtVanilla _ :: CtxtSeqBlock _ :: rest @@ -809,25 +809,25 @@ type LexFilterImpl ( // MAJOR PERMITTED UNDENTATION This is allowing: // if x then y else // let x = 3 + 4 - // x + x + // x + x // This is a serious thing to allow, but is required since there is no "return" in this language. // Without it there is no way of escaping special cases in large bits of code without indenting the main case. - | CtxtSeqBlock _, CtxtElse _ :: (CtxtIf _ as limitCtxt) :: _rest + | CtxtSeqBlock _, CtxtElse _ :: (CtxtIf _ as limitCtxt) :: _rest -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - // Permitted inner-construct precise block alignment: + // Permitted inner-construct precise block alignment: // interface ... - // with ... - // end - // + // with ... + // end + // // type ... - // with ... - // end + // with ... + // end | CtxtWithAsAugment _, (CtxtInterfaceHead _ | CtxtMemberHead _ | CtxtException _ | CtxtTypeDefns _ as limitCtxt :: _rest) - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) // Permit undentation via parentheses (or begin/end) following a 'then', 'else' or 'do': - // if nr > 0 then ( + // if nr > 0 then ( // nr <- nr - 1 // acc <- d // i <- i - 1 @@ -840,8 +840,8 @@ type LexFilterImpl ( // expr // else // expr - // rather than forcing - // if ... + // rather than forcing + // if ... // then expr // else expr // Also ...... with @@ -857,7 +857,7 @@ type LexFilterImpl ( // e.g. // let fffffff() = function // | [] -> 0 - // | _ -> 1 + // | _ -> 1 // // Note this does not allow // let fffffff() = function _ -> @@ -869,33 +869,33 @@ type LexFilterImpl ( | _, CtxtFunction _ :: rest -> undentationLimit false rest - // 'module ... : sig' limited by 'module' - // 'module ... : struct' limited by 'module' - // 'module ... : begin' limited by 'module' - // 'if ... then (' limited by 'if' - // 'if ... then {' limited by 'if' - // 'if ... then [' limited by 'if' - // 'if ... then [|' limited by 'if' - // 'if ... else (' limited by 'if' - // 'if ... else {' limited by 'if' - // 'if ... else [' limited by 'if' - // 'if ... else [|' limited by 'if' + // 'module ... : sig' limited by 'module' + // 'module ... : struct' limited by 'module' + // 'module ... : begin' limited by 'module' + // 'if ... then (' limited by 'if' + // 'if ... then {' limited by 'if' + // 'if ... then [' limited by 'if' + // 'if ... then [|' limited by 'if' + // 'if ... else (' limited by 'if' + // 'if ... else {' limited by 'if' + // 'if ... else [' limited by 'if' + // 'if ... else [|' limited by 'if' | _, CtxtParen ((SIG | STRUCT | BEGIN), _) :: CtxtSeqBlock _ :: (CtxtModuleBody (_, false) as limitCtxt) :: _ | _, CtxtParen ((BEGIN | LPAREN | LBRACK | LBRACE _ | LBRACE_BAR | LBRACK_BAR), _) :: CtxtSeqBlock _ :: CtxtThen _ :: (CtxtIf _ as limitCtxt) :: _ | _, CtxtParen ((BEGIN | LPAREN | LBRACK | LBRACE _ | LBRACE_BAR | LBRACK_BAR | LBRACK_LESS), _) :: CtxtSeqBlock _ :: CtxtElse _ :: (CtxtIf _ as limitCtxt) :: _ - // 'f ... (' in seqblock limited by 'f' - // 'f ... {' in seqblock limited by 'f' NOTE: this is covered by the more generous case above - // 'f ... [' in seqblock limited by 'f' - // 'f ... [|' in seqblock limited by 'f' - // 'f ... Foo<' in seqblock limited by 'f' + // 'f ... (' in seqblock limited by 'f' + // 'f ... {' in seqblock limited by 'f' NOTE: this is covered by the more generous case above + // 'f ... [' in seqblock limited by 'f' + // 'f ... [|' in seqblock limited by 'f' + // 'f ... Foo<' in seqblock limited by 'f' | _, CtxtParen ((BEGIN | LPAREN | LESS true | LBRACK | LBRACK_BAR), _) :: CtxtVanilla _ :: (CtxtSeqBlock _ as limitCtxt) :: _ - // 'type C = class ... ' limited by 'type' - // 'type C = interface ... ' limited by 'type' - // 'type C = struct ... ' limited by 'type' + // 'type C = class ... ' limited by 'type' + // 'type C = interface ... ' limited by 'type' + // 'type C = struct ... ' limited by 'type' | _, CtxtParen ((CLASS | STRUCT | INTERFACE), _) :: CtxtSeqBlock _ :: (CtxtTypeDefns _ as limitCtxt) :: _ - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) // 'type C(' limited by 'type' | _, CtxtSeqBlock _ :: CtxtParen(LPAREN, _) :: (CtxtTypeDefns _ as limitCtxt) :: _ @@ -904,52 +904,52 @@ type LexFilterImpl ( // 'static member P with get() = ' limited by 'static', likewise others | _, CtxtWithAsLet _ :: (CtxtMemberHead _ as limitCtxt) :: _ when lexbuf.SupportsFeature LanguageFeature.RelaxWhitespace - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) - // REVIEW: document these + // REVIEW: document these | _, CtxtSeqBlock _ :: CtxtParen((BEGIN | LPAREN | LBRACK | LBRACK_BAR), _) :: CtxtVanilla _ :: (CtxtSeqBlock _ as limitCtxt) :: _ | CtxtSeqBlock _, CtxtParen ((BEGIN | LPAREN | LBRACE _ | LBRACE_BAR | LBRACK | LBRACK_BAR), _) :: CtxtSeqBlock _ :: (CtxtTypeDefns _ | CtxtLetDecl _ | CtxtMemberBody _ | CtxtWithAsLet _ as limitCtxt) :: _ - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) - // Permitted inner-construct (e.g. "then" block and "else" block in overall - // "if-then-else" block ) block alignment: - // if ... + // Permitted inner-construct (e.g. "then" block and "else" block in overall + // "if-then-else" block ) block alignment: + // if ... // then expr - // elif expr - // else expr - | (CtxtIf _ | CtxtElse _ | CtxtThen _), (CtxtIf _ as limitCtxt) :: _rest + // elif expr + // else expr + | (CtxtIf _ | CtxtElse _ | CtxtThen _), (CtxtIf _ as limitCtxt) :: _rest -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - // Permitted inner-construct precise block alignment: - // while ... + // Permitted inner-construct precise block alignment: + // while ... // do expr - // done - | CtxtDo _, (CtxtFor _ | CtxtWhile _ as limitCtxt) :: _rest + // done + | CtxtDo _, (CtxtFor _ | CtxtWhile _ as limitCtxt) :: _rest -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - // These contexts all require indentation by at least one space - | _, (CtxtInterfaceHead _ | CtxtNamespaceHead _ | CtxtModuleHead _ | CtxtException _ | CtxtModuleBody (_, false) | CtxtIf _ | CtxtWithAsLet _ | CtxtLetDecl _ | CtxtMemberHead _ | CtxtMemberBody _ as limitCtxt :: _) - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) + // These contexts all require indentation by at least one space + | _, (CtxtInterfaceHead _ | CtxtNamespaceHead _ | CtxtModuleHead _ | CtxtException _ | CtxtModuleBody (_, false) | CtxtIf _ | CtxtWithAsLet _ | CtxtLetDecl _ | CtxtMemberHead _ | CtxtMemberBody _ as limitCtxt :: _) + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol + 1) - // These contexts can have their contents exactly aligning + // These contexts can have their contents exactly aligning | _, (CtxtParen _ | CtxtFor _ | CtxtWhen _ | CtxtWhile _ | CtxtTypeDefns _ | CtxtMatch _ | CtxtModuleBody (_, true) | CtxtNamespaceBody _ | CtxtTry _ | CtxtMatchClauses _ | CtxtSeqBlock _ as limitCtxt :: _) - -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) - - match newCtxt with - // Don't bother to check pushes of Vanilla blocks since we've + -> PositionWithColumn(limitCtxt.StartPos, limitCtxt.StartCol) + + match newCtxt with + // Don't bother to check pushes of Vanilla blocks since we've // always already pushed a SeqBlock at this position. - | CtxtVanilla _ + | CtxtVanilla _ // String interpolation inner expressions are not limited (e.g. multiline strings) | CtxtParen((INTERP_STRING_BEGIN_PART _ | INTERP_STRING_PART _),_) -> () - | _ -> + | _ -> let p1 = undentationLimit true offsideStack let c2 = newCtxt.StartCol - if c2 < p1.Column then - warn tokenTup - (if debug then - sprintf "possible incorrect indentation: this token is offside of context at position %s, newCtxt = %A, stack = %A, newCtxtPos = %s, c1 = %d, c2 = %d" - (warningStringOfPosition p1.Position) newCtxt offsideStack (stringOfPos newCtxt.StartPos) p1.Column c2 + if c2 < p1.Column then + warn tokenTup + (if debug then + sprintf "possible incorrect indentation: this token is offside of context at position %s, newCtxt = %A, stack = %A, newCtxtPos = %s, c1 = %d, c2 = %d" + (warningStringOfPosition p1.Position) newCtxt offsideStack (stringOfPos newCtxt.StartPos) p1.Column c2 else FSComp.SR.lexfltTokenIsOffsideOfContextStartedEarlier(warningStringOfPosition p1.Position)) let newOffsideStack = newCtxt :: offsideStack @@ -974,14 +974,14 @@ type LexFilterImpl ( // Peek ahead at a token, either from the old lexer or from our delayedStack //-------------------------------------------------------------------------- - let peekNextTokenTup() = + let peekNextTokenTup() = let tokenTup = popNextTokenTup() delayToken tokenTup tokenTup - - let peekNextToken() = + + let peekNextToken() = peekNextTokenTup().Token - + //---------------------------------------------------------------------------- // Adjacency precedence rule //-------------------------------------------------------------------------- @@ -990,16 +990,16 @@ type LexFilterImpl ( let lparenStartPos = startPosOfTokenTup rightTokenTup let tokenEndPos = leftTokenTup.LexbufState.EndPos (tokenEndPos = lparenStartPos) - + let nextTokenIsAdjacentLBrack (tokenTup: TokenTup) = let lookaheadTokenTup = peekNextTokenTup() - match lookaheadTokenTup.Token with + match lookaheadTokenTup.Token with | LBRACK -> isAdjacent tokenTup lookaheadTokenTup | _ -> false let nextTokenIsAdjacentLParen (tokenTup: TokenTup) = let lookaheadTokenTup = peekNextTokenTup() - match lookaheadTokenTup.Token with + match lookaheadTokenTup.Token with | LPAREN -> isAdjacent tokenTup lookaheadTokenTup | _ -> false @@ -1009,52 +1009,52 @@ type LexFilterImpl ( let peekAdjacentTypars indentation (tokenTup: TokenTup) = let lookaheadTokenTup = peekNextTokenTup() - match lookaheadTokenTup.Token with + match lookaheadTokenTup.Token with | INFIX_COMPARE_OP "", false) - | LESS _ -> - let tokenEndPos = tokenTup.LexbufState.EndPos - if isAdjacent tokenTup lookaheadTokenTup then + | LESS _ -> + let tokenEndPos = tokenTup.LexbufState.EndPos + if isAdjacent tokenTup lookaheadTokenTup then let mutable stack = [] - let rec scanAhead nParen = + let rec scanAhead nParen = let lookaheadTokenTup = popNextTokenTup() let lookaheadToken = lookaheadTokenTup.Token stack <- (lookaheadTokenTup, true) :: stack let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup - match lookaheadToken with - | EOF _ | SEMICOLON_SEMICOLON -> false + match lookaheadToken with + | EOF _ | SEMICOLON_SEMICOLON -> false | _ when indentation && lookaheadTokenStartPos < tokenEndPos -> false | RPAREN | RBRACK -> let nParen = nParen - 1 - if nParen > 0 then - scanAhead nParen - else + if nParen > 0 then + scanAhead nParen + else false - | GREATER _ | GREATER_RBRACK | GREATER_BAR_RBRACK -> + | GREATER _ | GREATER_RBRACK | GREATER_BAR_RBRACK -> let nParen = nParen - 1 let hasAfterOp = (match lookaheadToken with GREATER _ -> false | _ -> true) - if nParen > 0 then + if nParen > 0 then // Don't smash the token if there is an after op and we're in a nested paren stack <- (lookaheadTokenTup, not hasAfterOp) :: stack.Tail - scanAhead nParen - else - // On successful parse of a set of type parameters, look for an adjacent (, e.g. + scanAhead nParen + else + // On successful parse of a set of type parameters, look for an adjacent (, e.g. // M(args) // and insert a HIGH_PRECEDENCE_PAREN_APP if not hasAfterOp && nextTokenIsAdjacentLParen lookaheadTokenTup then let dotTokenTup = peekNextTokenTup() stack <- (pool.UseLocation(dotTokenTup, HIGH_PRECEDENCE_PAREN_APP), false) :: stack true - | INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp)) -> + | INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp)) -> let nParen = nParen - greaters.Length - if nParen > 0 then + if nParen > 0 then // Don't smash the token if there is an after op and we're in a nested paren stack <- (lookaheadTokenTup, not afterOp.IsSome) :: stack.Tail - scanAhead nParen - else - // On successful parse of a set of type parameters, look for an adjacent (, e.g. + scanAhead nParen + else + // On successful parse of a set of type parameters, look for an adjacent (, e.g. // M>(args) // and insert a HIGH_PRECEDENCE_PAREN_APP if afterOp.IsNone && nextTokenIsAdjacentLParen lookaheadTokenTup then @@ -1066,12 +1066,12 @@ type LexFilterImpl ( | LBRACK | LBRACK_LESS | INFIX_COMPARE_OP "", false) -> + | LQUOTE ("<@ @>", false) -> scanAhead (nParen+1) - - // These tokens CAN occur in non-parenthesized positions in the grammar of types or type parameter definitions + + // These tokens CAN occur in non-parenthesized positions in the grammar of types or type parameter definitions // Thus we explicitly DO consider these to be type applications: // fx // fx @@ -1084,42 +1084,42 @@ type LexFilterImpl ( // f<{| C : int |}>x // fx // fx - | DEFAULT | COLON | COLON_GREATER | STRUCT | NULL | DELEGATE | AND | WHEN + | DEFAULT | COLON | COLON_GREATER | STRUCT | NULL | DELEGATE | AND | WHEN | DOT_DOT | NEW | LBRACE_BAR | SEMICOLON | BAR_RBRACE - | INFIX_AT_HAT_OP "^" - | INFIX_AT_HAT_OP "^-" - | INFIX_STAR_DIV_MOD_OP "/" - | MINUS - | GLOBAL + | INFIX_AT_HAT_OP "^" + | INFIX_AT_HAT_OP "^-" + | INFIX_STAR_DIV_MOD_OP "/" + | MINUS + | GLOBAL | CONST | KEYWORD_STRING _ | NULL - | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ + | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ | UINT8 _ | UINT16 _ | UINT32 _ | UINT64 _ | UNATIVEINT _ - | DECIMAL _ | BIGNUM _ | STRING _ | BYTEARRAY _ | CHAR _ | TRUE | FALSE - | IEEE32 _ | IEEE64 _ + | DECIMAL _ | BIGNUM _ | STRING _ | BYTEARRAY _ | CHAR _ | TRUE | FALSE + | IEEE32 _ | IEEE64 _ | DOT | UNDERSCORE | EQUALS - | IDENT _ | COMMA | RARROW | HASH - | STAR | QUOTE -> + | IDENT _ | COMMA | RARROW | HASH + | STAR | QUOTE -> scanAhead nParen - - - // All other tokens ARE assumed to be part of the grammar of types or type parameter definitions - | _ -> - if nParen > 1 then - scanAhead nParen - else + + + // All other tokens ARE assumed to be part of the grammar of types or type parameter definitions + | _ -> + if nParen > 1 then + scanAhead nParen + else false - + let res = scanAhead 0 // Put the tokens back on and smash them up if needed for (tokenTup, smash) in stack do - if smash then - match tokenTup.Token with + if smash then + match tokenTup.Token with | INFIX_COMPARE_OP " delayToken (pool.UseShiftedLocation(tokenTup, INFIX_STAR_DIV_MOD_OP "/", 1, 0)) delayToken (pool.UseShiftedLocation(tokenTup, LESS res, 0, -1)) @@ -1133,7 +1133,7 @@ type LexFilterImpl ( delayToken (pool.UseShiftedLocation(tokenTup, INFIX_AT_HAT_OP "@", 1, 0)) delayToken (pool.UseShiftedLocation(tokenTup, LESS res, 0, -1)) pool.Return tokenTup - | GREATER_BAR_RBRACK -> + | GREATER_BAR_RBRACK -> delayToken (pool.UseShiftedLocation(tokenTup, BAR_RBRACK, 1, 0)) delayToken (pool.UseShiftedLocation(tokenTup, GREATER res, 0, -2)) pool.Return tokenTup @@ -1144,7 +1144,7 @@ type LexFilterImpl ( | GREATER _ -> delayToken (pool.UseLocation(tokenTup, GREATER res)) pool.Return tokenTup - | INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp) as opstr) -> + | INFIX_COMPARE_OP (TyparsCloseOp(greaters, afterOp) as opstr) -> match afterOp with | None -> () | Some tok -> delayToken (pool.UseShiftedLocation(tokenTup, tok, greaters.Length, 0)) @@ -1155,7 +1155,7 @@ type LexFilterImpl ( else delayToken tokenTup res - else + else false | _ -> false @@ -1163,125 +1163,125 @@ type LexFilterImpl ( // End actions //-------------------------------------------------------------------------- - let returnToken (tokenLexbufState: LexbufState) tok = + let returnToken (tokenLexbufState: LexbufState) tok = setLexbufState tokenLexbufState prevWasAtomicEnd <- isAtomicExprEndToken tok tok - + let rec suffixExists p l = match l with [] -> false | _ :: t -> p t || suffixExists p t - let tokenBalancesHeadContext token stack = - match token, stack with + let tokenBalancesHeadContext token stack = + match token, stack with | END, CtxtWithAsAugment _ :: _ | (ELSE | ELIF), CtxtIf _ :: _ | DONE, CtxtDo _ :: _ - // WITH balances except in the following contexts.... Phew - an overused keyword! + // WITH balances except in the following contexts.... Phew - an overused keyword! | WITH, ( (CtxtMatch _ | CtxtException _ | CtxtMemberHead _ | CtxtInterfaceHead _ | CtxtTry _ | CtxtTypeDefns _ | CtxtMemberBody _) :: _ - // This is the nasty record/object-expression case + // This is the nasty record/object-expression case | CtxtSeqBlock _ :: CtxtParen((LBRACE _ | LBRACE_BAR), _) :: _ ) - | FINALLY, CtxtTry _ :: _ -> + | FINALLY, CtxtTry _ :: _ -> true - // for x in ienum ... + // for x in ienum ... // let x = ... in | IN, (CtxtFor _ | CtxtLetDecl _) :: _ -> true // 'query { join x in ys ... }' - // 'query { ... + // 'query { ... // join x in ys ... }' // 'query { for ... do // join x in ys ... }' | IN, stack when detectJoinInCtxt stack -> true - // NOTE: ;; does not terminate a 'namespace' body. - | SEMICOLON_SEMICOLON, CtxtSeqBlock _ :: CtxtNamespaceBody _ :: _ -> + // NOTE: ;; does not terminate a 'namespace' body. + | SEMICOLON_SEMICOLON, CtxtSeqBlock _ :: CtxtNamespaceBody _ :: _ -> true - | SEMICOLON_SEMICOLON, CtxtSeqBlock _ :: CtxtModuleBody (_, true) :: _ -> + | SEMICOLON_SEMICOLON, CtxtSeqBlock _ :: CtxtModuleBody (_, true) :: _ -> true - | t2, CtxtParen(t1, _) :: _ -> + | t2, CtxtParen(t1, _) :: _ -> parenTokensBalance t1 t2 - | _ -> + | _ -> false - + //---------------------------------------------------------------------------- // Parse and transform the stream of tokens coming from popNextTokenTup, pushing // contexts where needed, popping them where things are offside, balancing // parentheses and other constructs. //-------------------------------------------------------------------------- - + let rec hwTokenFetch useBlockRule = let tokenTup = popNextTokenTup() let tokenReplaced = rulesForBothSoftWhiteAndHardWhite tokenTup - if tokenReplaced then hwTokenFetch useBlockRule else + if tokenReplaced then hwTokenFetch useBlockRule else let tokenStartPos = (startPosOfTokenTup tokenTup) let token = tokenTup.Token let tokenLexbufState = tokenTup.LexbufState let tokenStartCol = tokenStartPos.Column - let isSameLine() = - match token with + let isSameLine() = + match token with | EOF _ -> false | _ -> (startPosOfTokenTup (peekNextTokenTup())).OriginalLine = tokenStartPos.OriginalLine - let isControlFlowOrNotSameLine() = - match token with + let isControlFlowOrNotSameLine() = + match token with | EOF _ -> false - | _ -> - not (isSameLine()) || - (match peekNextToken() with TRY | MATCH | MATCH_BANG | IF | LET _ | FOR | WHILE -> true | _ -> false) + | _ -> + not (isSameLine()) || + (match peekNextToken() with TRY | MATCH | MATCH_BANG | IF | LET _ | FOR | WHILE -> true | _ -> false) // Look for '=' or '.Id.id.id = ' after an identifier - let rec isLongIdentEquals token = - match token with + let rec isLongIdentEquals token = + match token with | GLOBAL - | IDENT _ -> - let rec loop() = + | IDENT _ -> + let rec loop() = let tokenTup = popNextTokenTup() - let res = - match tokenTup.Token with + let res = + match tokenTup.Token with | EOF _ -> false - | DOT -> + | DOT -> let tokenTup = popNextTokenTup() - let res = - match tokenTup.Token with + let res = + match tokenTup.Token with | EOF _ -> false | IDENT _ -> loop() | _ -> false delayToken tokenTup res - | EQUALS -> - true + | EQUALS -> + true | _ -> false delayToken tokenTup res loop() | _ -> false - let reprocess() = + let reprocess() = delayToken tokenTup hwTokenFetch useBlockRule - let reprocessWithoutBlockRule() = + let reprocessWithoutBlockRule() = delayToken tokenTup hwTokenFetch false - - let insertTokenFromPrevPosToCurrentPos tok = + + let insertTokenFromPrevPosToCurrentPos tok = delayToken tokenTup if debug then dprintf "inserting %+A\n" tok - // span of inserted token lasts from the col + 1 of the prev token + // span of inserted token lasts from the col + 1 of the prev token // to the beginning of current token - let lastTokenPos = + let lastTokenPos = let pos = tokenTup.LastTokenPos pos.ShiftColumnBy 1 returnToken (lexbufStateForInsertedDummyTokens (lastTokenPos, tokenTup.LexbufState.StartPos)) tok - let insertToken tok = + let insertToken tok = delayToken tokenTup if debug then dprintf "inserting %+A\n" tok returnToken (lexbufStateForInsertedDummyTokens (startPosOfTokenTup tokenTup, tokenTup.LexbufState.EndPos)) tok @@ -1317,14 +1317,14 @@ type LexFilterImpl ( | _ -> false // If you see a 'member' keyword while you are inside the body of another member, then it usually means there is a syntax error inside this method - // and the upcoming 'member' is the start of the next member in the class. For better parser recovery and diagnostics, it is best to pop out of the + // and the upcoming 'member' is the start of the next member in the class. For better parser recovery and diagnostics, it is best to pop out of the // existing member context so the parser can recover. // // However there are two places where 'member' keywords can appear inside expressions inside the body of a member. The first is object expressions, and // the second is static inline constraints. We must not pop the context stack in those cases, or else legal code will not parse. // // It is impossible to decide for sure if we're in one of those two cases, so we must err conservatively if we might be. - let thereIsACtxtMemberBodyOnTheStackAndWeShouldPopStackForUpcomingMember ctxtStack = + let thereIsACtxtMemberBodyOnTheStackAndWeShouldPopStackForUpcomingMember ctxtStack = // a 'member' starter keyword is coming; should we pop? if not(List.exists (function CtxtMemberBody _ -> true | _ -> false) ctxtStack) then false // no member currently on the stack, nothing to pop @@ -1338,47 +1338,47 @@ type LexFilterImpl ( true let endTokenForACtxt ctxt = - match ctxt with + match ctxt with | CtxtFun _ - | CtxtMatchClauses _ - | CtxtWithAsLet _ -> + | CtxtMatchClauses _ + | CtxtWithAsLet _ -> Some OEND - | CtxtWithAsAugment _ -> + | CtxtWithAsAugment _ -> Some ODECLEND - - | CtxtDo _ - | CtxtLetDecl (true, _) -> + + | CtxtDo _ + | CtxtLetDecl (true, _) -> Some ODECLEND - - | CtxtSeqBlock(_, _, AddBlockEnd) -> - Some OBLOCKEND - | CtxtSeqBlock(_, _, AddOneSidedBlockEnd) -> - Some ORIGHT_BLOCK_END + | CtxtSeqBlock(_, _, AddBlockEnd) -> + Some OBLOCKEND + + | CtxtSeqBlock(_, _, AddOneSidedBlockEnd) -> + Some ORIGHT_BLOCK_END - | _ -> + | _ -> None - // Balancing rule. Every 'in' terminates all surrounding blocks up to a CtxtLetDecl, and will be swallowed by - // terminating the corresponding CtxtLetDecl in the rule below. - // Balancing rule. Every 'done' terminates all surrounding blocks up to a CtxtDo, and will be swallowed by - // terminating the corresponding CtxtDo in the rule below. - let tokenForcesHeadContextClosure token stack = + // Balancing rule. Every 'in' terminates all surrounding blocks up to a CtxtLetDecl, and will be swallowed by + // terminating the corresponding CtxtLetDecl in the rule below. + // Balancing rule. Every 'done' terminates all surrounding blocks up to a CtxtDo, and will be swallowed by + // terminating the corresponding CtxtDo in the rule below. + let tokenForcesHeadContextClosure token stack = not (isNil stack) && - match token with + match token with | EOF _ -> true - | SEMICOLON_SEMICOLON -> not (tokenBalancesHeadContext token stack) + | SEMICOLON_SEMICOLON -> not (tokenBalancesHeadContext token stack) | TokenRExprParen - | ELSE - | ELIF - | DONE - | IN - | WITH - | FINALLY + | ELSE + | ELIF + | DONE + | IN + | WITH + | FINALLY | INTERP_STRING_PART _ | INTERP_STRING_END _ -> - not (tokenBalancesHeadContext token stack) && + not (tokenBalancesHeadContext token stack) && // Only close the context if some context is going to match at some point in the stack. // If none match, the token will go through, and error recovery will kick in in the parser and report the extra token, // and then parsing will continue. Closing all the contexts will not achieve much except aid in a catastrophic failure. @@ -1403,39 +1403,39 @@ type LexFilterImpl ( | _ :: (CtxtNamespaceBody _ | CtxtModuleBody _) :: _ -> true // The context pair below is created a namespace/module scope when user explicitly uses 'begin'...'end', // and these can legally contain type definitions, so ignore this combo as uninteresting and recurse deeper - | _ :: CtxtParen((BEGIN|STRUCT), _) :: CtxtSeqBlock _ :: _ -> nextOuterMostInterestingContextIsNamespaceOrModule(offsideStack.Tail.Tail) + | _ :: CtxtParen((BEGIN|STRUCT), _) :: CtxtSeqBlock _ :: _ -> nextOuterMostInterestingContextIsNamespaceOrModule(offsideStack.Tail.Tail) // at the top of the stack there is an implicit module - | _ :: [] -> true + | _ :: [] -> true // anything else is a non-namespace/module | _ -> false while not offsideStack.IsEmpty && (not(nextOuterMostInterestingContextIsNamespaceOrModule offsideStack)) && - (match offsideStack.Head with + (match offsideStack.Head with // open-parens of sorts | CtxtParen(TokenLExprParen, _) -> true // seq blocks - | CtxtSeqBlock _ -> true + | CtxtSeqBlock _ -> true // vanillas - | CtxtVanilla _ -> true + | CtxtVanilla _ -> true // preserve all other contexts | _ -> false) do match offsideStack.Head with | CtxtParen _ -> if debug then dprintf "%s at %a terminates CtxtParen()\n" keywordName outputPos tokenStartPos popCtxt() - | CtxtSeqBlock(_, _, AddBlockEnd) -> + | CtxtSeqBlock(_, _, AddBlockEnd) -> popCtxt() - effectsToDo <- (fun() -> + effectsToDo <- (fun() -> if debug then dprintf "--> because %s is coming, inserting OBLOCKEND\n" keywordName delayTokenNoProcessing (pool.UseLocation(tokenTup, OBLOCKEND))) :: effectsToDo - | CtxtSeqBlock(_, _, NoAddBlockEnd) -> + | CtxtSeqBlock(_, _, NoAddBlockEnd) -> if debug then dprintf "--> because %s is coming, popping CtxtSeqBlock\n" keywordName popCtxt() - | CtxtSeqBlock(_, _, AddOneSidedBlockEnd) -> + | CtxtSeqBlock(_, _, AddOneSidedBlockEnd) -> popCtxt() - effectsToDo <- (fun() -> + effectsToDo <- (fun() -> if debug then dprintf "--> because %s is coming, inserting ORIGHT_BLOCK_END\n" keywordName delayTokenNoProcessing (pool.UseLocation(tokenTup, ORIGHT_BLOCK_END))) :: effectsToDo - | CtxtVanilla _ -> + | CtxtVanilla _ -> if debug then dprintf "--> because %s is coming, popping CtxtVanilla\n" keywordName popCtxt() | _ -> failwith "impossible, the while loop guard just above prevents this" @@ -1454,44 +1454,44 @@ type LexFilterImpl ( pool.Return tokenTup returnToken tokenLexbufState token - match token, offsideStack with + match token, offsideStack with // inserted faux tokens need no other processing - | _ when tokensThatNeedNoProcessingCount > 0 -> + | _ when tokensThatNeedNoProcessingCount > 0 -> tokensThatNeedNoProcessingCount <- tokensThatNeedNoProcessingCount - 1 returnToken tokenLexbufState token - | _ when tokenForcesHeadContextClosure token offsideStack -> + | _ when tokenForcesHeadContextClosure token offsideStack -> let ctxt = offsideStack.Head if debug then dprintf "IN/ELSE/ELIF/DONE/RPAREN/RBRACE/END/INTERP at %a terminates context at position %a\n" outputPos tokenStartPos outputPos ctxt.StartPos popCtxt() - match endTokenForACtxt ctxt with + match endTokenForACtxt ctxt with | Some tok -> if debug then dprintf "--> inserting %+A\n" tok insertToken tok - | _ -> + | _ -> reprocess() - // reset on ';;' rule. A ';;' terminates ALL entries - | SEMICOLON_SEMICOLON, [] -> + // reset on ';;' rule. A ';;' terminates ALL entries + | SEMICOLON_SEMICOLON, [] -> if debug then dprintf ";; scheduling a reset\n" delayToken(pool.UseLocation(tokenTup, ORESET)) returnToken tokenLexbufState SEMICOLON_SEMICOLON - | ORESET, [] -> + | ORESET, [] -> if debug then dprintf "performing a reset after a ;; has been swallowed\n" - // NOTE: The parser thread of F# Interactive will often be blocked on this call, e.g. after an entry has been - // processed and we're waiting for the first token of the next entry. + // NOTE: The parser thread of F# Interactive will often be blocked on this call, e.g. after an entry has been + // processed and we're waiting for the first token of the next entry. peekInitial() |> ignore pool.Return tokenTup - hwTokenFetch true + hwTokenFetch true | IN, stack when detectJoinInCtxt stack -> returnToken tokenLexbufState JOIN_IN - // Balancing rule. Encountering an 'in' balances with a 'let'. i.e. even a non-offside 'in' closes a 'let' - // The 'IN' token is thrown away and becomes an ODECLEND - | IN, CtxtLetDecl (blockLet, offsidePos) :: _ -> + // Balancing rule. Encountering an 'in' balances with a 'let'. i.e. even a non-offside 'in' closes a 'let' + // The 'IN' token is thrown away and becomes an ODECLEND + | IN, CtxtLetDecl (blockLet, offsidePos) :: _ -> if debug then dprintf "IN at %a (becomes %s)\n" outputPos tokenStartPos (if blockLet then "ODECLEND" else "IN") if tokenStartCol < offsidePos.Column then warn tokenTup (FSComp.SR.lexfltIncorrentIndentationOfIn()) popCtxt() @@ -1499,66 +1499,66 @@ type LexFilterImpl ( delayToken(pool.UseLocation(tokenTup, ODUMMY token)) returnToken tokenLexbufState (if blockLet then ODECLEND else token) - // Balancing rule. Encountering a 'done' balances with a 'do'. i.e. even a non-offside 'done' closes a 'do' - // The 'DONE' token is thrown away and becomes an ODECLEND - | DONE, CtxtDo offsidePos :: _ -> + // Balancing rule. Encountering a 'done' balances with a 'do'. i.e. even a non-offside 'done' closes a 'do' + // The 'DONE' token is thrown away and becomes an ODECLEND + | DONE, CtxtDo offsidePos :: _ -> if debug then dprintf "DONE at %a terminates CtxtDo(offsidePos=%a)\n" outputPos tokenStartPos outputPos offsidePos popCtxt() - // reprocess as the DONE may close a DO context + // reprocess as the DONE may close a DO context delayToken(pool.UseLocation(tokenTup, ODECLEND)) pool.Return tokenTup hwTokenFetch useBlockRule - // Balancing rule. Encountering a ')' or '}' balances with a '(' or '{', even if not offside - | ((TokenRExprParen | INTERP_STRING_END _ | INTERP_STRING_PART _) as t2), (CtxtParen (t1, _) :: _) + // Balancing rule. Encountering a ')' or '}' balances with a '(' or '{', even if not offside + | ((TokenRExprParen | INTERP_STRING_END _ | INTERP_STRING_PART _) as t2), (CtxtParen (t1, _) :: _) when parenTokensBalance t1 t2 -> if debug then dprintf "RPAREN/RBRACE/BAR_RBRACE/RBRACK/BAR_RBRACK/RQUOTE/END at %a terminates CtxtParen()\n" outputPos tokenStartPos popCtxt() - match t2 with + match t2 with // $".... { ... } ... { ....} " pushes a block context at second { // ~~~~~~~~ // ^---------INTERP_STRING_PART - | INTERP_STRING_PART _ -> + | INTERP_STRING_PART _ -> pushCtxt tokenTup (CtxtParen (token, tokenTup.LexbufState.EndPos)) pushCtxtSeqBlock(false, NoAddBlockEnd) - | _ -> + | _ -> // Queue a dummy token at this position to check if any closing rules apply delayToken(pool.UseLocation(tokenTup, ODUMMY token)) returnToken tokenLexbufState token - // Balancing rule. Encountering a 'end' can balance with a 'with' but only when not offside - | END, CtxtWithAsAugment offsidePos :: _ - when not (tokenStartCol + 1 <= offsidePos.Column) -> + // Balancing rule. Encountering a 'end' can balance with a 'with' but only when not offside + | END, CtxtWithAsAugment offsidePos :: _ + when not (tokenStartCol + 1 <= offsidePos.Column) -> if debug then dprintf "END at %a terminates CtxtWithAsAugment()\n" outputPos tokenStartPos popCtxt() delayToken(pool.UseLocation(tokenTup, ODUMMY token)) // make sure we queue a dummy token at this position to check if any closing rules apply returnToken tokenLexbufState OEND - // Transition rule. CtxtNamespaceHead ~~~> CtxtSeqBlock - // Applied when a token other then a long identifier is seen - | _, CtxtNamespaceHead (namespaceTokenPos, prevToken) :: _ -> - match prevToken, token with - | (NAMESPACE | DOT | REC | GLOBAL), (REC | IDENT _ | GLOBAL) when namespaceTokenPos.Column < tokenStartPos.Column -> + // Transition rule. CtxtNamespaceHead ~~~> CtxtSeqBlock + // Applied when a token other then a long identifier is seen + | _, CtxtNamespaceHead (namespaceTokenPos, prevToken) :: _ -> + match prevToken, token with + | (NAMESPACE | DOT | REC | GLOBAL), (REC | IDENT _ | GLOBAL) when namespaceTokenPos.Column < tokenStartPos.Column -> replaceCtxt tokenTup (CtxtNamespaceHead (namespaceTokenPos, token)) returnToken tokenLexbufState token - | IDENT _, DOT when namespaceTokenPos.Column < tokenStartPos.Column -> + | IDENT _, DOT when namespaceTokenPos.Column < tokenStartPos.Column -> replaceCtxt tokenTup (CtxtNamespaceHead (namespaceTokenPos, token)) returnToken tokenLexbufState token - | _ -> + | _ -> if debug then dprintf "CtxtNamespaceHead: pushing CtxtSeqBlock\n" popCtxt() // Don't push a new context if next token is EOF, since that raises an offside warning - match tokenTup.Token with - | EOF _ -> + match tokenTup.Token with + | EOF _ -> returnToken tokenLexbufState token - | _ -> + | _ -> delayToken tokenTup pushCtxt tokenTup (CtxtNamespaceBody namespaceTokenPos) - pushCtxtSeqBlockAt (tokenTup, true, AddBlockEnd) + pushCtxtSeqBlockAt (tokenTup, true, AddBlockEnd) hwTokenFetch false - - // Transition rule. CtxtModuleHead ~~~> push CtxtModuleBody; push CtxtSeqBlock - // Applied when a ':' or '=' token is seen + + // Transition rule. CtxtModuleHead ~~~> push CtxtModuleBody; push CtxtSeqBlock + // Applied when a ':' or '=' token is seen // Otherwise it's a 'head' module declaration, so ignore it // Here prevToken is either 'module', 'rec', 'global' (invalid), '.', or ident, because we skip attribute tokens and access modifier tokens @@ -1571,17 +1571,17 @@ type LexFilterImpl ( | _ when lexingModuleAttributes = LexingModuleAttributes && moduleTokenPos.Column < tokenStartPos.Column -> returnToken tokenLexbufState token - | MODULE, (PUBLIC | PRIVATE | INTERNAL) when moduleTokenPos.Column < tokenStartPos.Column -> + | MODULE, (PUBLIC | PRIVATE | INTERNAL) when moduleTokenPos.Column < tokenStartPos.Column -> returnToken tokenLexbufState token | MODULE, GLOBAL | (MODULE | REC | DOT), (REC | IDENT _) - | IDENT _, DOT when moduleTokenPos.Column < tokenStartPos.Column -> + | IDENT _, DOT when moduleTokenPos.Column < tokenStartPos.Column -> replaceCtxt tokenTup (CtxtModuleHead (moduleTokenPos, token, NotLexingModuleAttributes)) returnToken tokenLexbufState token | MODULE, LBRACK_LESS when moduleTokenPos.Column < tokenStartPos.Column -> replaceCtxt tokenTup (CtxtModuleHead (moduleTokenPos, prevToken, LexingModuleAttributes)) returnToken tokenLexbufState token - | _, (EQUALS | COLON) -> + | _, (EQUALS | COLON) -> if debug then dprintf "CtxtModuleHead: COLON/EQUALS, pushing CtxtModuleBody and CtxtSeqBlock\n" popCtxt() pushCtxt tokenTup (CtxtModuleBody (moduleTokenPos, false)) @@ -1593,14 +1593,14 @@ type LexFilterImpl ( if debug then dprintf "CtxtModuleHead: start of file, CtxtSeqBlock\n" popCtxt() // Don't push a new context if next token is EOF, since that raises an offside warning - match tokenTup.Token with - | EOF _ -> + match tokenTup.Token with + | EOF _ -> returnToken tokenLexbufState token | _ -> // We have reached other tokens without encountering '=' or ':', so this is a module declaration spanning the whole file - delayToken tokenTup + delayToken tokenTup pushCtxt tokenTup (CtxtModuleBody (moduleTokenPos, true)) - pushCtxtSeqBlockAt (tokenTup, true, AddBlockEnd) + pushCtxtSeqBlockAt (tokenTup, true, AddBlockEnd) hwTokenFetch false | _ -> // Adding a new nested module, EQUALS hasn't been typed yet @@ -1609,67 +1609,67 @@ type LexFilterImpl ( popCtxt() reprocessWithoutBlockRule() - // Offside rule for SeqBlock. + // Offside rule for SeqBlock. // f x // g x // ... - | _, CtxtSeqBlock(_, offsidePos, addBlockEnd) :: rest when - - // NOTE: ;; does not terminate a 'namespace' body. - ((isSemiSemi && not (match rest with (CtxtNamespaceBody _ | CtxtModuleBody (_, true)) :: _ -> true | _ -> false)) || - let grace = - match token, rest with - // When in a type context allow a grace of 2 column positions for '|' tokens, permits - // type x = + | _, CtxtSeqBlock(_, offsidePos, addBlockEnd) :: rest when + + // NOTE: ;; does not terminate a 'namespace' body. + ((isSemiSemi && not (match rest with (CtxtNamespaceBody _ | CtxtModuleBody (_, true)) :: _ -> true | _ -> false)) || + let grace = + match token, rest with + // When in a type context allow a grace of 2 column positions for '|' tokens, permits + // type x = // A of string <-- note missing '|' here - bad style, and perhaps should be disallowed - // | B of int + // | B of int | BAR, CtxtTypeDefns _ :: _ -> 2 // This ensures we close a type context seq block when the '|' marks - // of a type definition are aligned with the 'type' token. - // - // type x = - // | A - // | B - // + // of a type definition are aligned with the 'type' token. + // + // type x = + // | A + // | B + // // <-- close the type context sequence block here *) | _, CtxtTypeDefns posType :: _ when offsidePos.Column = posType.Column && not (isTypeSeqBlockElementContinuator token) -> -1 - // This ensures we close a namespace body when we see the next namespace definition - // - // namespace A.B.C + // This ensures we close a namespace body when we see the next namespace definition + // + // namespace A.B.C // ... // // namespace <-- close the namespace body context here | _, CtxtNamespaceBody posNamespace :: _ when offsidePos.Column = posNamespace.Column && (match token with NAMESPACE -> true | _ -> false) -> -1 - | _ -> - // Allow a grace of >2 column positions for infix tokens, permits - // let x = - // expr + expr - // + expr + expr - // And - // let x = - // expr - // |> f expr - // |> f expr + | _ -> + // Allow a grace of >2 column positions for infix tokens, permits + // let x = + // expr + expr + // + expr + expr + // And + // let x = + // expr + // |> f expr + // |> f expr // Note you need a semicolon in the following situation: // - // let x = + // let x = // stmt // -expr <-- not allowed, as prefix token is here considered infix // // i.e. // - // let x = + // let x = // stmt - // -expr + // -expr (if isInfix token then infixTokenLength token + 1 else 0) - (tokenStartCol + grace < offsidePos.Column)) -> + (tokenStartCol + grace < offsidePos.Column)) -> if debug then dprintf "offside token at column %d indicates end of CtxtSeqBlock started at %a!\n" tokenStartCol outputPos offsidePos popCtxt() if debug then (match addBlockEnd with AddBlockEnd -> dprintf "end of CtxtSeqBlock, insert OBLOCKEND \n" | _ -> ()) - match addBlockEnd with + match addBlockEnd with | AddBlockEnd -> insertToken OBLOCKEND | AddOneSidedBlockEnd -> insertToken ORIGHT_BLOCK_END | NoAddBlockEnd -> reprocess() @@ -1678,7 +1678,7 @@ type LexFilterImpl ( // fff // eeeee // - | _, CtxtVanilla(offsidePos, _) :: _ when isSemiSemi || tokenStartCol <= offsidePos.Column -> + | _, CtxtVanilla(offsidePos, _) :: _ when isSemiSemi || tokenStartCol <= offsidePos.Column -> if debug then dprintf "offside token at column %d indicates end of CtxtVanilla started at %a!\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() @@ -1694,73 +1694,73 @@ type LexFilterImpl ( reprocessWithoutBlockRule() // Offside rule for SeqBlock - avoiding inserting OBLOCKSEP on first item in block - | _, CtxtSeqBlock (FirstInSeqBlock, offsidePos, addBlockEnd) :: _ when useBlockRule -> - // This is the first token in a block, or a token immediately - // following an infix operator (see above). - // Return the token, but only after processing any additional rules - // applicable for this token. Don't apply the CtxtSeqBlock rule for - // this token, but do apply it on subsequent tokens. + | _, CtxtSeqBlock (FirstInSeqBlock, offsidePos, addBlockEnd) :: _ when useBlockRule -> + // This is the first token in a block, or a token immediately + // following an infix operator (see above). + // Return the token, but only after processing any additional rules + // applicable for this token. Don't apply the CtxtSeqBlock rule for + // this token, but do apply it on subsequent tokens. if debug then dprintf "repull for CtxtSeqBlockStart\n" replaceCtxt tokenTup (CtxtSeqBlock (NotFirstInSeqBlock, offsidePos, addBlockEnd)) reprocessWithoutBlockRule() // Offside rule for SeqBlock - inserting OBLOCKSEP on subsequent items in a block when they are precisely aligned // - // let f1 () = + // let f1 () = // expr // ... // ~~> insert OBLOCKSEP // - // let f1 () = + // let f1 () = // let x = expr // ... // ~~> insert OBLOCKSEP // - // let f1 () = + // let f1 () = // let x1 = expr // let x2 = expr // let x3 = expr // ... // ~~> insert OBLOCKSEP - | _, CtxtSeqBlock (NotFirstInSeqBlock, offsidePos, addBlockEnd) :: rest - when useBlockRule + | _, CtxtSeqBlock (NotFirstInSeqBlock, offsidePos, addBlockEnd) :: rest + when useBlockRule && not (let isTypeCtxt = (match rest with | CtxtTypeDefns _ :: _ -> true | _ -> false) // Don't insert 'OBLOCKSEP' between namespace declarations let isNamespaceCtxt = (match rest with | CtxtNamespaceBody _ :: _ -> true | _ -> false) if isNamespaceCtxt then (match token with NAMESPACE -> true | _ -> false) elif isTypeCtxt then isTypeSeqBlockElementContinuator token else isSeqBlockElementContinuator token) - && (tokenStartCol = offsidePos.Column) - && (tokenStartPos.OriginalLine <> offsidePos.OriginalLine) -> + && (tokenStartCol = offsidePos.Column) + && (tokenStartPos.OriginalLine <> offsidePos.OriginalLine) -> if debug then dprintf "offside at column %d matches start of block(%a)! delaying token, returning OBLOCKSEP\n" tokenStartCol outputPos offsidePos replaceCtxt tokenTup (CtxtSeqBlock (FirstInSeqBlock, offsidePos, addBlockEnd)) // No change to offside stack: another statement block starts... insertTokenFromPrevPosToCurrentPos OBLOCKSEP - // Offside rule for CtxtLetDecl - // let .... = + // Offside rule for CtxtLetDecl + // let .... = // ... // // - // let .... = + // let .... = // ... // // // let .... = // ... // <*> - | _, CtxtLetDecl (true, offsidePos) :: _ when - isSemiSemi || (if relaxWhitespace2OffsideRule || isLetContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + | _, CtxtLetDecl (true, offsidePos) :: _ when + isSemiSemi || (if relaxWhitespace2OffsideRule || isLetContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from LET(offsidePos=%a)! delaying token, returning ODECLEND\n" tokenStartCol outputPos offsidePos popCtxt() insertToken ODECLEND - + // do ignore ( // 1 // ), 2 // This is a 'unit * int', so for backwards compatibility, do not treat ')' as a continuator, don't apply relaxWhitespace2OffsideRule // Test here: Tests/FSharp.Compiler.ComponentTests/Conformance/LexicalFiltering/Basic/OffsideExceptions.fs, RelaxWhitespace2_AllowedBefore9 | _, CtxtDo offsidePos :: _ - when isSemiSemi || (if isDoContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if isDoContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from DO(offsidePos=%a)! delaying token, returning ODECLEND\n" tokenStartCol outputPos offsidePos popCtxt() insertToken ODECLEND @@ -1771,36 +1771,36 @@ type LexFilterImpl ( // ... | _, CtxtInterfaceHead offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isInterfaceContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isInterfaceContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from INTERFACE(offsidePos=%a)! pop and reprocess\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() | _, CtxtTypeDefns offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isTypeContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isTypeContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from TYPE(offsidePos=%a)! pop and reprocess\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() - // module A.B.M + // module A.B.M // ... // module M = ... // end // module M = ... // ... - // NOTE: ;; does not terminate a whole file module body. - | _, CtxtModuleBody (offsidePos, wholeFile) :: _ when (isSemiSemi && not wholeFile) || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + // NOTE: ;; does not terminate a whole file module body. + | _, CtxtModuleBody (offsidePos, wholeFile) :: _ when (isSemiSemi && not wholeFile) || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from MODULE with offsidePos %a! delaying token\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() - // NOTE: ;; does not terminate a 'namespace' body. - | _, CtxtNamespaceBody offsidePos :: _ when (* isSemiSemi || *) (if relaxWhitespace2OffsideRule || isNamespaceContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + // NOTE: ;; does not terminate a 'namespace' body. + | _, CtxtNamespaceBody offsidePos :: _ when (* isSemiSemi || *) (if relaxWhitespace2OffsideRule || isNamespaceContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from NAMESPACE with offsidePos %a! delaying token\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() - | _, CtxtException offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + | _, CtxtException offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from EXCEPTION with offsidePos %a! delaying token\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() @@ -1812,65 +1812,65 @@ type LexFilterImpl ( // 1 // This is not offside for backcompat, don't apply relaxWhitespace2OffsideRule // ] // Test here: Tests/FSharp.Compiler.ComponentTests/Conformance/LexicalFiltering/Basic/OffsideExceptions.fs, RelaxWhitespace2_AllowedBefore9 - | _, CtxtMemberBody offsidePos :: _ when isSemiSemi || (if false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + | _, CtxtMemberBody offsidePos :: _ when isSemiSemi || (if false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from MEMBER/OVERRIDE head with offsidePos %a!\n" tokenStartCol outputPos offsidePos popCtxt() insertToken ODECLEND - // Pop CtxtMemberHead when offside - | _, CtxtMemberHead offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + // Pop CtxtMemberHead when offside + | _, CtxtMemberHead offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "token at column %d is offside from MEMBER/OVERRIDE head with offsidePos %a!\n" tokenStartCol outputPos offsidePos popCtxt() reprocess() | _, CtxtIf offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isIfBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isIfBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtIf\n" popCtxt() reprocess() - + | _, CtxtWithAsLet offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isLetContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isLetContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtWithAsLet\n" popCtxt() insertToken OEND - + | _, CtxtWithAsAugment offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isWithAugmentBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isWithAugmentBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtWithAsAugment, isWithAugmentBlockContinuator = %b\n" (isWithAugmentBlockContinuator token) popCtxt() - insertToken ODECLEND - + insertToken ODECLEND + | _, CtxtMatch offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || relaxWhitespace2 && isMatchBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || relaxWhitespace2 && isMatchBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtMatch\n" popCtxt() reprocess() - - | _, CtxtFor offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isForLoopContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + + | _, CtxtFor offsidePos :: _ + when isSemiSemi || (if relaxWhitespace2OffsideRule || isForLoopContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtFor\n" popCtxt() reprocess() - - | _, CtxtWhile offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isWhileBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + + | _, CtxtWhile offsidePos :: _ + when isSemiSemi || (if relaxWhitespace2OffsideRule || isWhileBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtWhile\n" popCtxt() reprocess() - - | _, CtxtWhen offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + + | _, CtxtWhen offsidePos :: _ + when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtWhen\n" popCtxt() reprocess() - + | _, CtxtFun offsidePos :: _ // fun () -> async { // 1 // }, 2 // This is a '(unit -> seq) * int', so for backwards compatibility, do not treat '}' as a continuator, don't apply relaxWhitespace2OffsideRule // Test here: Tests/FSharp.Compiler.ComponentTests/Conformance/LexicalFiltering/Basic/OffsideExceptions.fs, RelaxWhitespace2_AllowedBefore9 - when isSemiSemi || (if (*relaxWhitespace2OffsideRule*)false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if (*relaxWhitespace2OffsideRule*)false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtFun\n" popCtxt() insertToken OEND @@ -1879,110 +1879,110 @@ type LexFilterImpl ( // }, 2 // This is a '(unit -> seq) * int', so for backwards compatibility, do not treat '}' as a continuator, don't apply relaxWhitespace2OffsideRule // Test here: Tests/FSharp.Compiler.ComponentTests/Conformance/LexicalFiltering/Basic/OffsideExceptions.fs, RelaxWhitespace2_AllowedBefore9 | _, CtxtFunction offsidePos :: _ - when isSemiSemi || (if (*relaxWhitespace2OffsideRule*)false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if (*relaxWhitespace2OffsideRule*)false then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> popCtxt() reprocess() - + | _, CtxtTry offsidePos :: _ - when isSemiSemi || (if relaxWhitespace2OffsideRule || isTryBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + when isSemiSemi || (if relaxWhitespace2OffsideRule || isTryBlockContinuator token then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtTry\n" popCtxt() reprocess() - - // then + + // then // ... - // else + // else // - // then + // then // ... - | _, CtxtThen offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule || isThenBlockContinuator token then tokenStartCol + 1 else tokenStartCol)<= offsidePos.Column -> + | _, CtxtThen offsidePos :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule || isThenBlockContinuator token then tokenStartCol + 1 else tokenStartCol)<= offsidePos.Column -> if debug then dprintf "offside from CtxtThen, popping\n" popCtxt() reprocess() - + // else ... // .... // - | _, CtxtElse (offsidePos) :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> + | _, CtxtElse (offsidePos) :: _ when isSemiSemi || (if relaxWhitespace2OffsideRule then tokenStartCol + 1 else tokenStartCol) <= offsidePos.Column -> if debug then dprintf "offside from CtxtElse, popping\n" popCtxt() reprocess() - // leadingBar=false permits match patterns without an initial '|' - | _, CtxtMatchClauses (leadingBar, offsidePos) :: _ - when (isSemiSemi || - (match token with - // BAR occurs in pattern matching 'with' blocks - | BAR -> + // leadingBar=false permits match patterns without an initial '|' + | _, CtxtMatchClauses (leadingBar, offsidePos) :: _ + when (isSemiSemi || + (match token with + // BAR occurs in pattern matching 'with' blocks + | BAR -> let cond1 = tokenStartCol + (if leadingBar then 0 else 2) < offsidePos.Column let cond2 = tokenStartCol + (if leadingBar then 1 else 2) < offsidePos.Column - if (cond1 <> cond2) then + if (cond1 <> cond2) then errorR(IndentationProblem(FSComp.SR.lexfltSeparatorTokensOfPatternMatchMisaligned(), mkSynRange (startPosOfTokenTup tokenTup) tokenTup.LexbufState.EndPos)) cond1 | END -> tokenStartCol + (if leadingBar then -1 else 1) < offsidePos.Column - | _ -> tokenStartCol + (if leadingBar then -1 else 1) < offsidePos.Column)) -> + | _ -> tokenStartCol + (if leadingBar then -1 else 1) < offsidePos.Column)) -> if debug then dprintf "offside from WITH, tokenStartCol = %d, offsidePos = %a, delaying token, returning OEND\n" tokenStartCol outputPos offsidePos popCtxt() insertToken OEND - - // namespace ... ~~~> CtxtNamespaceHead - | NAMESPACE, _ :: _ -> - if debug then dprintf "NAMESPACE: entering CtxtNamespaceHead, awaiting end of long identifier to push CtxtSeqBlock\n" + + // namespace ... ~~~> CtxtNamespaceHead + | NAMESPACE, _ :: _ -> + if debug then dprintf "NAMESPACE: entering CtxtNamespaceHead, awaiting end of long identifier to push CtxtSeqBlock\n" pushCtxt tokenTup (CtxtNamespaceHead (tokenStartPos, token)) returnToken tokenLexbufState token - - // module ... ~~~> CtxtModuleHead - | MODULE, _ :: _ -> + + // module ... ~~~> CtxtModuleHead + | MODULE, _ :: _ -> insertComingSoonTokens("MODULE", MODULE_COMING_SOON, MODULE_IS_HERE) if debug then dprintf "MODULE: entering CtxtModuleHead, awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtModuleHead (tokenStartPos, token, NotLexingModuleAttributes)) pool.Return tokenTup hwTokenFetch useBlockRule - - // exception ... ~~~> CtxtException - | EXCEPTION, _ :: _ -> + + // exception ... ~~~> CtxtException + | EXCEPTION, _ :: _ -> if debug then dprintf "EXCEPTION: entering CtxtException(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtException tokenStartPos) returnToken tokenLexbufState token - - // let ... ~~~> CtxtLetDecl - // -- this rule only applies to - // - 'static let' - | LET isUse, ctxt :: _ when (match ctxt with CtxtMemberHead _ -> true | _ -> false) -> + + // let ... ~~~> CtxtLetDecl + // -- this rule only applies to + // - 'static let' + | LET isUse, ctxt :: _ when (match ctxt with CtxtMemberHead _ -> true | _ -> false) -> if debug then dprintf "LET: entering CtxtLetDecl(), awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos let startPos = match ctxt with CtxtMemberHead startPos -> startPos | _ -> tokenStartPos popCtxt() // get rid of the CtxtMemberHead pushCtxt tokenTup (CtxtLetDecl(true, startPos)) returnToken tokenLexbufState (OLET isUse) - // let ... ~~~> CtxtLetDecl - // -- this rule only applies to - // - 'let' 'right-on' a SeqBlock line + // let ... ~~~> CtxtLetDecl + // -- this rule only applies to + // - 'let' 'right-on' a SeqBlock line // - 'let' in a CtxtMatchClauses, which is a parse error, but we need to treat as OLET to get various O...END tokens to enable parser to recover - | LET isUse, ctxt :: _ -> - let blockLet = match ctxt with | CtxtSeqBlock _ -> true - | CtxtMatchClauses _ -> true + | LET isUse, ctxt :: _ -> + let blockLet = match ctxt with | CtxtSeqBlock _ -> true + | CtxtMatchClauses _ -> true | _ -> false if debug then dprintf "LET: entering CtxtLetDecl(blockLet=%b), awaiting EQUALS to go to CtxtSeqBlock (%a)\n" blockLet outputPos tokenStartPos pushCtxt tokenTup (CtxtLetDecl(blockLet, tokenStartPos)) returnToken tokenLexbufState (if blockLet then OLET isUse else token) - - // let! ... ~~~> CtxtLetDecl - | BINDER b, ctxt :: _ -> + + // let! ... ~~~> CtxtLetDecl + | BINDER b, ctxt :: _ -> let blockLet = match ctxt with CtxtSeqBlock _ -> true | _ -> false if debug then dprintf "LET: entering CtxtLetDecl(blockLet=%b), awaiting EQUALS to go to CtxtSeqBlock (%a)\n" blockLet outputPos tokenStartPos pushCtxt tokenTup (CtxtLetDecl(blockLet, tokenStartPos)) returnToken tokenLexbufState (if blockLet then OBINDER b else token) - // and! ... ~~~> CtxtLetDecl - | AND_BANG isUse, ctxt :: _ -> + // and! ... ~~~> CtxtLetDecl + | AND_BANG isUse, ctxt :: _ -> let blockLet = match ctxt with CtxtSeqBlock _ -> true | _ -> false if debug then dprintf "AND!: entering CtxtLetDecl(blockLet=%b), awaiting EQUALS to go to CtxtSeqBlock (%a)\n" blockLet outputPos tokenStartPos pushCtxt tokenTup (CtxtLetDecl(blockLet,tokenStartPos)) returnToken tokenLexbufState (if blockLet then OAND_BANG isUse else token) - | (VAL | STATIC | ABSTRACT | MEMBER | OVERRIDE | DEFAULT), ctxtStack when thereIsACtxtMemberBodyOnTheStackAndWeShouldPopStackForUpcomingMember ctxtStack -> + | (VAL | STATIC | ABSTRACT | MEMBER | OVERRIDE | DEFAULT), ctxtStack when thereIsACtxtMemberBodyOnTheStackAndWeShouldPopStackForUpcomingMember ctxtStack -> if debug then dprintf "STATIC/MEMBER/OVERRIDE/DEFAULT: already inside CtxtMemberBody, popping all that context before starting next member...\n" // save this token, we'll consume it again later... delayTokenNoProcessing tokenTup @@ -1990,7 +1990,7 @@ type LexFilterImpl ( while (match offsideStack.Head with CtxtMemberBody _ -> false | _ -> true) do match endTokenForACtxt offsideStack.Head with // some contexts require us to insert various END tokens - | Some tok -> + | Some tok -> popCtxt() if debug then dprintf "--> inserting %+A\n" tok delayTokenNoProcessing (pool.UseLocation(tokenTup, tok)) @@ -1999,43 +1999,43 @@ type LexFilterImpl ( popCtxt() // pop CtxtMemberBody if debug then dprintf "...STATIC/MEMBER/OVERRIDE/DEFAULT: finished popping all that context\n" hwTokenFetch useBlockRule - - // static member ... ~~~> CtxtMemberHead - // static ... ~~~> CtxtMemberHead - // member ... ~~~> CtxtMemberHead - // override ... ~~~> CtxtMemberHead - // default ... ~~~> CtxtMemberHead - // val ... ~~~> CtxtMemberHead - | (VAL | STATIC | ABSTRACT | MEMBER | OVERRIDE | DEFAULT), ctxt :: _ when (match ctxt with CtxtMemberHead _ -> false | _ -> true) -> + + // static member ... ~~~> CtxtMemberHead + // static ... ~~~> CtxtMemberHead + // member ... ~~~> CtxtMemberHead + // override ... ~~~> CtxtMemberHead + // default ... ~~~> CtxtMemberHead + // val ... ~~~> CtxtMemberHead + | (VAL | STATIC | ABSTRACT | MEMBER | OVERRIDE | DEFAULT), ctxt :: _ when (match ctxt with CtxtMemberHead _ -> false | _ -> true) -> if debug then dprintf "STATIC/MEMBER/OVERRIDE/DEFAULT: entering CtxtMemberHead, awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtMemberHead tokenStartPos) returnToken tokenLexbufState token - // public new... ~~~> CtxtMemberHead - | (PUBLIC | PRIVATE | INTERNAL), _ctxt :: _ when (match peekNextToken() with NEW -> true | _ -> false) -> + // public new... ~~~> CtxtMemberHead + | (PUBLIC | PRIVATE | INTERNAL), _ctxt :: _ when (match peekNextToken() with NEW -> true | _ -> false) -> if debug then dprintf "PUBLIC/PRIVATE/INTERNAL NEW: entering CtxtMemberHead, awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtMemberHead tokenStartPos) returnToken tokenLexbufState token - // new( ~~~> CtxtMemberHead, if not already there because of 'public' - | NEW, ctxt :: _ when (match peekNextToken() with LPAREN -> true | _ -> false) && (match ctxt with CtxtMemberHead _ -> false | _ -> true) -> + // new( ~~~> CtxtMemberHead, if not already there because of 'public' + | NEW, ctxt :: _ when (match peekNextToken() with LPAREN -> true | _ -> false) && (match ctxt with CtxtMemberHead _ -> false | _ -> true) -> if debug then dprintf "NEW: entering CtxtMemberHead, awaiting EQUALS to go to CtxtSeqBlock (%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtMemberHead tokenStartPos) returnToken tokenLexbufState token - - // 'let ... = ' ~~~> CtxtSeqBlock - | EQUALS, CtxtLetDecl _ :: _ -> + + // 'let ... = ' ~~~> CtxtSeqBlock + | EQUALS, CtxtLetDecl _ :: _ -> if debug then dprintf "CtxtLetDecl: EQUALS, pushing CtxtSeqBlock\n" pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - | EQUALS, CtxtTypeDefns _ :: _ -> + | EQUALS, CtxtTypeDefns _ :: _ -> if debug then dprintf "CtxType: EQUALS, pushing CtxtSeqBlock\n" pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - | (LAZY | ASSERT), _ -> - if isControlFlowOrNotSameLine() then + | (LAZY | ASSERT), _ -> + if isControlFlowOrNotSameLine() then if debug then dprintf "LAZY/ASSERT, pushing CtxtSeqBlock\n" pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState (match token with LAZY -> OLAZY | _ -> OASSERT) @@ -2043,43 +2043,43 @@ type LexFilterImpl ( returnToken tokenLexbufState token - // 'with id = ' ~~~> CtxtSeqBlock - // 'with M.id = ' ~~~> CtxtSeqBlock + // 'with id = ' ~~~> CtxtSeqBlock + // 'with M.id = ' ~~~> CtxtSeqBlock // 'with id1 = 1 - // id2 = ... ~~~> CtxtSeqBlock + // id2 = ... ~~~> CtxtSeqBlock // 'with id1 = 1 - // M.id2 = ... ~~~> CtxtSeqBlock - // '{ id = ... ' ~~~> CtxtSeqBlock - // '{ M.id = ... ' ~~~> CtxtSeqBlock + // M.id2 = ... ~~~> CtxtSeqBlock + // '{ id = ... ' ~~~> CtxtSeqBlock + // '{ M.id = ... ' ~~~> CtxtSeqBlock // '{ id1 = 1 - // id2 = ... ' ~~~> CtxtSeqBlock + // id2 = ... ' ~~~> CtxtSeqBlock // '{ id1 = 1 - // M.id2 = ... ' ~~~> CtxtSeqBlock - | EQUALS, CtxtWithAsLet _ :: _ // This detects 'with = '. - | EQUALS, CtxtVanilla (_, true) :: CtxtSeqBlock _ :: (CtxtWithAsLet _ | CtxtParen((LBRACE _ | LBRACE_BAR), _)) :: _ -> + // M.id2 = ... ' ~~~> CtxtSeqBlock + | EQUALS, CtxtWithAsLet _ :: _ // This detects 'with = '. + | EQUALS, CtxtVanilla (_, true) :: CtxtSeqBlock _ :: (CtxtWithAsLet _ | CtxtParen((LBRACE _ | LBRACE_BAR), _)) :: _ -> if debug then dprintf "CtxtLetDecl/CtxtWithAsLet: EQUALS, pushing CtxtSeqBlock\n" // We don't insert begin/end block tokens for single-line bindings since we can't properly distinguish single-line *) // record update expressions such as "{ t with gbuckets=Array.copy t.gbuckets; gcount=t.gcount }" *) // These have a syntactically odd status because of the use of ";" to terminate expressions, so each *) // "=" binding is not properly balanced by "in" or "and" tokens in the single line syntax (unlike other bindings) *) - if isControlFlowOrNotSameLine() then + if isControlFlowOrNotSameLine() then pushCtxtSeqBlock(true, AddBlockEnd) else pushCtxtSeqBlock(false, NoAddBlockEnd) returnToken tokenLexbufState token - // 'new(... =' ~~~> CtxtMemberBody, CtxtSeqBlock - // 'member ... =' ~~~> CtxtMemberBody, CtxtSeqBlock - // 'static member ... =' ~~~> CtxtMemberBody, CtxtSeqBlock - // 'default ... =' ~~~> CtxtMemberBody, CtxtSeqBlock - // 'override ... =' ~~~> CtxtMemberBody, CtxtSeqBlock - | EQUALS, CtxtMemberHead offsidePos :: _ -> + // 'new(... =' ~~~> CtxtMemberBody, CtxtSeqBlock + // 'member ... =' ~~~> CtxtMemberBody, CtxtSeqBlock + // 'static member ... =' ~~~> CtxtMemberBody, CtxtSeqBlock + // 'default ... =' ~~~> CtxtMemberBody, CtxtSeqBlock + // 'override ... =' ~~~> CtxtMemberBody, CtxtSeqBlock + | EQUALS, CtxtMemberHead offsidePos :: _ -> if debug then dprintf "CtxtMemberHead: EQUALS, pushing CtxtSeqBlock\n" replaceCtxt tokenTup (CtxtMemberBody offsidePos) pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - // '(' tokens are balanced with ')' tokens and also introduce a CtxtSeqBlock + // '(' tokens are balanced with ')' tokens and also introduce a CtxtSeqBlock // $".... { ... } ... { ....} " pushes a block context at first { // ~~~~~~~~ // ^---------INTERP_STRING_BEGIN_PART @@ -2092,29 +2092,29 @@ type LexFilterImpl ( pushCtxtSeqBlock(false, NoAddBlockEnd) returnToken tokenLexbufState token - // '(' tokens are balanced with ')' tokens and also introduce a CtxtSeqBlock - | STRUCT, ctxts - when (match ctxts with - | CtxtSeqBlock _ :: (CtxtModuleBody _ | CtxtTypeDefns _) :: _ -> - // type ... = struct ... end - // module ... = struct ... end - true - + // '(' tokens are balanced with ')' tokens and also introduce a CtxtSeqBlock + | STRUCT, ctxts + when (match ctxts with + | CtxtSeqBlock _ :: (CtxtModuleBody _ | CtxtTypeDefns _) :: _ -> + // type ... = struct ... end + // module ... = struct ... end + true + | _ -> false) (* type X<'a when 'a : struct> *) -> if debug then dprintf "LPAREN etc., pushes CtxtParen, pushing CtxtSeqBlock, tokenStartPos = %a\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtParen (token, tokenStartPos)) pushCtxtSeqBlock(false, NoAddBlockEnd) returnToken tokenLexbufState token - | RARROW, ctxts - // Only treat '->' as a starting a sequence block in certain circumstances - when (match ctxts with - // comprehension/match - | (CtxtWhile _ | CtxtFor _ | CtxtWhen _ | CtxtMatchClauses _ | CtxtFun _) :: _ -> true - // comprehension - | CtxtSeqBlock _ :: CtxtParen ((LBRACK | LBRACE _ | LBRACE_BAR | LBRACK_BAR), _) :: _ -> true - // comprehension - | CtxtSeqBlock _ :: (CtxtDo _ | CtxtWhile _ | CtxtFor _ | CtxtWhen _ | CtxtMatchClauses _ | CtxtTry _ | CtxtThen _ | CtxtElse _) :: _ -> true + | RARROW, ctxts + // Only treat '->' as a starting a sequence block in certain circumstances + when (match ctxts with + // comprehension/match + | (CtxtWhile _ | CtxtFor _ | CtxtWhen _ | CtxtMatchClauses _ | CtxtFun _) :: _ -> true + // comprehension + | CtxtSeqBlock _ :: CtxtParen ((LBRACK | LBRACE _ | LBRACE_BAR | LBRACK_BAR), _) :: _ -> true + // comprehension + | CtxtSeqBlock _ :: (CtxtDo _ | CtxtWhile _ | CtxtFor _ | CtxtWhen _ | CtxtMatchClauses _ | CtxtTry _ | CtxtThen _ | CtxtElse _) :: _ -> true | _ -> false) -> if debug then dprintf "RARROW, pushing CtxtSeqBlock, tokenStartPos = %a\n" outputPos tokenStartPos pushCtxtSeqBlock(false, AddOneSidedBlockEnd) @@ -2125,71 +2125,71 @@ type LexFilterImpl ( pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - // do ~~> CtxtDo;CtxtSeqBlock (unconditionally) - | (DO | DO_BANG), _ -> + // do ~~> CtxtDo;CtxtSeqBlock (unconditionally) + | (DO | DO_BANG), _ -> if debug then dprintf "DO: pushing CtxtSeqBlock, tokenStartPos = %a\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtDo tokenStartPos) pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState (match token with DO -> ODO | DO_BANG -> ODO_BANG | _ -> failwith "unreachable") - // The r.h.s. of an infix token begins a new block. - | _, ctxts when (isInfix token && - not (isSameLine()) && + // The r.h.s. of an infix token begins a new block. + | _, ctxts when (isInfix token && + not (isSameLine()) && // This doesn't apply to the use of any infix tokens in a pattern match or 'when' block // For example // // match x with // | _ when true && // false -> // the 'false' token shouldn't start a new block - // let x = 1 + // let x = 1 // x - (match ctxts with CtxtMatchClauses _ :: _ -> false | _ -> true)) -> + (match ctxts with CtxtMatchClauses _ :: _ -> false | _ -> true)) -> if debug then dprintf "(Infix etc.), pushing CtxtSeqBlock, tokenStartPos = %a\n" outputPos tokenStartPos pushCtxtSeqBlock(false, NoAddBlockEnd) returnToken tokenLexbufState token - | WITH, (CtxtTry _ | CtxtMatch _) :: _ -> + | WITH, (CtxtTry _ | CtxtMatch _) :: _ -> let lookaheadTokenTup = peekNextTokenTup() let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup let leadingBar = match (peekNextToken()) with BAR -> true | _ -> false if debug then dprintf "WITH, pushing CtxtMatchClauses, lookaheadTokenStartPos = %a, tokenStartPos = %a\n" outputPos lookaheadTokenStartPos outputPos tokenStartPos pushCtxt lookaheadTokenTup (CtxtMatchClauses(leadingBar, lookaheadTokenStartPos)) - returnToken tokenLexbufState OWITH + returnToken tokenLexbufState OWITH - | FINALLY, CtxtTry _ :: _ -> + | FINALLY, CtxtTry _ :: _ -> if debug then dprintf "FINALLY, pushing pushCtxtSeqBlock, tokenStartPos = %a\n" outputPos tokenStartPos pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - | WITH, (CtxtException _ | CtxtTypeDefns _ | CtxtMemberHead _ | CtxtInterfaceHead _ | CtxtMemberBody _ as limCtxt) :: _ - | WITH, (CtxtSeqBlock _ as limCtxt :: CtxtParen((LBRACE _ | LBRACE_BAR), _) :: _) -> + | WITH, (CtxtException _ | CtxtTypeDefns _ | CtxtMemberHead _ | CtxtInterfaceHead _ | CtxtMemberBody _ as limCtxt) :: _ + | WITH, (CtxtSeqBlock _ as limCtxt :: CtxtParen((LBRACE _ | LBRACE_BAR), _) :: _) -> let lookaheadTokenTup = peekNextTokenTup() let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup - match lookaheadTokenTup.Token with + match lookaheadTokenTup.Token with | RBRACE _ - | IDENT _ + | IDENT _ // The next clause detects the access annotations after the 'with' in: - // member x.PublicGetSetProperty + // member x.PublicGetSetProperty // with public get i = "Ralf" - // and private set i v = () - // - // as well as: - // member x.PublicGetSetProperty + // and private set i v = () + // + // as well as: + // member x.PublicGetSetProperty // with inline get() = "Ralf" - // and [] set v = () - // - | PUBLIC | PRIVATE | INTERNAL | INLINE -> + // and [] set v = () + // + | PUBLIC | PRIVATE | INTERNAL | INLINE -> - let offsidePos = + let offsidePos = if lookaheadTokenStartPos.Column > tokenTup.LexbufState.EndPos.Column then // This detects: - // { new Foo + // { new Foo // with M() = 1 - // and N() = 2 } - // and treats the inner bindings as if they were member bindings. + // and N() = 2 } + // and treats the inner bindings as if they were member bindings. // (HOWEVER: note the above language construct is now deprecated/removed) - // + // // It also happens to detect // { foo with m = 1 // n = 2 } @@ -2197,24 +2197,24 @@ type LexFilterImpl ( tokenStartPos else // This detects: - // { foo with + // { foo with // m = 1 // n = 2 } // So we're careful to set the offside column to be the minimum required *) limCtxt.StartPos if debug then dprintf "WITH, pushing CtxtWithAsLet, tokenStartPos = %a, lookaheadTokenStartPos = %a\n" outputPos tokenStartPos outputPos lookaheadTokenStartPos pushCtxt tokenTup (CtxtWithAsLet offsidePos) - - // Detect 'with' bindings of the form + + // Detect 'with' bindings of the form // // with x = ... // - // Which can only be part of + // Which can only be part of // // { r with x = ... } // // and in this case push a CtxtSeqBlock to cover the sequence - let isFollowedByLongIdentEquals = + let isFollowedByLongIdentEquals = let tokenTup = popNextTokenTup() let res = isLongIdentEquals tokenTup.Token delayToken tokenTup @@ -2222,18 +2222,18 @@ type LexFilterImpl ( if isFollowedByLongIdentEquals then pushCtxtSeqBlock(false, NoAddBlockEnd) - - returnToken tokenLexbufState OWITH - | _ -> + + returnToken tokenLexbufState OWITH + | _ -> if debug then dprintf "WITH, pushing CtxtWithAsAugment and CtxtSeqBlock, tokenStartPos = %a, limCtxt = %A\n" outputPos tokenStartPos limCtxt // // For attributes on properties: - // member x.PublicGetSetProperty + // member x.PublicGetSetProperty // with [] get() = "Ralf" if (match lookaheadTokenTup.Token with LBRACK_LESS -> true | _ -> false) && (lookaheadTokenStartPos.OriginalLine = tokenTup.StartPos.OriginalLine) then let offsidePos = tokenStartPos pushCtxt tokenTup (CtxtWithAsLet offsidePos) - returnToken tokenLexbufState OWITH + returnToken tokenLexbufState OWITH // Recovery for `interface ... with` member without further indented member implementations elif lookaheadTokenStartPos.Column <= limCtxt.StartCol && (match limCtxt with CtxtInterfaceHead _ -> true | _ -> false) then @@ -2241,35 +2241,35 @@ type LexFilterImpl ( else // In these situations - // interface I with + // interface I with // ... // end - // exception ... with + // exception ... with // ... // end - // type ... with + // type ... with // ... // end - // member x.P + // member x.P // with get() = ... // and set() = ... - // member x.P with + // member x.P with // get() = ... - // The limit is "interface"/"exception"/"type" + // The limit is "interface"/"exception"/"type" let offsidePos = limCtxt.StartPos - + pushCtxt tokenTup (CtxtWithAsAugment offsidePos) pushCtxtSeqBlock(true, AddBlockEnd) - returnToken tokenLexbufState token + returnToken tokenLexbufState token - | WITH, stack -> + | WITH, stack -> if debug then dprintf "WITH\n" if debug then dprintf "WITH --> NO MATCH, pushing CtxtWithAsAugment (type augmentation), stack = %A" stack pushCtxt tokenTup (CtxtWithAsAugment tokenStartPos) pushCtxtSeqBlock(true, AddBlockEnd) - returnToken tokenLexbufState token + returnToken tokenLexbufState token - | FUNCTION, _ -> + | FUNCTION, _ -> let lookaheadTokenTup = peekNextTokenTup() let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup let leadingBar = match (peekNextToken()) with BAR -> true | _ -> false @@ -2277,22 +2277,22 @@ type LexFilterImpl ( pushCtxt lookaheadTokenTup (CtxtMatchClauses(leadingBar, lookaheadTokenStartPos)) returnToken tokenLexbufState OFUNCTION - | THEN, _ -> + | THEN, _ -> if debug then dprintf "THEN, replacing THEN with OTHEN, pushing CtxtSeqBlock;CtxtThen(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtThen tokenStartPos) pushCtxtSeqBlock(true, AddBlockEnd) - returnToken tokenLexbufState OTHEN + returnToken tokenLexbufState OTHEN - | ELSE, _ -> + | ELSE, _ -> let lookaheadTokenTup = peekNextTokenTup() let lookaheadTokenStartPos, lookaheadTokenEndPos = posOfTokenTup lookaheadTokenTup - match peekNextToken() with + match peekNextToken() with | IF when isSameLine() -> // We convert ELSE IF to ELIF since it then opens the block at the right point, // In particular the case // if e1 then e2 // else if e3 then e4 - // else if e5 then e6 + // else if e5 then e6 popNextTokenTup() |> pool.Return if debug then dprintf "ELSE IF: replacing ELSE IF with ELIF, pushing CtxtIf, CtxtVanilla(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtIf tokenStartPos) @@ -2300,104 +2300,104 @@ type LexFilterImpl ( let correctedTokenLexbufState = LexbufState(tokenStartPos, lookaheadTokenEndPos, false) returnToken correctedTokenLexbufState ELIF - | _ -> + | _ -> if debug then dprintf "ELSE: replacing ELSE with OELSE, pushing CtxtSeqBlock, CtxtElse(%a)\n" outputPos lookaheadTokenStartPos pushCtxt tokenTup (CtxtElse tokenStartPos) pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState OELSE - | (ELIF | IF), _ -> + | (ELIF | IF), _ -> if debug then dprintf "IF, pushing CtxtIf(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtIf tokenStartPos) returnToken tokenLexbufState token - | (MATCH | MATCH_BANG), _ -> + | (MATCH | MATCH_BANG), _ -> if debug then dprintf "MATCH, pushing CtxtMatch(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtMatch tokenStartPos) returnToken tokenLexbufState token - | FOR, _ -> + | FOR, _ -> if debug then dprintf "FOR, pushing CtxtFor(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtFor tokenStartPos) returnToken tokenLexbufState token - | WHILE, _ -> + | WHILE, _ -> if debug then dprintf "WHILE, pushing CtxtWhile(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtWhile tokenStartPos) returnToken tokenLexbufState token - | WHEN, CtxtSeqBlock _ :: _ -> + | WHEN, CtxtSeqBlock _ :: _ -> if debug then dprintf "WHEN, pushing CtxtWhen(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtWhen tokenStartPos) returnToken tokenLexbufState token - | FUN, _ -> + | FUN, _ -> if debug then dprintf "FUN, pushing CtxtFun(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtFun tokenStartPos) returnToken tokenLexbufState OFUN - | INTERFACE, _ -> + | INTERFACE, _ -> let lookaheadTokenTup = peekNextTokenTup() let lookaheadTokenStartPos = startPosOfTokenTup lookaheadTokenTup - match lookaheadTokenTup.Token with - // type I = interface .... end - | DEFAULT | OVERRIDE | INTERFACE | NEW | TYPE | STATIC | END | MEMBER | ABSTRACT | INHERIT | LBRACK_LESS -> + match lookaheadTokenTup.Token with + // type I = interface .... end + | DEFAULT | OVERRIDE | INTERFACE | NEW | TYPE | STATIC | END | MEMBER | ABSTRACT | INHERIT | LBRACK_LESS -> if debug then dprintf "INTERFACE, pushing CtxtParen, tokenStartPos = %a, lookaheadTokenStartPos = %a\n" outputPos tokenStartPos outputPos lookaheadTokenStartPos pushCtxt tokenTup (CtxtParen (token, tokenStartPos)) pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - // type C with interface .... with - // type C = interface .... with - | _ -> + // type C with interface .... with + // type C = interface .... with + | _ -> if debug then dprintf "INTERFACE, pushing CtxtInterfaceHead, tokenStartPos = %a, lookaheadTokenStartPos = %a\n" outputPos tokenStartPos outputPos lookaheadTokenStartPos pushCtxt tokenTup (CtxtInterfaceHead tokenStartPos) returnToken tokenLexbufState OINTERFACE_MEMBER - | CLASS, _ -> + | CLASS, _ -> if debug then dprintf "CLASS, pushing CtxtParen(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtParen (token, tokenStartPos)) pushCtxtSeqBlock(true, AddBlockEnd) returnToken tokenLexbufState token - | TYPE, _ -> + | TYPE, _ -> insertComingSoonTokens("TYPE", TYPE_COMING_SOON, TYPE_IS_HERE) if debug then dprintf "TYPE, pushing CtxtTypeDefns(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtTypeDefns tokenStartPos) pool.Return tokenTup hwTokenFetch useBlockRule - | TRY, _ -> + | TRY, _ -> if debug then dprintf "Try, pushing CtxtTry(%a)\n" outputPos tokenStartPos pushCtxt tokenTup (CtxtTry tokenStartPos) - // The ideal spec would be to push a begin/end block pair here, but we can only do that - // if we are able to balance the WITH with the TRY. We can't do that because of the numerous ways - // WITH is used in the grammar (see what happens when we hit a WITH below. - // This hits in the single line case: "try make ef1 t with _ -> make ef2 t". - + // The ideal spec would be to push a begin/end block pair here, but we can only do that + // if we are able to balance the WITH with the TRY. We can't do that because of the numerous ways + // WITH is used in the grammar (see what happens when we hit a WITH below. + // This hits in the single line case: "try make ef1 t with _ -> make ef2 t". + pushCtxtSeqBlock(false, AddOneSidedBlockEnd) returnToken tokenLexbufState token - | OBLOCKBEGIN, _ -> - returnToken tokenLexbufState token - - | ODUMMY _, _ -> + | OBLOCKBEGIN, _ -> + returnToken tokenLexbufState token + + | ODUMMY _, _ -> if debug then dprintf "skipping dummy token as no offside rules apply\n" pool.Return tokenTup - hwTokenFetch useBlockRule - - // Ordinary tokens start a vanilla block - | _, CtxtSeqBlock _ :: _ -> + hwTokenFetch useBlockRule + + // Ordinary tokens start a vanilla block + | _, CtxtSeqBlock _ :: _ -> pushCtxt tokenTup (CtxtVanilla(tokenStartPos, isLongIdentEquals token)) if debug then dprintf "pushing CtxtVanilla at tokenStartPos = %a\n" outputPos tokenStartPos - returnToken tokenLexbufState token - - | _ -> - returnToken tokenLexbufState token + returnToken tokenLexbufState token - and insertHighPrecedenceApp (tokenTup: TokenTup) = + | _ -> + returnToken tokenLexbufState token + + and insertHighPrecedenceApp (tokenTup: TokenTup) = let dotTokenTup = peekNextTokenTup() if debug then dprintf "inserting HIGH_PRECEDENCE_PAREN_APP at dotTokenPos = %a\n" outputPos (startPosOfTokenTup dotTokenTup) - let hpa = + let hpa = if nextTokenIsAdjacentLParen tokenTup then HIGH_PRECEDENCE_PAREN_APP elif nextTokenIsAdjacentLBrack tokenTup then @@ -2408,8 +2408,8 @@ type LexFilterImpl ( delayToken tokenTup true - and rulesForBothSoftWhiteAndHardWhite(tokenTup: TokenTup) = - match tokenTup.Token with + and rulesForBothSoftWhiteAndHardWhite(tokenTup: TokenTup) = + match tokenTup.Token with | HASH_IDENT ident -> let hashPos = LexbufState(tokenTup.StartPos, tokenTup.StartPos.ShiftColumnBy(1), false) let identPos = LexbufState(tokenTup.StartPos.ShiftColumnBy(1), tokenTup.EndPos, false) @@ -2417,20 +2417,20 @@ type LexFilterImpl ( delayToken(TokenTup(HASH, hashPos, tokenTup.LastTokenPos)) true - // Insert HIGH_PRECEDENCE_BRACK_APP if needed + // Insert HIGH_PRECEDENCE_BRACK_APP if needed // ident[3] | IDENT _ when nextTokenIsAdjacentLBrack tokenTup -> insertHighPrecedenceApp tokenTup - // Insert HIGH_PRECEDENCE_PAREN_APP if needed + // Insert HIGH_PRECEDENCE_PAREN_APP if needed // ident(3) | IDENT _ when nextTokenIsAdjacentLParen tokenTup -> insertHighPrecedenceApp tokenTup - // Insert HIGH_PRECEDENCE_TYAPP if needed + // Insert HIGH_PRECEDENCE_TYAPP if needed | DELEGATE | IDENT _ | IEEE64 _ | IEEE32 _ | DECIMAL _ | INT8 _ | INT16 _ | INT32 _ | INT64 _ | NATIVEINT _ | UINT8 _ | UINT16 _ | UINT32 _ | UINT64 _ | UNATIVEINT _ | BIGNUM _ when peekAdjacentTypars false tokenTup -> let lessTokenTup = popNextTokenTup() - delayToken (pool.UseLocation(lessTokenTup, match lessTokenTup.Token with LESS _ -> LESS true | _ -> failwith "unreachable")) + delayToken (pool.UseLocation(lessTokenTup, match lessTokenTup.Token with LESS _ -> LESS true | _ -> failwith "unreachable")) if debug then dprintf "softwhite inserting HIGH_PRECEDENCE_TYAPP at dotTokenPos = %a\n" outputPos (startPosOfTokenTup lessTokenTup) @@ -2441,21 +2441,21 @@ type LexFilterImpl ( // ..^1 will get parsed as DOT_DOT_HAT 1 while 1..^2 will get parsed as 1 DOT_DOT HAT 2 // because of processing rule underneath this. - | DOT_DOT_HAT -> + | DOT_DOT_HAT -> let hatPos = LexbufState(tokenTup.EndPos.ShiftColumnBy(-1), tokenTup.EndPos, false) delayToken(let rented = pool.Rent() in rented.Token <- INFIX_AT_HAT_OP("^"); rented.LexbufState <- hatPos; rented.LastTokenPos <- tokenTup.LastTokenPos; rented) delayToken(pool.UseShiftedLocation(tokenTup, DOT_DOT, 0, -1)) pool.Return tokenTup true - // Split this token to allow "1..2" for range specification + // Split this token to allow "1..2" for range specification | INT32_DOT_DOT (i, v) -> let dotDotPos = LexbufState(tokenTup.EndPos.ShiftColumnBy(-2), tokenTup.EndPos, false) delayToken(let rented = pool.Rent() in rented.Token <- DOT_DOT; rented.LexbufState <- dotDotPos; rented.LastTokenPos <- tokenTup.LastTokenPos; rented) delayToken(pool.UseShiftedLocation(tokenTup, INT32(i, v), 0, -2)) pool.Return tokenTup true - // Split @>. and @@>. into two + // Split @>. and @@>. into two | RQUOTE_DOT (s, raw) -> let dotPos = LexbufState(tokenTup.EndPos.ShiftColumnBy(-1), tokenTup.EndPos, false) delayToken(let rented = pool.Rent() in rented.Token <- DOT; rented.LexbufState <- dotPos; rented.LastTokenPos <- tokenTup.LastTokenPos; rented) @@ -2464,26 +2464,26 @@ type LexFilterImpl ( true | MINUS | PLUS_MINUS_OP _ | PERCENT_OP _ | AMP | AMP_AMP - when ((match tokenTup.Token with - | PLUS_MINUS_OP s -> (s = "+") || (s = "+.") || (s = "-.") - | PERCENT_OP s -> (s = "%") || (s = "%%") + when ((match tokenTup.Token with + | PLUS_MINUS_OP s -> (s = "+") || (s = "+.") || (s = "-.") + | PERCENT_OP s -> (s = "%") || (s = "%%") | _ -> true) && - nextTokenIsAdjacent tokenTup && + nextTokenIsAdjacent tokenTup && not (prevWasAtomicEnd && (tokenTup.LastTokenPos = startPosOfTokenTup tokenTup))) -> - let plus = - match tokenTup.Token with - | PLUS_MINUS_OP s -> (s = "+") + let plus = + match tokenTup.Token with + | PLUS_MINUS_OP s -> (s = "+") | _ -> false - let plusOrMinus = - match tokenTup.Token with - | PLUS_MINUS_OP s -> (s = "+") - | MINUS -> true + let plusOrMinus = + match tokenTup.Token with + | PLUS_MINUS_OP s -> (s = "+") + | MINUS -> true | _ -> false let nextTokenTup = popNextTokenTup() /// Merge the location of the prefix token and the literal - let delayMergedToken tok = + let delayMergedToken tok = let rented = pool.Rent() rented.Token <- tok rented.LexbufState <- LexbufState(tokenTup.LexbufState.StartPos, nextTokenTup.LexbufState.EndPos, nextTokenTup.LexbufState.PastEOF) @@ -2492,22 +2492,22 @@ type LexFilterImpl ( pool.Return nextTokenTup pool.Return tokenTup - let noMerge() = - let tokenName = - match tokenTup.Token with + let noMerge() = + let tokenName = + match tokenTup.Token with | PLUS_MINUS_OP s | PERCENT_OP s -> s | AMP -> "&" | AMP_AMP -> "&&" - | MINUS -> "-" - | _ -> failwith "unreachable" + | MINUS -> "-" + | _ -> failwith "unreachable" let token = ADJACENT_PREFIX_OP tokenName - delayToken nextTokenTup + delayToken nextTokenTup delayToken (pool.UseLocation(tokenTup, token)) pool.Return tokenTup - if plusOrMinus then - match nextTokenTup.Token with + if plusOrMinus then + match nextTokenTup.Token with | INT8(v, bad) -> delayMergedToken(INT8((if plus then v else -v), (plus && bad))) // note: '-' makes a 'bad' max int 'good'. '+' does not | INT16(v, bad) -> delayMergedToken(INT16((if plus then v else -v), (plus && bad))) // note: '-' makes a 'bad' max int 'good'. '+' does not | INT32(v, bad) -> delayMergedToken(INT32((if plus then v else -v), (plus && bad))) // note: '-' makes a 'bad' max int 'good'. '+' does not @@ -2523,20 +2523,20 @@ type LexFilterImpl ( noMerge() true - | _ -> + | _ -> false - - and pushCtxtSeqBlock(addBlockBegin, addBlockEnd) = pushCtxtSeqBlockAt (peekNextTokenTup(), addBlockBegin, addBlockEnd) - and pushCtxtSeqBlockAt(p: TokenTup, addBlockBegin, addBlockEnd) = + + and pushCtxtSeqBlock(addBlockBegin, addBlockEnd) = pushCtxtSeqBlockAt (peekNextTokenTup(), addBlockBegin, addBlockEnd) + and pushCtxtSeqBlockAt(p: TokenTup, addBlockBegin, addBlockEnd) = if addBlockBegin then if debug then dprintf "--> insert OBLOCKBEGIN \n" delayToken(pool.UseLocation(p, OBLOCKBEGIN)) - pushCtxt p (CtxtSeqBlock(FirstInSeqBlock, startPosOfTokenTup p, addBlockEnd)) + pushCtxt p (CtxtSeqBlock(FirstInSeqBlock, startPosOfTokenTup p, addBlockEnd)) - let rec swTokenFetch() = + let rec swTokenFetch() = let tokenTup = popNextTokenTup() let tokenReplaced = rulesForBothSoftWhiteAndHardWhite tokenTup - if tokenReplaced then swTokenFetch() + if tokenReplaced then swTokenFetch() else let lexbufState = tokenTup.LexbufState let tok = tokenTup.Token @@ -2544,33 +2544,33 @@ type LexFilterImpl ( returnToken lexbufState tok //---------------------------------------------------------------------------- - // Part VI. Publish the new lexer function. + // Part VI. Publish the new lexer function. //-------------------------------------------------------------------------- member _.LexBuffer = lexbuf - member _.GetToken() = - if not initialized then + member _.GetToken() = + if not initialized then let _firstTokenTup = peekInitial() () if indentationSyntaxStatus.Status - then hwTokenFetch true + then hwTokenFetch true else swTokenFetch() - + // LexFilterImpl does the majority of the work for offsides rules and other magic. // LexFilter just wraps it with light post-processing that introduces a few more 'coming soon' symbols, to // make it easier for the parser to 'look ahead' and safely shift tokens in a number of recovery scenarios. -type LexFilter (indentationSyntaxStatus: IndentationAwareSyntaxStatus, compilingFSharpCore, lexer, lexbuf: UnicodeLexing.Lexbuf) = +type LexFilter (indentationSyntaxStatus: IndentationAwareSyntaxStatus, compilingFSharpCore, lexer, lexbuf: UnicodeLexing.Lexbuf) = let inner = LexFilterImpl(indentationSyntaxStatus, compilingFSharpCore, lexer, lexbuf) // We don't interact with lexbuf state at all, any inserted tokens have same state/location as the real one read, so // we don't have to do any of the wrapped lexbuf magic that you see in LexFilterImpl. let delayedStack = Stack() - let delayToken tok = delayedStack.Push tok + let delayToken tok = delayedStack.Push tok - let popNextToken() = - if delayedStack.Count > 0 then + let popNextToken() = + if delayedStack.Count > 0 then let tokenTup = delayedStack.Pop() tokenTup else @@ -2582,18 +2582,18 @@ type LexFilter (indentationSyntaxStatus: IndentationAwareSyntaxStatus, compiling for i in 1..6 do delayToken comingSoon - member _.LexBuffer = inner.LexBuffer + member _.LexBuffer = inner.LexBuffer - member lexer.GetToken () = + member lexer.GetToken () = let token = popNextToken() match token with - | RBRACE _ -> + | RBRACE _ -> insertComingSoonTokens RBRACE_COMING_SOON RBRACE_IS_HERE lexer.GetToken() - | RPAREN -> + | RPAREN -> insertComingSoonTokens RPAREN_COMING_SOON RPAREN_IS_HERE lexer.GetToken() - | OBLOCKEND -> + | OBLOCKEND -> insertComingSoonTokens OBLOCKEND_COMING_SOON OBLOCKEND_IS_HERE lexer.GetToken() | _ -> token -- GitLab