From 9b7edb1699a31c837aac90357092718c8771e666 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Fri, 23 Dec 2016 13:59:39 +0300 Subject: [PATCH 01/16] use Roslyn line numbers only in lexer cache --- .../src/FSharp.Editor/Common/CommonHelpers.fs | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs index 4ee9660ffe0..587a8de039e 100644 --- a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs +++ b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs @@ -140,13 +140,13 @@ module internal CommonHelpers = // Go backwards to find the last cached scanned line that is valid let scanStartLine = let mutable i = startLine - while i > 0 && (match sourceTextData.[i-1] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do + while i > 0 && (match sourceTextData.[i] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do i <- i - 1 i // Rescan the lines if necessary and report the information let result = new List() - let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine - 1].Value.LexStateAtEndOfLine + let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine for i = scanStartLine to endLine do cancellationToken.ThrowIfCancellationRequested() @@ -319,7 +319,6 @@ module internal CommonHelpers = try let textLine = sourceText.Lines.GetLineFromPosition(position) let textLinePos = sourceText.Lines.GetLinePosition(position) - let lineNumber = textLinePos.Line + 1 // FCS line number let sourceTokenizer = FSharpSourceTokenizer(defines, Some fileName) let lines = sourceText.Lines // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) @@ -327,12 +326,12 @@ module internal CommonHelpers = // Go backwards to find the last cached scanned line that is valid let scanStartLine = - let mutable i = lineNumber - while i > 0 && (match sourceTextData.[i-1] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do + let mutable i = textLinePos.Line + while i >= 0 && (match sourceTextData.[i] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do i <- i - 1 i - let lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine - 1].Value.LexStateAtEndOfLine + let lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine let lineContents = textLine.Text.ToString(textLine.Span) let lineData = @@ -340,13 +339,13 @@ module internal CommonHelpers = // 1. the line starts at the same overall position // 2. the hash codes match // 3. the start-of-line lex states are the same - match sourceTextData.[lineNumber] with + match sourceTextData.[textLinePos.Line] with | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> data | _ -> // Otherwise, we recompute let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) - sourceTextData.[lineNumber] <- Some newData + sourceTextData.[textLinePos.Line] <- Some newData newData getSymbolFromTokens(fileName, lineData.Tokens, textLinePos, lineContents, lookupKind) From 60910aba9f0ca1f2e2607ce15672c68d33c56be3 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sat, 24 Dec 2016 17:46:49 +0300 Subject: [PATCH 02/16] fix and refactor getSymbolAtPosition --- .../src/FSharp.Editor/Common/CommonHelpers.fs | 134 ++++++++---------- 1 file changed, 59 insertions(+), 75 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs index 587a8de039e..dc32f9da609 100644 --- a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs +++ b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs @@ -73,6 +73,13 @@ module internal CommonHelpers = data.[i] <- None i <- i + 1 + /// Go backwards to find the last cached scanned line that is valid. + member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = + let mutable i = startLine + while i > 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do + i <- i - 1 + i + let private dataCache = ConditionalWeakTable() let internal compilerTokenToRoslynToken(colorKind: FSharpTokenColorKind) : string = @@ -126,54 +133,41 @@ module internal CommonHelpers = SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens) - let getColorizationData(documentKey: DocumentId, sourceText: SourceText, textSpan: TextSpan, fileName: string option, defines: string list, - cancellationToken: CancellationToken) : List = - try - let sourceTokenizer = FSharpSourceTokenizer(defines, fileName) - let lines = sourceText.Lines - // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) - let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) - - let startLine = lines.GetLineFromPosition(textSpan.Start).LineNumber - let endLine = lines.GetLineFromPosition(textSpan.End).LineNumber + let private getSourceLineDatas(documentKey: DocumentId, sourceText: SourceText, startLine: int, endLine: int, fileName: string option, defines: string list, + cancellationToken: CancellationToken) : ResizeArray = + let sourceTokenizer = FSharpSourceTokenizer(defines, fileName) + let lines = sourceText.Lines + // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) + let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) + let scanStartLine = sourceTextData.GetLastValidCachedLine(startLine, lines) - // Go backwards to find the last cached scanned line that is valid - let scanStartLine = - let mutable i = startLine - while i > 0 && (match sourceTextData.[i] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do - i <- i - 1 - i + // Rescan the lines if necessary and report the information + let result = ResizeArray() + let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + + for i = scanStartLine to endLine do + cancellationToken.ThrowIfCancellationRequested() + let textLine = lines.[i] + let lineContents = textLine.Text.ToString(textLine.Span) + + let lineData = + // We can reuse the old data when + // 1. the line starts at the same overall position + // 2. the hash codes match + // 3. the start-of-line lex states are the same + match sourceTextData.[i] with + | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> + data + | _ -> + // Otherwise, we recompute + let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) + sourceTextData.[i] <- Some newData + newData - // Rescan the lines if necessary and report the information - let result = new List() - let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine - - for i = scanStartLine to endLine do - cancellationToken.ThrowIfCancellationRequested() - let textLine = lines.[i] - let lineContents = textLine.Text.ToString(textLine.Span) - - let lineData = - // We can reuse the old data when - // 1. the line starts at the same overall position - // 2. the hash codes match - // 3. the start-of-line lex states are the same - match sourceTextData.[i] with - | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> - data - | _ -> - // Otherwise, we recompute - let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) - sourceTextData.[i] <- Some newData - newData - - lexState <- lineData.LexStateAtEndOfLine - - if startLine <= i then - result.AddRange(lineData.ClassifiedSpans |> Seq.filter(fun token -> - textSpan.Contains(token.TextSpan.Start) || - textSpan.Contains(token.TextSpan.End - 1) || - (token.TextSpan.Start <= textSpan.Start && textSpan.End <= token.TextSpan.End))) + lexState <- lineData.LexStateAtEndOfLine + + if startLine <= i then + result.Add(lineData) // If necessary, invalidate all subsequent lines after endLine if endLine < lines.Count - 1 then @@ -183,6 +177,22 @@ module internal CommonHelpers = sourceTextData.ClearFrom (endLine+1) | None -> () + result + + let getColorizationData(documentKey: DocumentId, sourceText: SourceText, textSpan: TextSpan, fileName: string option, defines: string list, + cancellationToken: CancellationToken) : List = + try + let lines = sourceText.Lines + let startLine = lines.GetLineFromPosition(textSpan.Start).LineNumber + let endLine = lines.GetLineFromPosition(textSpan.End).LineNumber + + // Rescan the lines if necessary and report the information + let result = new List() + for lineData in getSourceLineDatas(documentKey, sourceText, startLine, endLine, fileName, defines, cancellationToken) do + result.AddRange(lineData.ClassifiedSpans |> Seq.filter(fun token -> + textSpan.Contains(token.TextSpan.Start) || + textSpan.Contains(token.TextSpan.End - 1) || + (token.TextSpan.Start <= textSpan.Start && textSpan.End <= token.TextSpan.End))) result with | :? System.OperationCanceledException -> reraise() @@ -319,36 +329,10 @@ module internal CommonHelpers = try let textLine = sourceText.Lines.GetLineFromPosition(position) let textLinePos = sourceText.Lines.GetLinePosition(position) - let sourceTokenizer = FSharpSourceTokenizer(defines, Some fileName) - let lines = sourceText.Lines - // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) - let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) - - // Go backwards to find the last cached scanned line that is valid - let scanStartLine = - let mutable i = textLinePos.Line - while i >= 0 && (match sourceTextData.[i] with Some data -> not (data.IsValid(lines.[i])) | None -> true) do - i <- i - 1 - i - - let lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + let sourceLineDatas = getSourceLineDatas(documentKey, sourceText, textLinePos.Line, textLinePos.Line, Some fileName, defines, CancellationToken.None) + assert(sourceLineDatas.Count = 1) let lineContents = textLine.Text.ToString(textLine.Span) - - let lineData = - // We can reuse the old data when - // 1. the line starts at the same overall position - // 2. the hash codes match - // 3. the start-of-line lex states are the same - match sourceTextData.[textLinePos.Line] with - | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> - data - | _ -> - // Otherwise, we recompute - let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) - sourceTextData.[textLinePos.Line] <- Some newData - newData - - getSymbolFromTokens(fileName, lineData.Tokens, textLinePos, lineContents, lookupKind) + getSymbolFromTokens(fileName, sourceLineDatas.[0].Tokens, textLinePos, lineContents, lookupKind) with | :? System.OperationCanceledException -> reraise() | ex -> From 0306e154c5821e611927f7ec1b807b822e0fc485 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sun, 25 Dec 2016 19:11:33 +0300 Subject: [PATCH 03/16] add TrailingSemicolon analyzer and code fixer --- ...eywordToDisposableConstructorInvocation.fs | 0 .../AddOpen.fs} | 0 .../PrefixUnusedValueWithUnderscore.fs | 0 .../CodeFixes/RemoveTrailingSemicolon.fs | 38 +++++++++++++ .../Diagnostics/TrailingSemicolonAnalyzer.fs | 56 +++++++++++++++++++ .../src/FSharp.Editor/FSharp.Editor.fsproj | 10 ++-- 6 files changed, 100 insertions(+), 4 deletions(-) rename vsintegration/src/FSharp.Editor/{CodeFix => CodeFixes}/AddNewKeywordToDisposableConstructorInvocation.fs (100%) rename vsintegration/src/FSharp.Editor/{CodeFix/AddOpenCodeFixProvider.fs => CodeFixes/AddOpen.fs} (100%) rename vsintegration/src/FSharp.Editor/{CodeFix => CodeFixes}/PrefixUnusedValueWithUnderscore.fs (100%) create mode 100644 vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs create mode 100644 vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs diff --git a/vsintegration/src/FSharp.Editor/CodeFix/AddNewKeywordToDisposableConstructorInvocation.fs b/vsintegration/src/FSharp.Editor/CodeFixes/AddNewKeywordToDisposableConstructorInvocation.fs similarity index 100% rename from vsintegration/src/FSharp.Editor/CodeFix/AddNewKeywordToDisposableConstructorInvocation.fs rename to vsintegration/src/FSharp.Editor/CodeFixes/AddNewKeywordToDisposableConstructorInvocation.fs diff --git a/vsintegration/src/FSharp.Editor/CodeFix/AddOpenCodeFixProvider.fs b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs similarity index 100% rename from vsintegration/src/FSharp.Editor/CodeFix/AddOpenCodeFixProvider.fs rename to vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs diff --git a/vsintegration/src/FSharp.Editor/CodeFix/PrefixUnusedValueWithUnderscore.fs b/vsintegration/src/FSharp.Editor/CodeFixes/PrefixUnusedValueWithUnderscore.fs similarity index 100% rename from vsintegration/src/FSharp.Editor/CodeFix/PrefixUnusedValueWithUnderscore.fs rename to vsintegration/src/FSharp.Editor/CodeFixes/PrefixUnusedValueWithUnderscore.fs diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs b/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs new file mode 100644 index 00000000000..8b9a1503eec --- /dev/null +++ b/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +namespace rec Microsoft.VisualStudio.FSharp.Editor + +open System.Composition +open System.Collections.Immutable +open System.Threading +open System.Threading.Tasks + +open Microsoft.CodeAnalysis +open Microsoft.CodeAnalysis.Text +open Microsoft.CodeAnalysis.CodeFixes +open Microsoft.CodeAnalysis.CodeActions +open Microsoft.CodeAnalysis.Editor +open Microsoft.CodeAnalysis.Host.Mef + +[] +type internal RemoveTrailingSemicolonCodeFixProvider() = + inherit CodeFixProvider() + let fixableDiagnosticIds = [TrailingSemicolonDiagnosticAnalyzer.DiagnosticId] + + let createCodeFix (title: string, context: CodeFixContext, textChange: TextChange) = + CodeAction.Create( + title, + (fun (cancellationToken: CancellationToken) -> + async { + let! sourceText = context.Document.GetTextAsync() |> Async.AwaitTask + return context.Document.WithText(sourceText.WithChanges(textChange)) + } |> CommonRoslynHelpers.StartAsyncAsTask(cancellationToken)), + title) + + override __.FixableDiagnosticIds = fixableDiagnosticIds.ToImmutableArray() + + override __.RegisterCodeFixesAsync context : Task = + async { + let diagnostics = (context.Diagnostics |> Seq.filter (fun x -> fixableDiagnosticIds |> List.contains x.Id)).ToImmutableArray() + context.RegisterCodeFix(createCodeFix("Remove trailing semicolon", context, TextChange(context.Span, "")), diagnostics) + } |> CommonRoslynHelpers.StartAsyncUnitAsTask(context.CancellationToken) \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs new file mode 100644 index 00000000000..d218ccad033 --- /dev/null +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +namespace rec Microsoft.VisualStudio.FSharp.Editor + +open System.Composition +open System.Collections.Immutable +open System.Threading +open System.Threading.Tasks + +open Microsoft.CodeAnalysis +open Microsoft.CodeAnalysis.Text +open Microsoft.CodeAnalysis.Diagnostics + +[] +type internal TrailingSemicolonDiagnosticAnalyzer() = + inherit DocumentDiagnosticAnalyzer() + + static member DiagnosticId = "TrailingSemicolon" + + override __.SupportedDiagnostics = + [DiagnosticDescriptor(TrailingSemicolonDiagnosticAnalyzer.DiagnosticId, "Remove trailing semicolon", "", "", DiagnosticSeverity.Info, true, "", null)].ToImmutableArray() + + override this.AnalyzeSyntaxAsync(document: Document, cancellationToken: CancellationToken) = + async { + let! sourceText = document.GetTextAsync() |> Async.AwaitTask + return + (sourceText.Lines + |> Seq.choose (fun line -> + let lineStr = line.ToString() + let trimmedLineStr = lineStr.TrimEnd() + match trimmedLineStr.LastIndexOf ';' with + | -1 -> None + | semicolonIndex when semicolonIndex = trimmedLineStr.Length - 1 -> + let id = "TrailingSemicolon" + let emptyString = LocalizableString.op_Implicit "" + let description = LocalizableString.op_Implicit "Trailing semicolon." + let severity = DiagnosticSeverity.Info + let descriptor = DiagnosticDescriptor(id, emptyString, description, "", severity, true, emptyString, "", null) + + let linePositionSpan = + LinePositionSpan( + LinePosition(line.LineNumber, semicolonIndex), + LinePosition(line.LineNumber, semicolonIndex + 1)) + + let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) + let location = Location.Create(document.FilePath, textSpan, linePositionSpan) + Some(Diagnostic.Create(descriptor, location)) + | _ -> None) + ).ToImmutableArray() + } |> CommonRoslynHelpers.StartAsyncAsTask cancellationToken + + override this.AnalyzeSemanticsAsync(_, _) = Task.FromResult(ImmutableArray.Empty) + + interface IBuiltInAnalyzer with + member __.OpenFileOnly _ = true + member __.GetAnalyzerCategory() = DiagnosticAnalyzerCategory.SemanticDocumentAnalysis \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj index e993bd37e75..c3ae631a285 100644 --- a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj +++ b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj @@ -46,7 +46,8 @@ - + + @@ -60,9 +61,10 @@ - - - + + + + From af9392a703927d475c2d26f413f1b5ffcafd50ce Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sun, 25 Dec 2016 19:14:43 +0300 Subject: [PATCH 04/16] move code fix action to resources --- .../src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs | 2 +- vsintegration/src/FSharp.Editor/FSharp.Editor.resx | 3 +++ vsintegration/src/FSharp.Editor/srFSharp.Editor.fs | 1 + 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs b/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs index 8b9a1503eec..8d0d3085b8c 100644 --- a/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/RemoveTrailingSemicolon.fs @@ -34,5 +34,5 @@ type internal RemoveTrailingSemicolonCodeFixProvider() = override __.RegisterCodeFixesAsync context : Task = async { let diagnostics = (context.Diagnostics |> Seq.filter (fun x -> fixableDiagnosticIds |> List.contains x.Id)).ToImmutableArray() - context.RegisterCodeFix(createCodeFix("Remove trailing semicolon", context, TextChange(context.Span, "")), diagnostics) + context.RegisterCodeFix(createCodeFix(SR.RemoveTrailingSemicolon.Value, context, TextChange(context.Span, "")), diagnostics) } |> CommonRoslynHelpers.StartAsyncUnitAsTask(context.CancellationToken) \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/FSharp.Editor.resx b/vsintegration/src/FSharp.Editor/FSharp.Editor.resx index 20aff780277..2c78a368fa3 100644 --- a/vsintegration/src/FSharp.Editor/FSharp.Editor.resx +++ b/vsintegration/src/FSharp.Editor/FSharp.Editor.resx @@ -123,6 +123,9 @@ Prefix value name with underscore + + Remove trailing semicolon + Rename value to '_' diff --git a/vsintegration/src/FSharp.Editor/srFSharp.Editor.fs b/vsintegration/src/FSharp.Editor/srFSharp.Editor.fs index adb0de5696e..817fd055502 100644 --- a/vsintegration/src/FSharp.Editor/srFSharp.Editor.fs +++ b/vsintegration/src/FSharp.Editor/srFSharp.Editor.fs @@ -16,3 +16,4 @@ module SR = let AddNewKeyword = lazy ( GetString "AddNewKeyword" ) // "Add 'new' keyword" let PrefixValueNameWithUnderscore = lazy ( GetString "PrefixValueNameWithUnderscore" ) // "Prefix value name with underscore" let RenameValueToUnderscore = lazy ( GetString "RenameValueToUnderscore" ) // "Rename value to '_'" + let RemoveTrailingSemicolon = lazy ( GetString "RemoveTrailingSemicolon" ) // "Remove trailing semicolon" From 248d4dff453349173ef9c8ed487e6641a9818158 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sun, 25 Dec 2016 20:58:12 +0300 Subject: [PATCH 05/16] extract lexer related logic to a MEF class Lexer --- .../Classification/ColorizationService.fs | 5 +- .../src/FSharp.Editor/CodeFixes/AddOpen.fs | 5 +- .../Commands/HelpContextService.fs | 5 +- .../src/FSharp.Editor/Common/CommonHelpers.fs | 363 +----------------- .../src/FSharp.Editor/Common/Lexer.fs | 326 ++++++++++++++++ .../Completion/CompletionProvider.fs | 19 +- .../Debugging/LanguageDebugInfoService.fs | 5 +- .../DocumentHighlightsService.fs | 14 +- .../src/FSharp.Editor/FSharp.Editor.fsproj | 3 +- .../InlineRename/InlineRenameService.fs | 9 +- .../Navigation/FindReferencesService.fs | 5 +- .../Navigation/GoToDefinitionService.fs | 10 +- .../QuickInfo/QuickInfoProvider.fs | 11 +- 13 files changed, 398 insertions(+), 382 deletions(-) create mode 100644 vsintegration/src/FSharp.Editor/Common/Lexer.fs diff --git a/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs b/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs index 279271fb2e4..b945ebc415c 100644 --- a/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs +++ b/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs @@ -33,7 +33,8 @@ type internal FSharpColorizationService [] ( checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = interface IEditorClassificationService with // Do not perform classification if we don't have project options (#defines matter) @@ -43,7 +44,7 @@ type internal FSharpColorizationService async { let defines = projectInfoManager.GetCompilationDefinesForEditingDocument(document) let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask - result.AddRange(CommonHelpers.getColorizationData(document.Id, sourceText, textSpan, Some(document.FilePath), defines, cancellationToken)) + result.AddRange(lexer.GetColorizationData(document.Id, sourceText, textSpan, Some(document.FilePath), defines, cancellationToken)) } |> CommonRoslynHelpers.StartAsyncUnitAsTask cancellationToken member this.AddSemanticClassificationsAsync(document: Document, textSpan: TextSpan, result: List, cancellationToken: CancellationToken) = diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs index ffa5c791926..2ca54f536dc 100644 --- a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs @@ -105,7 +105,8 @@ type internal FSharpAddOpenCodeFixProvider ( checkerProvider: FSharpCheckerProvider, projectInfoManager: ProjectInfoManager, - assemblyContentProvider: AssemblyContentProvider + assemblyContentProvider: AssemblyContentProvider, + lexer: Lexer ) = inherit CodeFixProvider() let fixableDiagnosticIds = ["FS0039"] @@ -180,7 +181,7 @@ type internal FSharpAddOpenCodeFixProvider | Some parsedInput, FSharpCheckFileAnswer.Succeeded checkFileResults -> let textLinePos = sourceText.Lines.GetLinePosition context.Span.Start let defines = CompilerEnvironment.GetCompilationDefinesForEditing(context.Document.FilePath, options.OtherOptions |> Seq.toList) - let symbol = CommonHelpers.getSymbolAtPosition(context.Document.Id, sourceText, context.Span.Start, context.Document.FilePath, defines, SymbolLookupKind.Fuzzy) + let symbol = lexer.GetSymbolAtPosition(context.Document.Id, sourceText, context.Span.Start, context.Document.FilePath, defines, SymbolLookupKind.Fuzzy) match symbol with | Some symbol -> let pos = Pos.fromZ textLinePos.Line textLinePos.Character diff --git a/vsintegration/src/FSharp.Editor/Commands/HelpContextService.fs b/vsintegration/src/FSharp.Editor/Commands/HelpContextService.fs index 499171432db..6d77452351c 100644 --- a/vsintegration/src/FSharp.Editor/Commands/HelpContextService.fs +++ b/vsintegration/src/FSharp.Editor/Commands/HelpContextService.fs @@ -21,7 +21,8 @@ type internal FSharpHelpContextService [] ( checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = static member GetHelpTerm(checker: FSharpChecker, sourceText : SourceText, fileName, options, span: TextSpan, tokens: List, textVersion) = async { @@ -112,7 +113,7 @@ type internal FSharpHelpContextService let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask let defines = projectInfoManager.GetCompilationDefinesForEditingDocument(document) let textLine = sourceText.Lines.GetLineFromPosition(textSpan.Start) - let tokens = CommonHelpers.getColorizationData(document.Id, sourceText, textLine.Span, Some document.Name, defines, cancellationToken) + let tokens = lexer.GetColorizationData(document.Id, sourceText, textLine.Span, Some document.Name, defines, cancellationToken) let! keyword = FSharpHelpContextService.GetHelpTerm(checkerProvider.Checker, sourceText, document.FilePath, options, textSpan, tokens, textVersion.GetHashCode()) return defaultArg keyword String.Empty | None -> return String.Empty diff --git a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs index dc32f9da609..cb6f47347fd 100644 --- a/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs +++ b/vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs @@ -17,328 +17,7 @@ open Microsoft.FSharp.Compiler open Microsoft.FSharp.Compiler.SourceCodeServices open Microsoft.FSharp.Compiler.SourceCodeServices.ItemDescriptionIcons -[] -type internal LexerSymbolKind = - | Ident - | Operator - | GenericTypeParameter - | StaticallyResolvedTypeParameter - | Other - -type internal LexerSymbol = - { Kind: LexerSymbolKind - Line: int - LeftColumn: int - RightColumn: int - Text: string - FileName: string } - member x.Range: Range.range = - Range.mkRange x.FileName (Range.mkPos (x.Line + 1) x.LeftColumn) (Range.mkPos (x.Line + 1) x.RightColumn) - -[] -type internal SymbolLookupKind = - | Fuzzy - | ByRightColumn - | ByLongIdent - module internal CommonHelpers = - type private SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, - hashCode: int, classifiedSpans: IReadOnlyList, tokens: FSharpTokenInfo list) = - member val LineStart = lineStart - member val LexStateAtStartOfLine = lexStateAtStartOfLine - member val LexStateAtEndOfLine = lexStateAtEndOfLine - member val HashCode = hashCode - member val ClassifiedSpans = classifiedSpans - member val Tokens = tokens - - member data.IsValid(textLine: TextLine) = - data.LineStart = textLine.Start && - let lineContents = textLine.Text.ToString(textLine.Span) - data.HashCode = lineContents.GetHashCode() - - type private SourceTextData(approxLines: int) = - let data = ResizeArray(approxLines) - let extendTo i = - if i >= data.Count then - data.Capacity <- i + 1 - for j in data.Count .. i do - data.Add(None) - member x.Item - with get (i:int) = extendTo i; data.[i] - and set (i:int) v = extendTo i; data.[i] <- v - - member x.ClearFrom(n) = - let mutable i = n - while i < data.Count && data.[i].IsSome do - data.[i] <- None - i <- i + 1 - - /// Go backwards to find the last cached scanned line that is valid. - member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = - let mutable i = startLine - while i > 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do - i <- i - 1 - i - - let private dataCache = ConditionalWeakTable() - - let internal compilerTokenToRoslynToken(colorKind: FSharpTokenColorKind) : string = - match colorKind with - | FSharpTokenColorKind.Comment -> ClassificationTypeNames.Comment - | FSharpTokenColorKind.Identifier -> ClassificationTypeNames.Identifier - | FSharpTokenColorKind.Keyword -> ClassificationTypeNames.Keyword - | FSharpTokenColorKind.String -> ClassificationTypeNames.StringLiteral - | FSharpTokenColorKind.Text -> ClassificationTypeNames.Text - | FSharpTokenColorKind.UpperIdentifier -> ClassificationTypeNames.Identifier - | FSharpTokenColorKind.Number -> ClassificationTypeNames.NumericLiteral - | FSharpTokenColorKind.InactiveCode -> ClassificationTypeNames.ExcludedCode - | FSharpTokenColorKind.PreprocessorKeyword -> ClassificationTypeNames.PreprocessorKeyword - | FSharpTokenColorKind.Operator -> ClassificationTypeNames.Operator - | FSharpTokenColorKind.TypeName -> ClassificationTypeNames.ClassName - | FSharpTokenColorKind.Default - | _ -> ClassificationTypeNames.Text - - let private scanSourceLine(sourceTokenizer: FSharpSourceTokenizer, textLine: TextLine, lineContents: string, lexState: FSharpTokenizerLexState) : SourceLineData = - let colorMap = Array.create textLine.Span.Length ClassificationTypeNames.Text - let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) - let tokens = ResizeArray() - - let scanAndColorNextToken(lineTokenizer: FSharpLineTokenizer, lexState: Ref) : Option = - let tokenInfoOption, nextLexState = lineTokenizer.ScanToken(lexState.Value) - lexState.Value <- nextLexState - if tokenInfoOption.IsSome then - let classificationType = compilerTokenToRoslynToken(tokenInfoOption.Value.ColorClass) - for i = tokenInfoOption.Value.LeftColumn to tokenInfoOption.Value.RightColumn do - Array.set colorMap i classificationType - tokens.Add tokenInfoOption.Value - tokenInfoOption - - let previousLexState = ref lexState - let mutable tokenInfoOption = scanAndColorNextToken(lineTokenizer, previousLexState) - while tokenInfoOption.IsSome do - tokenInfoOption <- scanAndColorNextToken(lineTokenizer, previousLexState) - - let mutable startPosition = 0 - let mutable endPosition = startPosition - let classifiedSpans = new List() - - while startPosition < colorMap.Length do - let classificationType = colorMap.[startPosition] - endPosition <- startPosition - while endPosition < colorMap.Length && classificationType = colorMap.[endPosition] do - endPosition <- endPosition + 1 - let textSpan = new TextSpan(textLine.Start + startPosition, endPosition - startPosition) - classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) - startPosition <- endPosition - - SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens) - - let private getSourceLineDatas(documentKey: DocumentId, sourceText: SourceText, startLine: int, endLine: int, fileName: string option, defines: string list, - cancellationToken: CancellationToken) : ResizeArray = - let sourceTokenizer = FSharpSourceTokenizer(defines, fileName) - let lines = sourceText.Lines - // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) - let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) - let scanStartLine = sourceTextData.GetLastValidCachedLine(startLine, lines) - - // Rescan the lines if necessary and report the information - let result = ResizeArray() - let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine - - for i = scanStartLine to endLine do - cancellationToken.ThrowIfCancellationRequested() - let textLine = lines.[i] - let lineContents = textLine.Text.ToString(textLine.Span) - - let lineData = - // We can reuse the old data when - // 1. the line starts at the same overall position - // 2. the hash codes match - // 3. the start-of-line lex states are the same - match sourceTextData.[i] with - | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> - data - | _ -> - // Otherwise, we recompute - let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) - sourceTextData.[i] <- Some newData - newData - - lexState <- lineData.LexStateAtEndOfLine - - if startLine <= i then - result.Add(lineData) - - // If necessary, invalidate all subsequent lines after endLine - if endLine < lines.Count - 1 then - match sourceTextData.[endLine+1] with - | Some data -> - if data.LexStateAtStartOfLine <> lexState then - sourceTextData.ClearFrom (endLine+1) - | None -> () - - result - - let getColorizationData(documentKey: DocumentId, sourceText: SourceText, textSpan: TextSpan, fileName: string option, defines: string list, - cancellationToken: CancellationToken) : List = - try - let lines = sourceText.Lines - let startLine = lines.GetLineFromPosition(textSpan.Start).LineNumber - let endLine = lines.GetLineFromPosition(textSpan.End).LineNumber - - // Rescan the lines if necessary and report the information - let result = new List() - for lineData in getSourceLineDatas(documentKey, sourceText, startLine, endLine, fileName, defines, cancellationToken) do - result.AddRange(lineData.ClassifiedSpans |> Seq.filter(fun token -> - textSpan.Contains(token.TextSpan.Start) || - textSpan.Contains(token.TextSpan.End - 1) || - (token.TextSpan.Start <= textSpan.Start && textSpan.End <= token.TextSpan.End))) - result - with - | :? System.OperationCanceledException -> reraise() - | ex -> - Assert.Exception(ex) - List() - - type private DraftToken = - { Kind: LexerSymbolKind - Token: FSharpTokenInfo - RightColumn: int } - static member inline Create kind token = - { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } - - /// Returns symbol at a given position. - let private getSymbolFromTokens (fileName: string, tokens: FSharpTokenInfo list, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = - let isIdentifier t = t.CharClass = FSharpTokenCharKind.Identifier - let isOperator t = t.ColorClass = FSharpTokenColorKind.Operator - - let inline (|GenericTypeParameterPrefix|StaticallyResolvedTypeParameterPrefix|Other|) (token: FSharpTokenInfo) = - if token.Tag = FSharpTokenTag.QUOTE then GenericTypeParameterPrefix - elif token.Tag = FSharpTokenTag.INFIX_AT_HAT_OP then - // The lexer return INFIX_AT_HAT_OP token for both "^" and "@" symbols. - // We have to check the char itself to distinguish one from another. - if token.FullMatchedLength = 1 && lineStr.[token.LeftColumn] = '^' then - StaticallyResolvedTypeParameterPrefix - else Other - else Other - - // Operators: Filter out overlapped operators (>>= operator is tokenized as three distinct tokens: GREATER, GREATER, EQUALS. - // Each of them has FullMatchedLength = 3. So, we take the first GREATER and skip the other two). - // - // Generic type parameters: we convert QUOTE + IDENT tokens into single IDENT token, altering its LeftColumn - // and FullMathedLength (for "'type" which is tokenized as (QUOTE, left=2) + (IDENT, left=3, length=4) - // we'll get (IDENT, left=2, length=5). - // - // Statically resolved type parameters: we convert INFIX_AT_HAT_OP + IDENT tokens into single IDENT token, altering its LeftColumn - // and FullMathedLength (for "^type" which is tokenized as (INFIX_AT_HAT_OP, left=2) + (IDENT, left=3, length=4) - // we'll get (IDENT, left=2, length=5). - let tokens = - tokens - |> List.fold (fun (acc, lastToken) (token: FSharpTokenInfo) -> - match lastToken with - | Some t when token.LeftColumn <= t.RightColumn -> acc, lastToken - | _ -> - match token with - | GenericTypeParameterPrefix -> acc, Some (DraftToken.Create LexerSymbolKind.GenericTypeParameter token) - | StaticallyResolvedTypeParameterPrefix -> acc, Some (DraftToken.Create LexerSymbolKind.StaticallyResolvedTypeParameter token) - | Other -> - let draftToken = - match lastToken with - | Some { Kind = LexerSymbolKind.GenericTypeParameter | LexerSymbolKind.StaticallyResolvedTypeParameter as kind } when isIdentifier token -> - DraftToken.Create kind { token with LeftColumn = token.LeftColumn - 1 - FullMatchedLength = token.FullMatchedLength + 1 } - // ^ operator - | Some { Kind = LexerSymbolKind.StaticallyResolvedTypeParameter } -> - DraftToken.Create LexerSymbolKind.Operator { token with LeftColumn = token.LeftColumn - 1 - FullMatchedLength = 1 } - | _ -> - let kind = - if isOperator token then LexerSymbolKind.Operator - elif isIdentifier token then LexerSymbolKind.Ident - else LexerSymbolKind.Other - - DraftToken.Create kind token - draftToken :: acc, Some draftToken - ) ([], None) - |> fst - - // One or two tokens that in touch with the cursor (for "let x|(g) = ()" the tokens will be "x" and "(") - let tokensUnderCursor = - match lookupKind with - | SymbolLookupKind.Fuzzy -> - tokens |> List.filter (fun x -> x.Token.LeftColumn <= linePos.Character && x.RightColumn + 1 >= linePos.Character) - | SymbolLookupKind.ByRightColumn -> - tokens |> List.filter (fun x -> x.RightColumn = linePos.Character) - | SymbolLookupKind.ByLongIdent -> - tokens |> List.filter (fun x -> x.Token.LeftColumn <= linePos.Character) - - //printfn "Filtered tokens: %+A" tokensUnderCursor - match lookupKind with - | SymbolLookupKind.ByLongIdent -> - // Try to find start column of the long identifiers - // Assume that tokens are ordered in an decreasing order of start columns - let rec tryFindStartColumn tokens = - match tokens with - | { DraftToken.Kind = LexerSymbolKind.Ident; Token = t1 } :: {Kind = LexerSymbolKind.Operator; Token = t2 } :: remainingTokens -> - if t2.Tag = FSharpTokenTag.DOT then - tryFindStartColumn remainingTokens - else - Some t1.LeftColumn - | { Kind = LexerSymbolKind.Ident; Token = t } :: _ -> - Some t.LeftColumn - | _ :: _ | [] -> - None - let decreasingTokens = - match tokensUnderCursor |> List.sortBy (fun token -> - token.Token.LeftColumn) with - // Skip the first dot if it is the start of the identifier - | {Kind = LexerSymbolKind.Operator; Token = t} :: remainingTokens when t.Tag = FSharpTokenTag.DOT -> - remainingTokens - | newTokens -> newTokens - - match decreasingTokens with - | [] -> None - | first :: _ -> - tryFindStartColumn decreasingTokens - |> Option.map (fun leftCol -> - { Kind = LexerSymbolKind.Ident - Line = linePos.Line - LeftColumn = leftCol - RightColumn = first.RightColumn + 1 - Text = lineStr.[leftCol..first.RightColumn] - FileName = fileName }) - | SymbolLookupKind.Fuzzy - | SymbolLookupKind.ByRightColumn -> - // Select IDENT token. If failed, select OPERATOR token. - tokensUnderCursor - |> List.tryFind (fun { DraftToken.Kind = k } -> - match k with - | LexerSymbolKind.Ident - | LexerSymbolKind.GenericTypeParameter - | LexerSymbolKind.StaticallyResolvedTypeParameter -> true - | _ -> false) - |> Option.orElseWith (fun _ -> tokensUnderCursor |> List.tryFind (fun { DraftToken.Kind = k } -> k = LexerSymbolKind.Operator)) - |> Option.map (fun token -> - { Kind = token.Kind - Line = linePos.Line - LeftColumn = token.Token.LeftColumn - RightColumn = token.RightColumn + 1 - Text = lineStr.Substring(token.Token.LeftColumn, token.Token.FullMatchedLength) - FileName = fileName }) - - let getSymbolAtPosition(documentKey: DocumentId, sourceText: SourceText, position: int, fileName: string, defines: string list, lookupKind: SymbolLookupKind) : LexerSymbol option = - try - let textLine = sourceText.Lines.GetLineFromPosition(position) - let textLinePos = sourceText.Lines.GetLinePosition(position) - let sourceLineDatas = getSourceLineDatas(documentKey, sourceText, textLinePos.Line, textLinePos.Line, Some fileName, defines, CancellationToken.None) - assert(sourceLineDatas.Count = 1) - let lineContents = textLine.Text.ToString(textLine.Span) - getSymbolFromTokens(fileName, sourceLineDatas.[0].Tokens, textLinePos, lineContents, lookupKind) - with - | :? System.OperationCanceledException -> reraise() - | ex -> - Assert.Exception(ex) - None - /// Fix invalid span if it appears to have redundant suffix and prefix. let fixupSpan (sourceText: SourceText, span: TextSpan) : TextSpan = let text = sourceText.GetSubText(span).ToString() @@ -370,6 +49,22 @@ module internal CommonHelpers = | GlyphMajor.Error -> Glyph.Error | _ -> Glyph.None + let compilerTokenToRoslynToken(colorKind: FSharpTokenColorKind) : string = + match colorKind with + | FSharpTokenColorKind.Comment -> ClassificationTypeNames.Comment + | FSharpTokenColorKind.Identifier -> ClassificationTypeNames.Identifier + | FSharpTokenColorKind.Keyword -> ClassificationTypeNames.Keyword + | FSharpTokenColorKind.String -> ClassificationTypeNames.StringLiteral + | FSharpTokenColorKind.Text -> ClassificationTypeNames.Text + | FSharpTokenColorKind.UpperIdentifier -> ClassificationTypeNames.Identifier + | FSharpTokenColorKind.Number -> ClassificationTypeNames.NumericLiteral + | FSharpTokenColorKind.InactiveCode -> ClassificationTypeNames.ExcludedCode + | FSharpTokenColorKind.PreprocessorKeyword -> ClassificationTypeNames.PreprocessorKeyword + | FSharpTokenColorKind.Operator -> ClassificationTypeNames.Operator + | FSharpTokenColorKind.TypeName -> ClassificationTypeNames.ClassName + | FSharpTokenColorKind.Default + | _ -> ClassificationTypeNames.Text + [] type internal SymbolDeclarationLocation = | CurrentDocument @@ -459,31 +154,7 @@ module internal Extensions = | _ -> false isPrivate && declaredInTheFile - - let glyphMajorToRoslynGlyph = function - | GlyphMajor.Class -> Glyph.ClassPublic - | GlyphMajor.Constant -> Glyph.ConstantPublic - | GlyphMajor.Delegate -> Glyph.DelegatePublic - | GlyphMajor.Enum -> Glyph.EnumPublic - | GlyphMajor.EnumMember -> Glyph.FieldPublic - | GlyphMajor.Event -> Glyph.EventPublic - | GlyphMajor.Exception -> Glyph.ClassPublic - | GlyphMajor.FieldBlue -> Glyph.FieldPublic - | GlyphMajor.Interface -> Glyph.InterfacePublic - | GlyphMajor.Method -> Glyph.MethodPublic - | GlyphMajor.Method2 -> Glyph.MethodPublic - | GlyphMajor.Module -> Glyph.ModulePublic - | GlyphMajor.NameSpace -> Glyph.Namespace - | GlyphMajor.Property -> Glyph.PropertyPublic - | GlyphMajor.Struct -> Glyph.StructurePublic - | GlyphMajor.Typedef -> Glyph.ClassPublic - | GlyphMajor.Type -> Glyph.ClassPublic - | GlyphMajor.Union -> Glyph.EnumPublic - | GlyphMajor.Variable -> Glyph.FieldPublic - | GlyphMajor.ValueType -> Glyph.StructurePublic - | GlyphMajor.Error -> Glyph.Error - | _ -> Glyph.None - + type Async<'a> with /// Creates an asynchronous workflow that runs the asynchronous workflow given as an argument at most once. /// When the returned workflow is started for the second time, it reuses the result of the previous execution. diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs new file mode 100644 index 00000000000..1e70ef0d56b --- /dev/null +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -0,0 +1,326 @@ +// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. + +namespace Microsoft.VisualStudio.FSharp.Editor + +open System +open System.ComponentModel.Composition +open System.Collections.Generic +open System.Threading +open System.Threading.Tasks +open System.Runtime.CompilerServices + +open Microsoft.CodeAnalysis +open Microsoft.CodeAnalysis.Classification +open Microsoft.CodeAnalysis.Text + +open Microsoft.VisualStudio.FSharp.LanguageService +open Microsoft.FSharp.Compiler +open Microsoft.FSharp.Compiler.SourceCodeServices +open Microsoft.FSharp.Compiler.SourceCodeServices.ItemDescriptionIcons + +[] +type internal LexerSymbolKind = + | Ident + | Operator + | GenericTypeParameter + | StaticallyResolvedTypeParameter + | Other + +type internal LexerSymbol = + { Kind: LexerSymbolKind + Line: int + LeftColumn: int + RightColumn: int + Text: string + FileName: string } + member x.Range: Range.range = + Range.mkRange x.FileName (Range.mkPos (x.Line + 1) x.LeftColumn) (Range.mkPos (x.Line + 1) x.RightColumn) + +[] +type internal SymbolLookupKind = + | Fuzzy + | ByRightColumn + | ByLongIdent + +type private DraftToken = + { Kind: LexerSymbolKind + Token: FSharpTokenInfo + RightColumn: int } + static member inline Create kind token = + { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } + +type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, + hashCode: int, classifiedSpans: IReadOnlyList, tokens: FSharpTokenInfo list) = + member val LineStart = lineStart + member val LexStateAtStartOfLine = lexStateAtStartOfLine + member val LexStateAtEndOfLine = lexStateAtEndOfLine + member val HashCode = hashCode + member val ClassifiedSpans = classifiedSpans + member val Tokens = tokens + + member data.IsValid(textLine: TextLine) = + data.LineStart = textLine.Start && + let lineContents = textLine.Text.ToString(textLine.Span) + data.HashCode = lineContents.GetHashCode() + +type internal SourceTextData(approxLines: int) = + let data = ResizeArray(approxLines) + let extendTo i = + if i >= data.Count then + data.Capacity <- i + 1 + for j in data.Count .. i do + data.Add(None) + member x.Item + with get (i:int) = extendTo i; data.[i] + and set (i:int) v = extendTo i; data.[i] <- v + + member x.ClearFrom(n) = + let mutable i = n + while i < data.Count && data.[i].IsSome do + data.[i] <- None + i <- i + 1 + + /// Go backwards to find the last cached scanned line that is valid. + member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = + let mutable i = startLine + while i > 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do + i <- i - 1 + i + +[); System.Composition.Shared>] +type internal Lexer() = + let dataCache = ConditionalWeakTable() + + let scanSourceLine(sourceTokenizer: FSharpSourceTokenizer, textLine: TextLine, lineContents: string, lexState: FSharpTokenizerLexState) : SourceLineData = + let colorMap = Array.create textLine.Span.Length ClassificationTypeNames.Text + let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) + let tokens = ResizeArray() + + let scanAndColorNextToken(lineTokenizer: FSharpLineTokenizer, lexState: Ref) : Option = + let tokenInfoOption, nextLexState = lineTokenizer.ScanToken(lexState.Value) + lexState.Value <- nextLexState + if tokenInfoOption.IsSome then + let classificationType = CommonHelpers.compilerTokenToRoslynToken(tokenInfoOption.Value.ColorClass) + for i = tokenInfoOption.Value.LeftColumn to tokenInfoOption.Value.RightColumn do + Array.set colorMap i classificationType + tokens.Add tokenInfoOption.Value + tokenInfoOption + + let previousLexState = ref lexState + let mutable tokenInfoOption = scanAndColorNextToken(lineTokenizer, previousLexState) + while tokenInfoOption.IsSome do + tokenInfoOption <- scanAndColorNextToken(lineTokenizer, previousLexState) + + let mutable startPosition = 0 + let mutable endPosition = startPosition + let classifiedSpans = new List() + + while startPosition < colorMap.Length do + let classificationType = colorMap.[startPosition] + endPosition <- startPosition + while endPosition < colorMap.Length && classificationType = colorMap.[endPosition] do + endPosition <- endPosition + 1 + let textSpan = new TextSpan(textLine.Start + startPosition, endPosition - startPosition) + classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) + startPosition <- endPosition + + SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens) + + /// Returns symbol at a given position. + let getSymbolFromTokens (fileName: string, tokens: FSharpTokenInfo list, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = + let isIdentifier t = t.CharClass = FSharpTokenCharKind.Identifier + let isOperator t = t.ColorClass = FSharpTokenColorKind.Operator + + let inline (|GenericTypeParameterPrefix|StaticallyResolvedTypeParameterPrefix|Other|) (token: FSharpTokenInfo) = + if token.Tag = FSharpTokenTag.QUOTE then GenericTypeParameterPrefix + elif token.Tag = FSharpTokenTag.INFIX_AT_HAT_OP then + // The lexer return INFIX_AT_HAT_OP token for both "^" and "@" symbols. + // We have to check the char itself to distinguish one from another. + if token.FullMatchedLength = 1 && lineStr.[token.LeftColumn] = '^' then + StaticallyResolvedTypeParameterPrefix + else Other + else Other + + // Operators: Filter out overlapped operators (>>= operator is tokenized as three distinct tokens: GREATER, GREATER, EQUALS. + // Each of them has FullMatchedLength = 3. So, we take the first GREATER and skip the other two). + // + // Generic type parameters: we convert QUOTE + IDENT tokens into single IDENT token, altering its LeftColumn + // and FullMathedLength (for "'type" which is tokenized as (QUOTE, left=2) + (IDENT, left=3, length=4) + // we'll get (IDENT, left=2, length=5). + // + // Statically resolved type parameters: we convert INFIX_AT_HAT_OP + IDENT tokens into single IDENT token, altering its LeftColumn + // and FullMathedLength (for "^type" which is tokenized as (INFIX_AT_HAT_OP, left=2) + (IDENT, left=3, length=4) + // we'll get (IDENT, left=2, length=5). + let tokens = + tokens + |> List.fold (fun (acc, lastToken) (token: FSharpTokenInfo) -> + match lastToken with + | Some t when token.LeftColumn <= t.RightColumn -> acc, lastToken + | _ -> + match token with + | GenericTypeParameterPrefix -> acc, Some (DraftToken.Create LexerSymbolKind.GenericTypeParameter token) + | StaticallyResolvedTypeParameterPrefix -> acc, Some (DraftToken.Create LexerSymbolKind.StaticallyResolvedTypeParameter token) + | Other -> + let draftToken = + match lastToken with + | Some { Kind = LexerSymbolKind.GenericTypeParameter | LexerSymbolKind.StaticallyResolvedTypeParameter as kind } when isIdentifier token -> + DraftToken.Create kind { token with LeftColumn = token.LeftColumn - 1 + FullMatchedLength = token.FullMatchedLength + 1 } + // ^ operator + | Some { Kind = LexerSymbolKind.StaticallyResolvedTypeParameter } -> + DraftToken.Create LexerSymbolKind.Operator { token with LeftColumn = token.LeftColumn - 1 + FullMatchedLength = 1 } + | _ -> + let kind = + if isOperator token then LexerSymbolKind.Operator + elif isIdentifier token then LexerSymbolKind.Ident + else LexerSymbolKind.Other + + DraftToken.Create kind token + draftToken :: acc, Some draftToken + ) ([], None) + |> fst + + // One or two tokens that in touch with the cursor (for "let x|(g) = ()" the tokens will be "x" and "(") + let tokensUnderCursor = + match lookupKind with + | SymbolLookupKind.Fuzzy -> + tokens |> List.filter (fun x -> x.Token.LeftColumn <= linePos.Character && x.RightColumn + 1 >= linePos.Character) + | SymbolLookupKind.ByRightColumn -> + tokens |> List.filter (fun x -> x.RightColumn = linePos.Character) + | SymbolLookupKind.ByLongIdent -> + tokens |> List.filter (fun x -> x.Token.LeftColumn <= linePos.Character) + + //printfn "Filtered tokens: %+A" tokensUnderCursor + match lookupKind with + | SymbolLookupKind.ByLongIdent -> + // Try to find start column of the long identifiers + // Assume that tokens are ordered in an decreasing order of start columns + let rec tryFindStartColumn tokens = + match tokens with + | { DraftToken.Kind = LexerSymbolKind.Ident; Token = t1 } :: {Kind = LexerSymbolKind.Operator; Token = t2 } :: remainingTokens -> + if t2.Tag = FSharpTokenTag.DOT then + tryFindStartColumn remainingTokens + else + Some t1.LeftColumn + | { Kind = LexerSymbolKind.Ident; Token = t } :: _ -> + Some t.LeftColumn + | _ :: _ | [] -> + None + let decreasingTokens = + match tokensUnderCursor |> List.sortBy (fun token -> - token.Token.LeftColumn) with + // Skip the first dot if it is the start of the identifier + | {Kind = LexerSymbolKind.Operator; Token = t} :: remainingTokens when t.Tag = FSharpTokenTag.DOT -> + remainingTokens + | newTokens -> newTokens + + match decreasingTokens with + | [] -> None + | first :: _ -> + tryFindStartColumn decreasingTokens + |> Option.map (fun leftCol -> + { Kind = LexerSymbolKind.Ident + Line = linePos.Line + LeftColumn = leftCol + RightColumn = first.RightColumn + 1 + Text = lineStr.[leftCol..first.RightColumn] + FileName = fileName }) + | SymbolLookupKind.Fuzzy + | SymbolLookupKind.ByRightColumn -> + // Select IDENT token. If failed, select OPERATOR token. + tokensUnderCursor + |> List.tryFind (fun { DraftToken.Kind = k } -> + match k with + | LexerSymbolKind.Ident + | LexerSymbolKind.GenericTypeParameter + | LexerSymbolKind.StaticallyResolvedTypeParameter -> true + | _ -> false) + |> Option.orElseWith (fun _ -> tokensUnderCursor |> List.tryFind (fun { DraftToken.Kind = k } -> k = LexerSymbolKind.Operator)) + |> Option.map (fun token -> + { Kind = token.Kind + Line = linePos.Line + LeftColumn = token.Token.LeftColumn + RightColumn = token.RightColumn + 1 + Text = lineStr.Substring(token.Token.LeftColumn, token.Token.FullMatchedLength) + FileName = fileName }) + + member __.GetSourceLineDatas(documentKey: DocumentId, sourceText: SourceText, startLine: int, endLine: int, fileName: string option, defines: string list, + cancellationToken: CancellationToken) : ResizeArray = + let sourceTokenizer = FSharpSourceTokenizer(defines, fileName) + let lines = sourceText.Lines + // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) + let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) + let scanStartLine = sourceTextData.GetLastValidCachedLine(startLine, lines) + + // Rescan the lines if necessary and report the information + let result = ResizeArray() + let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + + for i = scanStartLine to endLine do + cancellationToken.ThrowIfCancellationRequested() + let textLine = lines.[i] + let lineContents = textLine.Text.ToString(textLine.Span) + + let lineData = + // We can reuse the old data when + // 1. the line starts at the same overall position + // 2. the hash codes match + // 3. the start-of-line lex states are the same + match sourceTextData.[i] with + | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> + data + | _ -> + // Otherwise, we recompute + let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) + sourceTextData.[i] <- Some newData + newData + + lexState <- lineData.LexStateAtEndOfLine + + if startLine <= i then + result.Add(lineData) + + // If necessary, invalidate all subsequent lines after endLine + if endLine < lines.Count - 1 then + match sourceTextData.[endLine+1] with + | Some data -> + if data.LexStateAtStartOfLine <> lexState then + sourceTextData.ClearFrom (endLine+1) + | None -> () + + result + + member this.GetColorizationData(documentKey: DocumentId, sourceText: SourceText, textSpan: TextSpan, fileName: string option, defines: string list, + cancellationToken: CancellationToken) : List = + try + let lines = sourceText.Lines + let startLine = lines.GetLineFromPosition(textSpan.Start).LineNumber + let endLine = lines.GetLineFromPosition(textSpan.End).LineNumber + + // Rescan the lines if necessary and report the information + let result = new List() + for lineData in this.GetSourceLineDatas(documentKey, sourceText, startLine, endLine, fileName, defines, cancellationToken) do + result.AddRange(lineData.ClassifiedSpans |> Seq.filter(fun token -> + textSpan.Contains(token.TextSpan.Start) || + textSpan.Contains(token.TextSpan.End - 1) || + (token.TextSpan.Start <= textSpan.Start && textSpan.End <= token.TextSpan.End))) + result + with + | :? System.OperationCanceledException -> reraise() + | ex -> + Assert.Exception(ex) + List() + + member this.GetSymbolAtPosition(documentKey: DocumentId, sourceText: SourceText, position: int, fileName: string, defines: string list, lookupKind: SymbolLookupKind) : LexerSymbol option = + try + let textLine = sourceText.Lines.GetLineFromPosition(position) + let textLinePos = sourceText.Lines.GetLinePosition(position) + let sourceLineDatas = this.GetSourceLineDatas(documentKey, sourceText, textLinePos.Line, textLinePos.Line, Some fileName, defines, CancellationToken.None) + assert(sourceLineDatas.Count = 1) + let lineContents = textLine.Text.ToString(textLine.Span) + getSymbolFromTokens(fileName, sourceLineDatas.[0].Tokens, textLinePos, lineContents, lookupKind) + with + | :? System.OperationCanceledException -> reraise() + | ex -> + Assert.Exception(ex) + None \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/Completion/CompletionProvider.fs b/vsintegration/src/FSharp.Editor/Completion/CompletionProvider.fs index 052321b4ae6..9cbb6b2c264 100644 --- a/vsintegration/src/FSharp.Editor/Completion/CompletionProvider.fs +++ b/vsintegration/src/FSharp.Editor/Completion/CompletionProvider.fs @@ -40,7 +40,8 @@ type internal FSharpCompletionProvider workspace: Workspace, serviceProvider: SVsServiceProvider, checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = inherit CompletionProvider() @@ -50,7 +51,7 @@ type internal FSharpCompletionProvider let xmlMemberIndexService = serviceProvider.GetService(typeof) :?> IVsXMLMemberIndexService let documentationBuilder = XmlDocumentation.CreateDocumentationBuilder(xmlMemberIndexService, serviceProvider.DTE) - static member ShouldTriggerCompletionAux(sourceText: SourceText, caretPosition: int, trigger: CompletionTriggerKind, getInfo: (unit -> DocumentId * string * string list)) = + static member ShouldTriggerCompletionAux(lexer: Lexer, sourceText: SourceText, caretPosition: int, trigger: CompletionTriggerKind, getInfo: (unit -> DocumentId * string * string list)) = // Skip if we are at the start of a document if caretPosition = 0 then false @@ -77,7 +78,7 @@ type internal FSharpCompletionProvider let textLines = sourceText.Lines let triggerLine = textLines.GetLineFromPosition(triggerPosition) let classifiedSpanOption = - CommonHelpers.getColorizationData(documentId, sourceText, triggerLine.Span, Some(filePath), defines, CancellationToken.None) + lexer.GetColorizationData(documentId, sourceText, triggerLine.Span, Some(filePath), defines, CancellationToken.None) |> Seq.tryFind(fun classifiedSpan -> classifiedSpan.TextSpan.Contains(triggerPosition)) match classifiedSpanOption with @@ -124,7 +125,7 @@ type internal FSharpCompletionProvider let defines = projectInfoManager.GetCompilationDefinesForEditingDocument(document) (documentId, document.FilePath, defines) - FSharpCompletionProvider.ShouldTriggerCompletionAux(sourceText, caretPosition, trigger.Kind, getInfo) + FSharpCompletionProvider.ShouldTriggerCompletionAux(lexer, sourceText, caretPosition, trigger.Kind, getInfo) override this.ProvideCompletionsAsync(context: Microsoft.CodeAnalysis.Completion.CompletionContext) = async { @@ -154,11 +155,12 @@ type internal FSharpCompletionService workspace: Workspace, serviceProvider: SVsServiceProvider, checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = inherit CompletionServiceWithProviders(workspace) - let builtInProviders = ImmutableArray.Create(FSharpCompletionProvider(workspace, serviceProvider, checkerProvider, projectInfoManager)) + let builtInProviders = ImmutableArray.Create(FSharpCompletionProvider(workspace, serviceProvider, checkerProvider, projectInfoManager, lexer)) let completionRules = CompletionRules.Default.WithDismissIfEmpty(true).WithDismissIfLastCharacterDeleted(true).WithDefaultEnterKeyRule(EnterKeyRule.Never) override this.Language = FSharpCommonConstants.FSharpLanguageName @@ -174,10 +176,11 @@ type internal FSharpCompletionServiceFactory ( serviceProvider: SVsServiceProvider, checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = interface ILanguageServiceFactory with member this.CreateLanguageService(hostLanguageServices: HostLanguageServices) : ILanguageService = - upcast new FSharpCompletionService(hostLanguageServices.WorkspaceServices.Workspace, serviceProvider, checkerProvider, projectInfoManager) + upcast new FSharpCompletionService(hostLanguageServices.WorkspaceServices.Workspace, serviceProvider, checkerProvider, projectInfoManager, lexer) diff --git a/vsintegration/src/FSharp.Editor/Debugging/LanguageDebugInfoService.fs b/vsintegration/src/FSharp.Editor/Debugging/LanguageDebugInfoService.fs index 6da1a0092d2..f8b25fa02bb 100644 --- a/vsintegration/src/FSharp.Editor/Debugging/LanguageDebugInfoService.fs +++ b/vsintegration/src/FSharp.Editor/Debugging/LanguageDebugInfoService.fs @@ -21,7 +21,8 @@ open Microsoft.VisualStudio.FSharp.LanguageService type internal FSharpLanguageDebugInfoService [] ( - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = static member GetDataTipInformation(sourceText: SourceText, position: int, tokens: List): TextSpan option = @@ -60,7 +61,7 @@ type internal FSharpLanguageDebugInfoService let defines = projectInfoManager.GetCompilationDefinesForEditingDocument(document) let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask let textSpan = TextSpan.FromBounds(0, sourceText.Length) - let tokens = CommonHelpers.getColorizationData(document.Id, sourceText, textSpan, Some(document.Name), defines, cancellationToken) + let tokens = lexer.GetColorizationData(document.Id, sourceText, textSpan, Some(document.Name), defines, cancellationToken) let result = match FSharpLanguageDebugInfoService.GetDataTipInformation(sourceText, position, tokens) with | None -> diff --git a/vsintegration/src/FSharp.Editor/DocumentHighlights/DocumentHighlightsService.fs b/vsintegration/src/FSharp.Editor/DocumentHighlights/DocumentHighlightsService.fs index ab4ea5cb7b6..5cb1681ce75 100644 --- a/vsintegration/src/FSharp.Editor/DocumentHighlights/DocumentHighlightsService.fs +++ b/vsintegration/src/FSharp.Editor/DocumentHighlights/DocumentHighlightsService.fs @@ -39,7 +39,13 @@ type internal FSharpHighlightSpan = [] [, FSharpCommonConstants.FSharpLanguageName)>] -type internal FSharpDocumentHighlightsService [] (checkerProvider: FSharpCheckerProvider, projectInfoManager: ProjectInfoManager) = +type internal FSharpDocumentHighlightsService + [] + ( + checkerProvider: FSharpCheckerProvider, + projectInfoManager: ProjectInfoManager, + lexer: Lexer + ) = /// Fix invalid spans if they appear to have redundant suffix and prefix. static let fixInvalidSymbolSpans (sourceText: SourceText) (lastIdent: string) (spans: FSharpHighlightSpan []) = @@ -67,14 +73,14 @@ type internal FSharpDocumentHighlightsService [] (checkerP |> Seq.distinctBy (fun span -> span.TextSpan.Start) |> Seq.toArray - static member GetDocumentHighlights(checker: FSharpChecker, documentKey: DocumentId, sourceText: SourceText, filePath: string, position: int, + static member GetDocumentHighlights(lexer: Lexer, checker: FSharpChecker, documentKey: DocumentId, sourceText: SourceText, filePath: string, position: int, defines: string list, options: FSharpProjectOptions, textVersionHash: int) : Async = async { let textLine = sourceText.Lines.GetLineFromPosition(position) let textLinePos = sourceText.Lines.GetLinePosition(position) let fcsTextLineNumber = textLinePos.Line + 1 - match CommonHelpers.getSymbolAtPosition(documentKey, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(documentKey, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with | Some symbol -> let! _parseResults, checkFileAnswer = checker.ParseAndCheckFileInProject(filePath, textVersionHash, sourceText.ToString(), options) match checkFileAnswer with @@ -101,7 +107,7 @@ type internal FSharpDocumentHighlightsService [] (checkerP let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.Name, options.OtherOptions |> Seq.toList) - let! spans = FSharpDocumentHighlightsService.GetDocumentHighlights(checkerProvider.Checker, document.Id, sourceText, document.FilePath, + let! spans = FSharpDocumentHighlightsService.GetDocumentHighlights(lexer, checkerProvider.Checker, document.Id, sourceText, document.FilePath, position, defines, options, textVersion.GetHashCode()) let highlightSpans = spans |> Array.map (fun span -> let kind = if span.IsDefinition then HighlightSpanKind.Definition else HighlightSpanKind.Reference diff --git a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj index c3ae631a285..ab034e75144 100644 --- a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj +++ b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj @@ -35,6 +35,7 @@ + @@ -47,7 +48,7 @@ - + diff --git a/vsintegration/src/FSharp.Editor/InlineRename/InlineRenameService.fs b/vsintegration/src/FSharp.Editor/InlineRename/InlineRenameService.fs index d8c789cca63..04f3a15e202 100644 --- a/vsintegration/src/FSharp.Editor/InlineRename/InlineRenameService.fs +++ b/vsintegration/src/FSharp.Editor/InlineRename/InlineRenameService.fs @@ -174,17 +174,18 @@ type internal InlineRenameService ( projectInfoManager: ProjectInfoManager, checkerProvider: FSharpCheckerProvider, - [] _refactorNotifyServices: seq + [] _refactorNotifyServices: seq, + lexer: Lexer ) = - static member GetInlineRenameInfo(checker: FSharpChecker, projectInfoManager: ProjectInfoManager, document: Document, sourceText: SourceText, position: int, + static member GetInlineRenameInfo(lexer: Lexer, checker: FSharpChecker, projectInfoManager: ProjectInfoManager, document: Document, sourceText: SourceText, position: int, defines: string list, options: FSharpProjectOptions, textVersionHash: int) : Async = async { let textLine = sourceText.Lines.GetLineFromPosition(position) let textLinePos = sourceText.Lines.GetLinePosition(position) let fcsTextLineNumber = textLinePos.Line + 1 // Roslyn line numbers are zero-based, FSharp.Compiler.Service line numbers are 1-based - match CommonHelpers.getSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with | Some symbol -> let! _parseResults, checkFileAnswer = checker.ParseAndCheckFileInProject(document.FilePath, textVersionHash, sourceText.ToString(), options) @@ -211,7 +212,7 @@ type internal InlineRenameService let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.Name, options.OtherOptions |> Seq.toList) - return! InlineRenameService.GetInlineRenameInfo(checkerProvider.Checker, projectInfoManager, document, sourceText, position, defines, options, hash textVersion) + return! InlineRenameService.GetInlineRenameInfo(lexer, checkerProvider.Checker, projectInfoManager, document, sourceText, position, defines, options, hash textVersion) | None -> return FailureInlineRenameInfo.Instance :> _ } |> CommonRoslynHelpers.StartAsyncAsTask(cancellationToken) \ No newline at end of file diff --git a/vsintegration/src/FSharp.Editor/Navigation/FindReferencesService.fs b/vsintegration/src/FSharp.Editor/Navigation/FindReferencesService.fs index 49b6015e5a8..6e9e4a5023f 100644 --- a/vsintegration/src/FSharp.Editor/Navigation/FindReferencesService.fs +++ b/vsintegration/src/FSharp.Editor/Navigation/FindReferencesService.fs @@ -25,7 +25,8 @@ type internal FSharpFindReferencesService [] ( checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = // File can be included in more than one project, hence single `range` may results with multiple `Document`s. @@ -67,7 +68,7 @@ type internal FSharpFindReferencesService let lineNumber = sourceText.Lines.GetLinePosition(position).Line + 1 let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.FilePath, options.OtherOptions |> Seq.toList) - match CommonHelpers.getSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with | Some symbol -> let! symbolUse = checkFileResults.GetSymbolUseAtLocation(lineNumber, symbol.RightColumn, textLine, [symbol.Text]) match symbolUse with diff --git a/vsintegration/src/FSharp.Editor/Navigation/GoToDefinitionService.fs b/vsintegration/src/FSharp.Editor/Navigation/GoToDefinitionService.fs index a0b0f60330f..7752898de50 100644 --- a/vsintegration/src/FSharp.Editor/Navigation/GoToDefinitionService.fs +++ b/vsintegration/src/FSharp.Editor/Navigation/GoToDefinitionService.fs @@ -45,15 +45,16 @@ type internal FSharpGoToDefinitionService ( checkerProvider: FSharpCheckerProvider, projectInfoManager: ProjectInfoManager, - []presenters: IEnumerable + []presenters: IEnumerable, + lexer: Lexer ) = - static member FindDefinition(checker: FSharpChecker, documentKey: DocumentId, sourceText: SourceText, filePath: string, position: int, defines: string list, options: FSharpProjectOptions, textVersionHash: int) : Async> = + static member FindDefinition(lexer: Lexer, checker: FSharpChecker, documentKey: DocumentId, sourceText: SourceText, filePath: string, position: int, defines: string list, options: FSharpProjectOptions, textVersionHash: int) : Async> = async { let textLine = sourceText.Lines.GetLineFromPosition(position) let textLinePos = sourceText.Lines.GetLinePosition(position) let fcsTextLineNumber = textLinePos.Line + 1 // Roslyn line numbers are zero-based, FSharp.Compiler.Service line numbers are 1-based - match CommonHelpers.getSymbolAtPosition(documentKey, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(documentKey, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with | Some symbol -> let! _parseResults, checkFileAnswer = checker.ParseAndCheckFileInProject(filePath, textVersionHash, sourceText.ToString(), options) match checkFileAnswer with @@ -80,7 +81,8 @@ type internal FSharpGoToDefinitionService let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.Name, options.OtherOptions |> Seq.toList) - let! definition = FSharpGoToDefinitionService.FindDefinition(checkerProvider.Checker, document.Id, sourceText, document.FilePath, position, defines, options, textVersion.GetHashCode()) + let! definition = + FSharpGoToDefinitionService.FindDefinition(lexer, checkerProvider.Checker, document.Id, sourceText, document.FilePath, position, defines, options, textVersion.GetHashCode()) match definition with | Some(range) -> diff --git a/vsintegration/src/FSharp.Editor/QuickInfo/QuickInfoProvider.fs b/vsintegration/src/FSharp.Editor/QuickInfo/QuickInfoProvider.fs index 7034b36da7b..9777fd86186 100644 --- a/vsintegration/src/FSharp.Editor/QuickInfo/QuickInfoProvider.fs +++ b/vsintegration/src/FSharp.Editor/QuickInfo/QuickInfoProvider.fs @@ -64,13 +64,14 @@ type internal FSharpQuickInfoProvider [)>] serviceProvider: IServiceProvider, classificationFormatMapService: IClassificationFormatMapService, checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = let xmlMemberIndexService = serviceProvider.GetService(typeof) :?> IVsXMLMemberIndexService let documentationBuilder = XmlDocumentation.CreateDocumentationBuilder(xmlMemberIndexService, serviceProvider.DTE) - static member ProvideQuickInfo(checker: FSharpChecker, documentId: DocumentId, sourceText: SourceText, filePath: string, position: int, options: FSharpProjectOptions, textVersionHash: int) = + static member ProvideQuickInfo(lexer: Lexer, checker: FSharpChecker, documentId: DocumentId, sourceText: SourceText, filePath: string, position: int, options: FSharpProjectOptions, textVersionHash: int) = async { let! _parseResults, checkResultsAnswer = checker.ParseAndCheckFileInProject(filePath, textVersionHash, sourceText.ToString(), options) let checkFileResults = @@ -83,7 +84,7 @@ type internal FSharpQuickInfoProvider //let qualifyingNames, partialName = QuickParse.GetPartialLongNameEx(textLine.ToString(), textLineColumn - 1) let defines = CompilerEnvironment.GetCompilationDefinesForEditing(filePath, options.OtherOptions |> Seq.toList) - match CommonHelpers.getSymbolAtPosition(documentId, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(documentId, sourceText, position, filePath, defines, SymbolLookupKind.Fuzzy) with | Some symbol -> let! res = checkFileResults.GetToolTipTextAlternate(textLineNumber, symbol.RightColumn, textLine.ToString(), [symbol.Text], FSharpTokenTag.IDENT) return @@ -100,12 +101,12 @@ type internal FSharpQuickInfoProvider let! sourceText = document.GetTextAsync(cancellationToken) |> Async.AwaitTask let defines = projectInfoManager.GetCompilationDefinesForEditingDocument(document) - match CommonHelpers.getSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with + match lexer.GetSymbolAtPosition(document.Id, sourceText, position, document.FilePath, defines, SymbolLookupKind.Fuzzy) with | Some _ -> match projectInfoManager.TryGetOptionsForEditingDocumentOrProject(document) with | Some options -> let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask - let! quickInfoResult = FSharpQuickInfoProvider.ProvideQuickInfo(checkerProvider.Checker, document.Id, sourceText, document.FilePath, position, options, textVersion.GetHashCode()) + let! quickInfoResult = FSharpQuickInfoProvider.ProvideQuickInfo(lexer, checkerProvider.Checker, document.Id, sourceText, document.FilePath, position, options, textVersion.GetHashCode()) match quickInfoResult with | Some(toolTipElement, textSpan) -> let dataTipText = XmlDocumentation.BuildDataTipText(documentationBuilder, toolTipElement) From 18d627970329112778e67e94203862ea94d5b602 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sun, 25 Dec 2016 22:14:06 +0300 Subject: [PATCH 06/16] use lexer cache to reduce recalculations in TrailingSemicolonAnalyzer --- .../FSharp.Editor/Common/LanguageService.fs | 7 +- .../Diagnostics/TrailingSemicolonAnalyzer.fs | 93 +++++++++++++++---- 2 files changed, 78 insertions(+), 22 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/LanguageService.fs b/vsintegration/src/FSharp.Editor/Common/LanguageService.fs index 2c88f8929f1..4e9c7ba96d1 100644 --- a/vsintegration/src/FSharp.Editor/Common/LanguageService.fs +++ b/vsintegration/src/FSharp.Editor/Common/LanguageService.fs @@ -173,6 +173,7 @@ type internal FSharpCheckerWorkspaceService = inherit Microsoft.CodeAnalysis.Host.IWorkspaceService abstract Checker: FSharpChecker abstract ProjectInfoManager: ProjectInfoManager + abstract Lexer: Lexer [] [, Microsoft.CodeAnalysis.Host.Mef.ServiceLayer.Default)>] @@ -180,13 +181,15 @@ type internal FSharpCheckerWorkspaceServiceFactory [] ( checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = interface Microsoft.CodeAnalysis.Host.Mef.IWorkspaceServiceFactory with member this.CreateService(_workspaceServices) = upcast { new FSharpCheckerWorkspaceService with member this.Checker = checkerProvider.Checker - member this.ProjectInfoManager = projectInfoManager } + member this.ProjectInfoManager = projectInfoManager + member this.Lexer = lexer } [] [, ".fs")>] diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs index d218ccad033..4710daed4be 100644 --- a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -6,15 +6,25 @@ open System.Composition open System.Collections.Immutable open System.Threading open System.Threading.Tasks +open System.Runtime.CompilerServices open Microsoft.CodeAnalysis open Microsoft.CodeAnalysis.Text open Microsoft.CodeAnalysis.Diagnostics +open Microsoft.FSharp.Compiler.SourceCodeServices [] type internal TrailingSemicolonDiagnosticAnalyzer() = inherit DocumentDiagnosticAnalyzer() + let cacheByDocumentId = ConditionalWeakTable>() + + let getLexer(document: Document) = + document.Project.Solution.Workspace.Services.GetService().Lexer + + let getProjectInfoManager(document: Document) = + document.Project.Solution.Workspace.Services.GetService().ProjectInfoManager + static member DiagnosticId = "TrailingSemicolon" override __.SupportedDiagnostics = @@ -22,31 +32,74 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = override this.AnalyzeSyntaxAsync(document: Document, cancellationToken: CancellationToken) = async { - let! sourceText = document.GetTextAsync() |> Async.AwaitTask - return - (sourceText.Lines - |> Seq.choose (fun line -> - let lineStr = line.ToString() - let trimmedLineStr = lineStr.TrimEnd() - match trimmedLineStr.LastIndexOf ';' with - | -1 -> None - | semicolonIndex when semicolonIndex = trimmedLineStr.Length - 1 -> + match getProjectInfoManager(document).TryGetOptionsForEditingDocumentOrProject(document) with + | Some options -> + let! sourceText = document.GetTextAsync() |> Async.AwaitTask + let lines = sourceText.Lines + let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.FilePath, Seq.toList options.OtherOptions) + let lineDatas = getLexer(document).GetSourceLineDatas(document.Id, sourceText, 0, sourceText.Lines.Count - 1, Some document.FilePath, defines, cancellationToken) + let cache = + match cacheByDocumentId.TryGetValue document.Id with + | true, x -> + x.Capacity <- lineDatas.Capacity + if x.Count < lineDatas.Count then + for __ in 1..lineDatas.Count - x.Count do + x.Add None + elif x.Count > lineDatas.Count then + for i in 1..x.Count - lineDatas.Count do + x.RemoveAt(x.Count - i) + else () + x + | _ -> + let cache = ResizeArray(lineDatas.Count) + for __ in 1..lineDatas.Count do cache.Add None + cache + + let mutable calculatedLines = 0 + + let locations = + lineDatas + |> Seq.mapi (fun lineNumber lineData -> + match cache.[lineNumber] with + | Some (oldLexState, oldLocation) when oldLexState = lineData.LexStateAtEndOfLine -> + oldLocation + | _ -> + calculatedLines <- calculatedLines + 1 + let location = + let line = lines.[lineNumber] + let lineStr = line.ToString() + let trimmedLineStr = lineStr.TrimEnd() + match trimmedLineStr.LastIndexOf ';' with + | -1 -> None + | semicolonIndex when semicolonIndex = trimmedLineStr.Length - 1 -> + let linePositionSpan = LinePositionSpan(LinePosition(line.LineNumber, semicolonIndex), LinePosition(line.LineNumber, semicolonIndex + 1)) + let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) + let location = Location.Create(document.FilePath, textSpan, linePositionSpan) + Some location + | _ -> None + cache.[lineNumber] <- Some (lineData.LexStateAtEndOfLine, location) + location) + |> Seq.choose id + + cacheByDocumentId.Remove(document.Id) |> ignore + cacheByDocumentId.Add(document.Id, cache) + + + let result = + (locations + |> Seq.map (fun location -> let id = "TrailingSemicolon" let emptyString = LocalizableString.op_Implicit "" let description = LocalizableString.op_Implicit "Trailing semicolon." let severity = DiagnosticSeverity.Info let descriptor = DiagnosticDescriptor(id, emptyString, description, "", severity, true, emptyString, "", null) - - let linePositionSpan = - LinePositionSpan( - LinePosition(line.LineNumber, semicolonIndex), - LinePosition(line.LineNumber, semicolonIndex + 1)) - - let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) - let location = Location.Create(document.FilePath, textSpan, linePositionSpan) - Some(Diagnostic.Create(descriptor, location)) - | _ -> None) - ).ToImmutableArray() + Diagnostic.Create(descriptor, location)) + ).ToImmutableArray() + + Logging.Logging.logInfof "TrailingSemicolonAnalyzer: %d/%d lines calculated" calculatedLines lineDatas.Count + + return result + | None -> return ImmutableArray<_>.Empty } |> CommonRoslynHelpers.StartAsyncAsTask cancellationToken override this.AnalyzeSemanticsAsync(_, _) = Task.FromResult(ImmutableArray.Empty) From a06f6bbb0e47422c3907bfb569de6a71b0cbbe81 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Sun, 25 Dec 2016 23:04:58 +0300 Subject: [PATCH 07/16] use line hashes in TrailingSemicolonAnalyzer --- .../src/FSharp.Editor/Common/Lexer.fs | 23 +++++++++++-------- .../Diagnostics/TrailingSemicolonAnalyzer.fs | 15 +++++++----- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs index 1e70ef0d56b..84fea48418a 100644 --- a/vsintegration/src/FSharp.Editor/Common/Lexer.fs +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -61,7 +61,10 @@ type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokeni member data.IsValid(textLine: TextLine) = data.LineStart = textLine.Start && let lineContents = textLine.Text.ToString(textLine.Span) - data.HashCode = lineContents.GetHashCode() + data.HashCode = lineContents.GetHashCode() + override __.ToString() = + sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, hash: %d, token count: %d)" + lineStart lexStateAtStartOfLine lexStateAtEndOfLine hashCode tokens.Length type internal SourceTextData(approxLines: int) = let data = ResizeArray(approxLines) @@ -83,7 +86,7 @@ type internal SourceTextData(approxLines: int) = /// Go backwards to find the last cached scanned line that is valid. member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = let mutable i = startLine - while i > 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do + while i >= 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do i <- i - 1 i @@ -95,10 +98,11 @@ type internal Lexer() = let colorMap = Array.create textLine.Span.Length ClassificationTypeNames.Text let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) let tokens = ResizeArray() + let previousLexState = ref lexState - let scanAndColorNextToken(lineTokenizer: FSharpLineTokenizer, lexState: Ref) : Option = - let tokenInfoOption, nextLexState = lineTokenizer.ScanToken(lexState.Value) - lexState.Value <- nextLexState + let scanAndColorNextToken(lineTokenizer: FSharpLineTokenizer) : Option = + let tokenInfoOption, nextLexState = lineTokenizer.ScanToken(previousLexState.Value) + previousLexState := nextLexState if tokenInfoOption.IsSome then let classificationType = CommonHelpers.compilerTokenToRoslynToken(tokenInfoOption.Value.ColorClass) for i = tokenInfoOption.Value.LeftColumn to tokenInfoOption.Value.RightColumn do @@ -106,10 +110,10 @@ type internal Lexer() = tokens.Add tokenInfoOption.Value tokenInfoOption - let previousLexState = ref lexState - let mutable tokenInfoOption = scanAndColorNextToken(lineTokenizer, previousLexState) + let mutable tokenInfoOption = scanAndColorNextToken(lineTokenizer) + while tokenInfoOption.IsSome do - tokenInfoOption <- scanAndColorNextToken(lineTokenizer, previousLexState) + tokenInfoOption <- scanAndColorNextToken(lineTokenizer) let mutable startPosition = 0 let mutable endPosition = startPosition @@ -254,7 +258,8 @@ type internal Lexer() = // Rescan the lines if necessary and report the information let result = ResizeArray() - let mutable lexState = if scanStartLine = 0 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + let mutable lexState = if scanStartLine = -1 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + let scanStartLine = max scanStartLine 0 for i = scanStartLine to endLine do cancellationToken.ThrowIfCancellationRequested() diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs index 4710daed4be..b48c75babcf 100644 --- a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -13,11 +13,13 @@ open Microsoft.CodeAnalysis.Text open Microsoft.CodeAnalysis.Diagnostics open Microsoft.FSharp.Compiler.SourceCodeServices +type private LineHash = int + [] type internal TrailingSemicolonDiagnosticAnalyzer() = inherit DocumentDiagnosticAnalyzer() - let cacheByDocumentId = ConditionalWeakTable>() + let cacheByDocumentId = ConditionalWeakTable>() let getLexer(document: Document) = document.Project.Solution.Workspace.Services.GetService().Lexer @@ -41,8 +43,8 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = let cache = match cacheByDocumentId.TryGetValue document.Id with | true, x -> - x.Capacity <- lineDatas.Capacity if x.Count < lineDatas.Count then + x.Capacity <- lineDatas.Capacity for __ in 1..lineDatas.Count - x.Count do x.Add None elif x.Count > lineDatas.Count then @@ -55,13 +57,15 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = for __ in 1..lineDatas.Count do cache.Add None cache + //Logging.Logging.logInfof "LineDatas: %+A" (Seq.toList lineDatas) + let mutable calculatedLines = 0 let locations = lineDatas |> Seq.mapi (fun lineNumber lineData -> match cache.[lineNumber] with - | Some (oldLexState, oldLocation) when oldLexState = lineData.LexStateAtEndOfLine -> + | Some (oldHashCode, oldLocation) when oldHashCode = lineData.HashCode -> oldLocation | _ -> calculatedLines <- calculatedLines + 1 @@ -77,14 +81,13 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = let location = Location.Create(document.FilePath, textSpan, linePositionSpan) Some location | _ -> None - cache.[lineNumber] <- Some (lineData.LexStateAtEndOfLine, location) + cache.[lineNumber] <- Some (lineData.HashCode, location) location) |> Seq.choose id cacheByDocumentId.Remove(document.Id) |> ignore cacheByDocumentId.Add(document.Id, cache) - let result = (locations |> Seq.map (fun location -> @@ -96,7 +99,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = Diagnostic.Create(descriptor, location)) ).ToImmutableArray() - Logging.Logging.logInfof "TrailingSemicolonAnalyzer: %d/%d lines calculated" calculatedLines lineDatas.Count + //Logging.Logging.logInfof "TrailingSemicolonAnalyzer: %d/%d lines calculated" calculatedLines lineDatas.Count return result | None -> return ImmutableArray<_>.Empty From b6ba57a361f73d54839d184453d8ea51837f0def Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Mon, 26 Dec 2016 23:44:09 +0300 Subject: [PATCH 08/16] optimization: do not scan whole line to check if the last non space character is the semicolon --- .../Diagnostics/TrailingSemicolonAnalyzer.fs | 36 +++++++++---------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs index b48c75babcf..604395e9351 100644 --- a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -2,6 +2,7 @@ namespace rec Microsoft.VisualStudio.FSharp.Editor +open System open System.Composition open System.Collections.Immutable open System.Threading @@ -57,30 +58,31 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = for __ in 1..lineDatas.Count do cache.Add None cache - //Logging.Logging.logInfof "LineDatas: %+A" (Seq.toList lineDatas) - - let mutable calculatedLines = 0 + let getTrailingSemicolonIndex (line: string) : int = + let rec loop (index: int) = + if index < 0 then -1 + elif line.[index] = ';' then index + elif Char.IsWhiteSpace(line.[index]) then loop (index - 1) + else -1 + loop (line.Length - 1) let locations = lineDatas |> Seq.mapi (fun lineNumber lineData -> match cache.[lineNumber] with - | Some (oldHashCode, oldLocation) when oldHashCode = lineData.HashCode -> - oldLocation + | Some (oldHashCode, oldLocation) when oldHashCode = lineData.HashCode -> oldLocation | _ -> - calculatedLines <- calculatedLines + 1 let location = - let line = lines.[lineNumber] - let lineStr = line.ToString() - let trimmedLineStr = lineStr.TrimEnd() - match trimmedLineStr.LastIndexOf ';' with + match getTrailingSemicolonIndex (lines.[lineNumber].ToString()) with | -1 -> None - | semicolonIndex when semicolonIndex = trimmedLineStr.Length - 1 -> - let linePositionSpan = LinePositionSpan(LinePosition(line.LineNumber, semicolonIndex), LinePosition(line.LineNumber, semicolonIndex + 1)) + | lastSemicolonIndex -> + let linePositionSpan = + LinePositionSpan( + LinePosition(lineNumber, lastSemicolonIndex), + LinePosition(lineNumber, lastSemicolonIndex + 1)) let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) let location = Location.Create(document.FilePath, textSpan, linePositionSpan) Some location - | _ -> None cache.[lineNumber] <- Some (lineData.HashCode, location) location) |> Seq.choose id @@ -88,7 +90,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = cacheByDocumentId.Remove(document.Id) |> ignore cacheByDocumentId.Add(document.Id, cache) - let result = + return (locations |> Seq.map (fun location -> let id = "TrailingSemicolon" @@ -97,11 +99,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = let severity = DiagnosticSeverity.Info let descriptor = DiagnosticDescriptor(id, emptyString, description, "", severity, true, emptyString, "", null) Diagnostic.Create(descriptor, location)) - ).ToImmutableArray() - - //Logging.Logging.logInfof "TrailingSemicolonAnalyzer: %d/%d lines calculated" calculatedLines lineDatas.Count - - return result + ).ToImmutableArray() | None -> return ImmutableArray<_>.Empty } |> CommonRoslynHelpers.StartAsyncAsTask cancellationToken From 6793db9a7b442acf9d94085848b7888ca9c42fa3 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Mon, 26 Dec 2016 23:49:48 +0300 Subject: [PATCH 09/16] reduce allocations --- vsintegration/src/FSharp.Editor/Common/Lexer.fs | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs index 84fea48418a..2621617be0d 100644 --- a/vsintegration/src/FSharp.Editor/Common/Lexer.fs +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -5,6 +5,7 @@ namespace Microsoft.VisualStudio.FSharp.Editor open System open System.ComponentModel.Composition open System.Collections.Generic +open System.Collections.Immutable open System.Threading open System.Threading.Tasks open System.Runtime.CompilerServices @@ -50,7 +51,7 @@ type private DraftToken = { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, - hashCode: int, classifiedSpans: IReadOnlyList, tokens: FSharpTokenInfo list) = + hashCode: int, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = member val LineStart = lineStart member val LexStateAtStartOfLine = lexStateAtStartOfLine member val LexStateAtEndOfLine = lexStateAtEndOfLine @@ -97,7 +98,7 @@ type internal Lexer() = let scanSourceLine(sourceTokenizer: FSharpSourceTokenizer, textLine: TextLine, lineContents: string, lexState: FSharpTokenizerLexState) : SourceLineData = let colorMap = Array.create textLine.Span.Length ClassificationTypeNames.Text let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) - let tokens = ResizeArray() + let tokens = ImmutableArray.CreateBuilder() let previousLexState = ref lexState let scanAndColorNextToken(lineTokenizer: FSharpLineTokenizer) : Option = @@ -128,10 +129,10 @@ type internal Lexer() = classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) startPosition <- endPosition - SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens) + SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, tokens.ToImmutable()) /// Returns symbol at a given position. - let getSymbolFromTokens (fileName: string, tokens: FSharpTokenInfo list, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = + let getSymbolFromTokens (fileName: string, tokens: ImmutableArray, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = let isIdentifier t = t.CharClass = FSharpTokenCharKind.Identifier let isOperator t = t.ColorClass = FSharpTokenColorKind.Operator @@ -157,7 +158,7 @@ type internal Lexer() = // we'll get (IDENT, left=2, length=5). let tokens = tokens - |> List.fold (fun (acc, lastToken) (token: FSharpTokenInfo) -> + |> Seq.fold (fun (acc, lastToken) (token: FSharpTokenInfo) -> match lastToken with | Some t when token.LeftColumn <= t.RightColumn -> acc, lastToken | _ -> From 416f1d723bb7e5b91b2e282315687a17b2f35c04 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Tue, 27 Dec 2016 11:48:22 +0300 Subject: [PATCH 10/16] optimization: reduce SourceLine.ToString() in Lexer, use CheckSum instead of ToString + GetHashCode for text line unique key eliminate a seq and an array allocation in ColorizationService --- .../Classification/ColorizationService.fs | 17 ++++++-------- .../src/FSharp.Editor/Common/Lexer.fs | 22 +++++++++---------- .../Diagnostics/TrailingSemicolonAnalyzer.fs | 8 +++---- 3 files changed, 21 insertions(+), 26 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs b/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs index b945ebc415c..82bdc348e53 100644 --- a/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs +++ b/vsintegration/src/FSharp.Editor/Classification/ColorizationService.fs @@ -55,16 +55,13 @@ type internal FSharpColorizationService let! textVersion = document.GetTextVersionAsync(cancellationToken) |> Async.AwaitTask let! _parseResults, checkResultsAnswer = checkerProvider.Checker.ParseAndCheckFileInProject(document.FilePath, textVersion.GetHashCode(), sourceText.ToString(), options) - let extraColorizationData = - match checkResultsAnswer with - | FSharpCheckFileAnswer.Aborted -> [| |] - | FSharpCheckFileAnswer.Succeeded(results) -> - [| for (range, tokenColorKind) in results.GetExtraColorizationsAlternate() do - let span = CommonHelpers.fixupSpan(sourceText, CommonRoslynHelpers.FSharpRangeToTextSpan(sourceText, range)) - if textSpan.Contains(span.Start) || textSpan.Contains(span.End - 1) || span.Contains(textSpan) then - yield ClassifiedSpan(span, CommonHelpers.compilerTokenToRoslynToken(tokenColorKind)) |] - - result.AddRange(extraColorizationData) + match checkResultsAnswer with + | FSharpCheckFileAnswer.Aborted -> () + | FSharpCheckFileAnswer.Succeeded(results) -> + for (range, tokenColorKind) in results.GetExtraColorizationsAlternate() do + let span = CommonHelpers.fixupSpan(sourceText, CommonRoslynHelpers.FSharpRangeToTextSpan(sourceText, range)) + if textSpan.Contains(span.Start) || textSpan.Contains(span.End - 1) || span.Contains(textSpan) then + result.Add(ClassifiedSpan(span, CommonHelpers.compilerTokenToRoslynToken(tokenColorKind))) | None -> () } |> CommonRoslynHelpers.StartAsyncUnitAsTask cancellationToken diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs index 2621617be0d..031cb037fb5 100644 --- a/vsintegration/src/FSharp.Editor/Common/Lexer.fs +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -51,21 +51,20 @@ type private DraftToken = { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, - hashCode: int, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = + checkSum: ImmutableArray, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = member val LineStart = lineStart member val LexStateAtStartOfLine = lexStateAtStartOfLine member val LexStateAtEndOfLine = lexStateAtEndOfLine - member val HashCode = hashCode + member val CheckSum = checkSum member val ClassifiedSpans = classifiedSpans member val Tokens = tokens member data.IsValid(textLine: TextLine) = data.LineStart = textLine.Start && - let lineContents = textLine.Text.ToString(textLine.Span) - data.HashCode = lineContents.GetHashCode() + data.CheckSum = textLine.Text.GetChecksum() override __.ToString() = - sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, hash: %d, token count: %d)" - lineStart lexStateAtStartOfLine lexStateAtEndOfLine hashCode tokens.Length + sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, checkSum: %A, token count: %d)" + lineStart lexStateAtStartOfLine lexStateAtEndOfLine (Seq.toArray checkSum) tokens.Length type internal SourceTextData(approxLines: int) = let data = ResizeArray(approxLines) @@ -95,8 +94,9 @@ type internal SourceTextData(approxLines: int) = type internal Lexer() = let dataCache = ConditionalWeakTable() - let scanSourceLine(sourceTokenizer: FSharpSourceTokenizer, textLine: TextLine, lineContents: string, lexState: FSharpTokenizerLexState) : SourceLineData = + let scanSourceLine(sourceTokenizer: FSharpSourceTokenizer, textLine: TextLine, lexState: FSharpTokenizerLexState) : SourceLineData = let colorMap = Array.create textLine.Span.Length ClassificationTypeNames.Text + let lineContents = textLine.ToString() let lineTokenizer = sourceTokenizer.CreateLineTokenizer(lineContents) let tokens = ImmutableArray.CreateBuilder() let previousLexState = ref lexState @@ -129,7 +129,7 @@ type internal Lexer() = classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) startPosition <- endPosition - SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, tokens.ToImmutable()) + SourceLineData(textLine.Start, lexState, previousLexState.Value, textLine.Text.GetChecksum(), classifiedSpans, tokens.ToImmutable()) /// Returns symbol at a given position. let getSymbolFromTokens (fileName: string, tokens: ImmutableArray, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = @@ -261,11 +261,9 @@ type internal Lexer() = let result = ResizeArray() let mutable lexState = if scanStartLine = -1 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine let scanStartLine = max scanStartLine 0 - for i = scanStartLine to endLine do cancellationToken.ThrowIfCancellationRequested() let textLine = lines.[i] - let lineContents = textLine.Text.ToString(textLine.Span) let lineData = // We can reuse the old data when @@ -277,7 +275,7 @@ type internal Lexer() = data | _ -> // Otherwise, we recompute - let newData = scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) + let newData = scanSourceLine(sourceTokenizer, textLine, lexState) sourceTextData.[i] <- Some newData newData @@ -323,7 +321,7 @@ type internal Lexer() = let textLinePos = sourceText.Lines.GetLinePosition(position) let sourceLineDatas = this.GetSourceLineDatas(documentKey, sourceText, textLinePos.Line, textLinePos.Line, Some fileName, defines, CancellationToken.None) assert(sourceLineDatas.Count = 1) - let lineContents = textLine.Text.ToString(textLine.Span) + let lineContents = textLine.ToString() getSymbolFromTokens(fileName, sourceLineDatas.[0].Tokens, textLinePos, lineContents, lookupKind) with | :? System.OperationCanceledException -> reraise() diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs index 604395e9351..8b01ca463b0 100644 --- a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -14,13 +14,13 @@ open Microsoft.CodeAnalysis.Text open Microsoft.CodeAnalysis.Diagnostics open Microsoft.FSharp.Compiler.SourceCodeServices -type private LineHash = int +type private LineCheckSum = ImmutableArray [] type internal TrailingSemicolonDiagnosticAnalyzer() = inherit DocumentDiagnosticAnalyzer() - let cacheByDocumentId = ConditionalWeakTable>() + let cacheByDocumentId = ConditionalWeakTable>() let getLexer(document: Document) = document.Project.Solution.Workspace.Services.GetService().Lexer @@ -70,7 +70,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = lineDatas |> Seq.mapi (fun lineNumber lineData -> match cache.[lineNumber] with - | Some (oldHashCode, oldLocation) when oldHashCode = lineData.HashCode -> oldLocation + | Some (oldCheckSum, oldLocation) when oldCheckSum = lineData.CheckSum -> oldLocation | _ -> let location = match getTrailingSemicolonIndex (lines.[lineNumber].ToString()) with @@ -83,7 +83,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) let location = Location.Create(document.FilePath, textSpan, linePositionSpan) Some location - cache.[lineNumber] <- Some (lineData.HashCode, location) + cache.[lineNumber] <- Some (lineData.CheckSum, location) location) |> Seq.choose id From 25ddfb21a2ccbd8cde9d9d101463bd88e25abf16 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Tue, 27 Dec 2016 13:25:17 +0300 Subject: [PATCH 11/16] back to line content hash --- .../src/FSharp.Editor/Common/Lexer.fs | 114 ++++++++++-------- .../Diagnostics/TrailingSemicolonAnalyzer.fs | 8 +- 2 files changed, 67 insertions(+), 55 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs index 031cb037fb5..046b5e64b2a 100644 --- a/vsintegration/src/FSharp.Editor/Common/Lexer.fs +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -51,20 +51,22 @@ type private DraftToken = { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, - checkSum: ImmutableArray, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = + hashCode: int, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = member val LineStart = lineStart member val LexStateAtStartOfLine = lexStateAtStartOfLine member val LexStateAtEndOfLine = lexStateAtEndOfLine - member val CheckSum = checkSum + member val HashCode = hashCode member val ClassifiedSpans = classifiedSpans member val Tokens = tokens member data.IsValid(textLine: TextLine) = - data.LineStart = textLine.Start && - data.CheckSum = textLine.Text.GetChecksum() + let lineStartsMatch = data.LineStart = textLine.Start + let newHashCode = textLine.ToString().GetHashCode() + let hashCodeMatch = data.HashCode = newHashCode + lineStartsMatch && hashCodeMatch override __.ToString() = - sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, checkSum: %A, token count: %d)" - lineStart lexStateAtStartOfLine lexStateAtEndOfLine (Seq.toArray checkSum) tokens.Length + sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, hashCode: %d, token count: %d)" + lineStart lexStateAtStartOfLine lexStateAtEndOfLine hashCode tokens.Length type internal SourceTextData(approxLines: int) = let data = ResizeArray(approxLines) @@ -74,8 +76,8 @@ type internal SourceTextData(approxLines: int) = for j in data.Count .. i do data.Add(None) member x.Item - with get (i:int) = extendTo i; data.[i] - and set (i:int) v = extendTo i; data.[i] <- v + with get (i:int) = extendTo i; data.[i] + and set (i:int) v = extendTo i; data.[i] <- v member x.ClearFrom(n) = let mutable i = n @@ -85,10 +87,19 @@ type internal SourceTextData(approxLines: int) = /// Go backwards to find the last cached scanned line that is valid. member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = - let mutable i = startLine - while i >= 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do - i <- i - 1 - i + let rec loop (i: int) = + if i < 0 then i + else + let data = x.[i] + let found = + match data with + | Some data -> + let sourceLine = sourceLines.[i] + let isValid = data.IsValid(sourceLine) + isValid + | None -> false + if found then i else loop (i - 1) + loop startLine [); System.Composition.Shared>] type internal Lexer() = @@ -129,7 +140,7 @@ type internal Lexer() = classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) startPosition <- endPosition - SourceLineData(textLine.Start, lexState, previousLexState.Value, textLine.Text.GetChecksum(), classifiedSpans, tokens.ToImmutable()) + SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, tokens.ToImmutable()) /// Returns symbol at a given position. let getSymbolFromTokens (fileName: string, tokens: ImmutableArray, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = @@ -254,45 +265,46 @@ type internal Lexer() = let sourceTokenizer = FSharpSourceTokenizer(defines, fileName) let lines = sourceText.Lines // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) - let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) - let scanStartLine = sourceTextData.GetLastValidCachedLine(startLine, lines) + let sourceTextData = dataCache.GetValue(documentKey, fun key -> SourceTextData(lines.Count)) + lock sourceTextData <| fun () -> + let scanStartLine = sourceTextData.GetLastValidCachedLine(startLine, lines) + // Rescan the lines if necessary and report the information + let result = ResizeArray() + let mutable lexState = if scanStartLine = -1 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine + let scanStartLine = max scanStartLine 0 + for i = scanStartLine to endLine do + cancellationToken.ThrowIfCancellationRequested() + let textLine = lines.[i] - // Rescan the lines if necessary and report the information - let result = ResizeArray() - let mutable lexState = if scanStartLine = -1 then 0L else sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine - let scanStartLine = max scanStartLine 0 - for i = scanStartLine to endLine do - cancellationToken.ThrowIfCancellationRequested() - let textLine = lines.[i] - - let lineData = - // We can reuse the old data when - // 1. the line starts at the same overall position - // 2. the hash codes match - // 3. the start-of-line lex states are the same - match sourceTextData.[i] with - | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> - data - | _ -> - // Otherwise, we recompute - let newData = scanSourceLine(sourceTokenizer, textLine, lexState) - sourceTextData.[i] <- Some newData - newData - - lexState <- lineData.LexStateAtEndOfLine - - if startLine <= i then - result.Add(lineData) - - // If necessary, invalidate all subsequent lines after endLine - if endLine < lines.Count - 1 then - match sourceTextData.[endLine+1] with - | Some data -> - if data.LexStateAtStartOfLine <> lexState then - sourceTextData.ClearFrom (endLine+1) - | None -> () - - result + let lineData = + // We can reuse the old data when + // 1. the line starts at the same overall position + // 2. the hash codes match + // 3. the start-of-line lex states are the same + let oldData = sourceTextData.[i] + match oldData with + | Some data when data.IsValid(textLine) && data.LexStateAtStartOfLine = lexState -> + data + | _ -> + // Otherwise, we recompute + let newData = scanSourceLine(sourceTokenizer, textLine, lexState) + sourceTextData.[i] <- Some newData + newData + + lexState <- lineData.LexStateAtEndOfLine + + if startLine <= i then + result.Add(lineData) + + // If necessary, invalidate all subsequent lines after endLine + if endLine < lines.Count - 1 then + match sourceTextData.[endLine+1] with + | Some data -> + if data.LexStateAtStartOfLine <> lexState then + sourceTextData.ClearFrom (endLine+1) + | None -> () + + result member this.GetColorizationData(documentKey: DocumentId, sourceText: SourceText, textSpan: TextSpan, fileName: string option, defines: string list, cancellationToken: CancellationToken) : List = diff --git a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs index 8b01ca463b0..604395e9351 100644 --- a/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs +++ b/vsintegration/src/FSharp.Editor/Diagnostics/TrailingSemicolonAnalyzer.fs @@ -14,13 +14,13 @@ open Microsoft.CodeAnalysis.Text open Microsoft.CodeAnalysis.Diagnostics open Microsoft.FSharp.Compiler.SourceCodeServices -type private LineCheckSum = ImmutableArray +type private LineHash = int [] type internal TrailingSemicolonDiagnosticAnalyzer() = inherit DocumentDiagnosticAnalyzer() - let cacheByDocumentId = ConditionalWeakTable>() + let cacheByDocumentId = ConditionalWeakTable>() let getLexer(document: Document) = document.Project.Solution.Workspace.Services.GetService().Lexer @@ -70,7 +70,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = lineDatas |> Seq.mapi (fun lineNumber lineData -> match cache.[lineNumber] with - | Some (oldCheckSum, oldLocation) when oldCheckSum = lineData.CheckSum -> oldLocation + | Some (oldHashCode, oldLocation) when oldHashCode = lineData.HashCode -> oldLocation | _ -> let location = match getTrailingSemicolonIndex (lines.[lineNumber].ToString()) with @@ -83,7 +83,7 @@ type internal TrailingSemicolonDiagnosticAnalyzer() = let textSpan = sourceText.Lines.GetTextSpan(linePositionSpan) let location = Location.Create(document.FilePath, textSpan, linePositionSpan) Some location - cache.[lineNumber] <- Some (lineData.CheckSum, location) + cache.[lineNumber] <- Some (lineData.HashCode, location) location) |> Seq.choose id From 85979934f2d3c2162b5df2b185cf3a703d220ad0 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Tue, 27 Dec 2016 13:35:10 +0300 Subject: [PATCH 12/16] Lexer optimizations --- .../src/FSharp.Editor/Common/Lexer.fs | 32 ++++++------------- 1 file changed, 10 insertions(+), 22 deletions(-) diff --git a/vsintegration/src/FSharp.Editor/Common/Lexer.fs b/vsintegration/src/FSharp.Editor/Common/Lexer.fs index 046b5e64b2a..d3232a53ed6 100644 --- a/vsintegration/src/FSharp.Editor/Common/Lexer.fs +++ b/vsintegration/src/FSharp.Editor/Common/Lexer.fs @@ -51,7 +51,7 @@ type private DraftToken = { Kind = kind; Token = token; RightColumn = token.LeftColumn + token.FullMatchedLength - 1 } type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokenizerLexState, lexStateAtEndOfLine: FSharpTokenizerLexState, - hashCode: int, classifiedSpans: IReadOnlyList, tokens: ImmutableArray) = + hashCode: int, classifiedSpans: ImmutableArray, tokens: ImmutableArray) = member val LineStart = lineStart member val LexStateAtStartOfLine = lexStateAtStartOfLine member val LexStateAtEndOfLine = lexStateAtEndOfLine @@ -60,10 +60,7 @@ type internal SourceLineData(lineStart: int, lexStateAtStartOfLine: FSharpTokeni member val Tokens = tokens member data.IsValid(textLine: TextLine) = - let lineStartsMatch = data.LineStart = textLine.Start - let newHashCode = textLine.ToString().GetHashCode() - let hashCodeMatch = data.HashCode = newHashCode - lineStartsMatch && hashCodeMatch + data.LineStart = textLine.Start && data.HashCode = textLine.ToString().GetHashCode() override __.ToString() = sprintf "SourceLineData(line: %d, startLexState: %d, endLexState: %d, hashCode: %d, token count: %d)" lineStart lexStateAtStartOfLine lexStateAtEndOfLine hashCode tokens.Length @@ -87,19 +84,10 @@ type internal SourceTextData(approxLines: int) = /// Go backwards to find the last cached scanned line that is valid. member x.GetLastValidCachedLine (startLine: int, sourceLines: TextLineCollection) : int = - let rec loop (i: int) = - if i < 0 then i - else - let data = x.[i] - let found = - match data with - | Some data -> - let sourceLine = sourceLines.[i] - let isValid = data.IsValid(sourceLine) - isValid - | None -> false - if found then i else loop (i - 1) - loop startLine + let mutable i = startLine + while i >= 0 && (match x.[i] with Some data -> not (data.IsValid(sourceLines.[i])) | None -> true) do + i <- i - 1 + i [); System.Composition.Shared>] type internal Lexer() = @@ -118,7 +106,7 @@ type internal Lexer() = if tokenInfoOption.IsSome then let classificationType = CommonHelpers.compilerTokenToRoslynToken(tokenInfoOption.Value.ColorClass) for i = tokenInfoOption.Value.LeftColumn to tokenInfoOption.Value.RightColumn do - Array.set colorMap i classificationType + colorMap.[i] <- classificationType tokens.Add tokenInfoOption.Value tokenInfoOption @@ -129,7 +117,7 @@ type internal Lexer() = let mutable startPosition = 0 let mutable endPosition = startPosition - let classifiedSpans = new List() + let classifiedSpans = ImmutableArray.CreateBuilder() while startPosition < colorMap.Length do let classificationType = colorMap.[startPosition] @@ -137,10 +125,10 @@ type internal Lexer() = while endPosition < colorMap.Length && classificationType = colorMap.[endPosition] do endPosition <- endPosition + 1 let textSpan = new TextSpan(textLine.Start + startPosition, endPosition - startPosition) - classifiedSpans.Add(new ClassifiedSpan(classificationType, textSpan)) + classifiedSpans.Add(ClassifiedSpan(classificationType, textSpan)) startPosition <- endPosition - SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, tokens.ToImmutable()) + SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans.ToImmutable(), tokens.ToImmutable()) /// Returns symbol at a given position. let getSymbolFromTokens (fileName: string, tokens: ImmutableArray, linePos: LinePosition, lineStr: string, lookupKind: SymbolLookupKind) : LexerSymbol option = From cd11da4b5a4d9a1d2436b36df7aafaca23fcdbdd Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Tue, 27 Dec 2016 17:22:17 +0300 Subject: [PATCH 13/16] fixed: "Add Open" code fix suggests wrong stuff --- VisualFSharp.sln | 2 +- src/fsharp/vs/ServiceAssemblyContent.fs | 42 ++++++------ .../src/FSharp.Editor/CodeFixes/AddOpen.fs | 68 +++++++++++-------- 3 files changed, 60 insertions(+), 52 deletions(-) diff --git a/VisualFSharp.sln b/VisualFSharp.sln index 0834eca9cac..430af4b9eec 100644 --- a/VisualFSharp.sln +++ b/VisualFSharp.sln @@ -1,7 +1,7 @@  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio 15 -VisualStudioVersion = 15.0.26009.0 +VisualStudioVersion = 15.0.26014.0 MinimumVisualStudioVersion = 10.0.40219.1 Project("{F2A71F9B-5D33-465A-A702-920D77279786}") = "FSharp.Compiler", "src\fsharp\FSharp.Compiler\FSharp.Compiler.fsproj", "{2E4D67B4-522D-4CF7-97E4-BA940F0B18F3}" EndProject diff --git a/src/fsharp/vs/ServiceAssemblyContent.fs b/src/fsharp/vs/ServiceAssemblyContent.fs index e4ad54d0aa3..b38f84023ae 100644 --- a/src/fsharp/vs/ServiceAssemblyContent.fs +++ b/src/fsharp/vs/ServiceAssemblyContent.fs @@ -15,6 +15,8 @@ open Microsoft.FSharp.Compiler.Range type internal ShortIdent = string type Idents = ShortIdent[] +type MaybeUnresolvedIdent = { Ident: ShortIdent; Resolved: bool } +type MaybeUnresolvedIdents = MaybeUnresolvedIdent[] type IsAutoOpen = bool type ModuleKind = { IsAutoOpen: bool; HasModuleSuffix: bool } @@ -467,15 +469,18 @@ module internal Entity = | _ -> candidateNs.Length candidateNs.[0..nsCount - 1] - let tryCreate (targetNamespace: Idents option, targetScope: Idents, partiallyQualifiedName: Idents, - requiresQualifiedAccessParent: Idents option, autoOpenParent: Idents option, - candidateNamespace: Idents option, candidate: Idents) = + let tryCreate (targetNamespace: Idents option, targetScope: Idents, partiallyQualifiedName: MaybeUnresolvedIdents, + requiresQualifiedAccessParent: Idents option, autoOpenParent: Idents option, candidateNamespace: Idents option, candidate: Idents) = match candidate with | [||] -> [||] | _ -> partiallyQualifiedName |> Array.heads - |> Array.choose (fun parts -> + // the last part must be unresolved, otherwise we show false positive suggestions like + // "open System" for `let _ = System.DateTime.Naaaw`. Here only "Naaw" is unresolved. + |> Array.filter (fun x -> not (x.[x.Length - 1].Resolved)) + |> Array.choose (fun parts -> + let parts = parts |> Array.map (fun x -> x.Ident) if not (candidate |> Array.endsWith parts) then None else let identCount = parts.Length @@ -556,29 +561,25 @@ module internal ParsedInput = | SynConstructorArgs.Pats ps -> ps | SynConstructorArgs.NamePatPairs(xs, _) -> List.map snd xs - let internal longIdentToArray (longIdent: LongIdent): Idents = - longIdent |> Seq.map string |> Seq.toArray - - /// Returns all Idents and LongIdents found in an untyped AST. - let internal getLongIdents (input: ParsedInput option) : IDictionary = - let identsByEndPos = Dictionary() + /// Returns all `Ident`s and `LongIdent`s found in an untyped AST. + let internal getLongIdents (input: ParsedInput option) : IDictionary = + let identsByEndPos = Dictionary() let addLongIdent (longIdent: LongIdent) = - let idents = longIdentToArray longIdent for ident in longIdent do - identsByEndPos.[ident.idRange.End] <- idents + identsByEndPos.[ident.idRange.End] <- longIdent let addLongIdentWithDots (LongIdentWithDots (longIdent, lids) as value) = - match longIdentToArray longIdent with - | [||] -> () - | [|_|] as idents -> identsByEndPos.[value.Range.End] <- idents + match longIdent with + | [] -> () + | [_] as idents -> identsByEndPos.[value.Range.End] <- idents | idents -> for dotRange in lids do identsByEndPos.[Range.mkPos dotRange.EndLine (dotRange.EndColumn - 1)] <- idents identsByEndPos.[value.Range.End] <- idents let addIdent (ident: Ident) = - identsByEndPos.[ident.idRange.End] <- [|ident.idText|] + identsByEndPos.[ident.idRange.End] <- [ident] let rec walkImplFileInput (ParsedImplFileInput(_, _, _, _, _, moduleOrNamespaceList, _)) = List.iter walkSynModuleOrNamespace moduleOrNamespaceList @@ -895,7 +896,7 @@ module internal ParsedInput = walkImplFileInput input | _ -> () //debug "%A" idents - identsByEndPos :> _ + upcast identsByEndPos let getLongIdentAt ast pos = let idents = getLongIdents (Some ast) @@ -1012,13 +1013,12 @@ module internal ParsedInput = |> Seq.sortBy (fun (m, _, _) -> -m.Length) |> Seq.toList - fun (partiallyQualifiedName: Idents) (requiresQualifiedAccessParent: Idents option, autoOpenParent: Idents option, - entityNamespace: Idents option, entity: Idents) -> + fun (partiallyQualifiedName: MaybeUnresolvedIdents) + (requiresQualifiedAccessParent: Idents option, autoOpenParent: Idents option, entityNamespace: Idents option, entity: Idents) -> match res with | None -> [||] | Some (scope, ns, pos) -> - Entity.tryCreate(ns, scope.Idents, partiallyQualifiedName, requiresQualifiedAccessParent, - autoOpenParent, entityNamespace, entity) + Entity.tryCreate(ns, scope.Idents, partiallyQualifiedName, requiresQualifiedAccessParent, autoOpenParent, entityNamespace, entity) |> Array.map (fun e -> e, match modules |> List.filter (fun (m, _, _) -> entity |> Array.startsWith m ) with diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs index 1f42bbf1745..f730aefcf6a 100644 --- a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs @@ -105,8 +105,7 @@ type internal FSharpAddOpenCodeFixProvider ( checkerProvider: FSharpCheckerProvider, projectInfoManager: ProjectInfoManager, - assemblyContentProvider: AssemblyContentProvider, - lexer: Lexer + assemblyContentProvider: AssemblyContentProvider ) = inherit CodeFixProvider() let fixableDiagnosticIds = ["FS0039"] @@ -179,34 +178,43 @@ type internal FSharpAddOpenCodeFixProvider | None, _ | _, FSharpCheckFileAnswer.Aborted -> () | Some parsedInput, FSharpCheckFileAnswer.Succeeded checkFileResults -> - let textLinePos = sourceText.Lines.GetLinePosition context.Span.Start - let defines = CompilerEnvironment.GetCompilationDefinesForEditing(context.Document.FilePath, options.OtherOptions |> Seq.toList) - let symbol = lexer.GetSymbolAtPosition(context.Document.Id, sourceText, context.Span.Start, context.Document.FilePath, defines, SymbolLookupKind.Fuzzy) - match symbol with - | Some symbol -> - let pos = Pos.fromZ textLinePos.Line textLinePos.Character - let isAttribute = ParsedInput.getEntityKind parsedInput pos = Some EntityKind.Attribute - let entities = - assemblyContentProvider.GetAllEntitiesInProjectAndReferencedAssemblies checkFileResults - |> List.map (fun e -> - [ yield e.TopRequireQualifiedAccessParent, e.AutoOpenParent, e.Namespace, e.CleanedIdents - if isAttribute then - let lastIdent = e.CleanedIdents.[e.CleanedIdents.Length - 1] - if lastIdent.EndsWith "Attribute" && e.Kind LookupType.Precise = EntityKind.Attribute then - yield - e.TopRequireQualifiedAccessParent, - e.AutoOpenParent, - e.Namespace, - e.CleanedIdents - |> Array.replace (e.CleanedIdents.Length - 1) (lastIdent.Substring(0, lastIdent.Length - 9)) ]) - |> List.concat - - let idents = ParsedInput.getLongIdentAt parsedInput (Range.mkPos pos.Line symbol.RightColumn) - match idents with - | Some idents -> - let createEntity = ParsedInput.tryFindInsertionContext pos.Line parsedInput idents - return entities |> Seq.map createEntity |> Seq.concat |> Seq.toList |> getSuggestions context - | None -> () + let unresolvedIdentRange = + let startLinePos = sourceText.Lines.GetLinePosition context.Span.Start + let startPos = Pos.fromZ startLinePos.Line startLinePos.Character + let endLinePos = sourceText.Lines.GetLinePosition context.Span.End + let endPos = Pos.fromZ endLinePos.Line endLinePos.Character + Range.mkRange context.Document.FilePath startPos endPos + + let isAttribute = ParsedInput.getEntityKind parsedInput unresolvedIdentRange.Start = Some EntityKind.Attribute + let entities = + assemblyContentProvider.GetAllEntitiesInProjectAndReferencedAssemblies checkFileResults + |> List.collect (fun e -> + [ yield e.TopRequireQualifiedAccessParent, e.AutoOpenParent, e.Namespace, e.CleanedIdents + if isAttribute then + let lastIdent = e.CleanedIdents.[e.CleanedIdents.Length - 1] + if lastIdent.EndsWith "Attribute" && e.Kind LookupType.Precise = EntityKind.Attribute then + yield + e.TopRequireQualifiedAccessParent, + e.AutoOpenParent, + e.Namespace, + e.CleanedIdents + |> Array.replace (e.CleanedIdents.Length - 1) (lastIdent.Substring(0, lastIdent.Length - 9)) ]) + + let longIdent = ParsedInput.getLongIdentAt parsedInput unresolvedIdentRange.End + + let maybeUnresolvedIdents = + longIdent + |> Option.map (fun longIdent -> + longIdent + |> List.map (fun ident -> + { Ident = ident.idText + Resolved = not (ident.idRange = unresolvedIdentRange) }) + |> List.toArray) + + match maybeUnresolvedIdents with + | Some maybeUnresolvedIdents -> + let createEntity = ParsedInput.tryFindInsertionContext unresolvedIdentRange.StartLine parsedInput maybeUnresolvedIdents + return entities |> Seq.map createEntity |> Seq.concat |> Seq.toList |> getSuggestions context | None -> () | None -> () } |> CommonRoslynHelpers.StartAsyncUnitAsTask(context.CancellationToken) From aebf38ce2f454628c49450ec0066ebc7d34d5b0b Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Wed, 28 Dec 2016 14:00:20 +0300 Subject: [PATCH 14/16] fix tests compilation --- .../tests/unittests/ColorizationServiceTests.fs | 2 +- .../tests/unittests/CompletionProviderTests.fs | 10 +++++----- .../tests/unittests/DocumentHighlightsServiceTests.fs | 2 +- .../tests/unittests/GoToDefinitionServiceTests.fs | 2 +- .../tests/unittests/HelpContextServiceTests.fs | 2 +- .../tests/unittests/LanguageDebugInfoServiceTests.fs | 2 +- .../tests/unittests/QuickInfoProviderTests.fs | 2 +- 7 files changed, 11 insertions(+), 11 deletions(-) diff --git a/vsintegration/tests/unittests/ColorizationServiceTests.fs b/vsintegration/tests/unittests/ColorizationServiceTests.fs index 6213835e3f8..8bc9159e0ca 100644 --- a/vsintegration/tests/unittests/ColorizationServiceTests.fs +++ b/vsintegration/tests/unittests/ColorizationServiceTests.fs @@ -18,7 +18,7 @@ type ColorizationServiceTests() = let textSpan = TextSpan(0, fileContents.Length) let fileName = if isScriptFile.IsSome && isScriptFile.Value then "test.fsx" else "test.fs" let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) - let tokens = CommonHelpers.getColorizationData(documentId, SourceText.From(fileContents), textSpan, Some(fileName), defines, CancellationToken.None) + let tokens = Lexer().GetColorizationData(documentId, SourceText.From(fileContents), textSpan, Some(fileName), defines, CancellationToken.None) let markerPosition = fileContents.IndexOf(marker) Assert.IsTrue(markerPosition >= 0, "Cannot find marker '{0}' in file contents", marker) (tokens, markerPosition) diff --git a/vsintegration/tests/unittests/CompletionProviderTests.fs b/vsintegration/tests/unittests/CompletionProviderTests.fs index 361bd25de61..0a18965d501 100644 --- a/vsintegration/tests/unittests/CompletionProviderTests.fs +++ b/vsintegration/tests/unittests/CompletionProviderTests.fs @@ -87,7 +87,7 @@ System.Console.WriteLine(x + y) let caretPosition = fileContents.IndexOf(marker) + marker.Length let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let getInfo() = documentId, filePath, [] - let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) + let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(Lexer(), SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) Assert.AreEqual(shouldBeTriggered, triggered, "FSharpCompletionProvider.ShouldTriggerCompletionAux() should compute the correct result") [] @@ -97,7 +97,7 @@ let ShouldNotTriggerCompletionAfterAnyTriggerOtherThanInsertion() = let caretPosition = fileContents.IndexOf("System.") let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let getInfo() = documentId, filePath, [] - let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(SourceText.From(fileContents), caretPosition, triggerKind, getInfo) + let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(Lexer(), SourceText.From(fileContents), caretPosition, triggerKind, getInfo) Assert.IsFalse(triggered, "FSharpCompletionProvider.ShouldTriggerCompletionAux() should not trigger") [] @@ -106,7 +106,7 @@ let ShouldNotTriggerCompletionInStringLiterals() = let caretPosition = fileContents.IndexOf("System.") let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let getInfo() = documentId, filePath, [] - let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) + let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(Lexer(), SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) Assert.IsFalse(triggered, "FSharpCompletionProvider.ShouldTriggerCompletionAux() should not trigger") [] @@ -120,7 +120,7 @@ System.Console.WriteLine() let caretPosition = fileContents.IndexOf("System.") let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let getInfo() = documentId, filePath, [] - let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) + let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(Lexer(), SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) Assert.IsFalse(triggered, "FSharpCompletionProvider.ShouldTriggerCompletionAux() should not trigger") [] @@ -133,7 +133,7 @@ System.Console.WriteLine() let caretPosition = fileContents.IndexOf("System.") let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let getInfo() = documentId, filePath, [] - let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) + let triggered = FSharpCompletionProvider.ShouldTriggerCompletionAux(Lexer(), SourceText.From(fileContents), caretPosition, CompletionTriggerKind.Insertion, getInfo) Assert.IsFalse(triggered, "FSharpCompletionProvider.ShouldTriggerCompletionAux() should not trigger") [] diff --git a/vsintegration/tests/unittests/DocumentHighlightsServiceTests.fs b/vsintegration/tests/unittests/DocumentHighlightsServiceTests.fs index 35e6b5f2464..9a6c6119cde 100644 --- a/vsintegration/tests/unittests/DocumentHighlightsServiceTests.fs +++ b/vsintegration/tests/unittests/DocumentHighlightsServiceTests.fs @@ -49,7 +49,7 @@ let internal options = { let private getSpans (sourceText: SourceText) (caretPosition: int) = let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) - FSharpDocumentHighlightsService.GetDocumentHighlights(FSharpChecker.Instance, documentId, sourceText, filePath, caretPosition, [], options, 0) + FSharpDocumentHighlightsService.GetDocumentHighlights(Lexer(), FSharpChecker.Instance, documentId, sourceText, filePath, caretPosition, [], options, 0) |> Async.RunSynchronously let private span sourceText isDefinition (startLine, startCol) (endLine, endCol) = diff --git a/vsintegration/tests/unittests/GoToDefinitionServiceTests.fs b/vsintegration/tests/unittests/GoToDefinitionServiceTests.fs index 2510be923ea..b8c9fd7f03b 100644 --- a/vsintegration/tests/unittests/GoToDefinitionServiceTests.fs +++ b/vsintegration/tests/unittests/GoToDefinitionServiceTests.fs @@ -99,7 +99,7 @@ let _ = Module1.foo 1 let caretPosition = fileContents.IndexOf(caretMarker) + caretMarker.Length - 1 // inside the marker let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) let actual = - FSharpGoToDefinitionService.FindDefinition(FSharpChecker.Instance, documentId, SourceText.From(fileContents), filePath, caretPosition, [], options, 0) + FSharpGoToDefinitionService.FindDefinition(Lexer(), FSharpChecker.Instance, documentId, SourceText.From(fileContents), filePath, caretPosition, [], options, 0) |> Async.RunSynchronously |> Option.map (fun range -> (range.StartLine, range.EndLine, range.StartColumn, range.EndColumn)) diff --git a/vsintegration/tests/unittests/HelpContextServiceTests.fs b/vsintegration/tests/unittests/HelpContextServiceTests.fs index 6752fc0617d..5ac52241cc9 100644 --- a/vsintegration/tests/unittests/HelpContextServiceTests.fs +++ b/vsintegration/tests/unittests/HelpContextServiceTests.fs @@ -57,7 +57,7 @@ type HelpContextServiceTests() = let span = TextSpan(marker, 0) let textLine = sourceText.Lines.GetLineFromPosition(marker) let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) - let tokens = CommonHelpers.getColorizationData(documentId, sourceText, textLine.Span, Some "test.fs", [], CancellationToken.None) + let tokens = Lexer().GetColorizationData(documentId, sourceText, textLine.Span, Some "test.fs", [], CancellationToken.None) yield FSharpHelpContextService.GetHelpTerm(FSharpChecker.Instance, sourceText, fileName, newOptions, span, tokens, version) |> Async.RunSynchronously diff --git a/vsintegration/tests/unittests/LanguageDebugInfoServiceTests.fs b/vsintegration/tests/unittests/LanguageDebugInfoServiceTests.fs index b904a120fbb..18748a156bf 100644 --- a/vsintegration/tests/unittests/LanguageDebugInfoServiceTests.fs +++ b/vsintegration/tests/unittests/LanguageDebugInfoServiceTests.fs @@ -54,7 +54,7 @@ let main argv = let sourceText = SourceText.From(code) let documentId = DocumentId.CreateNewId(ProjectId.CreateNewId()) - let tokens = CommonHelpers.getColorizationData(documentId, sourceText, TextSpan.FromBounds(0, sourceText.Length), Some(fileName), defines, CancellationToken.None) + let tokens = Lexer().GetColorizationData(documentId, sourceText, TextSpan.FromBounds(0, sourceText.Length), Some(fileName), defines, CancellationToken.None) let actualDataTipSpanOption = FSharpLanguageDebugInfoService.GetDataTipInformation(sourceText, searchPosition, tokens) match actualDataTipSpanOption with diff --git a/vsintegration/tests/unittests/QuickInfoProviderTests.fs b/vsintegration/tests/unittests/QuickInfoProviderTests.fs index 677dfc0ce13..308b479c398 100644 --- a/vsintegration/tests/unittests/QuickInfoProviderTests.fs +++ b/vsintegration/tests/unittests/QuickInfoProviderTests.fs @@ -97,7 +97,7 @@ Full name: System.Console" let getInfo() = documentId, filePath, [] let quickInfo = - FSharpQuickInfoProvider.ProvideQuickInfo(FSharpChecker.Instance, documentId, SourceText.From(fileContents), filePath, caretPosition, options, 0) + FSharpQuickInfoProvider.ProvideQuickInfo(Lexer(), FSharpChecker.Instance, documentId, SourceText.From(fileContents), filePath, caretPosition, options, 0) |> Async.RunSynchronously let actual = quickInfo |> Option.map (fun (text, _) -> getQuickInfoText text) From d45d524faeca318a89e75cab3e3bdf2f6af396a5 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Thu, 29 Dec 2016 21:18:10 +0300 Subject: [PATCH 15/16] fix after merge --- .../CodeFixes/ImplementInterfaceCodeFixProvider.fs | 1 + .../{CodeFix => CodeFixes}/ProposeUppercaseLabel.fs | 6 ++++-- vsintegration/src/FSharp.Editor/Common/SymbolHelpers.fs | 4 ++-- vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj | 9 ++------- 4 files changed, 9 insertions(+), 11 deletions(-) rename vsintegration/src/FSharp.Editor/{CodeFix => CodeFixes}/ProposeUppercaseLabel.fs (83%) diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/ImplementInterfaceCodeFixProvider.fs b/vsintegration/src/FSharp.Editor/CodeFixes/ImplementInterfaceCodeFixProvider.fs index 0bbf483bfdb..4e55466a18a 100644 --- a/vsintegration/src/FSharp.Editor/CodeFixes/ImplementInterfaceCodeFixProvider.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/ImplementInterfaceCodeFixProvider.fs @@ -181,6 +181,7 @@ type internal FSharpImplementInterfaceCodeFixProvider | _ -> Some context.Span.End let! interfaceState = queryInterfaceState appendBracketAt interfacePos tokens parsedInput + let symbol = lexer.GetSymbolAtPosition(context.Document.Id, sourceText, fixupPosition, context.Document.FilePath, defines, SymbolLookupKind.Fuzzy) match interfaceState, symbol with | Some state, Some symbol -> let fcsTextLineNumber = textLine.LineNumber + 1 diff --git a/vsintegration/src/FSharp.Editor/CodeFix/ProposeUppercaseLabel.fs b/vsintegration/src/FSharp.Editor/CodeFixes/ProposeUppercaseLabel.fs similarity index 83% rename from vsintegration/src/FSharp.Editor/CodeFix/ProposeUppercaseLabel.fs rename to vsintegration/src/FSharp.Editor/CodeFixes/ProposeUppercaseLabel.fs index 1483dd8e0f1..118e53a071b 100644 --- a/vsintegration/src/FSharp.Editor/CodeFix/ProposeUppercaseLabel.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/ProposeUppercaseLabel.fs @@ -13,7 +13,8 @@ type internal FSharpProposeUpperCaseLabelCodeFixProvider [] ( checkerProvider: FSharpCheckerProvider, - projectInfoManager: ProjectInfoManager + projectInfoManager: ProjectInfoManager, + lexer: Lexer ) = inherit CodeFixProvider() let fixableDiagnosticIds = ["FS0053"] @@ -23,7 +24,8 @@ type internal FSharpProposeUpperCaseLabelCodeFixProvider override __.RegisterCodeFixesAsync context : Task = asyncMaybe { let textChanger (originalText: string) = originalText.[0].ToString().ToUpper() + originalText.Substring(1) - let! solutionChanger, originalText = SymbolHelpers.changeAllSymbolReferences(context.Document, context.Span, textChanger, projectInfoManager, checkerProvider.Checker) + let! solutionChanger, originalText = + SymbolHelpers.changeAllSymbolReferences(context.Document, context.Span, textChanger, projectInfoManager, checkerProvider.Checker, lexer) let title = FSComp.SR.replaceWithSuggestion (textChanger originalText) context.RegisterCodeFix( CodeAction.Create(title, solutionChanger, title), diff --git a/vsintegration/src/FSharp.Editor/Common/SymbolHelpers.fs b/vsintegration/src/FSharp.Editor/Common/SymbolHelpers.fs index c166a32201b..b21398df539 100644 --- a/vsintegration/src/FSharp.Editor/Common/SymbolHelpers.fs +++ b/vsintegration/src/FSharp.Editor/Common/SymbolHelpers.fs @@ -60,7 +60,7 @@ module internal SymbolHelpers = type OriginalText = string - let changeAllSymbolReferences (document: Document, symbolSpan: TextSpan, textChanger: string -> string, projectInfoManager: ProjectInfoManager, checker: FSharpChecker) + let changeAllSymbolReferences (document: Document, symbolSpan: TextSpan, textChanger: string -> string, projectInfoManager: ProjectInfoManager, checker: FSharpChecker, lexer: Lexer) : Async<(Func> * OriginalText) option> = asyncMaybe { do! Option.guard (symbolSpan.Length > 0) @@ -70,7 +70,7 @@ module internal SymbolHelpers = do! Option.guard (originalText.Length > 0) let! options = projectInfoManager.TryGetOptionsForEditingDocumentOrProject document let defines = CompilerEnvironment.GetCompilationDefinesForEditing(document.Name, options.OtherOptions |> Seq.toList) - let! symbol = CommonHelpers.getSymbolAtPosition(document.Id, sourceText, symbolSpan.Start, document.FilePath, defines, SymbolLookupKind.Fuzzy) + let! symbol = lexer.GetSymbolAtPosition(document.Id, sourceText, symbolSpan.Start, document.FilePath, defines, SymbolLookupKind.Fuzzy) let! _, checkFileResults = checker.ParseAndCheckDocument(document, options) let textLine = sourceText.Lines.GetLineFromPosition(symbolSpan.Start) let textLinePos = sourceText.Lines.GetLinePosition(symbolSpan.Start) diff --git a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj index b562be43f2c..f05859c3e7c 100644 --- a/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj +++ b/vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj @@ -63,19 +63,14 @@ + - - - - - - - + From 18a16677fcd03481a332e59f40b3055bc4642743 Mon Sep 17 00:00:00 2001 From: Vasily Kirichenko Date: Fri, 30 Dec 2016 11:09:19 +0300 Subject: [PATCH 16/16] refactoring --- src/fsharp/vs/ServiceAssemblyContent.fs | 371 +----------------- src/fsharp/vs/ServiceUntypedParse.fs | 360 ++++++++++++++++- src/fsharp/vs/ServiceUntypedParse.fsi | 2 + .../src/FSharp.Editor/CodeFixes/AddOpen.fs | 12 +- .../Common/CommonRoslynHelpers.fs | 7 + 5 files changed, 376 insertions(+), 376 deletions(-) diff --git a/src/fsharp/vs/ServiceAssemblyContent.fs b/src/fsharp/vs/ServiceAssemblyContent.fs index 41f6f09a3c1..c6c791f0721 100644 --- a/src/fsharp/vs/ServiceAssemblyContent.fs +++ b/src/fsharp/vs/ServiceAssemblyContent.fs @@ -431,13 +431,11 @@ type internal EntityCache() = member __.Clear() = dic.Clear() member x.Locking f = lock dic <| fun _ -> f (x :> IAssemblyContentCache) -type internal LongIdent = string - type internal Entity = - { FullRelativeName: LongIdent - Qualifier: LongIdent - Namespace: LongIdent option - Name: LongIdent } + { FullRelativeName: string + Qualifier: string + Namespace: string option + Name: string } override x.ToString() = sprintf "%A" x [] @@ -535,367 +533,8 @@ type internal InsertContext = Pos: Point } module internal ParsedInput = - open Microsoft.FSharp.Compiler - open Microsoft.FSharp.Compiler.Ast - - type private EndLine = int - - /// An recursive pattern that collect all sequential expressions to avoid StackOverflowException - let rec (|Sequentials|_|) = function - | SynExpr.Sequential(_, _, e, Sequentials es, _) -> - Some(e::es) - | SynExpr.Sequential(_, _, e1, e2, _) -> - Some [e1; e2] - | _ -> None - - let (|ConstructorPats|) = function - | SynConstructorArgs.Pats ps -> ps - | SynConstructorArgs.NamePatPairs(xs, _) -> List.map snd xs - - /// Returns all `Ident`s and `LongIdent`s found in an untyped AST. - let internal getLongIdents (input: ParsedInput option) : IDictionary = - let identsByEndPos = Dictionary() - - let addLongIdent (longIdent: LongIdent) = - for ident in longIdent do - identsByEndPos.[ident.idRange.End] <- longIdent - - let addLongIdentWithDots (LongIdentWithDots (longIdent, lids) as value) = - match longIdent with - | [] -> () - | [_] as idents -> identsByEndPos.[value.Range.End] <- idents - | idents -> - for dotRange in lids do - identsByEndPos.[Range.mkPos dotRange.EndLine (dotRange.EndColumn - 1)] <- idents - identsByEndPos.[value.Range.End] <- idents - - let addIdent (ident: Ident) = - identsByEndPos.[ident.idRange.End] <- [ident] - - let rec walkImplFileInput (ParsedImplFileInput(_, _, _, _, _, moduleOrNamespaceList, _)) = - List.iter walkSynModuleOrNamespace moduleOrNamespaceList - - and walkSynModuleOrNamespace (SynModuleOrNamespace(_, _, _, decls, _, attrs, _, _)) = - List.iter walkAttribute attrs - List.iter walkSynModuleDecl decls - - and walkAttribute (attr: SynAttribute) = - addLongIdentWithDots attr.TypeName - walkExpr attr.ArgExpr - - and walkTyparDecl (SynTyparDecl.TyparDecl (attrs, typar)) = - List.iter walkAttribute attrs - walkTypar typar - - and walkTypeConstraint = function - | SynTypeConstraint.WhereTyparIsValueType (t, _) - | SynTypeConstraint.WhereTyparIsReferenceType (t, _) - | SynTypeConstraint.WhereTyparIsUnmanaged (t, _) - | SynTypeConstraint.WhereTyparSupportsNull (t, _) - | SynTypeConstraint.WhereTyparIsComparable (t, _) - | SynTypeConstraint.WhereTyparIsEquatable (t, _) -> walkTypar t - | SynTypeConstraint.WhereTyparDefaultsToType (t, ty, _) - | SynTypeConstraint.WhereTyparSubtypeOfType (t, ty, _) -> walkTypar t; walkType ty - | SynTypeConstraint.WhereTyparIsEnum (t, ts, _) - | SynTypeConstraint.WhereTyparIsDelegate (t, ts, _) -> walkTypar t; List.iter walkType ts - | SynTypeConstraint.WhereTyparSupportsMember (ts, sign, _) -> List.iter walkType ts; walkMemberSig sign - - and walkPat = function - | SynPat.Tuple (pats, _) - | SynPat.ArrayOrList (_, pats, _) - | SynPat.Ands (pats, _) -> List.iter walkPat pats - | SynPat.Named (pat, ident, _, _, _) -> - walkPat pat - addIdent ident - | SynPat.Typed (pat, t, _) -> - walkPat pat - walkType t - | SynPat.Attrib (pat, attrs, _) -> - walkPat pat - List.iter walkAttribute attrs - | SynPat.Or (pat1, pat2, _) -> List.iter walkPat [pat1; pat2] - | SynPat.LongIdent (ident, _, typars, ConstructorPats pats, _, _) -> - addLongIdentWithDots ident - typars - |> Option.iter (fun (SynValTyparDecls (typars, _, constraints)) -> - List.iter walkTyparDecl typars - List.iter walkTypeConstraint constraints) - List.iter walkPat pats - | SynPat.Paren (pat, _) -> walkPat pat - | SynPat.IsInst (t, _) -> walkType t - | SynPat.QuoteExpr(e, _) -> walkExpr e - | _ -> () - - and walkTypar (Typar (_, _, _)) = () - - and walkBinding (SynBinding.Binding (_, _, _, _, attrs, _, _, pat, returnInfo, e, _, _)) = - List.iter walkAttribute attrs - walkPat pat - walkExpr e - returnInfo |> Option.iter (fun (SynBindingReturnInfo (t, _, _)) -> walkType t) - - and walkInterfaceImpl (InterfaceImpl(_, bindings, _)) = List.iter walkBinding bindings - - and walkIndexerArg = function - | SynIndexerArg.One e -> walkExpr e - | SynIndexerArg.Two (e1, e2) -> List.iter walkExpr [e1; e2] - - and walkType = function - | SynType.Array (_, t, _) - | SynType.HashConstraint (t, _) - | SynType.MeasurePower (t, _, _) -> walkType t - | SynType.Fun (t1, t2, _) - | SynType.MeasureDivide (t1, t2, _) -> walkType t1; walkType t2 - | SynType.LongIdent ident -> addLongIdentWithDots ident - | SynType.App (ty, _, types, _, _, _, _) -> walkType ty; List.iter walkType types - | SynType.LongIdentApp (_, _, _, types, _, _, _) -> List.iter walkType types - | SynType.Tuple (ts, _) -> ts |> List.iter (fun (_, t) -> walkType t) - | SynType.WithGlobalConstraints (t, typeConstraints, _) -> - walkType t; List.iter walkTypeConstraint typeConstraints - | _ -> () - - and walkClause (Clause (pat, e1, e2, _, _)) = - walkPat pat - walkExpr e2 - e1 |> Option.iter walkExpr - - and walkSimplePats = function - | SynSimplePats.SimplePats (pats, _) -> List.iter walkSimplePat pats - | SynSimplePats.Typed (pats, ty, _) -> - walkSimplePats pats - walkType ty - - and walkExpr = function - | SynExpr.Paren (e, _, _, _) - | SynExpr.Quote (_, _, e, _, _) - | SynExpr.Typed (e, _, _) - | SynExpr.InferredUpcast (e, _) - | SynExpr.InferredDowncast (e, _) - | SynExpr.AddressOf (_, e, _, _) - | SynExpr.DoBang (e, _) - | SynExpr.YieldOrReturn (_, e, _) - | SynExpr.ArrayOrListOfSeqExpr (_, e, _) - | SynExpr.CompExpr (_, _, e, _) - | SynExpr.Do (e, _) - | SynExpr.Assert (e, _) - | SynExpr.Lazy (e, _) - | SynExpr.YieldOrReturnFrom (_, e, _) -> walkExpr e - | SynExpr.Lambda (_, _, pats, e, _) -> - walkSimplePats pats - walkExpr e - | SynExpr.New (_, t, e, _) - | SynExpr.TypeTest (e, t, _) - | SynExpr.Upcast (e, t, _) - | SynExpr.Downcast (e, t, _) -> walkExpr e; walkType t - | SynExpr.Tuple (es, _, _) - | Sequentials es - | SynExpr.ArrayOrList (_, es, _) -> List.iter walkExpr es - | SynExpr.App (_, _, e1, e2, _) - | SynExpr.TryFinally (e1, e2, _, _, _) - | SynExpr.While (_, e1, e2, _) -> List.iter walkExpr [e1; e2] - | SynExpr.Record (_, _, fields, _) -> - fields |> List.iter (fun ((ident, _), e, _) -> - addLongIdentWithDots ident - e |> Option.iter walkExpr) - | SynExpr.Ident ident -> addIdent ident - | SynExpr.ObjExpr(ty, argOpt, bindings, ifaces, _, _) -> - argOpt |> Option.iter (fun (e, ident) -> - walkExpr e - ident |> Option.iter addIdent) - walkType ty - List.iter walkBinding bindings - List.iter walkInterfaceImpl ifaces - | SynExpr.LongIdent (_, ident, _, _) -> addLongIdentWithDots ident - | SynExpr.For (_, ident, e1, _, e2, e3, _) -> - addIdent ident - List.iter walkExpr [e1; e2; e3] - | SynExpr.ForEach (_, _, _, pat, e1, e2, _) -> - walkPat pat - List.iter walkExpr [e1; e2] - | SynExpr.MatchLambda (_, _, synMatchClauseList, _, _) -> - List.iter walkClause synMatchClauseList - | SynExpr.Match (_, e, synMatchClauseList, _, _) -> - walkExpr e - List.iter walkClause synMatchClauseList - | SynExpr.TypeApp (e, _, tys, _, _, _, _) -> - List.iter walkType tys; walkExpr e - | SynExpr.LetOrUse (_, _, bindings, e, _) -> - List.iter walkBinding bindings; walkExpr e - | SynExpr.TryWith (e, _, clauses, _, _, _, _) -> - List.iter walkClause clauses; walkExpr e - | SynExpr.IfThenElse (e1, e2, e3, _, _, _, _) -> - List.iter walkExpr [e1; e2] - e3 |> Option.iter walkExpr - | SynExpr.LongIdentSet (ident, e, _) - | SynExpr.DotGet (e, _, ident, _) -> - addLongIdentWithDots ident - walkExpr e - | SynExpr.DotSet (e1, idents, e2, _) -> - walkExpr e1 - addLongIdentWithDots idents - walkExpr e2 - | SynExpr.DotIndexedGet (e, args, _, _) -> - walkExpr e - List.iter walkIndexerArg args - | SynExpr.DotIndexedSet (e1, args, e2, _, _, _) -> - walkExpr e1 - List.iter walkIndexerArg args - walkExpr e2 - | SynExpr.NamedIndexedPropertySet (ident, e1, e2, _) -> - addLongIdentWithDots ident - List.iter walkExpr [e1; e2] - | SynExpr.DotNamedIndexedPropertySet (e1, ident, e2, e3, _) -> - addLongIdentWithDots ident - List.iter walkExpr [e1; e2; e3] - | SynExpr.JoinIn (e1, _, e2, _) -> List.iter walkExpr [e1; e2] - | SynExpr.LetOrUseBang (_, _, _, pat, e1, e2, _) -> - walkPat pat - List.iter walkExpr [e1; e2] - | SynExpr.TraitCall (ts, sign, e, _) -> - List.iter walkTypar ts - walkMemberSig sign - walkExpr e - | SynExpr.Const (SynConst.Measure(_, m), _) -> walkMeasure m - | _ -> () - - and walkMeasure = function - | SynMeasure.Product (m1, m2, _) - | SynMeasure.Divide (m1, m2, _) -> walkMeasure m1; walkMeasure m2 - | SynMeasure.Named (longIdent, _) -> addLongIdent longIdent - | SynMeasure.Seq (ms, _) -> List.iter walkMeasure ms - | SynMeasure.Power (m, _, _) -> walkMeasure m - | SynMeasure.Var (ty, _) -> walkTypar ty - | SynMeasure.One - | SynMeasure.Anon _ -> () - - and walkSimplePat = function - | SynSimplePat.Attrib (pat, attrs, _) -> - walkSimplePat pat - List.iter walkAttribute attrs - | SynSimplePat.Typed(pat, t, _) -> - walkSimplePat pat - walkType t - | _ -> () - - and walkField (SynField.Field(attrs, _, _, t, _, _, _, _)) = - List.iter walkAttribute attrs - walkType t - - and walkValSig (SynValSig.ValSpfn(attrs, _, _, t, SynValInfo(argInfos, argInfo), _, _, _, _, _, _)) = - List.iter walkAttribute attrs - walkType t - argInfo :: (argInfos |> List.concat) - |> List.map (fun (SynArgInfo(attrs, _, _)) -> attrs) - |> List.concat - |> List.iter walkAttribute - - and walkMemberSig = function - | SynMemberSig.Inherit (t, _) - | SynMemberSig.Interface(t, _) -> walkType t - | SynMemberSig.Member(vs, _, _) -> walkValSig vs - | SynMemberSig.ValField(f, _) -> walkField f - | SynMemberSig.NestedType(SynTypeDefnSig.TypeDefnSig (info, repr, memberSigs, _), _) -> - let isTypeExtensionOrAlias = - match repr with - | SynTypeDefnSigRepr.Simple(SynTypeDefnSimpleRepr.TypeAbbrev _, _) - | SynTypeDefnSigRepr.ObjectModel(SynTypeDefnKind.TyconAbbrev, _, _) - | SynTypeDefnSigRepr.ObjectModel(SynTypeDefnKind.TyconAugmentation, _, _) -> true - | _ -> false - walkComponentInfo isTypeExtensionOrAlias info - walkTypeDefnSigRepr repr - List.iter walkMemberSig memberSigs - - and walkMember = function - | SynMemberDefn.AbstractSlot (valSig, _, _) -> walkValSig valSig - | SynMemberDefn.Member (binding, _) -> walkBinding binding - | SynMemberDefn.ImplicitCtor (_, attrs, pats, _, _) -> - List.iter walkAttribute attrs - List.iter walkSimplePat pats - | SynMemberDefn.ImplicitInherit (t, e, _, _) -> walkType t; walkExpr e - | SynMemberDefn.LetBindings (bindings, _, _, _) -> List.iter walkBinding bindings - | SynMemberDefn.Interface (t, members, _) -> - walkType t - members |> Option.iter (List.iter walkMember) - | SynMemberDefn.Inherit (t, _, _) -> walkType t - | SynMemberDefn.ValField (field, _) -> walkField field - | SynMemberDefn.NestedType (tdef, _, _) -> walkTypeDefn tdef - | SynMemberDefn.AutoProperty (attrs, _, _, t, _, _, _, _, e, _, _) -> - List.iter walkAttribute attrs - Option.iter walkType t - walkExpr e - | _ -> () - - and walkEnumCase (EnumCase(attrs, _, _, _, _)) = List.iter walkAttribute attrs - - and walkUnionCaseType = function - | SynUnionCaseType.UnionCaseFields fields -> List.iter walkField fields - | SynUnionCaseType.UnionCaseFullType (t, _) -> walkType t - - and walkUnionCase (SynUnionCase.UnionCase (attrs, _, t, _, _, _)) = - List.iter walkAttribute attrs - walkUnionCaseType t - - and walkTypeDefnSimple = function - | SynTypeDefnSimpleRepr.Enum (cases, _) -> List.iter walkEnumCase cases - | SynTypeDefnSimpleRepr.Union (_, cases, _) -> List.iter walkUnionCase cases - | SynTypeDefnSimpleRepr.Record (_, fields, _) -> List.iter walkField fields - | SynTypeDefnSimpleRepr.TypeAbbrev (_, t, _) -> walkType t - | _ -> () - - and walkComponentInfo isTypeExtensionOrAlias (ComponentInfo(attrs, typars, constraints, longIdent, _, _, _, _)) = - List.iter walkAttribute attrs - List.iter walkTyparDecl typars - List.iter walkTypeConstraint constraints - if isTypeExtensionOrAlias then - addLongIdent longIdent - - and walkTypeDefnRepr = function - | SynTypeDefnRepr.ObjectModel (_, defns, _) -> List.iter walkMember defns - | SynTypeDefnRepr.Simple(defn, _) -> walkTypeDefnSimple defn - | SynTypeDefnRepr.Exception _ -> () - - and walkTypeDefnSigRepr = function - | SynTypeDefnSigRepr.ObjectModel (_, defns, _) -> List.iter walkMemberSig defns - | SynTypeDefnSigRepr.Simple(defn, _) -> walkTypeDefnSimple defn - | SynTypeDefnSigRepr.Exception _ -> () - - and walkTypeDefn (TypeDefn (info, repr, members, _)) = - let isTypeExtensionOrAlias = - match repr with - | SynTypeDefnRepr.ObjectModel (SynTypeDefnKind.TyconAugmentation, _, _) - | SynTypeDefnRepr.ObjectModel (SynTypeDefnKind.TyconAbbrev, _, _) - | SynTypeDefnRepr.Simple (SynTypeDefnSimpleRepr.TypeAbbrev _, _) -> true - | _ -> false - walkComponentInfo isTypeExtensionOrAlias info - walkTypeDefnRepr repr - List.iter walkMember members - - and walkSynModuleDecl (decl: SynModuleDecl) = - match decl with - | SynModuleDecl.NamespaceFragment fragment -> walkSynModuleOrNamespace fragment - | SynModuleDecl.NestedModule (info, _, modules, _, _) -> - walkComponentInfo false info - List.iter walkSynModuleDecl modules - | SynModuleDecl.Let (_, bindings, _) -> List.iter walkBinding bindings - | SynModuleDecl.DoExpr (_, expr, _) -> walkExpr expr - | SynModuleDecl.Types (types, _) -> List.iter walkTypeDefn types - | SynModuleDecl.Attributes (attrs, _) -> List.iter walkAttribute attrs - | _ -> () - - match input with - | Some (ParsedInput.ImplFile input) -> - walkImplFileInput input - | _ -> () - //debug "%A" idents - upcast identsByEndPos - - let getLongIdentAt ast pos = - let idents = getLongIdents (Some ast) - match idents.TryGetValue pos with - | true, idents -> Some idents - | _ -> None - type Col = int + type private EndLine = int type Scope = { Idents: Idents diff --git a/src/fsharp/vs/ServiceUntypedParse.fs b/src/fsharp/vs/ServiceUntypedParse.fs index 07b14db3b0b..5f68d88f287 100755 --- a/src/fsharp/vs/ServiceUntypedParse.fs +++ b/src/fsharp/vs/ServiceUntypedParse.fs @@ -1157,4 +1157,362 @@ module UntypedParseImpl = | _ -> None member this.VisitBinding(defaultTraverse, synBinding) = defaultTraverse synBinding } - AstTraversal.Traverse(pos, pt, walker) \ No newline at end of file + AstTraversal.Traverse(pos, pt, walker) + + open Microsoft.FSharp.Compiler + open Microsoft.FSharp.Compiler.Ast + + /// An recursive pattern that collect all sequential expressions to avoid StackOverflowException + let rec (|Sequentials|_|) = function + | SynExpr.Sequential(_, _, e, Sequentials es, _) -> + Some(e::es) + | SynExpr.Sequential(_, _, e1, e2, _) -> + Some [e1; e2] + | _ -> None + + let (|ConstructorPats|) = function + | SynConstructorArgs.Pats ps -> ps + | SynConstructorArgs.NamePatPairs(xs, _) -> List.map snd xs + + /// Returns all `Ident`s and `LongIdent`s found in an untyped AST. + let GetLongIdents (input: ParsedInput option) : IDictionary = + let identsByEndPos = Dictionary() + + let addLongIdent (longIdent: LongIdent) = + for ident in longIdent do + identsByEndPos.[ident.idRange.End] <- longIdent + + let addLongIdentWithDots (LongIdentWithDots (longIdent, lids) as value) = + match longIdent with + | [] -> () + | [_] as idents -> identsByEndPos.[value.Range.End] <- idents + | idents -> + for dotRange in lids do + identsByEndPos.[Range.mkPos dotRange.EndLine (dotRange.EndColumn - 1)] <- idents + identsByEndPos.[value.Range.End] <- idents + + let addIdent (ident: Ident) = + identsByEndPos.[ident.idRange.End] <- [ident] + + let rec walkImplFileInput (ParsedImplFileInput(_, _, _, _, _, moduleOrNamespaceList, _)) = + List.iter walkSynModuleOrNamespace moduleOrNamespaceList + + and walkSynModuleOrNamespace (SynModuleOrNamespace(_, _, _, decls, _, attrs, _, _)) = + List.iter walkAttribute attrs + List.iter walkSynModuleDecl decls + + and walkAttribute (attr: SynAttribute) = + addLongIdentWithDots attr.TypeName + walkExpr attr.ArgExpr + + and walkTyparDecl (SynTyparDecl.TyparDecl (attrs, typar)) = + List.iter walkAttribute attrs + walkTypar typar + + and walkTypeConstraint = function + | SynTypeConstraint.WhereTyparIsValueType (t, _) + | SynTypeConstraint.WhereTyparIsReferenceType (t, _) + | SynTypeConstraint.WhereTyparIsUnmanaged (t, _) + | SynTypeConstraint.WhereTyparSupportsNull (t, _) + | SynTypeConstraint.WhereTyparIsComparable (t, _) + | SynTypeConstraint.WhereTyparIsEquatable (t, _) -> walkTypar t + | SynTypeConstraint.WhereTyparDefaultsToType (t, ty, _) + | SynTypeConstraint.WhereTyparSubtypeOfType (t, ty, _) -> walkTypar t; walkType ty + | SynTypeConstraint.WhereTyparIsEnum (t, ts, _) + | SynTypeConstraint.WhereTyparIsDelegate (t, ts, _) -> walkTypar t; List.iter walkType ts + | SynTypeConstraint.WhereTyparSupportsMember (ts, sign, _) -> List.iter walkType ts; walkMemberSig sign + + and walkPat = function + | SynPat.Tuple (pats, _) + | SynPat.ArrayOrList (_, pats, _) + | SynPat.Ands (pats, _) -> List.iter walkPat pats + | SynPat.Named (pat, ident, _, _, _) -> + walkPat pat + addIdent ident + | SynPat.Typed (pat, t, _) -> + walkPat pat + walkType t + | SynPat.Attrib (pat, attrs, _) -> + walkPat pat + List.iter walkAttribute attrs + | SynPat.Or (pat1, pat2, _) -> List.iter walkPat [pat1; pat2] + | SynPat.LongIdent (ident, _, typars, ConstructorPats pats, _, _) -> + addLongIdentWithDots ident + typars + |> Option.iter (fun (SynValTyparDecls (typars, _, constraints)) -> + List.iter walkTyparDecl typars + List.iter walkTypeConstraint constraints) + List.iter walkPat pats + | SynPat.Paren (pat, _) -> walkPat pat + | SynPat.IsInst (t, _) -> walkType t + | SynPat.QuoteExpr(e, _) -> walkExpr e + | _ -> () + + and walkTypar (Typar (_, _, _)) = () + + and walkBinding (SynBinding.Binding (_, _, _, _, attrs, _, _, pat, returnInfo, e, _, _)) = + List.iter walkAttribute attrs + walkPat pat + walkExpr e + returnInfo |> Option.iter (fun (SynBindingReturnInfo (t, _, _)) -> walkType t) + + and walkInterfaceImpl (InterfaceImpl(_, bindings, _)) = List.iter walkBinding bindings + + and walkIndexerArg = function + | SynIndexerArg.One e -> walkExpr e + | SynIndexerArg.Two (e1, e2) -> List.iter walkExpr [e1; e2] + + and walkType = function + | SynType.Array (_, t, _) + | SynType.HashConstraint (t, _) + | SynType.MeasurePower (t, _, _) -> walkType t + | SynType.Fun (t1, t2, _) + | SynType.MeasureDivide (t1, t2, _) -> walkType t1; walkType t2 + | SynType.LongIdent ident -> addLongIdentWithDots ident + | SynType.App (ty, _, types, _, _, _, _) -> walkType ty; List.iter walkType types + | SynType.LongIdentApp (_, _, _, types, _, _, _) -> List.iter walkType types + | SynType.Tuple (ts, _) -> ts |> List.iter (fun (_, t) -> walkType t) + | SynType.WithGlobalConstraints (t, typeConstraints, _) -> + walkType t; List.iter walkTypeConstraint typeConstraints + | _ -> () + + and walkClause (Clause (pat, e1, e2, _, _)) = + walkPat pat + walkExpr e2 + e1 |> Option.iter walkExpr + + and walkSimplePats = function + | SynSimplePats.SimplePats (pats, _) -> List.iter walkSimplePat pats + | SynSimplePats.Typed (pats, ty, _) -> + walkSimplePats pats + walkType ty + + and walkExpr = function + | SynExpr.Paren (e, _, _, _) + | SynExpr.Quote (_, _, e, _, _) + | SynExpr.Typed (e, _, _) + | SynExpr.InferredUpcast (e, _) + | SynExpr.InferredDowncast (e, _) + | SynExpr.AddressOf (_, e, _, _) + | SynExpr.DoBang (e, _) + | SynExpr.YieldOrReturn (_, e, _) + | SynExpr.ArrayOrListOfSeqExpr (_, e, _) + | SynExpr.CompExpr (_, _, e, _) + | SynExpr.Do (e, _) + | SynExpr.Assert (e, _) + | SynExpr.Lazy (e, _) + | SynExpr.YieldOrReturnFrom (_, e, _) -> walkExpr e + | SynExpr.Lambda (_, _, pats, e, _) -> + walkSimplePats pats + walkExpr e + | SynExpr.New (_, t, e, _) + | SynExpr.TypeTest (e, t, _) + | SynExpr.Upcast (e, t, _) + | SynExpr.Downcast (e, t, _) -> walkExpr e; walkType t + | SynExpr.Tuple (es, _, _) + | Sequentials es + | SynExpr.ArrayOrList (_, es, _) -> List.iter walkExpr es + | SynExpr.App (_, _, e1, e2, _) + | SynExpr.TryFinally (e1, e2, _, _, _) + | SynExpr.While (_, e1, e2, _) -> List.iter walkExpr [e1; e2] + | SynExpr.Record (_, _, fields, _) -> + fields |> List.iter (fun ((ident, _), e, _) -> + addLongIdentWithDots ident + e |> Option.iter walkExpr) + | SynExpr.Ident ident -> addIdent ident + | SynExpr.ObjExpr(ty, argOpt, bindings, ifaces, _, _) -> + argOpt |> Option.iter (fun (e, ident) -> + walkExpr e + ident |> Option.iter addIdent) + walkType ty + List.iter walkBinding bindings + List.iter walkInterfaceImpl ifaces + | SynExpr.LongIdent (_, ident, _, _) -> addLongIdentWithDots ident + | SynExpr.For (_, ident, e1, _, e2, e3, _) -> + addIdent ident + List.iter walkExpr [e1; e2; e3] + | SynExpr.ForEach (_, _, _, pat, e1, e2, _) -> + walkPat pat + List.iter walkExpr [e1; e2] + | SynExpr.MatchLambda (_, _, synMatchClauseList, _, _) -> + List.iter walkClause synMatchClauseList + | SynExpr.Match (_, e, synMatchClauseList, _, _) -> + walkExpr e + List.iter walkClause synMatchClauseList + | SynExpr.TypeApp (e, _, tys, _, _, _, _) -> + List.iter walkType tys; walkExpr e + | SynExpr.LetOrUse (_, _, bindings, e, _) -> + List.iter walkBinding bindings; walkExpr e + | SynExpr.TryWith (e, _, clauses, _, _, _, _) -> + List.iter walkClause clauses; walkExpr e + | SynExpr.IfThenElse (e1, e2, e3, _, _, _, _) -> + List.iter walkExpr [e1; e2] + e3 |> Option.iter walkExpr + | SynExpr.LongIdentSet (ident, e, _) + | SynExpr.DotGet (e, _, ident, _) -> + addLongIdentWithDots ident + walkExpr e + | SynExpr.DotSet (e1, idents, e2, _) -> + walkExpr e1 + addLongIdentWithDots idents + walkExpr e2 + | SynExpr.DotIndexedGet (e, args, _, _) -> + walkExpr e + List.iter walkIndexerArg args + | SynExpr.DotIndexedSet (e1, args, e2, _, _, _) -> + walkExpr e1 + List.iter walkIndexerArg args + walkExpr e2 + | SynExpr.NamedIndexedPropertySet (ident, e1, e2, _) -> + addLongIdentWithDots ident + List.iter walkExpr [e1; e2] + | SynExpr.DotNamedIndexedPropertySet (e1, ident, e2, e3, _) -> + addLongIdentWithDots ident + List.iter walkExpr [e1; e2; e3] + | SynExpr.JoinIn (e1, _, e2, _) -> List.iter walkExpr [e1; e2] + | SynExpr.LetOrUseBang (_, _, _, pat, e1, e2, _) -> + walkPat pat + List.iter walkExpr [e1; e2] + | SynExpr.TraitCall (ts, sign, e, _) -> + List.iter walkTypar ts + walkMemberSig sign + walkExpr e + | SynExpr.Const (SynConst.Measure(_, m), _) -> walkMeasure m + | _ -> () + + and walkMeasure = function + | SynMeasure.Product (m1, m2, _) + | SynMeasure.Divide (m1, m2, _) -> walkMeasure m1; walkMeasure m2 + | SynMeasure.Named (longIdent, _) -> addLongIdent longIdent + | SynMeasure.Seq (ms, _) -> List.iter walkMeasure ms + | SynMeasure.Power (m, _, _) -> walkMeasure m + | SynMeasure.Var (ty, _) -> walkTypar ty + | SynMeasure.One + | SynMeasure.Anon _ -> () + + and walkSimplePat = function + | SynSimplePat.Attrib (pat, attrs, _) -> + walkSimplePat pat + List.iter walkAttribute attrs + | SynSimplePat.Typed(pat, t, _) -> + walkSimplePat pat + walkType t + | _ -> () + + and walkField (SynField.Field(attrs, _, _, t, _, _, _, _)) = + List.iter walkAttribute attrs + walkType t + + and walkValSig (SynValSig.ValSpfn(attrs, _, _, t, SynValInfo(argInfos, argInfo), _, _, _, _, _, _)) = + List.iter walkAttribute attrs + walkType t + argInfo :: (argInfos |> List.concat) + |> List.map (fun (SynArgInfo(attrs, _, _)) -> attrs) + |> List.concat + |> List.iter walkAttribute + + and walkMemberSig = function + | SynMemberSig.Inherit (t, _) + | SynMemberSig.Interface(t, _) -> walkType t + | SynMemberSig.Member(vs, _, _) -> walkValSig vs + | SynMemberSig.ValField(f, _) -> walkField f + | SynMemberSig.NestedType(SynTypeDefnSig.TypeDefnSig (info, repr, memberSigs, _), _) -> + let isTypeExtensionOrAlias = + match repr with + | SynTypeDefnSigRepr.Simple(SynTypeDefnSimpleRepr.TypeAbbrev _, _) + | SynTypeDefnSigRepr.ObjectModel(SynTypeDefnKind.TyconAbbrev, _, _) + | SynTypeDefnSigRepr.ObjectModel(SynTypeDefnKind.TyconAugmentation, _, _) -> true + | _ -> false + walkComponentInfo isTypeExtensionOrAlias info + walkTypeDefnSigRepr repr + List.iter walkMemberSig memberSigs + + and walkMember = function + | SynMemberDefn.AbstractSlot (valSig, _, _) -> walkValSig valSig + | SynMemberDefn.Member (binding, _) -> walkBinding binding + | SynMemberDefn.ImplicitCtor (_, attrs, pats, _, _) -> + List.iter walkAttribute attrs + List.iter walkSimplePat pats + | SynMemberDefn.ImplicitInherit (t, e, _, _) -> walkType t; walkExpr e + | SynMemberDefn.LetBindings (bindings, _, _, _) -> List.iter walkBinding bindings + | SynMemberDefn.Interface (t, members, _) -> + walkType t + members |> Option.iter (List.iter walkMember) + | SynMemberDefn.Inherit (t, _, _) -> walkType t + | SynMemberDefn.ValField (field, _) -> walkField field + | SynMemberDefn.NestedType (tdef, _, _) -> walkTypeDefn tdef + | SynMemberDefn.AutoProperty (attrs, _, _, t, _, _, _, _, e, _, _) -> + List.iter walkAttribute attrs + Option.iter walkType t + walkExpr e + | _ -> () + + and walkEnumCase (EnumCase(attrs, _, _, _, _)) = List.iter walkAttribute attrs + + and walkUnionCaseType = function + | SynUnionCaseType.UnionCaseFields fields -> List.iter walkField fields + | SynUnionCaseType.UnionCaseFullType (t, _) -> walkType t + + and walkUnionCase (SynUnionCase.UnionCase (attrs, _, t, _, _, _)) = + List.iter walkAttribute attrs + walkUnionCaseType t + + and walkTypeDefnSimple = function + | SynTypeDefnSimpleRepr.Enum (cases, _) -> List.iter walkEnumCase cases + | SynTypeDefnSimpleRepr.Union (_, cases, _) -> List.iter walkUnionCase cases + | SynTypeDefnSimpleRepr.Record (_, fields, _) -> List.iter walkField fields + | SynTypeDefnSimpleRepr.TypeAbbrev (_, t, _) -> walkType t + | _ -> () + + and walkComponentInfo isTypeExtensionOrAlias (ComponentInfo(attrs, typars, constraints, longIdent, _, _, _, _)) = + List.iter walkAttribute attrs + List.iter walkTyparDecl typars + List.iter walkTypeConstraint constraints + if isTypeExtensionOrAlias then + addLongIdent longIdent + + and walkTypeDefnRepr = function + | SynTypeDefnRepr.ObjectModel (_, defns, _) -> List.iter walkMember defns + | SynTypeDefnRepr.Simple(defn, _) -> walkTypeDefnSimple defn + | SynTypeDefnRepr.Exception _ -> () + + and walkTypeDefnSigRepr = function + | SynTypeDefnSigRepr.ObjectModel (_, defns, _) -> List.iter walkMemberSig defns + | SynTypeDefnSigRepr.Simple(defn, _) -> walkTypeDefnSimple defn + | SynTypeDefnSigRepr.Exception _ -> () + + and walkTypeDefn (TypeDefn (info, repr, members, _)) = + let isTypeExtensionOrAlias = + match repr with + | SynTypeDefnRepr.ObjectModel (SynTypeDefnKind.TyconAugmentation, _, _) + | SynTypeDefnRepr.ObjectModel (SynTypeDefnKind.TyconAbbrev, _, _) + | SynTypeDefnRepr.Simple (SynTypeDefnSimpleRepr.TypeAbbrev _, _) -> true + | _ -> false + walkComponentInfo isTypeExtensionOrAlias info + walkTypeDefnRepr repr + List.iter walkMember members + + and walkSynModuleDecl (decl: SynModuleDecl) = + match decl with + | SynModuleDecl.NamespaceFragment fragment -> walkSynModuleOrNamespace fragment + | SynModuleDecl.NestedModule (info, _, modules, _, _) -> + walkComponentInfo false info + List.iter walkSynModuleDecl modules + | SynModuleDecl.Let (_, bindings, _) -> List.iter walkBinding bindings + | SynModuleDecl.DoExpr (_, expr, _) -> walkExpr expr + | SynModuleDecl.Types (types, _) -> List.iter walkTypeDefn types + | SynModuleDecl.Attributes (attrs, _) -> List.iter walkAttribute attrs + | _ -> () + + match input with + | Some (ParsedInput.ImplFile input) -> + walkImplFileInput input + | _ -> () + //debug "%A" idents + upcast identsByEndPos + + let GetLongIdentAt (parsedInput: ParsedInput, pos: pos) = + let idents = GetLongIdents (Some parsedInput) + match idents.TryGetValue pos with + | true, idents -> Some idents + | _ -> None \ No newline at end of file diff --git a/src/fsharp/vs/ServiceUntypedParse.fsi b/src/fsharp/vs/ServiceUntypedParse.fsi index 4be910f598c..380153df64f 100755 --- a/src/fsharp/vs/ServiceUntypedParse.fsi +++ b/src/fsharp/vs/ServiceUntypedParse.fsi @@ -92,6 +92,8 @@ module internal UntypedParseImpl = val TryFindExpressionIslandInPosition : pos * ParsedInput option -> string option val TryGetCompletionContext : pos * FSharpParseFileResults option -> CompletionContext option val GetEntityKind: pos * ParsedInput -> EntityKind option + val GetLongIdents: ParsedInput option -> IDictionary + val GetLongIdentAt: ParsedInput * pos -> LongIdent option // implementation details used by other code in the compiler module internal SourceFileImpl = diff --git a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs index 04a7cad419a..16ca9d5df9d 100644 --- a/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs +++ b/vsintegration/src/FSharp.Editor/CodeFixes/AddOpen.fs @@ -178,14 +178,8 @@ type internal FSharpAddOpenCodeFixProvider | None, _ | _, FSharpCheckFileAnswer.Aborted -> () | Some parsedInput, FSharpCheckFileAnswer.Succeeded checkFileResults -> - let unresolvedIdentRange = - let startLinePos = sourceText.Lines.GetLinePosition context.Span.Start - let startPos = Pos.fromZ startLinePos.Line startLinePos.Character - let endLinePos = sourceText.Lines.GetLinePosition context.Span.End - let endPos = Pos.fromZ endLinePos.Line endLinePos.Character - Range.mkRange context.Document.FilePath startPos endPos - let isAttribute = UntypedParseImpl.GetEntityKind(pos, parsedInput) = Some EntityKind.Attribute - let isAttribute = ParsedInput.getEntityKind parsedInput unresolvedIdentRange.Start = Some EntityKind.Attribute + let unresolvedIdentRange = CommonRoslynHelpers.TextSpanToFSharpRange(context.Document.FilePath, sourceText, context.Span) + let isAttribute = UntypedParseImpl.GetEntityKind(unresolvedIdentRange.Start, parsedInput) = Some EntityKind.Attribute let entities = assemblyContentProvider.GetAllEntitiesInProjectAndReferencedAssemblies checkFileResults |> List.collect (fun e -> @@ -200,7 +194,7 @@ type internal FSharpAddOpenCodeFixProvider e.CleanedIdents |> Array.replace (e.CleanedIdents.Length - 1) (lastIdent.Substring(0, lastIdent.Length - 9)) ]) - let longIdent = ParsedInput.getLongIdentAt parsedInput unresolvedIdentRange.End + let longIdent = UntypedParseImpl.GetLongIdentAt(parsedInput, unresolvedIdentRange.End) let maybeUnresolvedIdents = longIdent diff --git a/vsintegration/src/FSharp.Editor/Common/CommonRoslynHelpers.fs b/vsintegration/src/FSharp.Editor/Common/CommonRoslynHelpers.fs index 367ac018ccf..d26c48515dd 100644 --- a/vsintegration/src/FSharp.Editor/Common/CommonRoslynHelpers.fs +++ b/vsintegration/src/FSharp.Editor/Common/CommonRoslynHelpers.fs @@ -27,6 +27,13 @@ module internal CommonRoslynHelpers = //Assert.Exception(e) None + let TextSpanToFSharpRange(fileName: string, sourceText: SourceText, span: TextSpan) : range = + let startLinePos = sourceText.Lines.GetLinePosition span.Start + let startPos = Pos.fromZ startLinePos.Line startLinePos.Character + let endLinePos = sourceText.Lines.GetLinePosition span.End + let endPos = Pos.fromZ endLinePos.Line endLinePos.Character + Range.mkRange fileName startPos endPos + let GetCompletedTaskResult(task: Task<'TResult>) = if task.Status = TaskStatus.RanToCompletion then task.Result