Skip to content

Commit

Permalink
Perform progression search in two phases. (#58785)
Browse files Browse the repository at this point in the history
* Break progression search into two phases

* Update tests

* Add asserts

* REorder

* Make public

* Update src/Features/Core/Portable/NavigateTo/NavigateToSearcher.cs

Co-authored-by: David Barbet <dibarbet@gmail.com>

* Remove parallelism.  We don't need the contention

* Fix counter logic

* Reorder

* Fix comment

* Lint

Co-authored-by: David Barbet <dibarbet@gmail.com>
  • Loading branch information
CyrusNajmabadi and dibarbet authored Jan 14, 2022
1 parent 03adb3a commit 9a69692
Show file tree
Hide file tree
Showing 5 changed files with 132 additions and 75 deletions.
135 changes: 90 additions & 45 deletions src/Features/Core/Portable/NavigateTo/NavigateToSearcher.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
Expand All @@ -19,6 +20,14 @@

namespace Microsoft.CodeAnalysis.NavigateTo
{
[Flags]
internal enum NavigateToSearchScope
{
RegularDocuments = 0b01,
GeneratedDocuments = 0b10,
AllDocuments = RegularDocuments | GeneratedDocuments
}

internal class NavigateToSearcher
{
private readonly INavigateToSearcherHost _host;
Expand All @@ -27,11 +36,13 @@ internal class NavigateToSearcher
private readonly INavigateToSearchCallback _callback;
private readonly string _searchPattern;
private readonly IImmutableSet<string> _kinds;
private readonly IStreamingProgressTracker _progress;
private readonly IStreamingProgressTracker _progress_doNotAccessDirectly;

private readonly Document? _activeDocument;
private readonly ImmutableArray<Document> _visibleDocuments;

private int _remainingProgressItems;

private NavigateToSearcher(
INavigateToSearcherHost host,
Solution solution,
Expand All @@ -46,7 +57,7 @@ private NavigateToSearcher(
_callback = callback;
_searchPattern = searchPattern;
_kinds = kinds;
_progress = new StreamingProgressTracker((current, maximum, ct) =>
_progress_doNotAccessDirectly = new StreamingProgressTracker((current, maximum, ct) =>
{
callback.ReportProgress(current, maximum);
return new ValueTask();
Expand Down Expand Up @@ -75,7 +86,28 @@ public static NavigateToSearcher Create(
return new NavigateToSearcher(host, solution, asyncListener, callback, searchPattern, kinds);
}

internal async Task SearchAsync(bool searchCurrentDocument, CancellationToken cancellationToken)
private async Task AddProgressItemsAsync(int count, CancellationToken cancellationToken)
{
Debug.Assert(count >= 0);
Debug.Assert(_remainingProgressItems >= 0);
Interlocked.Add(ref _remainingProgressItems, count);
await _progress_doNotAccessDirectly.AddItemsAsync(count, cancellationToken).ConfigureAwait(false);
}

private async Task ProgressItemsCompletedAsync(int count, CancellationToken cancellationToken)
{
var newValue = Interlocked.Add(ref _remainingProgressItems, -count);
Debug.Assert(newValue >= 0);
await _progress_doNotAccessDirectly.ItemsCompletedAsync(count, cancellationToken).ConfigureAwait(false);
}

public Task SearchAsync(bool searchCurrentDocument, CancellationToken cancellationToken)
=> SearchAsync(searchCurrentDocument, NavigateToSearchScope.AllDocuments, cancellationToken);

public async Task SearchAsync(
bool searchCurrentDocument,
NavigateToSearchScope scope,
CancellationToken cancellationToken)
{
var isFullyLoaded = true;

Expand All @@ -94,16 +126,19 @@ internal async Task SearchAsync(bool searchCurrentDocument, CancellationToken ca
// totally hydrated the oop side. Until that happens, we'll attempt to return cached data from languages
// that support that.
isFullyLoaded = await _host.IsFullyLoadedAsync(cancellationToken).ConfigureAwait(false);
await SearchAllProjectsAsync(isFullyLoaded, cancellationToken).ConfigureAwait(false);
await SearchAllProjectsAsync(isFullyLoaded, scope, cancellationToken).ConfigureAwait(false);
}
}
catch (OperationCanceledException)
{
}
finally
{
// providing this extra information will make UI to show indication to users
// that result might not contain full data
// Ensure that we actually complete all our remaining progress items so that the progress bar completes.
await ProgressItemsCompletedAsync(_remainingProgressItems, cancellationToken).ConfigureAwait(false);
Debug.Assert(_remainingProgressItems == 0);

// Pass along isFullyLoaded so that the UI can show indication to users that results may be incomplete.
_callback.Done(isFullyLoaded);
}
}
Expand All @@ -118,57 +153,67 @@ private async Task SearchCurrentDocumentAsync(CancellationToken cancellationToke
if (service == null)
return;

await _progress.AddItemsAsync(1, cancellationToken).ConfigureAwait(false);
try
{
await service.SearchDocumentAsync(
_activeDocument, _searchPattern, _kinds,
r => _callback.AddItemAsync(project, r, cancellationToken),
cancellationToken).ConfigureAwait(false);
}
finally
{
await _progress.ItemCompletedAsync(cancellationToken).ConfigureAwait(false);
}
await AddProgressItemsAsync(1, cancellationToken).ConfigureAwait(false);
await service.SearchDocumentAsync(
_activeDocument, _searchPattern, _kinds,
r => _callback.AddItemAsync(project, r, cancellationToken),
cancellationToken).ConfigureAwait(false);
}

private async Task SearchAllProjectsAsync(bool isFullyLoaded, CancellationToken cancellationToken)
private async Task SearchAllProjectsAsync(
bool isFullyLoaded,
NavigateToSearchScope scope,
CancellationToken cancellationToken)
{
var seenItems = new HashSet<INavigateToSearchResult>(NavigateToSearchResultComparer.Instance);
var orderedProjects = GetOrderedProjectsToProcess();
var searchGeneratedDocuments = isFullyLoaded;

var projectCount = orderedProjects.Sum(g => g.Length);
var searchRegularDocuments = scope.HasFlag(NavigateToSearchScope.RegularDocuments);
var searchGeneratedDocuments = scope.HasFlag(NavigateToSearchScope.GeneratedDocuments);
Debug.Assert(searchRegularDocuments || searchGeneratedDocuments);

// We do at least two passes. One for loaded docs. One for source generated docs.
await _progress.AddItemsAsync(projectCount * 2, cancellationToken).ConfigureAwait(false);
var projectCount = orderedProjects.Sum(g => g.Length);

if (!isFullyLoaded)
if (isFullyLoaded)
{
// We need an additional pass to look through cached docs.
await _progress.AddItemsAsync(projectCount, cancellationToken).ConfigureAwait(false);
// We may do up to two passes. One for loaded docs. One for source generated docs.
await AddProgressItemsAsync(
projectCount * ((searchRegularDocuments ? 1 : 0) + (searchGeneratedDocuments ? 1 : 0)),
cancellationToken).ConfigureAwait(false);

await SearchCachedDocumentsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false);
if (searchRegularDocuments)
await SearchFullyLoadedProjectsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false);

// If searching cached data returned any results, then we're done. We've at least shown some results
// to the user. That will hopefully serve them well enough until the solution fully loads.
if (seenItems.Count > 0)
{
// Ensure that we actually complete all our workitems so that the progress bar completes.
await _progress.ItemsCompletedAsync(projectCount * 2, cancellationToken).ConfigureAwait(false);
return;
}
if (searchGeneratedDocuments)
await SearchGeneratedDocumentsAsync(seenItems, cancellationToken).ConfigureAwait(false);
}
else
{
// If we're not fully loaded, we only search regular documents. Generated documents must wait until
// we're fully loaded (and thus have all the information necessary to properly run generators).
if (searchRegularDocuments)
{
// We do at least two passes. One for cached docs. One for normal docs.
await AddProgressItemsAsync(
projectCount * 2,
cancellationToken).ConfigureAwait(false);

await SearchCachedDocumentsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false);

await SearchFullyLoadedProjectsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false);
await SearchGeneratedDocumentsAsync(seenItems, cancellationToken).ConfigureAwait(false);
// If searching cached data returned any results, then we're done. We've at least shown some results
// to the user. That will hopefully serve them well enough until the solution fully loads.
if (seenItems.Count > 0)
return;

await SearchFullyLoadedProjectsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false);

// Report a telemetry even to track if we found uncached items after failing to find cached items.
// In practice if we see that we are always finding uncached items, then it's likely something
// has broken in the caching system since we would expect to normally find values there. Specifically
// we expect: foundFullItems <<< not foundFullItems.
if (!isFullyLoaded)
Logger.Log(FunctionId.NavigateTo_CacheItemsMiss, KeyValueLogMessage.Create(m => m["FoundFullItems"] = seenItems.Count > 0));
// Report a telemetry event to track if we found uncached items after failing to find cached items.
// In practice if we see that we are always finding uncached items, then it's likely something
// has broken in the caching system since we would expect to normally find values there. Specifically
// we expect: foundFullItems <<< not foundFullItems.
Logger.Log(FunctionId.NavigateTo_CacheItemsMiss, KeyValueLogMessage.Create(m => m["FoundFullItems"] = seenItems.Count > 0));
}
}
}

/// <summary>
Expand Down Expand Up @@ -293,7 +338,7 @@ await processProjectAsync(service, project, result =>
finally
{
// after each project is searched, increment our progress.
await _progress.ItemCompletedAsync(cancellationToken).ConfigureAwait(false);
await ProgressItemsCompletedAsync(count: 1, cancellationToken).ConfigureAwait(false);
}
}
}
Expand All @@ -320,7 +365,7 @@ private Task SearchCachedDocumentsAsync(
HashSet<INavigateToSearchResult> seenItems,
CancellationToken cancellationToken)
{
// We searched cached information in parallel. This is because there's no syncing step when searching cached
// We search cached information in parallel. This is because there's no syncing step when searching cached
// docs. As such, we can just send a request for all projects in parallel to our OOP host and have it read
// and search the local DB easily. The DB can easily scale to feed all the threads trying to read from it
// and we can get high throughput just processing everything in parallel.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Editing;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.NavigateTo;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.VisualStudio.GraphModel;
using Microsoft.VisualStudio.GraphModel.CodeSchema;
Expand Down Expand Up @@ -136,11 +137,15 @@ internal static List<IGraphQuery> GetGraphQueries(

if (searchParameters != null)
{
// WARNING: searchParameters.SearchQuery returns an IVsSearchQuery object, which
// is a COM type. Therefore, it's probably best to grab the values we want now
// rather than get surprised by COM marshalling later.
graphQueries.Add(new SearchGraphQuery(
searchParameters.SearchQuery.SearchString, threadingContext, asyncListener));
// WARNING: searchParameters.SearchQuery returns an IVsSearchQuery object, which is a COM type.
// Therefore, it's probably best to grab the values we want now rather than get surprised by COM
// marshalling later.
//
// Create two queries. One to find results in normal docs, and one to find results in generated
// docs. That way if the generated docs take a long time we can still report the regular doc
// results immediately.
graphQueries.Add(new SearchGraphQuery(searchParameters.SearchQuery.SearchString, NavigateToSearchScope.RegularDocuments, threadingContext, asyncListener));
graphQueries.Add(new SearchGraphQuery(searchParameters.SearchQuery.SearchString, NavigateToSearchScope.GeneratedDocuments, threadingContext, asyncListener));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,21 @@ namespace Microsoft.VisualStudio.LanguageServices.Implementation.Progression
{
internal sealed partial class SearchGraphQuery : IGraphQuery
{
private readonly string _searchPattern;
private readonly NavigateToSearchScope _searchScope;
private readonly IThreadingContext _threadingContext;
private readonly IAsynchronousOperationListener _asyncListener;
private readonly string _searchPattern;

public SearchGraphQuery(
string searchPattern,
NavigateToSearchScope searchScope,
IThreadingContext threadingContext,
IAsynchronousOperationListener asyncListener)
{
_threadingContext = threadingContext;
_asyncListener = asyncListener;
_searchPattern = searchPattern;
_searchScope = searchScope;
}

public async Task<GraphBuilder> GetGraphAsync(Solution solution, IGraphContext context, CancellationToken cancellationToken)
Expand All @@ -40,7 +43,7 @@ public async Task<GraphBuilder> GetGraphAsync(Solution solution, IGraphContext c
NavigateToUtilities.GetKindsProvided(solution),
_threadingContext.DisposalToken);

await searcher.SearchAsync(searchCurrentDocument: false, cancellationToken).ConfigureAwait(false);
await searcher.SearchAsync(searchCurrentDocument: false, _searchScope, cancellationToken).ConfigureAwait(false);

return graphBuilder;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,25 +156,28 @@ private static async Task PopulateContextGraphAsync(Solution solution, List<IGra
try
{
var cancellationToken = context.CancelToken;
var graphBuilderTasks = graphQueries.Select(q => q.GetGraphAsync(solution, context, cancellationToken)).ToArray();
var graphBuilders = await Task.WhenAll(graphBuilderTasks).ConfigureAwait(false);

// Perform the actual graph transaction
using var transaction = new GraphTransactionScope();

// Remove any links that may have been added by a previous population. We don't
// remove nodes to maintain node identity, matching the behavior of the old
// providers.
context.Graph.Links.Clear();
using (var transaction1 = new GraphTransactionScope())
{
// Remove any links that may have been added by a previous population. We don't
// remove nodes to maintain node identity, matching the behavior of the old
// providers.
context.Graph.Links.Clear();
transaction1.Complete();
}

foreach (var graphBuilder in graphBuilders)
foreach (var query in graphQueries)
{
graphBuilder.ApplyToGraph(context.Graph, cancellationToken);
var graphBuilder = await query.GetGraphAsync(solution, context, cancellationToken).ConfigureAwait(false);

using var transaction2 = new GraphTransactionScope();

graphBuilder.ApplyToGraph(context.Graph, cancellationToken);
context.OutputNodes.AddAll(graphBuilder.GetCreatedNodes(cancellationToken));
}

transaction.Complete();
transaction2.Complete();
}
}
catch (Exception ex) when (FatalError.ReportAndPropagateUnlessCanceled(ex, ErrorSeverity.Diagnostic))
{
Expand Down
Loading

0 comments on commit 9a69692

Please sign in to comment.