-
Notifications
You must be signed in to change notification settings - Fork 4.1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Perform progression search in two phases. #58785
Changes from 4 commits
dc2384e
1136f3f
4c2d850
9238013
1587448
45f8ee5
551d84e
e52bc55
d7c38c9
6c6ad85
4f51945
91e0779
4a911af
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,6 +5,7 @@ | |
using System; | ||
using System.Collections.Generic; | ||
using System.Collections.Immutable; | ||
using System.Diagnostics; | ||
using System.Linq; | ||
using System.Threading; | ||
using System.Threading.Tasks; | ||
|
@@ -19,6 +20,14 @@ | |
|
||
namespace Microsoft.CodeAnalysis.NavigateTo | ||
{ | ||
[Flags] | ||
internal enum NavigateToSearchScope | ||
{ | ||
RegularDocuments = 0b01, | ||
GeneratedDocuments = 0b10, | ||
AllDocuments = RegularDocuments | GeneratedDocuments | ||
} | ||
|
||
internal class NavigateToSearcher | ||
{ | ||
private readonly INavigateToSearcherHost _host; | ||
|
@@ -27,11 +36,13 @@ internal class NavigateToSearcher | |
private readonly INavigateToSearchCallback _callback; | ||
private readonly string _searchPattern; | ||
private readonly IImmutableSet<string> _kinds; | ||
private readonly IStreamingProgressTracker _progress; | ||
private readonly IStreamingProgressTracker _progress_doNotAccessDirectly; | ||
|
||
private readonly Document? _activeDocument; | ||
private readonly ImmutableArray<Document> _visibleDocuments; | ||
|
||
private int _remainingProgressItems; | ||
|
||
private NavigateToSearcher( | ||
INavigateToSearcherHost host, | ||
Solution solution, | ||
|
@@ -46,7 +57,7 @@ private NavigateToSearcher( | |
_callback = callback; | ||
_searchPattern = searchPattern; | ||
_kinds = kinds; | ||
_progress = new StreamingProgressTracker((current, maximum, ct) => | ||
_progress_doNotAccessDirectly = new StreamingProgressTracker((current, maximum, ct) => | ||
{ | ||
callback.ReportProgress(current, maximum); | ||
return new ValueTask(); | ||
|
@@ -75,7 +86,28 @@ public static NavigateToSearcher Create( | |
return new NavigateToSearcher(host, solution, asyncListener, callback, searchPattern, kinds); | ||
} | ||
|
||
internal async Task SearchAsync(bool searchCurrentDocument, CancellationToken cancellationToken) | ||
private async Task AddProgressItemsAsync(int count, CancellationToken cancellationToken) | ||
{ | ||
Debug.Assert(count >= 0); | ||
Debug.Assert(_remainingProgressItems >= 0); | ||
Interlocked.Increment(ref _remainingProgressItems); | ||
await _progress_doNotAccessDirectly.AddItemsAsync(count, cancellationToken).ConfigureAwait(false); | ||
} | ||
|
||
private async Task ProgressItemsCompletedAsync(int count, CancellationToken cancellationToken) | ||
{ | ||
var newValue = Interlocked.Add(ref _remainingProgressItems, -count); | ||
Debug.Assert(newValue >= 0); | ||
await _progress_doNotAccessDirectly.ItemsCompletedAsync(count, cancellationToken).ConfigureAwait(false); | ||
} | ||
|
||
internal Task SearchAsync(bool searchCurrentDocument, CancellationToken cancellationToken) | ||
=> SearchAsync(searchCurrentDocument, NavigateToSearchScope.AllDocuments, cancellationToken); | ||
|
||
internal async Task SearchAsync( | ||
bool searchCurrentDocument, | ||
NavigateToSearchScope scope, | ||
CancellationToken cancellationToken) | ||
{ | ||
var isFullyLoaded = true; | ||
|
||
|
@@ -94,16 +126,19 @@ internal async Task SearchAsync(bool searchCurrentDocument, CancellationToken ca | |
// totally hydrated the oop side. Until that happens, we'll attempt to return cached data from languages | ||
// that support that. | ||
isFullyLoaded = await _host.IsFullyLoadedAsync(cancellationToken).ConfigureAwait(false); | ||
await SearchAllProjectsAsync(isFullyLoaded, cancellationToken).ConfigureAwait(false); | ||
await SearchAllProjectsAsync(isFullyLoaded, scope, cancellationToken).ConfigureAwait(false); | ||
} | ||
} | ||
catch (OperationCanceledException) | ||
{ | ||
} | ||
finally | ||
{ | ||
// providing this extra information will make UI to show indication to users | ||
// that result might not contain full data | ||
// Ensure that we actually complete all our remaining progress items so that the progress bar completes. | ||
await ProgressItemsCompletedAsync(_remainingProgressItems, cancellationToken).ConfigureAwait(false); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so is the only case where _remainingProgressItems would not be zero is when an exception is thrown and we aren't able to complete the search? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. not sure how possible this is, but could this race? For example we have 5 searches remaining, one document throws triggering this catch (we subtract 5 from the remaining), but then one of the previously in progress document searches also completes, subtracting another 1 and we get -1. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
No. A totally normal thing that causes this is if we also shortcircuit remaining work. i can add comment, btu this happens if we search cached documents and find results (during solution load). in that case, we skip the other searches. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
It should not. This is async-serial. Meaning this should never run concurrently with any finding work. |
||
Debug.Assert(_remainingProgressItems == 0); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. it was becoming a PITA for all the control flow to keep track of this and set progress to the 'finished' state at teh end. So now we just keep track ourselves across all codepaths and just jump to teh end unilaterally. |
||
|
||
// Pass along isFullyLoaded so that the UI can show indication to users that results may be incomplete. | ||
_callback.Done(isFullyLoaded); | ||
} | ||
} | ||
|
@@ -118,57 +153,65 @@ private async Task SearchCurrentDocumentAsync(CancellationToken cancellationToke | |
if (service == null) | ||
return; | ||
|
||
await _progress.AddItemsAsync(1, cancellationToken).ConfigureAwait(false); | ||
try | ||
{ | ||
await service.SearchDocumentAsync( | ||
_activeDocument, _searchPattern, _kinds, | ||
r => _callback.AddItemAsync(project, r, cancellationToken), | ||
cancellationToken).ConfigureAwait(false); | ||
} | ||
finally | ||
{ | ||
await _progress.ItemCompletedAsync(cancellationToken).ConfigureAwait(false); | ||
} | ||
await AddProgressItemsAsync(1, cancellationToken).ConfigureAwait(false); | ||
await service.SearchDocumentAsync( | ||
_activeDocument, _searchPattern, _kinds, | ||
r => _callback.AddItemAsync(project, r, cancellationToken), | ||
cancellationToken).ConfigureAwait(false); | ||
} | ||
|
||
private async Task SearchAllProjectsAsync(bool isFullyLoaded, CancellationToken cancellationToken) | ||
private async Task SearchAllProjectsAsync( | ||
bool isFullyLoaded, | ||
NavigateToSearchScope scope, | ||
CancellationToken cancellationToken) | ||
{ | ||
var seenItems = new HashSet<INavigateToSearchResult>(NavigateToSearchResultComparer.Instance); | ||
var orderedProjects = GetOrderedProjectsToProcess(); | ||
var searchGeneratedDocuments = isFullyLoaded; | ||
|
||
var searchRegularDocuments = scope.HasFlag(NavigateToSearchScope.RegularDocuments); | ||
var searchGeneratedDocuments = scope.HasFlag(NavigateToSearchScope.GeneratedDocuments); | ||
Debug.Assert(searchRegularDocuments || searchGeneratedDocuments); | ||
|
||
var projectCount = orderedProjects.Sum(g => g.Length); | ||
|
||
// We do at least two passes. One for loaded docs. One for source generated docs. | ||
await _progress.AddItemsAsync(projectCount * 2, cancellationToken).ConfigureAwait(false); | ||
await AddProgressItemsAsync( | ||
projectCount * ((searchRegularDocuments ? 1 : 0) + (searchGeneratedDocuments ? 1 : 0)), | ||
cancellationToken).ConfigureAwait(false); | ||
|
||
if (searchRegularDocuments) | ||
await SearchRegularDocumentsAsync(isFullyLoaded, orderedProjects, seenItems, cancellationToken).ConfigureAwait(false); | ||
|
||
if (searchGeneratedDocuments) | ||
await SearchGeneratedDocumentsAsync(seenItems, cancellationToken).ConfigureAwait(false); | ||
|
||
// Report a telemetry even to track if we found uncached items after failing to find cached items. | ||
CyrusNajmabadi marked this conversation as resolved.
Show resolved
Hide resolved
|
||
// In practice if we see that we are always finding uncached items, then it's likely something | ||
// has broken in the caching system since we would expect to normally find values there. Specifically | ||
// we expect: foundFullItems <<< not foundFullItems. | ||
if (!isFullyLoaded) | ||
Logger.Log(FunctionId.NavigateTo_CacheItemsMiss, KeyValueLogMessage.Create(m => m["FoundFullItems"] = seenItems.Count > 0)); | ||
} | ||
|
||
private async Task SearchRegularDocumentsAsync( | ||
bool isFullyLoaded, | ||
ImmutableArray<ImmutableArray<Project>> orderedProjects, | ||
HashSet<INavigateToSearchResult> seenItems, | ||
CancellationToken cancellationToken) | ||
{ | ||
if (!isFullyLoaded) | ||
{ | ||
// We need an additional pass to look through cached docs. | ||
await _progress.AddItemsAsync(projectCount, cancellationToken).ConfigureAwait(false); | ||
|
||
await AddProgressItemsAsync(orderedProjects.Sum(g => g.Length), cancellationToken).ConfigureAwait(false); | ||
await SearchCachedDocumentsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false); | ||
|
||
// If searching cached data returned any results, then we're done. We've at least shown some results | ||
// to the user. That will hopefully serve them well enough until the solution fully loads. | ||
if (seenItems.Count > 0) | ||
{ | ||
// Ensure that we actually complete all our workitems so that the progress bar completes. | ||
await _progress.ItemsCompletedAsync(projectCount * 2, cancellationToken).ConfigureAwait(false); | ||
return; | ||
} | ||
} | ||
|
||
await SearchFullyLoadedProjectsAsync(orderedProjects, seenItems, cancellationToken).ConfigureAwait(false); | ||
await SearchGeneratedDocumentsAsync(seenItems, cancellationToken).ConfigureAwait(false); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. since this is streaming, wouldn't we have reported the non source generated results first already? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. i'm not sure what you mean :) |
||
|
||
// Report a telemetry even to track if we found uncached items after failing to find cached items. | ||
// In practice if we see that we are always finding uncached items, then it's likely something | ||
// has broken in the caching system since we would expect to normally find values there. Specifically | ||
// we expect: foundFullItems <<< not foundFullItems. | ||
if (!isFullyLoaded) | ||
Logger.Log(FunctionId.NavigateTo_CacheItemsMiss, KeyValueLogMessage.Create(m => m["FoundFullItems"] = seenItems.Count > 0)); | ||
} | ||
|
||
/// <summary> | ||
|
@@ -293,7 +336,7 @@ await processProjectAsync(service, project, result => | |
finally | ||
{ | ||
// after each project is searched, increment our progress. | ||
await _progress.ItemCompletedAsync(cancellationToken).ConfigureAwait(false); | ||
await ProgressItemsCompletedAsync(count: 1, cancellationToken).ConfigureAwait(false); | ||
} | ||
} | ||
} | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,6 +10,7 @@ | |
using Microsoft.CodeAnalysis; | ||
using Microsoft.CodeAnalysis.Editing; | ||
using Microsoft.CodeAnalysis.Editor.Shared.Utilities; | ||
using Microsoft.CodeAnalysis.NavigateTo; | ||
using Microsoft.CodeAnalysis.Shared.TestHooks; | ||
using Microsoft.VisualStudio.GraphModel; | ||
using Microsoft.VisualStudio.GraphModel.CodeSchema; | ||
|
@@ -136,11 +137,15 @@ internal static List<IGraphQuery> GetGraphQueries( | |
|
||
if (searchParameters != null) | ||
{ | ||
// WARNING: searchParameters.SearchQuery returns an IVsSearchQuery object, which | ||
// is a COM type. Therefore, it's probably best to grab the values we want now | ||
// rather than get surprised by COM marshalling later. | ||
graphQueries.Add(new SearchGraphQuery( | ||
searchParameters.SearchQuery.SearchString, threadingContext, asyncListener)); | ||
// WARNING: searchParameters.SearchQuery returns an IVsSearchQuery object, which is a COM type. | ||
// Therefore, it's probably best to grab the values we want now rather than get surprised by COM | ||
// marshalling later. | ||
// | ||
// Create two queries. One to find results in normal docs, and one to find results in generated | ||
// docs. That way if the generated docs take a long time we can still report the regular doc | ||
// results immediately. | ||
graphQueries.Add(new SearchGraphQuery(searchParameters.SearchQuery.SearchString, NavigateToSearchScope.RegularDocuments, threadingContext, asyncListener)); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. similar question as above, the code was already searching non-source generated first and since we're streaming results back wouldn't they have been reported first? why do we need to explicitly separate the search? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Ah, i see the question. Progression is not streaming. We have to grab the produced-sub-graph (what we make) and apply it at once to the full graph. So this allows us to break that into two chunks. The produced-sub-graph for teh normal docs, and the produced-sub-graph for the generated docs. we can apply each to the larger graph when each finishes. |
||
graphQueries.Add(new SearchGraphQuery(searchParameters.SearchQuery.SearchString, NavigateToSearchScope.GeneratedDocuments, threadingContext, asyncListener)); | ||
} | ||
} | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
where do the enum values come from?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Not sure what you mean. Can you clarify?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ah - I meant why the value
0b01
?There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ah. It's a flags enum. So I prefer using binary to show the values and how they can be combined.