Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Files array results matching #1234

Merged
merged 5 commits into from
Jan 28, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions src/Sarif.Multitool/Sarif.Multitool.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,6 @@

<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), build.props))\build.props" />

<ItemGroup>
<Compile Remove="ResultMatchingCommand.cs" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="CommandLineParser" Version="2.2.1" />
<PackageReference Include="Microsoft.Json.Pointer" Version="0.57.0" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,72 +309,54 @@ public void SarifLogResultMatcher_MultipleLogsDuplicateData_WorksAsExpected()
new Run()
{
Tool = new Tool { Name = "TestTool" },
Files = new Dictionary<string, FileData>()
Files = new List<FileData>
{
{ "testfile", new FileData() { Contents = new FileContent() { Text = "TestFileContents" } } }
new FileData() { Contents = new FileContent() { Text = "TestFileContents" } }
},
Results = new Result[0]
}
}
};
SarifLog current2 = new SarifLog()
{
Runs = new Run[]
{
new Run()
{
Tool = new Tool { Name = "TestTool" },
Files = new Dictionary<string, FileData>()
Results = new List<Result>
{
{ "testfile", new FileData() { Contents = new FileContent() { Text = "TestFileContents" } } }
},
Results = new Result[0],
new Result
{
Message = new Message { Text = "Some testing occurred."},
Locations = new List<Location>
{
new Location
{
PhysicalLocation = new PhysicalLocation
{
FileLocation = new FileLocation
{
FileIndex = 0
}
}
}
}
}
}
}
}
};

SarifLog current2 = current1.DeepClone();
SarifLog result = s_preserveOldestPropertyBagMatcher.Match(new SarifLog[0], new SarifLog[] { current1, current2 }).First();

// In first match operation, the file data objects are identical, they should be collapsed into a single array entry
result.Runs[0].Files.Should().HaveCount(1);

}

[Fact]
public void SarifLogResultMatcher_MultipleLogsInvalidData_ThrowsInvalidOperationException()
{
SarifLog current1 = new SarifLog()
{
Runs = new Run[]
{
new Run()
{
Tool = new Tool { Name = "TestTool" },
Files = new Dictionary<string, FileData>()
{
{ "testfile", new FileData() { Contents = new FileContent() { Text = "TestFileContents" } } }
},
Results = new Result[0]
}
}
};
SarifLog current2 = new SarifLog()
{
Runs = new Run[]
{
new Run()
{
Tool = new Tool { Name = "TestTool" },
Files = new Dictionary<string, FileData>()
{
{ "testfile", new FileData() { Contents = new FileContent() { Text = "DifferentTestFileContents" } } }
},
Results = new Result[0],
}
}
};
Assert.Throws<InvalidOperationException>(() => s_preserveOldestPropertyBagMatcher.Match(new SarifLog[0], new SarifLog[] { current1, current2 }));
current2 = current1.DeepClone();

}
// Now we differentiate a files object
current2.Runs[0].Files[0].Contents.Text = Guid.NewGuid().ToString();
result = s_preserveOldestPropertyBagMatcher.Match(new SarifLog[0], new SarifLog[] { current1, current2 }).First();

// Both file data objects should be present
result.Runs[0].Files.Should().HaveCount(2);

// Merged results should each refer to differentiated file data object
result.Runs[0].Results[0].Locations[0].PhysicalLocation.FileLocation.FileIndex.Should().Be(0);
result.Runs[0].Results[1].Locations[0].PhysicalLocation.FileLocation.FileIndex.Should().Be(1);
}

[Fact]
public void SarifLogResultMatcher_PreservesPropertiesProperly ()
{
Expand All @@ -388,15 +370,14 @@ public void SarifLogResultMatcher_PreservesPropertiesProperly ()
SetPropertyOnAllFileAndResultObjects(baselineLog, "Key", baselinePropertyValue);
SetPropertyOnAllFileAndResultObjects(currentLog, "Key", currentPropertyValue);


SarifLog matchedLog = s_preserveOldestPropertyBagMatcher.Match(baselineLog.DeepClone(), currentLog.DeepClone());
matchedLog.Runs[0].Results.Where((r) => { return r.GetProperty("Key") == baselinePropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Results.Count);
matchedLog.Runs[0].Files.Values.Where((r) => { return r.GetProperty("Key") == baselinePropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Files.Count);
matchedLog.Runs[0].Files.Where((r) => { return r.GetProperty("Key") == baselinePropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Files.Count);

// Retain property bag values from most current run
matchedLog = s_preserveMostRecentPropertyBagMatcher.Match(baselineLog.DeepClone(), currentLog.DeepClone());
matchedLog.Runs[0].Results.Where((r) => { return r.GetProperty("Key") == currentPropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Results.Count);
matchedLog.Runs[0].Files.Values.Where((r) => { return r.GetProperty("Key") == currentPropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Files.Count);
matchedLog.Runs[0].Files.Where((r) => { return r.GetProperty("Key") == currentPropertyValue; }).Count().Should().Be(matchedLog.Runs[0].Files.Count);
}

private void SetPropertyOnAllFileAndResultObjects(SarifLog sarifLog, string propertyKey, string propertyValue)
Expand All @@ -410,7 +391,7 @@ private void SetPropertyOnAllFileAndResultObjects(SarifLog sarifLog, string prop

if (run.Files != null)
{
foreach (FileData file in run.Files.Values)
foreach (FileData file in run.Files)
{
file.SetProperty(propertyKey, propertyValue);
}
Expand Down
113 changes: 113 additions & 0 deletions src/Sarif.UnitTests/OrderSensitiveValueComparisonListTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.

using System;
using System.Collections.Generic;
using FluentAssertions;
using Xunit;

namespace Microsoft.CodeAnalysis.Sarif.UnitTests
{
/// <summary>
/// A comparer, analagous to the internal .NET ObjectEqualityComparer, that
/// provides comparison semantics roughly equivalent to the default .NET
/// behavior provided by Object.Equals and friends (this functionality is
/// what's invoked when calling List.Contains). Our comparer only exists
/// for testing, because ObjectEqualityComparer is an internal type. We
/// don't cover all possible cases to make the types equivalent; the
/// implementation covers enough for core validation.
/// </summary>
/// <typeparam name="T"></typeparam>
internal class DefaultObjectComparer<T> : IEqualityComparer<T>
{
public bool Equals(T x, T y)
{
return Object.Equals(x, y);
}

public int GetHashCode(T obj)
{
if (obj == null) { return 0; }
return obj.GetHashCode();
}
}

public class OrderSensitiveValueComparisonListTests
{
[Fact]
public void OrderSensitiveValueComparisonListTests_DefaultObjectComparer()
{
var equalityComparer = new DefaultObjectComparer<FileChange>();

var listOne = CreateTestList(equalityComparer);

// Populate the second list with references from the first.
var listTwo = new OrderSensitiveValueComparisonList<FileChange>(equalityComparer);
for (int i = 0; i < listOne.Count; i++)
{
listTwo.Add(listOne[i]);
}

// Every list s/be equal to itself.
listOne.Equals(listOne).Should().Be(true);

// Two lists with shared objects, by reference, in the same
// order should be regarded as equivalent.
listOne.Equals(listTwo).Should().Be(true);

FileChange toSwap = listTwo[0];
listTwo[0] = listTwo[1];
listTwo[1] = toSwap;

// We have reordered two objects that are themselves identical.
// The comparison should fail as the order of references changed.
listOne.Equals(listTwo).Should().Be(false);
}

[Fact]
public void OrderSensitiveValueComparisonListTests_ValueComparer()
{
// Two identical lists with elements that are
// distinct objects, by reference.
var listOne = CreateTestList(FileChange.ValueComparer);
var listTwo = CreateTestList(FileChange.ValueComparer);

// Every list s/be equal to itself
listOne.Equals(listOne).Should().Be(true);

// As initialized, these objects are different, due
// to a unique GUID property on each list
listOne.Equals(listTwo).Should().Be(false);

// Make the two lists equivalent, by value
listTwo[2].SetProperty(DIFFERENTIATING_PROPERTY_NAME, listOne[2].GetProperty<Guid>(DIFFERENTIATING_PROPERTY_NAME));
listOne.Equals(listTwo).Should().Be(true);

FileChange toSwap = listTwo[0];
listTwo[0] = listTwo[1];
listTwo[1] = toSwap;

// We have reordered two objects that are themselves identical.
// by value. The comparison should still succeed.
listOne.Equals(listTwo).Should().Be(true);
}

private const string DIFFERENTIATING_PROPERTY_NAME = nameof(DIFFERENTIATING_PROPERTY_NAME);

private OrderSensitiveValueComparisonList<FileChange> CreateTestList(IEqualityComparer<FileChange> equalityComparer)
{
// Test list. First two elements are identical. The third element is unique.
var fileChangeOne = new FileChange();
var fileChangeTwo = new FileChange();

var fileChangeThree = new FileChange();
Guid differentiatingProperty = Guid.NewGuid();
fileChangeThree.SetProperty(DIFFERENTIATING_PROPERTY_NAME, differentiatingProperty);

var list = new OrderSensitiveValueComparisonList<FileChange>(equalityComparer);
list.AddRange(new[] { fileChangeOne, fileChangeTwo, fileChangeThree });

return list;
}
}
}
4 changes: 3 additions & 1 deletion src/Sarif.UnitTests/RandomSarifLogGenerator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ public static IEnumerable<string> GenerateFakeFiles(string baseAddress, int coun
public static IList<Result> GenerateFakeResults(Random random, List<string> ruleIds, List<Uri> filePaths, int resultCount)
{
List<Result> results = new List<Result>();
int fileIndex = random.Next(filePaths.Count);
for (int i = 0; i < resultCount; i++)
{
results.Add(new Result()
Expand All @@ -105,7 +106,8 @@ public static IList<Result> GenerateFakeResults(Random random, List<string> rule
{
FileLocation = new FileLocation
{
Uri = filePaths[random.Next(filePaths.Count)]
Uri = filePaths[fileIndex],
FileIndex = fileIndex
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

FileIndex = fileIndex [](start = 36, length = 21)

The results matcher now only persists files that are observed via results. I will fix this later, the results matcher should retain all files from current run. Implementation as it stands exists to ensure we flow property bags associated with baseline files, when appropriate. It's easier for me to just fix the gap then to explain much more. :)

#1235

},
}
}
Expand Down
6 changes: 0 additions & 6 deletions src/Sarif.UnitTests/Sarif.UnitTests.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,6 @@

<Import Project="$([MSBuild]::GetDirectoryNameOfFileAbove($(MSBuildThisFileDirectory), build.props))\build.props" />

<ItemGroup>
<Compile Remove="Baseline\**" />
<EmbeddedResource Remove="Baseline\**" />
<None Remove="Baseline\**" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="FluentAssertions" Version="5.4.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.5.0" />
Expand Down
Loading