Skip to content

Commit

Permalink
Fix merging race conditions
Browse files Browse the repository at this point in the history
Ensure we cache and synchronize appropriately so we don't merge more than
once or do unsafe reads of the resultset. This has the added benefit of
reducing the runtime of at_exit hooks by over 66%, since we now do less
than 1/3 the work.

See https://gist.github.com/jenseng/62465f674f8c02de09ef776f23d4dca4
for simple repro script.

Basically there are 3 main problems when using merging:

1. `SimpleCov::ResultMerger.stored_data` doesn't synchronize its read,
   so it can see an empty file if another process is writing it.
2. `SimpleCov::ResultMerger.resultset` calls `.stored_data` twice and
   never caches the result.
3. `SimpleCov.result` doesn't cache the `@result`, so the default `at_exit`
   behavior causes it to store and merge 3 times. Due to 1. and 2.,
   this is extra bad

This can cause the formatter to miss out on coverage data and/or get the
wrong values for covered percentages.

Furthermore, if you use OJ, `JSON.parse("") -> nil`, which means
`.resultset` can be nil, so this race condition causes exceptions as
seen in #406. In addition to fixing the race condition, also add ` || {}`
to make  `.stored_data` a bit more robust and protect against an empty
.resultset.json.
  • Loading branch information
jenseng committed Jul 17, 2017
1 parent 607986f commit 6f12dd6
Show file tree
Hide file tree
Showing 4 changed files with 252 additions and 26 deletions.
21 changes: 13 additions & 8 deletions lib/simplecov.rb
Original file line number Diff line number Diff line change
Expand Up @@ -75,23 +75,21 @@ def add_not_loaded_files(result)
# from cache using SimpleCov::ResultMerger if use_merging is activated (default)
#
def result
# Ensure the variable is defined to avoid ruby warnings
@result = nil unless defined?(@result)
return @result if result?

# Collect our coverage result
if running && !result?
if running
@result = SimpleCov::Result.new add_not_loaded_files(Coverage.result)
end

# If we're using merging of results, store the current result
# first, then merge the results and return those
# first (if there is one), then merge the results and return those
if use_merging
SimpleCov::ResultMerger.store_result(@result) if result?

SimpleCov::ResultMerger.merged_result
else
@result
@result = SimpleCov::ResultMerger.merged_result
end

@result
ensure
self.running = false
end
Expand Down Expand Up @@ -158,6 +156,13 @@ def usable?
false
end
end

#
# Clear out the previously cached .result. Primarily useful in testing
#
def clear_result
@result = nil
end
end
end

Expand Down
55 changes: 42 additions & 13 deletions lib/simplecov/result_merger.rb
Original file line number Diff line number Diff line change
Expand Up @@ -17,25 +17,31 @@ def resultset_writelock
File.join(SimpleCov.coverage_path, ".resultset.json.lock")
end

# Loads the cached resultset from JSON and returns it as a Hash
# Loads the cached resultset from JSON and returns it as a Hash,
# caching it for subsequent accesses.
def resultset
if stored_data
begin
JSON.parse(stored_data)
rescue
@resultset ||= begin
data = stored_data
if data
begin
JSON.parse(data) || {}
rescue
{}
end
else
{}
end
else
{}
end
end

# Returns the contents of the resultset cache as a string or if the file is missing or empty nil
def stored_data
return unless File.exist?(resultset_path)
data = File.read(resultset_path)
return if data.nil? || data.length < 2
data
synchronize_resultset do
return unless File.exist?(resultset_path)
data = File.read(resultset_path)
return if data.nil? || data.length < 2
data
end
end

# Gets the resultset hash and re-creates all included instances
Expand Down Expand Up @@ -76,8 +82,9 @@ def merged_result

# Saves the given SimpleCov::Result in the resultset cache
def store_result(result)
File.open(resultset_writelock, "w+") do |f|
f.flock(File::LOCK_EX)
synchronize_resultset do
# Ensure we have the latest, in case it was already cached
clear_resultset
new_set = resultset
command_name, data = result.to_hash.first
new_set[command_name] = data
Expand All @@ -87,6 +94,28 @@ def store_result(result)
end
true
end

# Ensure only one process is reading or writing the resultset at any
# given time
def synchronize_resultset
# make it reentrant
return yield if defined?(@resultset_locked) && @resultset_locked

begin
@resultset_locked = true
File.open(resultset_writelock, "w+") do |f|
f.flock(File::LOCK_EX)
yield
end
ensure
@resultset_locked = false
end
end

# Clear out the previously cached .resultset
def clear_resultset
@resultset = nil
end
end
end
end
93 changes: 88 additions & 5 deletions spec/result_merger_spec.rb
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
require "helper"
require "tempfile"
require "timeout"

if SimpleCov.usable?
describe SimpleCov::ResultMerger do
before do
SimpleCov::ResultMerger.clear_resultset
File.delete(SimpleCov::ResultMerger.resultset_path) if File.exist?(SimpleCov::ResultMerger.resultset_path)
end

describe "with two faked coverage resultsets" do
before do
SimpleCov.use_merging true
@resultset1 = {
source_fixture("sample.rb") => [nil, 1, 1, 1, nil, nil, 1, 1, nil, nil],
source_fixture("app/models/user.rb") => [nil, 1, 1, 1, nil, nil, 1, 0, nil, nil],
Expand Down Expand Up @@ -73,15 +79,92 @@
expect(SimpleCov::ResultMerger.results.length).to eq(1)
end
end
end
end
end

describe ".store_result" do
it "refreshes the resultset" do
set = SimpleCov::ResultMerger.resultset
SimpleCov::ResultMerger.store_result({})
new_set = SimpleCov::ResultMerger.resultset
expect(new_set).not_to be(set)
end

context "with merging disabled" do
before { SimpleCov.use_merging false }
it "persists to disk" do
SimpleCov::ResultMerger.store_result("a" => [1])
SimpleCov::ResultMerger.clear_resultset
new_set = SimpleCov::ResultMerger.resultset
expect(new_set).to eq("a" => [1])
end

it "synchronizes writes" do
expect(SimpleCov::ResultMerger).to receive(:synchronize_resultset)
SimpleCov::ResultMerger.store_result({})
end
end

it "returns nil for SimpleCov.result" do
expect(SimpleCov.result).to be_nil
describe ".resultset" do
it "caches" do
set = SimpleCov::ResultMerger.resultset
new_set = SimpleCov::ResultMerger.resultset
expect(new_set).to be(set)
end

it "synchronizes reads" do
expect(SimpleCov::ResultMerger).to receive(:synchronize_resultset)
SimpleCov::ResultMerger.resultset
end
end

describe ".synchronize_resultset" do
it "is reentrant (i.e. doesn't block its own process)" do
# without @resultset_locked, this spec would fail and
# `.store_result` wouldn't work
expect do
Timeout.timeout(1) do
SimpleCov::ResultMerger.synchronize_resultset do
SimpleCov::ResultMerger.synchronize_resultset do
end
end
end
end.not_to raise_error
end

it "blocks other processes" do
file = Tempfile.new("foo")

other_process = open("|ruby -e " + Shellwords.escape(<<-CODE) + " 2>/dev/null")
require "simplecov"
SimpleCov.coverage_dir(#{SimpleCov.coverage_dir.inspect})
# ensure the parent process has enough time to get a
# lock before we do
sleep 0.5
$stdout.sync = true
puts "running" # see `sleep`s in parent process
SimpleCov::ResultMerger.synchronize_resultset do
File.open(#{file.path.inspect}, "a") { |f| f.write("process 2\n") }
end
CODE

SimpleCov::ResultMerger.synchronize_resultset do
# wait until the child process is going, and then wait some more
# so we can be sure it blocks on the lock we already have.
sleep 0.1 until other_process.gets == "running\n"
sleep 1

# despite the sleeps, this will be written first since we got
# the first lock
File.open(file.path, "a") { |f| f.write("process 1\n") }
end

# wait for it to finish
other_process.gets

expect(file.read).to eq("process 1\nprocess 2\n")
end
end
end
Expand Down
109 changes: 109 additions & 0 deletions spec/simplecov_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
require "helper"

if SimpleCov.usable?
describe SimpleCov do
describe ".result" do
before do
SimpleCov.clear_result
allow(Coverage).to receive(:result).once.and_return({})
end

context "with merging disabled" do
before do
allow(SimpleCov).to receive(:use_merging).once.and_return(false)
end

context "when not running" do
before do
allow(SimpleCov).to receive(:running).and_return(false)
end

it "returns nil" do
expect(SimpleCov.result).to be_nil
end
end

context "when running" do
before do
allow(SimpleCov).to receive(:running).and_return(true, false)
end

it "uses the result from Coverage" do
expect(Coverage).to receive(:result).once.and_return(__FILE__ => [0, 1])
expect(SimpleCov.result.filenames).to eq [__FILE__]
end

it "adds not-loaded-files" do
expect(SimpleCov).to receive(:add_not_loaded_files).once.and_return({})
SimpleCov.result
end

it "doesn't store the current coverage" do
expect(SimpleCov::ResultMerger).not_to receive(:store_result)
SimpleCov.result
end

it "doesn't merge the result" do
expect(SimpleCov::ResultMerger).not_to receive(:merged_result)
SimpleCov.result
end

it "caches its result" do
result = SimpleCov.result
expect(SimpleCov.result).to be(result)
end
end
end

context "with merging enabled" do
let(:the_merged_result) { double }

before do
allow(SimpleCov).to receive(:use_merging).once.and_return(true)
allow(SimpleCov::ResultMerger).to receive(:store_result).once
allow(SimpleCov::ResultMerger).to receive(:merged_result).once.and_return(the_merged_result)
end

context "when not running" do
before do
allow(SimpleCov).to receive(:running).and_return(false)
end

it "merges the result" do
expect(SimpleCov.result).to be(the_merged_result)
end
end

context "when running" do
before do
allow(SimpleCov).to receive(:running).and_return(true, false)
end

it "uses the result from Coverage" do
expect(Coverage).to receive(:result).once.and_return({})
SimpleCov.result
end

it "adds not-loaded-files" do
expect(SimpleCov).to receive(:add_not_loaded_files).once.and_return({})
SimpleCov.result
end

it "stores the current coverage" do
expect(SimpleCov::ResultMerger).to receive(:store_result).once
SimpleCov.result
end

it "merges the result" do
expect(SimpleCov.result).to be(the_merged_result)
end

it "caches its result" do
result = SimpleCov.result
expect(SimpleCov.result).to be(result)
end
end
end
end
end
end

0 comments on commit 6f12dd6

Please sign in to comment.