module SimpleCov::ResultMerger
def clear_resultset
def clear_resultset @resultset = nil end
def merge_and_store(*results)
def merge_and_store(*results) result = merge_results(*results) store_result(result) if result result end
def merge_results(*results)
coverage data and the command_name for the result consisting of a join
Merge two or more SimpleCov::Results into a new one with merged
def merge_results(*results) parsed_results = JSON.parse(JSON.dump(results.map(&:original_result))) combined_result = SimpleCov::Combine::ResultsCombiner.combine(*parsed_results) result = SimpleCov::Result.new(combined_result) # Specify the command name result.command_name = results.map(&:command_name).sort.join(", ") result end
def merged_result
for the result consisting of a join on all source result's names
SimpleCov::Result with merged coverage data and the command_name
Gets all SimpleCov::Results from cache, merges them and produces a new
def merged_result merge_results(*results) end
def results
All results that are above the SimpleCov.merge_timeout will be
of SimpleCov::Result from that.
Gets the resultset hash and re-creates all included instances
def results results = [] resultset.each do |command_name, data| result = SimpleCov::Result.from_hash(command_name => data) # Only add result if the timeout is above the configured threshold results << result if (Time.now - result.created_at) < SimpleCov.merge_timeout end results end
def resultset
Loads the cached resultset from JSON and returns it as a Hash,
def resultset @resultset ||= begin data = stored_data if data begin JSON.parse(data) || {} rescue StandardError {} end else {} end end end
def resultset_path
def resultset_path File.join(SimpleCov.coverage_path, ".resultset.json") end
def resultset_writelock
def resultset_writelock File.join(SimpleCov.coverage_path, ".resultset.json.lock") end
def store_result(result)
def store_result(result) synchronize_resultset do # Ensure we have the latest, in case it was already cached clear_resultset new_set = resultset command_name, data = result.to_hash.first new_set[command_name] = data File.open(resultset_path, "w+") do |f_| f_.puts JSON.pretty_generate(new_set) end end true end
def stored_data
def stored_data synchronize_resultset do return unless File.exist?(resultset_path) data = File.read(resultset_path) return if data.nil? || data.length < 2 data end end
def synchronize_resultset
Ensure only one process is reading or writing the resultset at any
def synchronize_resultset # make it reentrant return yield if defined?(@resultset_locked) && @resultset_locked begin @resultset_locked = true File.open(resultset_writelock, "w+") do |f| f.flock(File::LOCK_EX) yield end ensure @resultset_locked = false end end