# frozen_string_literal: truerequire"json"moduleSimpleCov## Singleton that is responsible for caching, loading and merging# SimpleCov::Results into a single result for coverage analysis based# upon multiple test suites.#moduleResultMergerclass<<self# The path to the .resultset.json cache filedefresultset_pathFile.join(SimpleCov.coverage_path,".resultset.json")enddefresultset_writelockFile.join(SimpleCov.coverage_path,".resultset.json.lock")enddefmerge_and_store(*file_paths,ignore_timeout: false)result=merge_results(*file_paths,ignore_timeout: ignore_timeout)store_result(result)ifresultresultenddefmerge_results(*file_paths,ignore_timeout: false)# It is intentional here that files are only read in and parsed one at a time.## In big CI setups you might deal with 100s of CI jobs and each one producing Megabytes# of data. Reading them all in easily produces Gigabytes of memory consumption which# we want to avoid.## For similar reasons a SimpleCov::Result is only created in the end as that'd create# even more data especially when it also reads in all source files.initial_memo=valid_results(file_paths.shift,ignore_timeout: ignore_timeout)command_names,coverage=file_paths.reduce(initial_memo)do|memo,file_path|merge_coverage(memo,valid_results(file_path,ignore_timeout: ignore_timeout))endcreate_result(command_names,coverage)enddefvalid_results(file_path,ignore_timeout: false)results=parse_file(file_path)merge_valid_results(results,ignore_timeout: ignore_timeout)enddefparse_file(path)data=read_file(path)parse_json(data)enddefread_file(path)returnunlessFile.exist?(path)data=File.read(path)returnifdata.nil?||data.length<2dataenddefparse_json(content)return{}unlesscontentJSON.parse(content)||{}rescueStandardErrorwarn"[SimpleCov]: Warning! Parsing JSON content of resultset file failed"{}enddefmerge_valid_results(results,ignore_timeout: false)results=results.select{|_command_name,data|within_merge_timeout?(data)}unlessignore_timeoutcommand_plus_coverage=results.mapdo|command_name,data|[[command_name],adapt_result(data.fetch("coverage"))]end# one file itself _might_ include multiple test runsmerge_coverage(*command_plus_coverage)enddefwithin_merge_timeout?(data)time_since_result_creation(data)<SimpleCov.merge_timeoutenddeftime_since_result_creation(data)Time.now-Time.at(data.fetch("timestamp"))enddefcreate_result(command_names,coverage)returnnilunlesscoveragecommand_name=command_names.reject(&:empty?).sort.join(", ")SimpleCov::Result.new(coverage,command_name: command_name)enddefmerge_coverage(*results)return[[""],nil]ifresults.empty?returnresults.firstifresults.size==1results.reducedo|(memo_command,memo_coverage),(command,coverage)|# timestamp is dropped here, which is intentional (we merge it, it gets a new time stamp as of now)merged_coverage=Combine.combine(Combine::ResultsCombiner,memo_coverage,coverage)merged_command=memo_command+command[merged_command,merged_coverage]endend## Gets all SimpleCov::Results stored in resultset, merges them and produces a new# SimpleCov::Result with merged coverage data and the command_name# for the result consisting of a join on all source result's namesdefmerged_result# conceptually this is just doing `merge_results(resultset_path)`# it's more involved to make syre `synchronize_resultset` is only used around readingresultset_hash=read_resultsetcommand_names,coverage=merge_valid_results(resultset_hash)create_result(command_names,coverage)enddefread_resultsetresultset_content=synchronize_resultsetdoread_file(resultset_path)endparse_json(resultset_content)end# Saves the given SimpleCov::Result in the resultset cachedefstore_result(result)synchronize_resultsetdo# Ensure we have the latest, in case it was already cachednew_resultset=read_resultset# A single result only ever has one command_name, see `SimpleCov::Result#to_hash`command_name,data=result.to_hash.firstnew_resultset[command_name]=dataFile.open(resultset_path,"w+")do|f_|f_.putsJSON.pretty_generate(new_resultset)endendtrueend# Ensure only one process is reading or writing the resultset at any# given timedefsynchronize_resultset# make it reentrantreturnyieldifdefined?(@resultset_locked)&&@resultset_lockedbegin@resultset_locked=trueFile.open(resultset_writelock,"w+")do|f|f.flock(File::LOCK_EX)yieldendensure@resultset_locked=falseendend# We changed the format of the raw result data in simplecov, as people are likely# to have "old" resultsets lying around (but not too old so that they're still# considered we can adapt them).# See https://github.com/simplecov-ruby/simplecov/pull/824#issuecomment-576049747defadapt_result(result)ifpre_simplecov_0_18_result?(result)adapt_pre_simplecov_0_18_result(result)elseresultendend# pre 0.18 coverage data pointed from file directly to an array of line coveragedefpre_simplecov_0_18_result?(result)_key,data=result.firstdata.is_a?(Array)enddefadapt_pre_simplecov_0_18_result(result)result.transform_valuesdo|line_coverage_data|{"lines"=>line_coverage_data}endendendendend