require'fileutils'require'rails/version'require'active_support/concern'require'active_support/core_ext/class/delegating_attributes'require'active_support/core_ext/string/inflections'require'action_view/helpers/number_helper'moduleActiveSupportmoduleTestingmodulePerformanceextendActiveSupport::Concernincludeddosuperclass_delegating_accessor:profile_optionsself.profile_options={}ifdefined?(MiniTest::Assertions)&&TestCase<MiniTest::AssertionsincludeForMiniTestelseincludeForClassicTestUnitendend# each implementation should define metrics and freeze the defaultsDEFAULTS=ifENV["BENCHMARK_TESTS"]{:runs=>4,:output=>'tmp/performance',:benchmark=>true}else{:runs=>1,:output=>'tmp/performance',:benchmark=>false}enddeffull_profile_optionsDEFAULTS.merge(profile_options)enddeffull_test_name"#{self.class.name}##{method_name}"endmoduleForMiniTestdefrun(runner)@runner=runnerrun_warmupiffull_profile_options&&metrics=full_profile_options[:metrics]metrics.eachdo|metric_name|ifklass=Metrics[metric_name.to_sym]run_profile(klass.new)endendendreturnenddefrun_test(metric,mode)result='.'beginrun_callbacks:setupsetupmetric.send(mode){__send__method_name}rescueException=>eresult=@runner.puke(self.class,method_name,e)ensurebeginteardownrun_callbacks:teardown,:enumerator=>:reverse_eachrescueException=>eresult=@runner.puke(self.class,method_name,e)endendresultendendmoduleForClassicTestUnitdefrun(result)returnifmethod_name=~/^default_test$/yield(self.class::STARTED,name)@_result=resultrun_warmupiffull_profile_options&&metrics=full_profile_options[:metrics]metrics.eachdo|metric_name|ifklass=Metrics[metric_name.to_sym]run_profile(klass.new)result.add_runelseputs'%20s: unsupported'%metric_nameendendendyield(self.class::FINISHED,name)enddefrun_test(metric,mode)run_callbacks:setupsetupmetric.send(mode){__send__@method_name}rescue::Test::Unit::AssertionFailedError=>eadd_failure(e.message,e.backtrace)rescueStandardError,ScriptError=>eadd_error(e)ensurebeginteardownrun_callbacks:teardown,:enumerator=>:reverse_eachrescue::Test::Unit::AssertionFailedError=>eadd_failure(e.message,e.backtrace)rescueStandardError,ScriptError=>eadd_error(e)endendendprotected# overridden by each implementationdefrun_gc;enddefrun_warmuprun_gctime=Metrics::Time.newrun_test(time,:benchmark)puts"%s (%s warmup)"%[full_test_name,time.format(time.total)]run_gcenddefrun_profile(metric)klass=full_profile_options[:benchmark]?Benchmarker:Profilerperformer=klass.new(self,metric)performer.runputsperformer.reportperformer.recordendclassPerformerdelegate:run_test,:full_profile_options,:full_test_name,:to=>:@harnessdefinitialize(harness,metric)@harness,@metric,@supported=harness,metric,falseenddefreportif@supportedrate=@total/full_profile_options[:runs]'%20s: %s'%[@metric.name,@metric.format(rate)]else'%20s: unsupported'%@metric.nameendendprotecteddefoutput_filename"#{full_profile_options[:output]}/#{full_test_name}_#{@metric.name}"endend# overridden by each implementationclassProfiler<Performerdeftime_with_blockbefore=Time.nowyieldTime.now-beforeenddefrun;enddefrecord;endendclassBenchmarker<Performerdefinitialize(*args)super@supported=@metric.respond_to?('measure')enddefrunreturnunless@supportedfull_profile_options[:runs].to_i.times{run_test(@metric,:benchmark)}@total=@metric.totalenddefrecordavg=@metric.total/full_profile_options[:runs].to_inow=Time.now.utc.xmlschemawith_output_filedo|file|file.puts"#{avg},#{now},#{environment}"endenddefenvironmentunlessdefined?@envapp="#{$1}.#{$2}"ifFile.directory?('.git')&&`git branch -v`=~/^\* (\S+)\s+(\S+)/rails=Rails::VERSION::STRINGifFile.directory?('vendor/rails/.git')Dir.chdir('vendor/rails')dorails+=".#{$1}.#{$2}"if`git branch -v`=~/^\* (\S+)\s+(\S+)/endendruby=defined?(RUBY_ENGINE)?RUBY_ENGINE:'ruby'ruby+="-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL}"@env=[app,rails,ruby,RUBY_PLATFORM]*','end@envendprotectedHEADER='measurement,created_at,app,rails,ruby,platform'defwith_output_filefname=output_filenameifnew=!File.exist?(fname)FileUtils.mkdir_p(File.dirname(fname))endFile.open(fname,'ab')do|file|file.puts(HEADER)ifnewyieldfileendenddefoutput_filename"#{super}.csv"endendmoduleMetricsdefself.[](name)const_get(name.to_s.camelize)rescueNameErrornilendclassBaseincludeActionView::Helpers::NumberHelperattr_reader:totaldefinitialize@total=0enddefname@name||=self.class.name.demodulize.underscoreenddefbenchmarkwith_gc_statsdobefore=measureyield@total+=(measure-before)endend# overridden by each implementationdefprofile;endprotected# overridden by each implementationdefwith_gc_stats;endendclassTime<Basedefmeasure::Time.now.to_fenddefformat(measurement)ifmeasurement<1'%d ms'%(measurement*1000)else'%.2f sec'%measurementendendendclassAmount<Basedefformat(measurement)number_with_delimiter(measurement.floor)endendclassDigitalInformationUnit<Basedefformat(measurement)number_to_human_size(measurement,:precision=>2)endend# each implementation provides its own metrics like ProcessTime, Memory or GcRunsendendendendRUBY_ENGINE='ruby'unlessdefined?(RUBY_ENGINE)# mri 1.8caseRUBY_ENGINEwhen'ruby'thenrequire'active_support/testing/performance/ruby'when'rbx'thenrequire'active_support/testing/performance/rubinius'when'jruby'thenrequire'active_support/testing/performance/jruby'else$stderr.puts'Your ruby interpreter is not supported for benchmarking.'exitend