# frozen_string_literal: truemoduleActiveRecord::Import::MysqlAdapterincludeActiveRecord::Import::ImportSupportincludeActiveRecord::Import::OnDuplicateKeyUpdateSupportNO_MAX_PACKET=0QUERY_OVERHEAD=8# This was shown to be true for MySQL, but it's not clear where the overhead is from.# +sql+ can be a single string or an array. If it is an array all# elements that are in position >= 1 will be appended to the final SQL.definsert_many(sql,values,options={},*args)# :nodoc:# the number of inserts defaultnumber_of_inserts=0base_sql,post_sql=casesqlwhenString[sql,'']whenArray[sql.shift,sql.join(' ')]endsql_size=QUERY_OVERHEAD+base_sql.bytesize+post_sql.bytesize# the number of bytes the requested insert statement values will take upvalues_in_bytes=values.sum(&:bytesize)# the number of bytes (commas) it will take to comma separate our valuescomma_separated_bytes=values.size-1# the total number of bytes required if this statement is one statementtotal_bytes=sql_size+values_in_bytes+comma_separated_bytesmax=max_allowed_packet# if we can insert it all as one statementifmax==NO_MAX_PACKET||total_bytes<=max||options[:force_single_insert]number_of_inserts+=1sql2insert=base_sql+values.join(',')+post_sqlinsert(sql2insert,*args)elsevalue_sets=::ActiveRecord::Import::ValueSetsBytesParser.parse(values,reserved_bytes: sql_size,max_bytes: max)transaction(requires_new: true)dovalue_sets.eachdo|value_set|number_of_inserts+=1sql2insert=base_sql+value_set.join(',')+post_sqlinsert(sql2insert,*args)endendendActiveRecord::Import::Result.new([],number_of_inserts,[],[])end# Returns the maximum number of bytes that the server will allow# in a single packetdefmax_allowed_packet# :nodoc:@max_allowed_packet||=beginresult=execute("SELECT @@max_allowed_packet")# original Mysql gem responds to #fetch_row while Mysql2 responds to #firstval=result.respond_to?(:fetch_row)?result.fetch_row[0]:result.first[0]val.to_iendenddefpre_sql_statements(options)sql=[]sql<<"IGNORE"ifoptions[:ignore]||options[:on_duplicate_key_ignore]sql+superend# Add a column to be updated on duplicate key updatedefadd_column_for_on_duplicate_key_update(column,options={})# :nodoc:if(columns=options[:on_duplicate_key_update])casecolumnswhenArraythencolumns<<column.to_symunlesscolumns.include?(column.to_sym)whenHashthencolumns[column.to_sym]=column.to_symendendend# Returns a generated ON DUPLICATE KEY UPDATE statement given the passed# in +args+.defsql_for_on_duplicate_key_update(table_name,*args)# :nodoc:sql=' ON DUPLICATE KEY UPDATE '.duparg,model,_primary_key,locking_column=argscaseargwhenArraysql<<sql_for_on_duplicate_key_update_as_array(table_name,model,locking_column,arg)whenHashsql<<sql_for_on_duplicate_key_update_as_hash(table_name,model,locking_column,arg)whenStringsql<<argelseraiseArgumentError,"Expected Array or Hash"endsqlenddefsql_for_on_duplicate_key_update_as_array(table_name,model,locking_column,arr)# :nodoc:results=arr.mapdo|column|original_column_name=model.attribute_alias?(column)?model.attribute_alias(column):columnqc=quote_column_name(original_column_name)"#{table_name}.#{qc}=VALUES(#{qc})"endincrement_locking_column!(table_name,results,locking_column)results.join(',')enddefsql_for_on_duplicate_key_update_as_hash(table_name,model,locking_column,hsh)# :nodoc:results=hsh.mapdo|column1,column2|original_column1_name=model.attribute_alias?(column1)?model.attribute_alias(column1):column1qc1=quote_column_name(original_column1_name)original_column2_name=model.attribute_alias?(column2)?model.attribute_alias(column2):column2qc2=quote_column_name(original_column2_name)"#{table_name}.#{qc1}=VALUES( #{qc2} )"endincrement_locking_column!(table_name,results,locking_column)results.join(',')end# Return true if the statement is a duplicate key record errordefduplicate_key_update_error?(exception)# :nodoc:exception.is_a?(ActiveRecord::StatementInvalid)&&exception.to_s.include?('Duplicate entry')enddefincrement_locking_column!(table_name,results,locking_column)iflocking_column.present?results<<"`#{locking_column}`=#{table_name}.`#{locking_column}`+1"endendend