提交 4ed6167e 编写于 作者: J Jon Leighton

Merge branch 'master' of github.com:rails/rails

......@@ -27,13 +27,13 @@ def setup_subscriptions
path = payload[:virtual_path]
next unless path
partial = path =~ /^.*\/_[^\/]*$/
if partial
@partials[path] += 1
@partials[path.split("/").last] += 1
@templates[path] += 1
else
@templates[path] += 1
end
@templates[path] += 1
end
end
......
......@@ -233,7 +233,7 @@ def number_with_delimiter(number, options = {})
parts = number.to_s.to_str.split('.')
parts[0].gsub!(/(\d)(?=(\d\d\d)+(?!\d))/, "\\1#{options[:delimiter]}")
parts.join(options[:separator]).html_safe
safe_join(parts, options[:separator])
end
# Formats a +number+ with the specified level of <tt>:precision</tt> (e.g., 112.32 has a precision
......
......@@ -94,10 +94,10 @@ def content_tag_for(tag_name, single_or_multiple_records, prefix = nil, options
# for each record.
def content_tag_for_single_record(tag_name, record, prefix, options, &block)
options = options ? options.dup : {}
options.merge!(:class => "#{dom_class(record, prefix)} #{options[:class]}".rstrip, :id => dom_id(record, prefix))
options[:class] = "#{dom_class(record, prefix)} #{options[:class]}".rstrip
options[:id] = dom_id(record, prefix)
content = block.arity == 0 ? capture(&block) : capture(record, &block)
content_tag(tag_name, content, options)
content_tag(tag_name, capture(record, &block), options)
end
end
end
......
......@@ -16,7 +16,7 @@ def render_with_scope
end
def render_with_has_many_through_association
@developer = Developer.find(:first)
@developer = Developer.first
render :partial => @developer.topics
end
......@@ -31,7 +31,7 @@ def render_with_belongs_to_association
end
def render_with_record
@developer = Developer.find(:first)
@developer = Developer.first
render :partial => @developer
end
......
require 'active_record/connection_adapters/sqlite_adapter'
require 'active_record/connection_adapters/abstract_adapter'
require 'active_record/connection_adapters/statement_pool'
require 'arel/visitors/bind_visitor'
gem 'sqlite3', '~> 1.3.5'
gem 'sqlite3', '~> 1.3.6'
require 'sqlite3'
module ActiveRecord
......@@ -35,14 +37,169 @@ def sqlite3_connection(config) # :nodoc:
end
module ConnectionAdapters #:nodoc:
class SQLite3Adapter < SQLiteAdapter # :nodoc:
def quote(value, column = nil)
if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
s = column.class.string_to_binary(value).unpack("H*")[0]
"x'#{s}'"
else
class SQLite3Column < Column #:nodoc:
class << self
def binary_to_string(value)
if value.encoding != Encoding::ASCII_8BIT
value = value.force_encoding(Encoding::ASCII_8BIT)
end
value
end
end
end
# The SQLite3 adapter works SQLite 3.6.16 or newer
# with the sqlite3-ruby drivers (available as gem from https://rubygems.org/gems/sqlite3).
#
# Options:
#
# * <tt>:database</tt> - Path to the database file.
class SQLite3Adapter < AbstractAdapter
class Version
include Comparable
def initialize(version_string)
@version = version_string.split('.').map { |v| v.to_i }
end
def <=>(version_string)
@version <=> version_string.split('.').map { |v| v.to_i }
end
end
class StatementPool < ConnectionAdapters::StatementPool
def initialize(connection, max)
super
@cache = Hash.new { |h,pid| h[pid] = {} }
end
def each(&block); cache.each(&block); end
def key?(key); cache.key?(key); end
def [](key); cache[key]; end
def length; cache.length; end
def []=(sql, key)
while @max <= cache.size
dealloc(cache.shift.last[:stmt])
end
cache[sql] = key
end
def clear
cache.values.each do |hash|
dealloc hash[:stmt]
end
cache.clear
end
private
def cache
@cache[$$]
end
def dealloc(stmt)
stmt.close unless stmt.closed?
end
end
class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
include Arel::Visitors::BindVisitor
end
def initialize(connection, logger, config)
super(connection, logger)
@statements = StatementPool.new(@connection,
config.fetch(:statement_limit) { 1000 })
@config = config
if config.fetch(:prepared_statements) { true }
@visitor = Arel::Visitors::SQLite.new self
else
@visitor = BindSubstitution.new self
end
end
def adapter_name #:nodoc:
'SQLite'
end
# Returns true
def supports_ddl_transactions?
true
end
# Returns true if SQLite version is '3.6.8' or greater, false otherwise.
def supports_savepoints?
sqlite_version >= '3.6.8'
end
# Returns true, since this connection adapter supports prepared statement
# caching.
def supports_statement_cache?
true
end
# Returns true, since this connection adapter supports migrations.
def supports_migrations? #:nodoc:
true
end
# Returns true.
def supports_primary_key? #:nodoc:
true
end
def requires_reloading?
true
end
# Returns true
def supports_add_column?
true
end
# Disconnects from the database if already connected. Otherwise, this
# method does nothing.
def disconnect!
super
clear_cache!
@connection.close rescue nil
end
# Clears the prepared statements cache.
def clear_cache!
@statements.clear
end
# Returns true
def supports_count_distinct? #:nodoc:
true
end
# Returns true
def supports_autoincrement? #:nodoc:
true
end
def supports_index_sort_order?
true
end
def native_database_types #:nodoc:
{
:primary_key => default_primary_key_type,
:string => { :name => "varchar", :limit => 255 },
:text => { :name => "text" },
:integer => { :name => "integer" },
:float => { :name => "float" },
:decimal => { :name => "decimal" },
:datetime => { :name => "datetime" },
:timestamp => { :name => "datetime" },
:time => { :name => "time" },
:date => { :name => "date" },
:binary => { :name => "blob" },
:boolean => { :name => "boolean" }
}
end
# Returns the current database encoding format as a string, eg: 'UTF-8'
......@@ -55,6 +212,50 @@ def supports_explain?
true
end
# QUOTING ==================================================
def quote(value, column = nil)
if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
s = column.class.string_to_binary(value).unpack("H*")[0]
"x'#{s}'"
else
super
end
end
def quote_string(s) #:nodoc:
@connection.class.quote(s)
end
def quote_column_name(name) #:nodoc:
%Q("#{name.to_s.gsub('"', '""')}")
end
# Quote date/time values for use in SQL input. Includes microseconds
# if the value is a Time responding to usec.
def quoted_date(value) #:nodoc:
if value.respond_to?(:usec)
"#{super}.#{sprintf("%06d", value.usec)}"
else
super
end
end
def type_cast(value, column) # :nodoc:
return value.to_f if BigDecimal === value
return super unless String === value
return super unless column && value
value = super
if column.type == :string && value.encoding == Encoding::ASCII_8BIT
logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
value.encode! 'utf-8'
end
value
end
# DATABASE STATEMENTS ======================================
def explain(arel, binds = [])
......@@ -75,6 +276,336 @@ def pp(result) # :nodoc:
end.join("\n") + "\n"
end
end
def exec_query(sql, name = nil, binds = [])
log(sql, name, binds) do
# Don't cache statements without bind values
if binds.empty?
stmt = @connection.prepare(sql)
cols = stmt.columns
records = stmt.to_a
stmt.close
stmt = records
else
cache = @statements[sql] ||= {
:stmt => @connection.prepare(sql)
}
stmt = cache[:stmt]
cols = cache[:cols] ||= stmt.columns
stmt.reset!
stmt.bind_params binds.map { |col, val|
type_cast(val, col)
}
end
ActiveRecord::Result.new(cols, stmt.to_a)
end
end
def exec_delete(sql, name = 'SQL', binds = [])
exec_query(sql, name, binds)
@connection.changes
end
alias :exec_update :exec_delete
def last_inserted_id(result)
@connection.last_insert_row_id
end
def execute(sql, name = nil) #:nodoc:
log(sql, name) { @connection.execute(sql) }
end
def update_sql(sql, name = nil) #:nodoc:
super
@connection.changes
end
def delete_sql(sql, name = nil) #:nodoc:
sql += " WHERE 1=1" unless sql =~ /WHERE/i
super sql, name
end
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
super
id_value || @connection.last_insert_row_id
end
alias :create :insert_sql
def select_rows(sql, name = nil)
exec_query(sql, name).rows
end
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def begin_db_transaction #:nodoc:
log('begin transaction',nil) { @connection.transaction }
end
def commit_db_transaction #:nodoc:
log('commit transaction',nil) { @connection.commit }
end
def rollback_db_transaction #:nodoc:
log('rollback transaction',nil) { @connection.rollback }
end
# SCHEMA STATEMENTS ========================================
def tables(name = 'SCHEMA', table_name = nil) #:nodoc:
sql = <<-SQL
SELECT name
FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
sql << " AND name = #{quote_table_name(table_name)}" if table_name
exec_query(sql, name).map do |row|
row['name']
end
end
def table_exists?(name)
name && tables('SCHEMA', name).any?
end
# Returns an array of +SQLite3Column+ objects for the table specified by +table_name+.
def columns(table_name) #:nodoc:
table_structure(table_name).map do |field|
case field["dflt_value"]
when /^null$/i
field["dflt_value"] = nil
when /^'(.*)'$/
field["dflt_value"] = $1.gsub("''", "'")
when /^"(.*)"$/
field["dflt_value"] = $1.gsub('""', '"')
end
SQLite3Column.new(field['name'], field['dflt_value'], field['type'], field['notnull'].to_i == 0)
end
end
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil) #:nodoc:
exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", name).map do |row|
IndexDefinition.new(
table_name,
row['name'],
row['unique'] != 0,
exec_query("PRAGMA index_info('#{row['name']}')").map { |col|
col['name']
})
end
end
def primary_key(table_name) #:nodoc:
column = table_structure(table_name).find { |field|
field['pk'] == 1
}
column && column['name']
end
def remove_index!(table_name, index_name) #:nodoc:
exec_query "DROP INDEX #{quote_column_name(index_name)}"
end
# Renames a table.
#
# Example:
# rename_table('octopuses', 'octopi')
def rename_table(name, new_name)
exec_query "ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}"
end
# See: http://www.sqlite.org/lang_altertable.html
# SQLite has an additional restriction on the ALTER TABLE statement
def valid_alter_table_options( type, options)
type.to_sym != :primary_key
end
def add_column(table_name, column_name, type, options = {}) #:nodoc:
if supports_add_column? && valid_alter_table_options( type, options )
super(table_name, column_name, type, options)
else
alter_table(table_name) do |definition|
definition.column(column_name, type, options)
end
end
end
def remove_column(table_name, *column_names) #:nodoc:
raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty?
column_names.flatten.each do |column_name|
alter_table(table_name) do |definition|
definition.columns.delete(definition[column_name])
end
end
end
alias :remove_columns :remove_column
def change_column_default(table_name, column_name, default) #:nodoc:
alter_table(table_name) do |definition|
definition[column_name].default = default
end
end
def change_column_null(table_name, column_name, null, default = nil)
unless null || default.nil?
exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
end
alter_table(table_name) do |definition|
definition[column_name].null = null
end
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
alter_table(table_name) do |definition|
include_default = options_include_default?(options)
definition[column_name].instance_eval do
self.type = type
self.limit = options[:limit] if options.include?(:limit)
self.default = options[:default] if include_default
self.null = options[:null] if options.include?(:null)
self.precision = options[:precision] if options.include?(:precision)
self.scale = options[:scale] if options.include?(:scale)
end
end
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
unless columns(table_name).detect{|c| c.name == column_name.to_s }
raise ActiveRecord::ActiveRecordError, "Missing column #{table_name}.#{column_name}"
end
alter_table(table_name, :rename => {column_name.to_s => new_column_name.to_s})
end
def empty_insert_statement_value
"VALUES(NULL)"
end
protected
def select(sql, name = nil, binds = []) #:nodoc:
exec_query(sql, name, binds)
end
def table_structure(table_name)
structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA').to_hash
raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
structure
end
def alter_table(table_name, options = {}) #:nodoc:
altered_table_name = "altered_#{table_name}"
caller = lambda {|definition| yield definition if block_given?}
transaction do
move_table(table_name, altered_table_name,
options.merge(:temporary => true))
move_table(altered_table_name, table_name, &caller)
end
end
def move_table(from, to, options = {}, &block) #:nodoc:
copy_table(from, to, options, &block)
drop_table(from)
end
def copy_table(from, to, options = {}) #:nodoc:
options = options.merge(:id => (!columns(from).detect{|c| c.name == 'id'}.nil? && 'id' == primary_key(from).to_s))
create_table(to, options) do |definition|
@definition = definition
columns(from).each do |column|
column_name = options[:rename] ?
(options[:rename][column.name] ||
options[:rename][column.name.to_sym] ||
column.name) : column.name
@definition.column(column_name, column.type,
:limit => column.limit, :default => column.default,
:precision => column.precision, :scale => column.scale,
:null => column.null)
end
@definition.primary_key(primary_key(from)) if primary_key(from)
yield @definition if block_given?
end
copy_table_indexes(from, to, options[:rename] || {})
copy_table_contents(from, to,
@definition.columns.map {|column| column.name},
options[:rename] || {})
end
def copy_table_indexes(from, to, rename = {}) #:nodoc:
indexes(from).each do |index|
name = index.name
if to == "altered_#{from}"
name = "temp_#{name}"
elsif from == "altered_#{to}"
name = name[5..-1]
end
to_column_names = columns(to).map { |c| c.name }
columns = index.columns.map {|c| rename[c] || c }.select do |column|
to_column_names.include?(column)
end
unless columns.empty?
# index name can't be the same
opts = { :name => name.gsub(/_(#{from})_/, "_#{to}_") }
opts[:unique] = true if index.unique
add_index(to, columns, opts)
end
end
end
def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
column_mappings = Hash[columns.map {|name| [name, name]}]
rename.each { |a| column_mappings[a.last] = a.first }
from_columns = columns(from).collect {|col| col.name}
columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
quoted_columns = columns.map { |col| quote_column_name(col) } * ','
quoted_to = quote_table_name(to)
exec_query("SELECT * FROM #{quote_table_name(from)}").each do |row|
sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
sql << columns.map {|col| quote row[column_mappings[col]]} * ', '
sql << ')'
exec_query sql
end
end
def sqlite_version
@sqlite_version ||= SQLite3Adapter::Version.new(select_value('select sqlite_version(*)'))
end
def default_primary_key_type
if supports_autoincrement?
'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL'
else
'INTEGER PRIMARY KEY NOT NULL'
end
end
def translate_exception(exception, message)
case exception.message
when /column(s)? .* (is|are) not unique/
RecordNotUnique.new(message, exception)
else
super
end
end
end
end
end
require 'active_record/connection_adapters/abstract_adapter'
require 'active_record/connection_adapters/statement_pool'
require 'arel/visitors/bind_visitor'
module ActiveRecord
module ConnectionAdapters #:nodoc:
class SQLiteColumn < Column #:nodoc:
class << self
def binary_to_string(value)
if value.encoding != Encoding::ASCII_8BIT
value = value.force_encoding(Encoding::ASCII_8BIT)
end
value
end
end
end
# The SQLite adapter works with both the 2.x and 3.x series of SQLite with the sqlite-ruby
# drivers (available both as gems and from http://rubyforge.org/projects/sqlite-ruby/).
#
# Options:
#
# * <tt>:database</tt> - Path to the database file.
class SQLiteAdapter < AbstractAdapter
class Version
include Comparable
def initialize(version_string)
@version = version_string.split('.').map { |v| v.to_i }
end
def <=>(version_string)
@version <=> version_string.split('.').map { |v| v.to_i }
end
end
class StatementPool < ConnectionAdapters::StatementPool
def initialize(connection, max)
super
@cache = Hash.new { |h,pid| h[pid] = {} }
end
def each(&block); cache.each(&block); end
def key?(key); cache.key?(key); end
def [](key); cache[key]; end
def length; cache.length; end
def []=(sql, key)
while @max <= cache.size
dealloc(cache.shift.last[:stmt])
end
cache[sql] = key
end
def clear
cache.values.each do |hash|
dealloc hash[:stmt]
end
cache.clear
end
private
def cache
@cache[$$]
end
def dealloc(stmt)
stmt.close unless stmt.closed?
end
end
class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
include Arel::Visitors::BindVisitor
end
def initialize(connection, logger, config)
super(connection, logger)
@statements = StatementPool.new(@connection,
config.fetch(:statement_limit) { 1000 })
@config = config
if config.fetch(:prepared_statements) { true }
@visitor = Arel::Visitors::SQLite.new self
else
@visitor = BindSubstitution.new self
end
end
def adapter_name #:nodoc:
'SQLite'
end
# Returns true if SQLite version is '2.0.0' or greater, false otherwise.
def supports_ddl_transactions?
sqlite_version >= '2.0.0'
end
# Returns true if SQLite version is '3.6.8' or greater, false otherwise.
def supports_savepoints?
sqlite_version >= '3.6.8'
end
# Returns true, since this connection adapter supports prepared statement
# caching.
def supports_statement_cache?
true
end
# Returns true, since this connection adapter supports migrations.
def supports_migrations? #:nodoc:
true
end
# Returns true.
def supports_primary_key? #:nodoc:
true
end
def requires_reloading?
true
end
# Returns true if SQLite version is '3.1.6' or greater, false otherwise.
def supports_add_column?
sqlite_version >= '3.1.6'
end
# Disconnects from the database if already connected. Otherwise, this
# method does nothing.
def disconnect!
super
clear_cache!
@connection.close rescue nil
end
# Clears the prepared statements cache.
def clear_cache!
@statements.clear
end
# Returns true if SQLite version is '3.2.6' or greater, false otherwise.
def supports_count_distinct? #:nodoc:
sqlite_version >= '3.2.6'
end
# Returns true if SQLite version is '3.1.0' or greater, false otherwise.
def supports_autoincrement? #:nodoc:
sqlite_version >= '3.1.0'
end
def supports_index_sort_order?
sqlite_version >= '3.3.0'
end
def native_database_types #:nodoc:
{
:primary_key => default_primary_key_type,
:string => { :name => "varchar", :limit => 255 },
:text => { :name => "text" },
:integer => { :name => "integer" },
:float => { :name => "float" },
:decimal => { :name => "decimal" },
:datetime => { :name => "datetime" },
:timestamp => { :name => "datetime" },
:time => { :name => "time" },
:date => { :name => "date" },
:binary => { :name => "blob" },
:boolean => { :name => "boolean" }
}
end
# QUOTING ==================================================
def quote_string(s) #:nodoc:
@connection.class.quote(s)
end
def quote_column_name(name) #:nodoc:
%Q("#{name.to_s.gsub('"', '""')}")
end
# Quote date/time values for use in SQL input. Includes microseconds
# if the value is a Time responding to usec.
def quoted_date(value) #:nodoc:
if value.respond_to?(:usec)
"#{super}.#{sprintf("%06d", value.usec)}"
else
super
end
end
def type_cast(value, column) # :nodoc:
return value.to_f if BigDecimal === value
return super unless String === value
return super unless column && value
value = super
if column.type == :string && value.encoding == Encoding::ASCII_8BIT
logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
value.encode! 'utf-8'
end
value
end
# DATABASE STATEMENTS ======================================
def exec_query(sql, name = nil, binds = [])
log(sql, name, binds) do
# Don't cache statements without bind values
if binds.empty?
stmt = @connection.prepare(sql)
cols = stmt.columns
records = stmt.to_a
stmt.close
stmt = records
else
cache = @statements[sql] ||= {
:stmt => @connection.prepare(sql)
}
stmt = cache[:stmt]
cols = cache[:cols] ||= stmt.columns
stmt.reset!
stmt.bind_params binds.map { |col, val|
type_cast(val, col)
}
end
ActiveRecord::Result.new(cols, stmt.to_a)
end
end
def exec_delete(sql, name = 'SQL', binds = [])
exec_query(sql, name, binds)
@connection.changes
end
alias :exec_update :exec_delete
def last_inserted_id(result)
@connection.last_insert_row_id
end
def execute(sql, name = nil) #:nodoc:
log(sql, name) { @connection.execute(sql) }
end
def update_sql(sql, name = nil) #:nodoc:
super
@connection.changes
end
def delete_sql(sql, name = nil) #:nodoc:
sql += " WHERE 1=1" unless sql =~ /WHERE/i
super sql, name
end
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
super
id_value || @connection.last_insert_row_id
end
alias :create :insert_sql
def select_rows(sql, name = nil)
exec_query(sql, name).rows
end
def create_savepoint
execute("SAVEPOINT #{current_savepoint_name}")
end
def rollback_to_savepoint
execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
end
def release_savepoint
execute("RELEASE SAVEPOINT #{current_savepoint_name}")
end
def begin_db_transaction #:nodoc:
log('begin transaction',nil) { @connection.transaction }
end
def commit_db_transaction #:nodoc:
log('commit transaction',nil) { @connection.commit }
end
def rollback_db_transaction #:nodoc:
log('rollback transaction',nil) { @connection.rollback }
end
# SCHEMA STATEMENTS ========================================
def tables(name = 'SCHEMA', table_name = nil) #:nodoc:
sql = <<-SQL
SELECT name
FROM sqlite_master
WHERE type = 'table' AND NOT name = 'sqlite_sequence'
SQL
sql << " AND name = #{quote_table_name(table_name)}" if table_name
exec_query(sql, name).map do |row|
row['name']
end
end
def table_exists?(name)
name && tables('SCHEMA', name).any?
end
# Returns an array of +SQLiteColumn+ objects for the table specified by +table_name+.
def columns(table_name) #:nodoc:
table_structure(table_name).map do |field|
case field["dflt_value"]
when /^null$/i
field["dflt_value"] = nil
when /^'(.*)'$/
field["dflt_value"] = $1.gsub("''", "'")
when /^"(.*)"$/
field["dflt_value"] = $1.gsub('""', '"')
end
SQLiteColumn.new(field['name'], field['dflt_value'], field['type'], field['notnull'].to_i == 0)
end
end
# Returns an array of indexes for the given table.
def indexes(table_name, name = nil) #:nodoc:
exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", name).map do |row|
IndexDefinition.new(
table_name,
row['name'],
row['unique'] != 0,
exec_query("PRAGMA index_info('#{row['name']}')").map { |col|
col['name']
})
end
end
def primary_key(table_name) #:nodoc:
column = table_structure(table_name).find { |field|
field['pk'] == 1
}
column && column['name']
end
def remove_index!(table_name, index_name) #:nodoc:
exec_query "DROP INDEX #{quote_column_name(index_name)}"
end
# Renames a table.
#
# Example:
# rename_table('octopuses', 'octopi')
def rename_table(name, new_name)
exec_query "ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}"
end
# See: http://www.sqlite.org/lang_altertable.html
# SQLite has an additional restriction on the ALTER TABLE statement
def valid_alter_table_options( type, options)
type.to_sym != :primary_key
end
def add_column(table_name, column_name, type, options = {}) #:nodoc:
if supports_add_column? && valid_alter_table_options( type, options )
super(table_name, column_name, type, options)
else
alter_table(table_name) do |definition|
definition.column(column_name, type, options)
end
end
end
def remove_column(table_name, *column_names) #:nodoc:
raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty?
column_names.flatten.each do |column_name|
alter_table(table_name) do |definition|
definition.columns.delete(definition[column_name])
end
end
end
alias :remove_columns :remove_column
def change_column_default(table_name, column_name, default) #:nodoc:
alter_table(table_name) do |definition|
definition[column_name].default = default
end
end
def change_column_null(table_name, column_name, null, default = nil)
unless null || default.nil?
exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
end
alter_table(table_name) do |definition|
definition[column_name].null = null
end
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
alter_table(table_name) do |definition|
include_default = options_include_default?(options)
definition[column_name].instance_eval do
self.type = type
self.limit = options[:limit] if options.include?(:limit)
self.default = options[:default] if include_default
self.null = options[:null] if options.include?(:null)
self.precision = options[:precision] if options.include?(:precision)
self.scale = options[:scale] if options.include?(:scale)
end
end
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
unless columns(table_name).detect{|c| c.name == column_name.to_s }
raise ActiveRecord::ActiveRecordError, "Missing column #{table_name}.#{column_name}"
end
alter_table(table_name, :rename => {column_name.to_s => new_column_name.to_s})
end
def empty_insert_statement_value
"VALUES(NULL)"
end
protected
def select(sql, name = nil, binds = []) #:nodoc:
exec_query(sql, name, binds)
end
def table_structure(table_name)
structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA').to_hash
raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
structure
end
def alter_table(table_name, options = {}) #:nodoc:
altered_table_name = "altered_#{table_name}"
caller = lambda {|definition| yield definition if block_given?}
transaction do
move_table(table_name, altered_table_name,
options.merge(:temporary => true))
move_table(altered_table_name, table_name, &caller)
end
end
def move_table(from, to, options = {}, &block) #:nodoc:
copy_table(from, to, options, &block)
drop_table(from)
end
def copy_table(from, to, options = {}) #:nodoc:
options = options.merge(:id => (!columns(from).detect{|c| c.name == 'id'}.nil? && 'id' == primary_key(from).to_s))
create_table(to, options) do |definition|
@definition = definition
columns(from).each do |column|
column_name = options[:rename] ?
(options[:rename][column.name] ||
options[:rename][column.name.to_sym] ||
column.name) : column.name
@definition.column(column_name, column.type,
:limit => column.limit, :default => column.default,
:precision => column.precision, :scale => column.scale,
:null => column.null)
end
@definition.primary_key(primary_key(from)) if primary_key(from)
yield @definition if block_given?
end
copy_table_indexes(from, to, options[:rename] || {})
copy_table_contents(from, to,
@definition.columns.map {|column| column.name},
options[:rename] || {})
end
def copy_table_indexes(from, to, rename = {}) #:nodoc:
indexes(from).each do |index|
name = index.name
if to == "altered_#{from}"
name = "temp_#{name}"
elsif from == "altered_#{to}"
name = name[5..-1]
end
to_column_names = columns(to).map { |c| c.name }
columns = index.columns.map {|c| rename[c] || c }.select do |column|
to_column_names.include?(column)
end
unless columns.empty?
# index name can't be the same
opts = { :name => name.gsub(/_(#{from})_/, "_#{to}_") }
opts[:unique] = true if index.unique
add_index(to, columns, opts)
end
end
end
def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
column_mappings = Hash[columns.map {|name| [name, name]}]
rename.each { |a| column_mappings[a.last] = a.first }
from_columns = columns(from).collect {|col| col.name}
columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
quoted_columns = columns.map { |col| quote_column_name(col) } * ','
quoted_to = quote_table_name(to)
exec_query("SELECT * FROM #{quote_table_name(from)}").each do |row|
sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
sql << columns.map {|col| quote row[column_mappings[col]]} * ', '
sql << ')'
exec_query sql
end
end
def sqlite_version
@sqlite_version ||= SQLiteAdapter::Version.new(select_value('select sqlite_version(*)'))
end
def default_primary_key_type
if supports_autoincrement?
'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL'
else
'INTEGER PRIMARY KEY NOT NULL'
end
end
def translate_exception(exception, message)
case exception.message
when /column(s)? .* (is|are) not unique/
RecordNotUnique.new(message, exception)
else
super
end
end
end
end
end
......@@ -196,7 +196,6 @@ module ClassMethods
# The following bundled adapters throw the ActiveRecord::RecordNotUnique exception:
# * ActiveRecord::ConnectionAdapters::MysqlAdapter
# * ActiveRecord::ConnectionAdapters::Mysql2Adapter
# * ActiveRecord::ConnectionAdapters::SQLiteAdapter
# * ActiveRecord::ConnectionAdapters::SQLite3Adapter
# * ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
#
......
......@@ -5,7 +5,7 @@
module ActiveRecord
module ConnectionAdapters
class SQLiteAdapter
class SQLite3Adapter
class QuotingTest < ActiveRecord::TestCase
def setup
@conn = Base.sqlite3_connection :database => ':memory:',
......
require 'cases/helper'
module ActiveRecord::ConnectionAdapters
class SQLiteAdapter
class SQLite3Adapter
class StatementPoolTest < ActiveRecord::TestCase
def test_cache_is_per_pid
return skip('must support fork') unless Process.respond_to?(:fork)
......
......@@ -228,7 +228,7 @@ def test_preserving_time_objects
)
# For adapters which support microsecond resolution.
if current_adapter?(:PostgreSQLAdapter) || current_adapter?(:SQLiteAdapter)
if current_adapter?(:PostgreSQLAdapter) || current_adapter?(:SQLite3Adapter)
assert_equal 11, Topic.find(1).written_on.sec
assert_equal 223300, Topic.find(1).written_on.usec
assert_equal 9900, Topic.find(2).written_on.usec
......
......@@ -36,7 +36,7 @@ def current_adapter?(*types)
end
def in_memory_db?
current_adapter?(:SQLiteAdapter) &&
current_adapter?(:SQLite3Adapter) &&
ActiveRecord::Base.connection_pool.spec.config[:database] == ":memory:"
end
......
......@@ -22,6 +22,7 @@
module Rails
autoload :Info, 'rails/info'
autoload :InfoController, 'rails/info_controller'
autoload :Queueing, 'rails/queueing'
class << self
def application
......@@ -37,6 +38,25 @@ def configuration
application.config
end
# Rails.queue is the application's queue. You can push a job onto
# the queue by:
#
# Rails.queue.push job
#
# A job is an object that responds to +run+. Queue consumers will
# pop jobs off of the queue and invoke the queue's +run+ method.
#
# Note that depending on your queue implementation, jobs may not
# be executed in the same process as they were created in, and
# are never executed in the same thread as they were created in.
#
# If necessary, a queue implementation may need to serialize your
# job for distribution to another process. The documentation of
# your queue will specify the requirements for that serialization.
def queue
application.queue
end
def initialize!
application.initialize!
end
......
......@@ -66,7 +66,7 @@ def inherited(base)
end
end
attr_accessor :assets, :sandbox
attr_accessor :assets, :sandbox, :queue
alias_method :sandbox?, :sandbox
attr_reader :reloaders
......@@ -199,6 +199,10 @@ def config #:nodoc:
@config ||= Application::Configuration.new(find_root_with_flag("config.ru", Dir.pwd))
end
def queue #:nodoc:
@queue ||= config.queue.new
end
def to_app
self
end
......
......@@ -11,7 +11,7 @@ class Configuration < ::Rails::Engine::Configuration
:force_ssl, :helpers_paths, :logger, :log_formatter, :log_tags, :preload_frameworks,
:railties_order, :relative_url_root, :secret_token,
:serve_static_assets, :ssl_options, :static_cache_control, :session_options,
:time_zone, :reload_classes_only_on_change, :use_schema_cache_dump
:time_zone, :reload_classes_only_on_change, :use_schema_cache_dump, :queue
attr_writer :log_level
attr_reader :encoding
......@@ -43,6 +43,7 @@ def initialize(*)
@autoflush_log = true
@log_formatter = ActiveSupport::Logger::SimpleFormatter.new
@use_schema_cache_dump = true
@queue = Queue
@assets = ActiveSupport::OrderedOptions.new
@assets.enabled = false
......
......@@ -93,6 +93,13 @@ module Finisher
ActiveSupport::Dependencies.unhook!
end
end
initializer :activate_queue_consumer do |app|
if config.queue == Queue
consumer = Rails::Queueing::ThreadedConsumer.start(app.queue)
at_exit { consumer.shutdown }
end
end
end
end
end
......@@ -35,4 +35,7 @@
# Expands the lines which load the assets.
config.assets.debug = true
<%- end -%>
# In development, use an in-memory queue for queueing
config.queue = Queue
end
......@@ -76,4 +76,8 @@
# Use default logging formatter so that PID and timestamp are not suppressed
config.log_formatter = ::Logger::Formatter.new
# Default the production mode queue to an in-memory queue. You will probably
# want to replace this with an out-of-process queueing solution
config.queue = Queue
end
......@@ -33,4 +33,7 @@
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Use the testing queue
config.queue = Rails::Queueing::TestQueue
end
module Rails
module Queueing
# In test mode, the Rails queue is backed by an Array so that assertions
# can be made about its contents. The test queue provides a +contents+
# method to make assertions about the queue's contents and a +drain+
# method to drain the queue and run the jobs.
#
# Jobs are run in a separate thread to catch mistakes where code
# assumes that the job is run in the same thread.
class TestQueue
attr_reader :contents
def initialize
@contents = []
end
def drain
# run the jobs in a separate thread so assumptions of synchronous
# jobs are caught in test mode.
t = Thread.new do
while job = @contents.pop
job.run
end
end
t.join
end
# implement the Queue API
def push(object)
@contents << object
end
end
# The threaded consumer will run jobs in a background thread in
# development mode or in a VM where running jobs on a thread in
# production mode makes sense.
#
# When the process exits, the consumer pushes a nil onto the
# queue and joins the thread, which will ensure that all jobs
# are executed before the process finally dies.
class ThreadedConsumer
def self.start(queue)
new(queue).start
end
def initialize(queue)
@queue = queue
end
def start
@thread = Thread.new do
while job = @queue.pop
job.run
end
end
self
end
def shutdown
@queue.push nil
@thread.join
end
end
end
end
require 'isolation/abstract_unit'
require 'rack/test'
module ApplicationTests
class GeneratorsTest < ActiveSupport::TestCase
include ActiveSupport::Testing::Isolation
def setup
build_app
boot_rails
end
def teardown
teardown_app
end
def app_const
@app_const ||= Class.new(Rails::Application)
end
test "the queue is a TestQueue in test mode" do
app("test")
assert_kind_of Rails::Queueing::TestQueue, Rails.application.queue
assert_kind_of Rails::Queueing::TestQueue, Rails.queue
end
test "the queue is a Queue in development mode" do
app("development")
assert_kind_of Queue, Rails.application.queue
assert_kind_of Queue, Rails.queue
end
test "in development mode, an enqueued job will be processed in a separate thread" do
app("development")
current = Thread.current
job = Struct.new(:origin, :target).new(Thread.current)
def job.run
self.target = Thread.current
end
Rails.queue.push job
sleep 0.1
assert job.target, "The job was run"
assert_not_equal job.origin, job.target
end
test "in test mode, explicitly draining the queue will process it in a separate thread" do
app("test")
current = Thread.current
job = Struct.new(:origin, :target).new(Thread.current)
def job.run
self.target = Thread.current
end
Rails.queue.push job
Rails.queue.drain
assert job.target, "The job was run"
assert_not_equal job.origin, job.target
end
test "in test mode, the queue can be observed" do
app("test")
job = Class.new(Struct.new(:id)) do
def run
end
end
jobs = (1..10).map do |id|
job.new(id)
end
jobs.each do |job|
Rails.queue.push job
end
assert_equal jobs, Rails.queue.contents
end
test "a custom queue implementation can be provided" do
add_to_env_config "production", <<-RUBY
require "my_queue"
config.queue = MyQueue
RUBY
app_file "lib/my_queue.rb", <<-RUBY
class MyQueue
def push(job)
job.run
end
end
RUBY
app("production")
assert_kind_of MyQueue, Rails.queue
job = Class.new(Struct.new(:id, :ran)) do
def run
self.ran = true
end
end
job1 = job.new(1)
Rails.queue.push job1
assert_equal true, job1.ran
end
end
end
......@@ -8,7 +8,7 @@
# Rails booted up.
require 'fileutils'
require 'rubygems'
require 'bundler/setup'
require 'minitest/autorun'
require 'active_support/test_case'
......
require 'abstract_unit'
require 'rails/queueing'
class TestQueueTest < ActiveSupport::TestCase
class Job
attr_reader :id
def initialize(id, &block)
@id = id
@block = block
end
def run
@block.call if @block
end
end
def setup
@queue = Rails::Queueing::TestQueue.new
end
def test_contents
assert_equal [], @queue.contents
job = Job.new(1)
@queue.push job
assert_equal [job], @queue.contents
end
def test_drain
t = nil
ran = false
job = Job.new(1) do
ran = true
t = Thread.current
end
@queue.push job
@queue.drain
assert_equal [], @queue.contents
assert ran, "The job runs synchronously when the queue is drained"
assert_not_equal t, Thread.current
end
end
require 'abstract_unit'
require 'rails/queueing'
class TestThreadConsumer < ActiveSupport::TestCase
class Job
attr_reader :id
def initialize(id, &block)
@id = id
@block = block
end
def run
@block.call if @block
end
end
def setup
@queue = Queue.new
@consumer = Rails::Queueing::ThreadedConsumer.start(@queue)
end
def teardown
@queue.push nil
end
test "the jobs are executed" do
ran = false
job = Job.new(1) do
ran = true
end
@queue.push job
sleep 0.1
assert_equal true, ran
end
test "the jobs are not executed synchronously" do
ran = false
job = Job.new(1) do
sleep 0.1
ran = true
end
@queue.push job
assert_equal false, ran
end
test "shutting down the queue synchronously drains the jobs" do
ran = false
job = Job.new(1) do
sleep 0.1
ran = true
end
@queue.push job
assert_equal false, ran
@consumer.shutdown
assert_equal true, ran
end
end
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册