Ooops! Fixed upgrade of Rails.

This commit is contained in:
Jacques Distler 2007-02-09 17:12:31 -06:00
parent 5536e6e79e
commit bba0cf6b10
177 changed files with 13221 additions and 0 deletions

View file

@ -0,0 +1,75 @@
module ActiveRecord
module AttributeMethods #:nodoc:
DEFAULT_SUFFIXES = %w(= ? _before_type_cast)
def self.included(base)
base.extend ClassMethods
base.attribute_method_suffix *DEFAULT_SUFFIXES
end
# Declare and check for suffixed attribute methods.
module ClassMethods
# Declare a method available for all attributes with the given suffix.
# Uses method_missing and respond_to? to rewrite the method
# #{attr}#{suffix}(*args, &block)
# to
# attribute#{suffix}(#{attr}, *args, &block)
#
# An attribute#{suffix} instance method must exist and accept at least
# the attr argument.
#
# For example:
# class Person < ActiveRecord::Base
# attribute_method_suffix '_changed?'
#
# private
# def attribute_changed?(attr)
# ...
# end
# end
#
# person = Person.find(1)
# person.name_changed? # => false
# person.name = 'Hubert'
# person.name_changed? # => true
def attribute_method_suffix(*suffixes)
attribute_method_suffixes.concat suffixes
rebuild_attribute_method_regexp
end
# Returns MatchData if method_name is an attribute method.
def match_attribute_method?(method_name)
rebuild_attribute_method_regexp unless defined?(@@attribute_method_regexp) && @@attribute_method_regexp
@@attribute_method_regexp.match(method_name)
end
private
# Suffixes a, ?, c become regexp /(a|\?|c)$/
def rebuild_attribute_method_regexp
suffixes = attribute_method_suffixes.map { |s| Regexp.escape(s) }
@@attribute_method_regexp = /(#{suffixes.join('|')})$/.freeze
end
# Default to =, ?, _before_type_cast
def attribute_method_suffixes
@@attribute_method_suffixes ||= []
end
end
private
# Handle *? for method_missing.
def attribute?(attribute_name)
query_attribute(attribute_name)
end
# Handle *= for method_missing.
def attribute=(attribute_name, value)
write_attribute(attribute_name, value)
end
# Handle *_before_type_cast for method_missing.
def attribute_before_type_cast(attribute_name)
read_attribute_before_type_cast(attribute_name)
end
end
end

View file

@ -0,0 +1,861 @@
# Requires FrontBase Ruby bindings (gem install ruby-frontbase)
require 'active_record/connection_adapters/abstract_adapter'
FB_TRACE = false
module ActiveRecord
class Base
class << self
# Establishes a connection to the database that's used by all Active Record objects.
def frontbase_connection(config) # :nodoc:
# FrontBase only supports one unnamed sequence per table
define_attr_method(:set_sequence_name, :sequence_name, &Proc.new {|*args| nil})
config = config.symbolize_keys
database = config[:database]
port = config[:port]
host = config[:host]
username = config[:username]
password = config[:password]
dbpassword = config[:dbpassword]
session_name = config[:session_name]
dbpassword = '' if dbpassword.nil?
# Turn off colorization since it makes tail/less output difficult
self.colorize_logging = false
require_library_or_gem 'frontbase' unless self.class.const_defined? :FBSQL_Connect
# Check bindings version
version = "0.0.0"
version = FBSQL_Connect::FB_BINDINGS_VERSION if defined? FBSQL_Connect::FB_BINDINGS_VERSION
if ActiveRecord::ConnectionAdapters::FrontBaseAdapter.compare_versions(version,"1.0.0") == -1
raise AdapterNotFound,
'The FrontBase adapter requires ruby-frontbase version 1.0.0 or greater; you appear ' <<
"to be running an older version (#{version}) -- please update ruby-frontbase (gem install ruby-frontbase)."
end
connection = FBSQL_Connect.connect(host, port, database, username, password, dbpassword, session_name)
ConnectionAdapters::FrontBaseAdapter.new(connection, logger, [host, port, database, username, password, dbpassword, session_name], config)
end
end
end
module ConnectionAdapters
# From EOF Documentation....
# buffer should have space for EOUniqueBinaryKeyLength (12) bytes.
# Assigns a world-wide unique ID made up of:
# < Sequence [2], ProcessID [2] , Time [4], IP Addr [4] >
class TwelveByteKey < String #:nodoc:
@@mutex = Mutex.new
@@sequence_number = rand(65536)
@@key_cached_pid_component = nil
@@key_cached_ip_component = nil
def initialize(string = nil)
# Generate a unique key
if string.nil?
new_key = replace('_' * 12)
new_key[0..1] = self.class.key_sequence_component
new_key[2..3] = self.class.key_pid_component
new_key[4..7] = self.class.key_time_component
new_key[8..11] = self.class.key_ip_component
new_key
else
if string.size == 24
string.gsub!(/[[:xdigit:]]{2}/) { |x| x.hex.chr }
end
raise "string is not 12 bytes long" unless string.size == 12
super(string)
end
end
def inspect
unpack("H*").first.upcase
end
alias_method :to_s, :inspect
private
class << self
def key_sequence_component
seq = nil
@@mutex.synchronize do
seq = @@sequence_number
@@sequence_number = (@@sequence_number + 1) % 65536
end
sequence_component = "__"
sequence_component[0] = seq >> 8
sequence_component[1] = seq
sequence_component
end
def key_pid_component
if @@key_cached_pid_component.nil?
@@mutex.synchronize do
pid = $$
pid_component = "__"
pid_component[0] = pid >> 8
pid_component[1] = pid
@@key_cached_pid_component = pid_component
end
end
@@key_cached_pid_component
end
def key_time_component
time = Time.new.to_i
time_component = "____"
time_component[0] = (time & 0xFF000000) >> 24
time_component[1] = (time & 0x00FF0000) >> 16
time_component[2] = (time & 0x0000FF00) >> 8
time_component[3] = (time & 0x000000FF)
time_component
end
def key_ip_component
if @@key_cached_ip_component.nil?
@@mutex.synchronize do
old_lookup_flag = BasicSocket.do_not_reverse_lookup
BasicSocket.do_not_reverse_lookup = true
udpsocket = UDPSocket.new
udpsocket.connect("17.112.152.32",1)
ip_string = udpsocket.addr[3]
BasicSocket.do_not_reverse_lookup = old_lookup_flag
packed = Socket.pack_sockaddr_in(0,ip_string)
addr_subset = packed[4..7]
ip = addr_subset[0] << 24 | addr_subset[1] << 16 | addr_subset[2] << 8 | addr_subset[3]
ip_component = "____"
ip_component[0] = (ip & 0xFF000000) >> 24
ip_component[1] = (ip & 0x00FF0000) >> 16
ip_component[2] = (ip & 0x0000FF00) >> 8
ip_component[3] = (ip & 0x000000FF)
@@key_cached_ip_component = ip_component
end
end
@@key_cached_ip_component
end
end
end
class FrontBaseColumn < Column #:nodoc:
attr_reader :fb_autogen
def initialize(base, name, type, typename, limit, precision, scale, default, nullable)
@base = base
@name = name
@type = simplified_type(type,typename,limit)
@limit = limit
@precision = precision
@scale = scale
@default = default
@null = nullable == "YES"
@text = [:string, :text].include? @type
@number = [:float, :integer, :decimal].include? @type
@fb_autogen = false
if @default
@default.gsub!(/^'(.*)'$/,'\1') if @text
@fb_autogen = @default.include?("SELECT UNIQUE FROM")
case @type
when :boolean
@default = @default == "TRUE"
when :binary
if @default != "X''"
buffer = ""
@default.scan(/../) { |h| buffer << h.hex.chr }
@default = buffer
else
@default = ""
end
else
@default = type_cast(@default)
end
end
end
# Casts value (which is a String) to an appropriate instance.
def type_cast(value)
if type == :twelvebytekey
ActiveRecord::ConnectionAdapters::TwelveByteKey.new(value)
else
super(value)
end
end
def type_cast_code(var_name)
if type == :twelvebytekey
"ActiveRecord::ConnectionAdapters::TwelveByteKey.new(#{var_name})"
else
super(var_name)
end
end
private
def simplified_type(field_type, type_name,limit)
ret_type = :string
puts "typecode: [#{field_type}] [#{type_name}]" if FB_TRACE
# 12 byte primary keys are a special case that Apple's EOF
# used heavily. Optimize for this case
if field_type == 11 && limit == 96
ret_type = :twelvebytekey # BIT(96)
else
ret_type = case field_type
when 1 then :boolean # BOOLEAN
when 2 then :integer # INTEGER
when 4 then :float # FLOAT
when 10 then :string # CHARACTER VARYING
when 11 then :bitfield # BIT
when 13 then :date # DATE
when 14 then :time # TIME
when 16 then :timestamp # TIMESTAMP
when 20 then :text # CLOB
when 21 then :binary # BLOB
when 22 then :integer # TINYINT
else
puts "ERROR: Unknown typecode: [#{field_type}] [#{type_name}]"
end
end
puts "ret_type: #{ret_type.inspect}" if FB_TRACE
ret_type
end
end
class FrontBaseAdapter < AbstractAdapter
class << self
def compare_versions(v1, v2)
v1_seg = v1.split(".")
v2_seg = v2.split(".")
0.upto([v1_seg.length,v2_seg.length].min) do |i|
step = (v1_seg[i].to_i <=> v2_seg[i].to_i)
return step unless step == 0
end
return v1_seg.length <=> v2_seg.length
end
end
def initialize(connection, logger, connection_options, config)
super(connection, logger)
@connection_options, @config = connection_options, config
@transaction_mode = :pessimistic
# Start out in auto-commit mode
self.rollback_db_transaction
# threaded_connections_test.rb will fail unless we set the session
# to optimistic locking mode
# set_pessimistic_transactions
# execute "SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ WRITE, LOCKING OPTIMISTIC"
end
# Returns the human-readable name of the adapter. Use mixed case - one
# can always use downcase if needed.
def adapter_name #:nodoc:
'FrontBase'
end
# Does this adapter support migrations? Backend specific, as the
# abstract adapter always returns +false+.
def supports_migrations? #:nodoc:
true
end
def native_database_types #:nodoc:
{
:primary_key => "INTEGER DEFAULT UNIQUE PRIMARY KEY",
:string => { :name => "VARCHAR", :limit => 255 },
:text => { :name => "CLOB" },
:integer => { :name => "INTEGER" },
:float => { :name => "FLOAT" },
:decimal => { :name => "DECIMAL" },
:datetime => { :name => "TIMESTAMP" },
:timestamp => { :name => "TIMESTAMP" },
:time => { :name => "TIME" },
:date => { :name => "DATE" },
:binary => { :name => "BLOB" },
:boolean => { :name => "BOOLEAN" },
:twelvebytekey => { :name => "BYTE", :limit => 12}
}
end
# QUOTING ==================================================
# Quotes the column value to help prevent
# {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
def quote(value, column = nil)
return value.quoted_id if value.respond_to?(:quoted_id)
retvalue = "<INVALID>"
puts "quote(#{value.inspect}(#{value.class}),#{column.type.inspect})" if FB_TRACE
# If a column was passed in, use column type information
unless value.nil?
if column
retvalue = case column.type
when :string
if value.kind_of?(String)
"'#{quote_string(value.to_s)}'" # ' (for ruby-mode)
else
"'#{quote_string(value.to_yaml)}'"
end
when :integer
if value.kind_of?(TrueClass)
'1'
elsif value.kind_of?(FalseClass)
'0'
else
value.to_i.to_s
end
when :float
value.to_f.to_s
when :decimal
value.to_d.to_s("F")
when :datetime, :timestamp
"TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'"
when :time
"TIME '#{value.strftime("%H:%M:%S")}'"
when :date
"DATE '#{value.strftime("%Y-%m-%d")}'"
when :twelvebytekey
value = value.to_s.unpack("H*").first unless value.kind_of?(TwelveByteKey)
"X'#{value.to_s}'"
when :boolean
value = quoted_true if value.kind_of?(TrueClass)
value = quoted_false if value.kind_of?(FalseClass)
value
when :binary
blob_handle = @connection.create_blob(value.to_s)
puts "SQL -> Insert #{value.to_s.length} byte blob as #{retvalue}" if FB_TRACE
blob_handle.handle
when :text
if value.kind_of?(String)
clobdata = value.to_s # ' (for ruby-mode)
else
clobdata = value.to_yaml
end
clob_handle = @connection.create_clob(clobdata)
puts "SQL -> Insert #{value.to_s.length} byte clob as #{retvalue}" if FB_TRACE
clob_handle.handle
else
raise "*** UNKNOWN TYPE: #{column.type.inspect}"
end # case
# Since we don't have column type info, make a best guess based
# on the Ruby class of the value
else
retvalue = case value
when ActiveRecord::ConnectionAdapters::TwelveByteKey
s = value.unpack("H*").first
"X'#{s}'"
when String
if column && column.type == :binary
s = value.unpack("H*").first
"X'#{s}'"
elsif column && [:integer, :float, :decimal].include?(column.type)
value.to_s
else
"'#{quote_string(value)}'" # ' (for ruby-mode)
end
when NilClass
"NULL"
when TrueClass
(column && column.type == :integer ? '1' : quoted_true)
when FalseClass
(column && column.type == :integer ? '0' : quoted_false)
when Float, Fixnum, Bignum, BigDecimal
value.to_s
when Time, Date, DateTime
if column
case column.type
when :date
"DATE '#{value.strftime("%Y-%m-%d")}'"
when :time
"TIME '#{value.strftime("%H:%M:%S")}'"
when :timestamp
"TIMESTAMP '#{value.strftime("%Y-%m-%d %H:%M:%S")}'"
else
raise NotImplementedError, "Unknown column type!"
end # case
else # Column wasn't passed in, so try to guess the right type
if value.kind_of? Date
"DATE '#{value.strftime("%Y-%m-%d")}'"
else
if [:hour, :min, :sec].all? {|part| value.send(:part).zero? }
"TIME '#{value.strftime("%H:%M:%S")}'"
else
"TIMESTAMP '#{quoted_date(value)}'"
end
end
end #if column
else
"'#{quote_string(value.to_yaml)}'"
end #case
end
else
retvalue = "NULL"
end
retvalue
end # def
# Quotes a string, escaping any ' (single quote) characters.
def quote_string(s)
s.gsub(/'/, "''") # ' (for ruby-mode)
end
def quote_column_name(name) #:nodoc:
%( "#{name}" )
end
def quoted_true
"true"
end
def quoted_false
"false"
end
# CONNECTION MANAGEMENT ====================================
def active?
true if @connection.status == 1
rescue => e
false
end
def reconnect!
@connection.close rescue nil
@connection = FBSQL_Connect.connect(*@connection_options.first(7))
end
# Close this connection
def disconnect!
@connection.close rescue nil
@active = false
end
# DATABASE STATEMENTS ======================================
# Returns an array of record hashes with the column names as keys and
# column values as values.
def select_all(sql, name = nil) #:nodoc:
fbsql = cleanup_fb_sql(sql)
return_value = []
fbresult = execute(sql, name)
puts "select_all SQL -> #{fbsql}" if FB_TRACE
columns = fbresult.columns
fbresult.each do |row|
puts "SQL <- #{row.inspect}" if FB_TRACE
hashed_row = {}
colnum = 0
row.each do |col|
hashed_row[columns[colnum]] = col
if col.kind_of?(FBSQL_LOB)
hashed_row[columns[colnum]] = col.read
end
colnum += 1
end
puts "raw row: #{hashed_row.inspect}" if FB_TRACE
return_value << hashed_row
end
return_value
end
def select_one(sql, name = nil) #:nodoc:
fbsql = cleanup_fb_sql(sql)
return_value = []
fbresult = execute(fbsql, name)
puts "SQL -> #{fbsql}" if FB_TRACE
columns = fbresult.columns
fbresult.each do |row|
puts "SQL <- #{row.inspect}" if FB_TRACE
hashed_row = {}
colnum = 0
row.each do |col|
hashed_row[columns[colnum]] = col
if col.kind_of?(FBSQL_LOB)
hashed_row[columns[colnum]] = col.read
end
colnum += 1
end
return_value << hashed_row
break
end
fbresult.clear
return_value.first
end
def query(sql, name = nil) #:nodoc:
fbsql = cleanup_fb_sql(sql)
puts "SQL(query) -> #{fbsql}" if FB_TRACE
log(fbsql, name) { @connection.query(fbsql) }
rescue => e
puts "FB Exception: #{e.inspect}" if FB_TRACE
raise e
end
def execute(sql, name = nil) #:nodoc:
fbsql = cleanup_fb_sql(sql)
puts "SQL(execute) -> #{fbsql}" if FB_TRACE
log(fbsql, name) { @connection.query(fbsql) }
rescue ActiveRecord::StatementInvalid => e
if e.message.scan(/Table name - \w* - exists/).empty?
puts "FB Exception: #{e.inspect}" if FB_TRACE
raise e
end
end
# Returns the last auto-generated ID from the affected table.
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
puts "SQL -> #{sql.inspect}" if FB_TRACE
execute(sql, name)
id_value || pk
end
# Executes the update statement and returns the number of rows affected.
def update(sql, name = nil) #:nodoc:
puts "SQL -> #{sql.inspect}" if FB_TRACE
execute(sql, name).num_rows
end
alias_method :delete, :update #:nodoc:
def set_pessimistic_transactions
if @transaction_mode == :optimistic
execute "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE, LOCKING PESSIMISTIC, READ WRITE"
@transaction_mode = :pessimistic
end
end
def set_optimistic_transactions
if @transaction_mode == :pessimistic
execute "SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ WRITE, LOCKING OPTIMISTIC"
@transaction_mode = :optimistic
end
end
def begin_db_transaction #:nodoc:
execute "SET COMMIT FALSE" rescue nil
end
def commit_db_transaction #:nodoc:
execute "COMMIT"
ensure
execute "SET COMMIT TRUE"
end
def rollback_db_transaction #:nodoc:
execute "ROLLBACK"
ensure
execute "SET COMMIT TRUE"
end
def add_limit_offset!(sql, options) #:nodoc:
if limit = options[:limit]
offset = options[:offset] || 0
# Here is the full syntax FrontBase supports:
# (from gclem@frontbase.com)
#
# TOP <limit - unsigned integer>
# TOP ( <offset expr>, <limit expr>)
# "TOP 0" is not allowed, so we have
# to use a cheap trick.
if limit.zero?
case sql
when /WHERE/i
sql.sub!(/WHERE/i, 'WHERE 0 = 1 AND ')
when /ORDER\s+BY/i
sql.sub!(/ORDER\s+BY/i, 'WHERE 0 = 1 ORDER BY')
else
sql << 'WHERE 0 = 1'
end
else
if offset.zero?
sql.replace sql.gsub("SELECT ","SELECT TOP #{limit} ")
else
sql.replace sql.gsub("SELECT ","SELECT TOP(#{offset},#{limit}) ")
end
end
end
end
def prefetch_primary_key?(table_name = nil)
true
end
# Returns the next sequence value from a sequence generator. Not generally
# called directly; used by ActiveRecord to get the next primary key value
# when inserting a new database record (see #prefetch_primary_key?).
def next_sequence_value(sequence_name)
unique = select_value("SELECT UNIQUE FROM #{sequence_name}","Next Sequence Value")
# The test cases cannot handle a zero primary key
unique.zero? ? select_value("SELECT UNIQUE FROM #{sequence_name}","Next Sequence Value") : unique
end
def default_sequence_name(table, column)
table
end
# Set the sequence to the max value of the table's column.
def reset_sequence!(table, column, sequence = nil)
klasses = classes_for_table_name(table)
klass = klasses.nil? ? nil : klasses.first
pk = klass.primary_key unless klass.nil?
if pk && klass.columns_hash[pk].type == :integer
execute("SET UNIQUE FOR #{klass.table_name}(#{pk})")
end
end
def classes_for_table_name(table)
ActiveRecord::Base.send(:subclasses).select {|klass| klass.table_name == table}
end
def reset_pk_sequence!(table, pk = nil, sequence = nil)
klasses = classes_for_table_name(table)
klass = klasses.nil? ? nil : klasses.first
pk = klass.primary_key unless klass.nil?
if pk && klass.columns_hash[pk].type == :integer
mpk = select_value("SELECT MAX(#{pk}) FROM #{table}")
execute("SET UNIQUE FOR #{klass.table_name}(#{pk})")
end
end
# SCHEMA STATEMENTS ========================================
def structure_dump #:nodoc:
select_all("SHOW TABLES").inject('') do |structure, table|
structure << select_one("SHOW CREATE TABLE #{table.to_a.first.last}")["Create Table"] << ";\n\n"
end
end
def recreate_database(name) #:nodoc:
drop_database(name)
create_database(name)
end
def create_database(name) #:nodoc:
execute "CREATE DATABASE #{name}"
end
def drop_database(name) #:nodoc:
execute "DROP DATABASE #{name}"
end
def current_database
select_value('SELECT "CATALOG_NAME" FROM INFORMATION_SCHEMA.CATALOGS').downcase
end
def tables(name = nil) #:nodoc:
select_values(<<-SQL, nil)
SELECT "TABLE_NAME"
FROM INFORMATION_SCHEMA.TABLES AS T0,
INFORMATION_SCHEMA.SCHEMATA AS T1
WHERE T0.SCHEMA_PK = T1.SCHEMA_PK
AND "SCHEMA_NAME" = CURRENT_SCHEMA
SQL
end
def indexes(table_name, name = nil)#:nodoc:
indexes = []
current_index = nil
sql = <<-SQL
SELECT INDEX_NAME, T2.ORDINAL_POSITION, INDEX_COLUMN_COUNT, INDEX_TYPE,
"COLUMN_NAME", IS_NULLABLE
FROM INFORMATION_SCHEMA.TABLES AS T0,
INFORMATION_SCHEMA.INDEXES AS T1,
INFORMATION_SCHEMA.INDEX_COLUMN_USAGE AS T2,
INFORMATION_SCHEMA.COLUMNS AS T3
WHERE T0."TABLE_NAME" = '#{table_name}'
AND INDEX_TYPE <> 0
AND T0.TABLE_PK = T1.TABLE_PK
AND T0.TABLE_PK = T2.TABLE_PK
AND T0.TABLE_PK = T3.TABLE_PK
AND T1.INDEXES_PK = T2.INDEX_PK
AND T2.COLUMN_PK = T3.COLUMN_PK
ORDER BY INDEX_NAME, T2.ORDINAL_POSITION
SQL
columns = []
query(sql).each do |row|
index_name = row[0]
ord_position = row[1]
ndx_colcount = row[2]
index_type = row[3]
column_name = row[4]
is_unique = index_type == 1
columns << column_name
if ord_position == ndx_colcount
indexes << IndexDefinition.new(table_name, index_name, is_unique , columns)
columns = []
end
end
indexes
end
def columns(table_name, name = nil)#:nodoc:
sql = <<-SQL
SELECT "TABLE_NAME", "COLUMN_NAME", ORDINAL_POSITION, IS_NULLABLE, COLUMN_DEFAULT,
DATA_TYPE, DATA_TYPE_CODE, CHARACTER_MAXIMUM_LENGTH, NUMERIC_PRECISION,
NUMERIC_PRECISION_RADIX, NUMERIC_SCALE, DATETIME_PRECISION, DATETIME_PRECISION_LEADING
FROM INFORMATION_SCHEMA.TABLES T0,
INFORMATION_SCHEMA.COLUMNS T1,
INFORMATION_SCHEMA.DATA_TYPE_DESCRIPTOR T3
WHERE "TABLE_NAME" = '#{table_name}'
AND T0.TABLE_PK = T1.TABLE_PK
AND T0.TABLE_PK = T3.TABLE_OR_DOMAIN_PK
AND T1.COLUMN_PK = T3.COLUMN_NAME_PK
ORDER BY T1.ORDINAL_POSITION
SQL
rawresults = query(sql,name)
columns = []
rawresults.each do |field|
args = [base = field[0],
name = field[1],
typecode = field[6],
typestring = field[5],
limit = field[7],
precision = field[8],
scale = field[9],
default = field[4],
nullable = field[3]]
columns << FrontBaseColumn.new(*args)
end
columns
end
def create_table(name, options = {})
table_definition = TableDefinition.new(self)
table_definition.primary_key(options[:primary_key] || "id") unless options[:id] == false
yield table_definition
if options[:force]
drop_table(name) rescue nil
end
create_sql = "CREATE#{' TEMPORARY' if options[:temporary]} TABLE "
create_sql << "#{name} ("
create_sql << table_definition.to_sql
create_sql << ") #{options[:options]}"
begin_db_transaction
execute create_sql
commit_db_transaction
rescue ActiveRecord::StatementInvalid => e
raise e unless e.message.match(/Table name - \w* - exists/)
end
def rename_table(name, new_name)
columns = columns(name)
pkcol = columns.find {|c| c.fb_autogen}
execute "ALTER TABLE NAME #{name} TO #{new_name}"
if pkcol
change_column_default(new_name,pkcol.name,"UNIQUE")
begin_db_transaction
mpk = select_value("SELECT MAX(#{pkcol.name}) FROM #{new_name}")
mpk = 0 if mpk.nil?
execute "SET UNIQUE=#{mpk} FOR #{new_name}"
commit_db_transaction
end
end
# Drops a table from the database.
def drop_table(name, options = {})
execute "DROP TABLE #{name} RESTRICT"
rescue ActiveRecord::StatementInvalid => e
raise e unless e.message.match(/Referenced TABLE - \w* - does not exist/)
end
# Adds a new column to the named table.
# See TableDefinition#column for details of the options you can use.
def add_column(table_name, column_name, type, options = {})
add_column_sql = "ALTER TABLE #{table_name} ADD #{column_name} #{type_to_sql(type, options[:limit])}"
options[:type] = type
add_column_options!(add_column_sql, options)
execute(add_column_sql)
end
def add_column_options!(sql, options) #:nodoc:
default_value = quote(options[:default], options[:column])
if options_include_default?(options)
if options[:type] == :boolean
default_value = options[:default] == 0 ? quoted_false : quoted_true
end
sql << " DEFAULT #{default_value}"
end
sql << " NOT NULL" if options[:null] == false
end
# Removes the column from the table definition.
# ===== Examples
# remove_column(:suppliers, :qualification)
def remove_column(table_name, column_name)
execute "ALTER TABLE #{table_name} DROP #{column_name} RESTRICT"
end
def remove_index(table_name, options = {}) #:nodoc:
if options[:unique]
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{quote_column_name(index_name(table_name, options))} RESTRICT"
else
execute "DROP INDEX #{quote_column_name(index_name(table_name, options))}"
end
end
def change_column_default(table_name, column_name, default) #:nodoc:
execute "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{default}" if default != "NULL"
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
change_column_sql = %( ALTER COLUMN "#{table_name}"."#{column_name}" TO #{type_to_sql(type, options[:limit])} )
execute(change_column_sql)
change_column_sql = %( ALTER TABLE "#{table_name}" ALTER COLUMN "#{column_name}" )
if options_include_default?(options)
default_value = quote(options[:default], options[:column])
if type == :boolean
default_value = options[:default] == 0 ? quoted_false : quoted_true
end
change_column_sql << " SET DEFAULT #{default_value}"
end
execute(change_column_sql)
# change_column_sql = "ALTER TABLE #{table_name} CHANGE #{column_name} #{column_name} #{type_to_sql(type, options[:limit])}"
# add_column_options!(change_column_sql, options)
# execute(change_column_sql)
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
execute %( ALTER COLUMN NAME "#{table_name}"."#{column_name}" TO "#{new_column_name}" )
end
private
# Clean up sql to make it something FrontBase can digest
def cleanup_fb_sql(sql) #:nodoc:
# Turn non-standard != into standard <>
cleansql = sql.gsub("!=", "<>")
# Strip blank lines and comments
cleansql.split("\n").reject { |line| line.match(/^(?:\s*|--.*)$/) } * "\n"
end
end
end
end

View file

@ -0,0 +1,106 @@
module ActiveRecord
module Locking
# Active Records support optimistic locking if the field <tt>lock_version</tt> is present. Each update to the
# record increments the lock_version column and the locking facilities ensure that records instantiated twice
# will let the last one saved raise a StaleObjectError if the first was also updated. Example:
#
# p1 = Person.find(1)
# p2 = Person.find(1)
#
# p1.first_name = "Michael"
# p1.save
#
# p2.first_name = "should fail"
# p2.save # Raises a ActiveRecord::StaleObjectError
#
# You're then responsible for dealing with the conflict by rescuing the exception and either rolling back, merging,
# or otherwise apply the business logic needed to resolve the conflict.
#
# You must ensure that your database schema defaults the lock_version column to 0.
#
# This behavior can be turned off by setting <tt>ActiveRecord::Base.lock_optimistically = false</tt>.
# To override the name of the lock_version column, invoke the <tt>set_locking_column</tt> method.
# This method uses the same syntax as <tt>set_table_name</tt>
module Optimistic
def self.included(base) #:nodoc:
super
base.extend ClassMethods
base.cattr_accessor :lock_optimistically, :instance_writer => false
base.lock_optimistically = true
base.alias_method_chain :update, :lock
base.alias_method_chain :attributes_from_column_definition, :lock
class << base
alias_method :locking_column=, :set_locking_column
end
end
def locking_enabled? #:nodoc:
lock_optimistically && respond_to?(self.class.locking_column)
end
def attributes_from_column_definition_with_lock
result = attributes_from_column_definition_without_lock
# If the locking column has no default value set,
# start the lock version at zero. Note we can't use
# locking_enabled? at this point as @attributes may
# not have been initialized yet
if lock_optimistically && result.include?(self.class.locking_column)
result[self.class.locking_column] ||= 0
end
return result
end
def update_with_lock #:nodoc:
return update_without_lock unless locking_enabled?
lock_col = self.class.locking_column
previous_value = send(lock_col)
send(lock_col + '=', previous_value + 1)
affected_rows = connection.update(<<-end_sql, "#{self.class.name} Update with optimistic locking")
UPDATE #{self.class.table_name}
SET #{quoted_comma_pair_list(connection, attributes_with_quotes(false))}
WHERE #{self.class.primary_key} = #{quote_value(id)}
AND #{self.class.quoted_locking_column} = #{quote_value(previous_value)}
end_sql
unless affected_rows == 1
raise ActiveRecord::StaleObjectError, "Attempted to update a stale object"
end
return true
end
module ClassMethods
DEFAULT_LOCKING_COLUMN = 'lock_version'
# Set the column to use for optimistic locking. Defaults to lock_version.
def set_locking_column(value = nil, &block)
define_attr_method :locking_column, value, &block
value
end
# The version column used for optimistic locking. Defaults to lock_version.
def locking_column
reset_locking_column
end
# Quote the column name used for optimistic locking.
def quoted_locking_column
connection.quote_column_name(locking_column)
end
# Reset the column used for optimistic locking back to the lock_version default.
def reset_locking_column
set_locking_column DEFAULT_LOCKING_COLUMN
end
end
end
end
end

View file

@ -0,0 +1,77 @@
# Copyright (c) 2006 Shugo Maeda <shugo@ruby-lang.org>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject
# to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
# ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module ActiveRecord
module Locking
# Locking::Pessimistic provides support for row-level locking using
# SELECT ... FOR UPDATE and other lock types.
#
# Pass :lock => true to ActiveRecord::Base.find to obtain an exclusive
# lock on the selected rows:
# # select * from accounts where id=1 for update
# Account.find(1, :lock => true)
#
# Pass :lock => 'some locking clause' to give a database-specific locking clause
# of your own such as 'LOCK IN SHARE MODE' or 'FOR UPDATE NOWAIT'.
#
# Example:
# Account.transaction do
# # select * from accounts where name = 'shugo' limit 1 for update
# shugo = Account.find(:first, :conditions => "name = 'shugo'", :lock => true)
# yuko = Account.find(:first, :conditions => "name = 'yuko'", :lock => true)
# shugo.balance -= 100
# shugo.save!
# yuko.balance += 100
# yuko.save!
# end
#
# You can also use ActiveRecord::Base#lock! method to lock one record by id.
# This may be better if you don't need to lock every row. Example:
# Account.transaction do
# # select * from accounts where ...
# accounts = Account.find(:all, :conditions => ...)
# account1 = accounts.detect { |account| ... }
# account2 = accounts.detect { |account| ... }
# # select * from accounts where id=? for update
# account1.lock!
# account2.lock!
# account1.balance -= 100
# account1.save!
# account2.balance += 100
# account2.save!
# end
#
# Database-specific information on row locking:
# MySQL: http://dev.mysql.com/doc/refman/5.1/en/innodb-locking-reads.html
# PostgreSQL: http://www.postgresql.org/docs/8.1/interactive/sql-select.html#SQL-FOR-UPDATE-SHARE
module Pessimistic
# Obtain a row lock on this record. Reloads the record to obtain the requested
# lock. Pass an SQL locking clause to append the end of the SELECT statement
# or pass true for "FOR UPDATE" (the default, an exclusive row lock). Returns
# the locked record.
def lock!(lock = true)
reload(:lock => lock) unless new_record?
self
end
end
end
end

View file

@ -0,0 +1,308 @@
module ActiveRecord #:nodoc:
module XmlSerialization
# Builds an XML document to represent the model. Some configuration is
# availble through +options+, however more complicated cases should use
# override ActiveRecord's to_xml.
#
# By default the generated XML document will include the processing
# instruction and all object's attributes. For example:
#
# <?xml version="1.0" encoding="UTF-8"?>
# <topic>
# <title>The First Topic</title>
# <author-name>David</author-name>
# <id type="integer">1</id>
# <approved type="boolean">false</approved>
# <replies-count type="integer">0</replies-count>
# <bonus-time type="datetime">2000-01-01T08:28:00+12:00</bonus-time>
# <written-on type="datetime">2003-07-16T09:28:00+1200</written-on>
# <content>Have a nice day</content>
# <author-email-address>david@loudthinking.com</author-email-address>
# <parent-id></parent-id>
# <last-read type="date">2004-04-15</last-read>
# </topic>
#
# This behavior can be controlled with :only, :except,
# :skip_instruct, :skip_types and :dasherize. The :only and
# :except options are the same as for the #attributes method.
# The default is to dasherize all column names, to disable this,
# set :dasherize to false. To not have the column type included
# in the XML output, set :skip_types to false.
#
# For instance:
#
# topic.to_xml(:skip_instruct => true, :except => [ :id, :bonus_time, :written_on, :replies_count ])
#
# <topic>
# <title>The First Topic</title>
# <author-name>David</author-name>
# <approved type="boolean">false</approved>
# <content>Have a nice day</content>
# <author-email-address>david@loudthinking.com</author-email-address>
# <parent-id></parent-id>
# <last-read type="date">2004-04-15</last-read>
# </topic>
#
# To include first level associations use :include
#
# firm.to_xml :include => [ :account, :clients ]
#
# <?xml version="1.0" encoding="UTF-8"?>
# <firm>
# <id type="integer">1</id>
# <rating type="integer">1</rating>
# <name>37signals</name>
# <clients>
# <client>
# <rating type="integer">1</rating>
# <name>Summit</name>
# </client>
# <client>
# <rating type="integer">1</rating>
# <name>Microsoft</name>
# </client>
# </clients>
# <account>
# <id type="integer">1</id>
# <credit-limit type="integer">50</credit-limit>
# </account>
# </firm>
#
# To include any methods on the object(s) being called use :methods
#
# firm.to_xml :methods => [ :calculated_earnings, :real_earnings ]
#
# <firm>
# # ... normal attributes as shown above ...
# <calculated-earnings>100000000000000000</calculated-earnings>
# <real-earnings>5</real-earnings>
# </firm>
#
# To call any Proc's on the object(s) use :procs. The Proc's
# are passed a modified version of the options hash that was
# given to #to_xml.
#
# proc = Proc.new { |options| options[:builder].tag!('abc', 'def') }
# firm.to_xml :procs => [ proc ]
#
# <firm>
# # ... normal attributes as shown above ...
# <abc>def</abc>
# </firm>
#
# You may override the to_xml method in your ActiveRecord::Base
# subclasses if you need to. The general form of doing this is
#
# class IHaveMyOwnXML < ActiveRecord::Base
# def to_xml(options = {})
# options[:indent] ||= 2
# xml = options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])
# xml.instruct! unless options[:skip_instruct]
# xml.level_one do
# xml.tag!(:second_level, 'content')
# end
# end
# end
def to_xml(options = {})
XmlSerializer.new(self, options).to_s
end
end
class XmlSerializer #:nodoc:
attr_reader :options
def initialize(record, options = {})
@record, @options = record, options.dup
end
def builder
@builder ||= begin
options[:indent] ||= 2
builder = options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])
unless options[:skip_instruct]
builder.instruct!
options[:skip_instruct] = true
end
builder
end
end
def root
root = (options[:root] || @record.class.to_s.underscore).to_s
dasherize? ? root.dasherize : root
end
def dasherize?
!options.has_key?(:dasherize) || options[:dasherize]
end
# To replicate the behavior in ActiveRecord#attributes,
# :except takes precedence over :only. If :only is not set
# for a N level model but is set for the N+1 level models,
# then because :except is set to a default value, the second
# level model can have both :except and :only set. So if
# :only is set, always delete :except.
def serializable_attributes
attribute_names = @record.attribute_names
if options[:only]
options.delete(:except)
attribute_names = attribute_names & Array(options[:only]).collect { |n| n.to_s }
else
options[:except] = Array(options[:except]) | Array(@record.class.inheritance_column)
attribute_names = attribute_names - options[:except].collect { |n| n.to_s }
end
attribute_names.collect { |name| Attribute.new(name, @record) }
end
def serializable_method_attributes
Array(options[:methods]).collect { |name| MethodAttribute.new(name.to_s, @record) }
end
def add_attributes
(serializable_attributes + serializable_method_attributes).each do |attribute|
add_tag(attribute)
end
end
def add_includes
if include_associations = options.delete(:include)
root_only_or_except = { :except => options[:except],
:only => options[:only] }
include_has_options = include_associations.is_a?(Hash)
for association in include_has_options ? include_associations.keys : Array(include_associations)
association_options = include_has_options ? include_associations[association] : root_only_or_except
opts = options.merge(association_options)
case @record.class.reflect_on_association(association).macro
when :has_many, :has_and_belongs_to_many
records = @record.send(association).to_a
unless records.empty?
tag = records.first.class.to_s.underscore.pluralize
tag = tag.dasherize if dasherize?
builder.tag!(tag) do
records.each { |r| r.to_xml(opts.merge(:root => association.to_s.singularize)) }
end
end
when :has_one, :belongs_to
if record = @record.send(association)
record.to_xml(opts.merge(:root => association))
end
end
end
options[:include] = include_associations
end
end
def add_procs
if procs = options.delete(:procs)
[ *procs ].each do |proc|
proc.call(options)
end
end
end
def add_tag(attribute)
builder.tag!(
dasherize? ? attribute.name.dasherize : attribute.name,
attribute.value.to_s,
attribute.decorations(!options[:skip_types])
)
end
def serialize
args = [root]
if options[:namespace]
args << {:xmlns=>options[:namespace]}
end
builder.tag!(*args) do
add_attributes
add_includes
add_procs
end
end
alias_method :to_s, :serialize
class Attribute #:nodoc:
attr_reader :name, :value, :type
def initialize(name, record)
@name, @record = name, record
@type = compute_type
@value = compute_value
end
# There is a significant speed improvement if the value
# does not need to be escaped, as #tag! escapes all values
# to ensure that valid XML is generated. For known binary
# values, it is at least an order of magnitude faster to
# Base64 encode binary values and directly put them in the
# output XML than to pass the original value or the Base64
# encoded value to the #tag! method. It definitely makes
# no sense to Base64 encode the value and then give it to
# #tag!, since that just adds additional overhead.
def needs_encoding?
![ :binary, :date, :datetime, :boolean, :float, :integer ].include?(type)
end
def decorations(include_types = true)
decorations = {}
if type == :binary
decorations[:encoding] = 'base64'
end
if include_types && type != :string
decorations[:type] = type
end
decorations
end
protected
def compute_type
type = @record.class.columns_hash[name].type
case type
when :text
:string
when :time
:datetime
else
type
end
end
def compute_value
value = @record.send(name)
if formatter = Hash::XML_FORMATTING[type.to_s]
value ? formatter.call(value) : nil
else
value
end
end
end
class MethodAttribute < Attribute #:nodoc:
protected
def compute_type
Hash::XML_TYPE_NAMES[@record.send(name).class.name] || :string
end
end
end
end