2008-03-19 22:33:41 +01:00
|
|
|
require 'cgi'
|
2008-06-07 17:32:51 +02:00
|
|
|
require "base64"
|
2008-03-19 22:33:41 +01:00
|
|
|
|
2008-09-12 06:14:34 +02:00
|
|
|
module CouchRest
|
2008-03-18 19:37:10 +01:00
|
|
|
class Database
|
2009-01-29 02:36:36 +01:00
|
|
|
attr_reader :server, :host, :name, :root, :uri
|
2008-12-15 06:17:35 +01:00
|
|
|
attr_accessor :bulk_save_cache_limit
|
2008-09-08 00:28:20 +02:00
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# Create a CouchRest::Database adapter for the supplied CouchRest::Server
|
|
|
|
# and database name.
|
|
|
|
#
|
2008-09-30 08:39:57 +02:00
|
|
|
# ==== Parameters
|
|
|
|
# server<CouchRest::Server>:: database host
|
|
|
|
# name<String>:: database name
|
|
|
|
#
|
2009-01-29 02:36:36 +01:00
|
|
|
def initialize(server, name)
|
2008-03-18 19:37:10 +01:00
|
|
|
@name = name
|
2008-09-07 22:51:26 +02:00
|
|
|
@server = server
|
|
|
|
@host = server.uri
|
2009-06-08 03:51:31 +02:00
|
|
|
@uri = "/#{name.gsub('/','%2F')}"
|
|
|
|
@root = host + uri
|
2008-10-14 01:46:48 +02:00
|
|
|
@streamer = Streamer.new(self)
|
2008-12-15 06:17:35 +01:00
|
|
|
@bulk_save_cache = []
|
2009-03-06 00:06:56 +01:00
|
|
|
@bulk_save_cache_limit = 500 # must be smaller than the uuid count
|
2008-03-18 19:37:10 +01:00
|
|
|
end
|
|
|
|
|
2008-09-30 08:39:57 +02:00
|
|
|
# returns the database's uri
|
2008-08-03 21:51:17 +02:00
|
|
|
def to_s
|
2009-06-08 03:51:31 +02:00
|
|
|
@root
|
2008-08-03 21:51:17 +02:00
|
|
|
end
|
|
|
|
|
2008-09-30 08:26:34 +02:00
|
|
|
# GET the database info from CouchDB
|
2008-09-07 21:43:13 +02:00
|
|
|
def info
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.get @root
|
2008-09-07 21:43:13 +02:00
|
|
|
end
|
|
|
|
|
2008-09-30 08:26:34 +02:00
|
|
|
# Query the <tt>_all_docs</tt> view. Accepts all the same arguments as view.
|
2009-01-29 02:36:36 +01:00
|
|
|
def documents(params = {})
|
2008-10-08 21:19:28 +02:00
|
|
|
keys = params.delete(:keys)
|
2009-06-08 03:51:31 +02:00
|
|
|
url = CouchRest.paramify_url "#{@root}/_all_docs", params
|
2008-10-08 21:19:28 +02:00
|
|
|
if keys
|
|
|
|
CouchRest.post(url, {:keys => keys})
|
|
|
|
else
|
|
|
|
CouchRest.get url
|
|
|
|
end
|
2008-03-19 16:57:20 +01:00
|
|
|
end
|
|
|
|
|
2009-05-09 04:00:39 +02:00
|
|
|
# load a set of documents by passing an array of ids
|
|
|
|
def get_bulk(ids)
|
|
|
|
documents(:keys => ids, :include_docs => true)
|
|
|
|
end
|
2009-05-14 05:44:57 +02:00
|
|
|
alias :bulk_load :get_bulk
|
2009-05-09 04:00:39 +02:00
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# POST a temporary view function to CouchDB for querying. This is not
|
|
|
|
# recommended, as you don't get any performance benefit from CouchDB's
|
|
|
|
# materialized views. Can be quite slow on large databases.
|
2009-01-29 02:36:36 +01:00
|
|
|
def slow_view(funcs, params = {})
|
2008-10-08 21:19:28 +02:00
|
|
|
keys = params.delete(:keys)
|
|
|
|
funcs = funcs.merge({:keys => keys}) if keys
|
2009-06-08 03:51:31 +02:00
|
|
|
url = CouchRest.paramify_url "#{@root}/_temp_view", params
|
2009-07-14 10:43:40 +02:00
|
|
|
JSON.parse(HttpAbstraction.post(url, funcs.to_json, {"Content-Type" => 'application/json'}))
|
2008-03-20 02:10:16 +01:00
|
|
|
end
|
2009-01-05 08:00:59 +01:00
|
|
|
|
|
|
|
# backwards compatibility is a plus
|
|
|
|
alias :temp_view :slow_view
|
2008-03-20 02:10:16 +01:00
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# Query a CouchDB view as defined by a <tt>_design</tt> document. Accepts
|
|
|
|
# paramaters as described in http://wiki.apache.org/couchdb/HttpViewApi
|
2009-01-29 02:36:36 +01:00
|
|
|
def view(name, params = {}, &block)
|
2008-10-08 21:19:28 +02:00
|
|
|
keys = params.delete(:keys)
|
2009-03-09 21:12:28 +01:00
|
|
|
name = name.split('/') # I think this will always be length == 2, but maybe not...
|
|
|
|
dname = name.shift
|
|
|
|
vname = name.join('/')
|
2009-06-08 03:51:31 +02:00
|
|
|
url = CouchRest.paramify_url "#{@root}/_design/#{dname}/_view/#{vname}", params
|
2008-10-08 21:19:28 +02:00
|
|
|
if keys
|
|
|
|
CouchRest.post(url, {:keys => keys})
|
|
|
|
else
|
2008-10-14 01:46:48 +02:00
|
|
|
if block_given?
|
2009-03-09 21:12:28 +01:00
|
|
|
@streamer.view("_design/#{dname}/_view/#{vname}", params, &block)
|
2008-10-14 01:46:48 +02:00
|
|
|
else
|
|
|
|
CouchRest.get url
|
|
|
|
end
|
2008-10-08 21:19:28 +02:00
|
|
|
end
|
2008-03-19 16:57:20 +01:00
|
|
|
end
|
2008-07-05 01:56:09 +02:00
|
|
|
|
2008-09-30 08:26:34 +02:00
|
|
|
# GET a document from CouchDB, by id. Returns a Ruby Hash.
|
2009-03-15 02:42:34 +01:00
|
|
|
def get(id, params = {})
|
2009-01-13 04:54:02 +01:00
|
|
|
slug = escape_docid(id)
|
2009-06-08 03:51:31 +02:00
|
|
|
url = CouchRest.paramify_url("#{@root}/#{slug}", params)
|
2009-03-15 03:00:26 +01:00
|
|
|
result = CouchRest.get(url)
|
|
|
|
return result unless result.is_a?(Hash)
|
|
|
|
doc = if /^_design/ =~ result["_id"]
|
|
|
|
Design.new(result)
|
2008-11-09 01:28:58 +01:00
|
|
|
else
|
2009-03-15 03:00:26 +01:00
|
|
|
Document.new(result)
|
2008-11-09 01:28:58 +01:00
|
|
|
end
|
|
|
|
doc.database = self
|
|
|
|
doc
|
2008-03-19 18:17:25 +01:00
|
|
|
end
|
2008-03-19 16:57:20 +01:00
|
|
|
|
2008-09-30 08:26:34 +02:00
|
|
|
# GET an attachment directly from CouchDB
|
2009-02-03 00:24:31 +01:00
|
|
|
def fetch_attachment(doc, name)
|
2009-06-08 03:51:31 +02:00
|
|
|
uri = url_for_attachment(doc, name)
|
2009-07-14 10:43:40 +02:00
|
|
|
HttpAbstraction.get uri
|
2008-06-07 17:32:51 +02:00
|
|
|
end
|
|
|
|
|
2008-10-01 02:22:54 +02:00
|
|
|
# PUT an attachment directly to CouchDB
|
2009-01-29 02:36:36 +01:00
|
|
|
def put_attachment(doc, name, file, options = {})
|
|
|
|
docid = escape_docid(doc['_id'])
|
|
|
|
name = CGI.escape(name)
|
2009-06-08 03:51:31 +02:00
|
|
|
uri = url_for_attachment(doc, name)
|
2009-07-14 10:43:40 +02:00
|
|
|
JSON.parse(HttpAbstraction.put(uri, file, options))
|
2008-10-01 02:22:54 +02:00
|
|
|
end
|
2008-10-01 01:21:28 +02:00
|
|
|
|
2009-02-02 09:11:38 +01:00
|
|
|
# DELETE an attachment directly from CouchDB
|
|
|
|
def delete_attachment doc, name
|
2009-06-08 03:51:31 +02:00
|
|
|
uri = url_for_attachment(doc, name)
|
2009-02-03 00:24:31 +01:00
|
|
|
# this needs a rev
|
2009-07-14 10:43:40 +02:00
|
|
|
JSON.parse(HttpAbstraction.delete(uri))
|
2009-02-02 09:11:38 +01:00
|
|
|
end
|
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# Save a document to CouchDB. This will use the <tt>_id</tt> field from
|
|
|
|
# the document as the id for PUT, or request a new UUID from CouchDB, if
|
|
|
|
# no <tt>_id</tt> is present on the document. IDs are attached to
|
|
|
|
# documents on the client side because POST has the curious property of
|
|
|
|
# being automatically retried by proxies in the event of network
|
|
|
|
# segmentation and lost responses.
|
2008-12-15 06:17:35 +01:00
|
|
|
#
|
|
|
|
# If <tt>bulk</tt> is true (false by default) the document is cached for bulk-saving later.
|
|
|
|
# Bulk saving happens automatically when #bulk_save_cache limit is exceded, or on the next non bulk save.
|
2009-01-29 02:36:36 +01:00
|
|
|
def save_doc(doc, bulk = false)
|
2008-06-07 17:32:51 +02:00
|
|
|
if doc['_attachments']
|
|
|
|
doc['_attachments'] = encode_attachments(doc['_attachments'])
|
|
|
|
end
|
2008-12-15 06:17:35 +01:00
|
|
|
if bulk
|
|
|
|
@bulk_save_cache << doc
|
|
|
|
return bulk_save if @bulk_save_cache.length >= @bulk_save_cache_limit
|
2008-12-15 17:27:53 +01:00
|
|
|
return {"ok" => true} # Compatibility with Document#save
|
2008-12-15 06:17:35 +01:00
|
|
|
elsif !bulk && @bulk_save_cache.length > 0
|
|
|
|
bulk_save
|
|
|
|
end
|
2008-11-09 01:28:58 +01:00
|
|
|
result = if doc['_id']
|
2009-05-27 03:27:49 +02:00
|
|
|
slug = escape_docid(doc['_id'])
|
|
|
|
begin
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.put "#{@root}/#{slug}", doc
|
2009-07-14 10:43:40 +02:00
|
|
|
rescue HttpAbstraction::ResourceNotFound
|
2009-05-27 03:27:49 +02:00
|
|
|
p "resource not found when saving even tho an id was passed"
|
|
|
|
slug = doc['_id'] = @server.next_uuid
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.put "#{@root}/#{slug}", doc
|
2009-05-27 03:27:49 +02:00
|
|
|
end
|
2008-03-19 16:57:20 +01:00
|
|
|
else
|
2008-09-14 00:59:24 +02:00
|
|
|
begin
|
|
|
|
slug = doc['_id'] = @server.next_uuid
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.put "#{@root}/#{slug}", doc
|
2008-09-14 00:59:24 +02:00
|
|
|
rescue #old version of couchdb
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.post @root, doc
|
2008-09-14 00:59:24 +02:00
|
|
|
end
|
2008-03-19 16:57:20 +01:00
|
|
|
end
|
2008-11-09 01:28:58 +01:00
|
|
|
if result['ok']
|
|
|
|
doc['_id'] = result['id']
|
|
|
|
doc['_rev'] = result['rev']
|
|
|
|
doc.database = self if doc.respond_to?(:database=)
|
|
|
|
end
|
|
|
|
result
|
2008-03-19 16:57:20 +01:00
|
|
|
end
|
|
|
|
|
2009-01-29 02:36:36 +01:00
|
|
|
### DEPRECATION NOTICE
|
|
|
|
def save(doc, bulk=false)
|
|
|
|
puts "CouchRest::Database's save method is being deprecated, please use save_doc instead"
|
|
|
|
save_doc(doc, bulk)
|
|
|
|
end
|
|
|
|
|
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# POST an array of documents to CouchDB. If any of the documents are
|
|
|
|
# missing ids, supply one from the uuid cache.
|
2008-12-15 06:17:35 +01:00
|
|
|
#
|
|
|
|
# If called with no arguments, bulk saves the cache of documents to be bulk saved.
|
2009-01-24 08:25:24 +01:00
|
|
|
def bulk_save(docs = nil, use_uuids = true)
|
2008-12-15 06:17:35 +01:00
|
|
|
if docs.nil?
|
|
|
|
docs = @bulk_save_cache
|
|
|
|
@bulk_save_cache = []
|
|
|
|
end
|
2009-01-24 08:25:24 +01:00
|
|
|
if (use_uuids)
|
|
|
|
ids, noids = docs.partition{|d|d['_id']}
|
|
|
|
uuid_count = [noids.length, @server.uuid_batch_count].max
|
|
|
|
noids.each do |doc|
|
|
|
|
nextid = @server.next_uuid(uuid_count) rescue nil
|
|
|
|
doc['_id'] = nextid if nextid
|
|
|
|
end
|
2008-09-07 22:51:26 +02:00
|
|
|
end
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.post "#{@root}/_bulk_docs", {:docs => docs}
|
2008-03-20 00:38:07 +01:00
|
|
|
end
|
2009-02-17 09:36:11 +01:00
|
|
|
alias :bulk_delete :bulk_save
|
2008-03-20 00:38:07 +01:00
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# DELETE the document from CouchDB that has the given <tt>_id</tt> and
|
|
|
|
# <tt>_rev</tt>.
|
2009-01-09 10:59:08 +01:00
|
|
|
#
|
|
|
|
# If <tt>bulk</tt> is true (false by default) the deletion is recorded for bulk-saving (bulk-deletion :) later.
|
|
|
|
# Bulk saving happens automatically when #bulk_save_cache limit is exceded, or on the next non bulk save.
|
2009-01-29 02:36:36 +01:00
|
|
|
def delete_doc(doc, bulk = false)
|
2009-01-13 05:01:37 +01:00
|
|
|
raise ArgumentError, "_id and _rev required for deleting" unless doc['_id'] && doc['_rev']
|
2009-01-09 10:59:08 +01:00
|
|
|
if bulk
|
|
|
|
@bulk_save_cache << { '_id' => doc['_id'], '_rev' => doc['_rev'], '_deleted' => true }
|
|
|
|
return bulk_save if @bulk_save_cache.length >= @bulk_save_cache_limit
|
|
|
|
return { "ok" => true } # Mimic the non-deferred version
|
|
|
|
end
|
2009-01-13 04:54:02 +01:00
|
|
|
slug = escape_docid(doc['_id'])
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.delete "#{@root}/#{slug}?rev=#{doc['_rev']}"
|
2009-01-29 02:36:36 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
### DEPRECATION NOTICE
|
|
|
|
def delete(doc, bulk=false)
|
|
|
|
puts "CouchRest::Database's delete method is being deprecated, please use delete_doc instead"
|
|
|
|
delete_doc(doc, bulk)
|
2008-03-19 23:21:27 +01:00
|
|
|
end
|
2009-01-05 09:44:12 +01:00
|
|
|
|
|
|
|
# COPY an existing document to a new id. If the destination id currently exists, a rev must be provided.
|
|
|
|
# <tt>dest</tt> can take one of two forms if overwriting: "id_to_overwrite?rev=revision" or the actual doc
|
|
|
|
# hash with a '_rev' key
|
2009-01-29 02:36:36 +01:00
|
|
|
def copy_doc(doc, dest)
|
2009-01-05 09:44:12 +01:00
|
|
|
raise ArgumentError, "_id is required for copying" unless doc['_id']
|
2009-01-13 04:54:02 +01:00
|
|
|
slug = escape_docid(doc['_id'])
|
2009-01-05 09:44:12 +01:00
|
|
|
destination = if dest.respond_to?(:has_key?) && dest['_id'] && dest['_rev']
|
|
|
|
"#{dest['_id']}?rev=#{dest['_rev']}"
|
|
|
|
else
|
|
|
|
dest
|
|
|
|
end
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.copy "#{@root}/#{slug}", destination
|
2009-01-29 02:36:36 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
### DEPRECATION NOTICE
|
|
|
|
def copy(doc, dest)
|
|
|
|
puts "CouchRest::Database's copy method is being deprecated, please use copy_doc instead"
|
|
|
|
copy_doc(doc, dest)
|
2009-01-05 09:44:12 +01:00
|
|
|
end
|
|
|
|
|
2008-12-15 00:29:15 +01:00
|
|
|
# Compact the database, removing old document revisions and optimizing space use.
|
|
|
|
def compact!
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.post "#{@root}/_compact"
|
2009-01-29 02:36:36 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
# Create the database
|
|
|
|
def create!
|
|
|
|
bool = server.create_db(@name) rescue false
|
|
|
|
bool && true
|
|
|
|
end
|
|
|
|
|
|
|
|
# Delete and re create the database
|
|
|
|
def recreate!
|
|
|
|
delete!
|
|
|
|
create!
|
2009-07-14 10:43:40 +02:00
|
|
|
rescue HttpAbstraction::ResourceNotFound
|
2009-01-29 02:36:36 +01:00
|
|
|
ensure
|
|
|
|
create!
|
2008-12-15 00:29:15 +01:00
|
|
|
end
|
2009-01-31 19:38:44 +01:00
|
|
|
|
|
|
|
# Replicates via "pulling" from another database to this database. Makes no attempt to deal with conflicts.
|
|
|
|
def replicate_from other_db
|
|
|
|
raise ArgumentError, "must provide a CouchReset::Database" unless other_db.kind_of?(CouchRest::Database)
|
|
|
|
CouchRest.post "#{@host}/_replicate", :source => other_db.root, :target => name
|
|
|
|
end
|
|
|
|
|
|
|
|
# Replicates via "pushing" to another database. Makes no attempt to deal with conflicts.
|
|
|
|
def replicate_to other_db
|
|
|
|
raise ArgumentError, "must provide a CouchReset::Database" unless other_db.kind_of?(CouchRest::Database)
|
|
|
|
CouchRest.post "#{@host}/_replicate", :target => other_db.root, :source => name
|
|
|
|
end
|
|
|
|
|
2008-10-14 10:07:48 +02:00
|
|
|
# DELETE the database itself. This is not undoable and could be rather
|
|
|
|
# catastrophic. Use with care!
|
2008-03-18 19:37:10 +01:00
|
|
|
def delete!
|
2009-05-28 03:16:50 +02:00
|
|
|
clear_extended_doc_fresh_cache
|
2009-06-08 03:51:31 +02:00
|
|
|
CouchRest.delete @root
|
2008-03-18 19:37:10 +01:00
|
|
|
end
|
2008-09-07 22:51:26 +02:00
|
|
|
|
2008-06-07 17:32:51 +02:00
|
|
|
private
|
2009-02-02 09:11:38 +01:00
|
|
|
|
2009-05-28 03:16:50 +02:00
|
|
|
def clear_extended_doc_fresh_cache
|
2009-05-28 08:59:43 +02:00
|
|
|
::CouchRest::ExtendedDocument.subclasses.each{|klass| klass.design_doc_fresh = false if klass.respond_to?(:design_doc_fresh=) }
|
2009-05-28 03:16:50 +02:00
|
|
|
end
|
2009-06-08 03:51:31 +02:00
|
|
|
|
2009-02-25 07:51:13 +01:00
|
|
|
def uri_for_attachment(doc, name)
|
2009-02-03 00:24:31 +01:00
|
|
|
if doc.is_a?(String)
|
|
|
|
puts "CouchRest::Database#fetch_attachment will eventually require a doc as the first argument, not a doc.id"
|
|
|
|
docid = doc
|
|
|
|
rev = nil
|
|
|
|
else
|
|
|
|
docid = doc['_id']
|
|
|
|
rev = doc['_rev']
|
|
|
|
end
|
|
|
|
docid = escape_docid(docid)
|
2009-02-02 09:11:38 +01:00
|
|
|
name = CGI.escape(name)
|
2009-02-03 00:24:31 +01:00
|
|
|
rev = "?rev=#{doc['_rev']}" if rev
|
2009-06-08 03:51:31 +02:00
|
|
|
"/#{docid}/#{name}#{rev}"
|
|
|
|
end
|
|
|
|
|
|
|
|
def url_for_attachment(doc, name)
|
|
|
|
@root + uri_for_attachment(doc, name)
|
2009-02-02 09:11:38 +01:00
|
|
|
end
|
|
|
|
|
2009-01-13 04:54:02 +01:00
|
|
|
def escape_docid id
|
|
|
|
/^_design\/(.*)/ =~ id ? "_design/#{CGI.escape($1)}" : CGI.escape(id)
|
2009-01-13 04:50:00 +01:00
|
|
|
end
|
|
|
|
|
2009-01-29 02:36:36 +01:00
|
|
|
def encode_attachments(attachments)
|
2008-06-07 17:32:51 +02:00
|
|
|
attachments.each do |k,v|
|
2008-06-12 17:40:52 +02:00
|
|
|
next if v['stub']
|
2008-06-07 18:05:29 +02:00
|
|
|
v['data'] = base64(v['data'])
|
2008-06-07 17:32:51 +02:00
|
|
|
end
|
2008-06-07 18:05:29 +02:00
|
|
|
attachments
|
2008-06-07 17:32:51 +02:00
|
|
|
end
|
2008-09-07 22:51:26 +02:00
|
|
|
|
2009-01-29 02:36:36 +01:00
|
|
|
def base64(data)
|
2008-06-07 17:32:51 +02:00
|
|
|
Base64.encode64(data).gsub(/\s/,'')
|
|
|
|
end
|
2008-03-18 19:37:10 +01:00
|
|
|
end
|
2008-06-20 23:26:26 +02:00
|
|
|
end
|